1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees 2 tree representation into the GIMPLE form. 3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 4 2012 Free Software Foundation, Inc. 5 Major work done by Sebastian Pop <s.pop@laposte.net>, 6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. 7 8 This file is part of GCC. 9 10 GCC is free software; you can redistribute it and/or modify it under 11 the terms of the GNU General Public License as published by the Free 12 Software Foundation; either version 3, or (at your option) any later 13 version. 14 15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 16 WARRANTY; without even the implied warranty of MERCHANTABILITY or 17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 18 for more details. 19 20 You should have received a copy of the GNU General Public License 21 along with GCC; see the file COPYING3. If not see 22 <http://www.gnu.org/licenses/>. */ 23 24 #include "config.h" 25 #include "system.h" 26 #include "coretypes.h" 27 #include "tm.h" 28 #include "tree.h" 29 #include "gimple.h" 30 #include "tree-iterator.h" 31 #include "tree-inline.h" 32 #include "tree-pretty-print.h" 33 #include "langhooks.h" 34 #include "tree-flow.h" 35 #include "cgraph.h" 36 #include "timevar.h" 37 #include "hashtab.h" 38 #include "flags.h" 39 #include "function.h" 40 #include "output.h" 41 #include "ggc.h" 42 #include "diagnostic-core.h" 43 #include "target.h" 44 #include "pointer-set.h" 45 #include "splay-tree.h" 46 #include "vec.h" 47 #include "gimple.h" 48 #include "tree-pass.h" 49 50 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name. */ 51 #include "expr.h" /* FIXME: for can_move_by_pieces 52 and STACK_CHECK_MAX_VAR_SIZE. */ 53 54 enum gimplify_omp_var_data 55 { 56 GOVD_SEEN = 1, 57 GOVD_EXPLICIT = 2, 58 GOVD_SHARED = 4, 59 GOVD_PRIVATE = 8, 60 GOVD_FIRSTPRIVATE = 16, 61 GOVD_LASTPRIVATE = 32, 62 GOVD_REDUCTION = 64, 63 GOVD_LOCAL = 128, 64 GOVD_DEBUG_PRIVATE = 256, 65 GOVD_PRIVATE_OUTER_REF = 512, 66 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE 67 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL) 68 }; 69 70 71 enum omp_region_type 72 { 73 ORT_WORKSHARE = 0, 74 ORT_PARALLEL = 2, 75 ORT_COMBINED_PARALLEL = 3, 76 ORT_TASK = 4, 77 ORT_UNTIED_TASK = 5 78 }; 79 80 struct gimplify_omp_ctx 81 { 82 struct gimplify_omp_ctx *outer_context; 83 splay_tree variables; 84 struct pointer_set_t *privatized_types; 85 location_t location; 86 enum omp_clause_default_kind default_kind; 87 enum omp_region_type region_type; 88 }; 89 90 static struct gimplify_ctx *gimplify_ctxp; 91 static struct gimplify_omp_ctx *gimplify_omp_ctxp; 92 93 94 /* Formal (expression) temporary table handling: multiple occurrences of 95 the same scalar expression are evaluated into the same temporary. */ 96 97 typedef struct gimple_temp_hash_elt 98 { 99 tree val; /* Key */ 100 tree temp; /* Value */ 101 } elt_t; 102 103 /* Forward declaration. */ 104 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); 105 106 /* Mark X addressable. Unlike the langhook we expect X to be in gimple 107 form and we don't do any syntax checking. */ 108 109 void 110 mark_addressable (tree x) 111 { 112 while (handled_component_p (x)) 113 x = TREE_OPERAND (x, 0); 114 if (TREE_CODE (x) == MEM_REF 115 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR) 116 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0); 117 if (TREE_CODE (x) != VAR_DECL 118 && TREE_CODE (x) != PARM_DECL 119 && TREE_CODE (x) != RESULT_DECL) 120 return; 121 TREE_ADDRESSABLE (x) = 1; 122 123 /* Also mark the artificial SSA_NAME that points to the partition of X. */ 124 if (TREE_CODE (x) == VAR_DECL 125 && !DECL_EXTERNAL (x) 126 && !TREE_STATIC (x) 127 && cfun->gimple_df != NULL 128 && cfun->gimple_df->decls_to_pointers != NULL) 129 { 130 void *namep 131 = pointer_map_contains (cfun->gimple_df->decls_to_pointers, x); 132 if (namep) 133 TREE_ADDRESSABLE (*(tree *)namep) = 1; 134 } 135 } 136 137 /* Return a hash value for a formal temporary table entry. */ 138 139 static hashval_t 140 gimple_tree_hash (const void *p) 141 { 142 tree t = ((const elt_t *) p)->val; 143 return iterative_hash_expr (t, 0); 144 } 145 146 /* Compare two formal temporary table entries. */ 147 148 static int 149 gimple_tree_eq (const void *p1, const void *p2) 150 { 151 tree t1 = ((const elt_t *) p1)->val; 152 tree t2 = ((const elt_t *) p2)->val; 153 enum tree_code code = TREE_CODE (t1); 154 155 if (TREE_CODE (t2) != code 156 || TREE_TYPE (t1) != TREE_TYPE (t2)) 157 return 0; 158 159 if (!operand_equal_p (t1, t2, 0)) 160 return 0; 161 162 #ifdef ENABLE_CHECKING 163 /* Only allow them to compare equal if they also hash equal; otherwise 164 results are nondeterminate, and we fail bootstrap comparison. */ 165 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2)); 166 #endif 167 168 return 1; 169 } 170 171 /* Link gimple statement GS to the end of the sequence *SEQ_P. If 172 *SEQ_P is NULL, a new sequence is allocated. This function is 173 similar to gimple_seq_add_stmt, but does not scan the operands. 174 During gimplification, we need to manipulate statement sequences 175 before the def/use vectors have been constructed. */ 176 177 void 178 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs) 179 { 180 gimple_stmt_iterator si; 181 182 if (gs == NULL) 183 return; 184 185 if (*seq_p == NULL) 186 *seq_p = gimple_seq_alloc (); 187 188 si = gsi_last (*seq_p); 189 190 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT); 191 } 192 193 /* Shorter alias name for the above function for use in gimplify.c 194 only. */ 195 196 static inline void 197 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs) 198 { 199 gimple_seq_add_stmt_without_update (seq_p, gs); 200 } 201 202 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is 203 NULL, a new sequence is allocated. This function is 204 similar to gimple_seq_add_seq, but does not scan the operands. 205 During gimplification, we need to manipulate statement sequences 206 before the def/use vectors have been constructed. */ 207 208 static void 209 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) 210 { 211 gimple_stmt_iterator si; 212 213 if (src == NULL) 214 return; 215 216 if (*dst_p == NULL) 217 *dst_p = gimple_seq_alloc (); 218 219 si = gsi_last (*dst_p); 220 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); 221 } 222 223 /* Set up a context for the gimplifier. */ 224 225 void 226 push_gimplify_context (struct gimplify_ctx *c) 227 { 228 memset (c, '\0', sizeof (*c)); 229 c->prev_context = gimplify_ctxp; 230 gimplify_ctxp = c; 231 } 232 233 /* Tear down a context for the gimplifier. If BODY is non-null, then 234 put the temporaries into the outer BIND_EXPR. Otherwise, put them 235 in the local_decls. 236 237 BODY is not a sequence, but the first tuple in a sequence. */ 238 239 void 240 pop_gimplify_context (gimple body) 241 { 242 struct gimplify_ctx *c = gimplify_ctxp; 243 244 gcc_assert (c && (c->bind_expr_stack == NULL 245 || VEC_empty (gimple, c->bind_expr_stack))); 246 VEC_free (gimple, heap, c->bind_expr_stack); 247 gimplify_ctxp = c->prev_context; 248 249 if (body) 250 declare_vars (c->temps, body, false); 251 else 252 record_vars (c->temps); 253 254 if (c->temp_htab) 255 htab_delete (c->temp_htab); 256 } 257 258 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */ 259 260 static void 261 gimple_push_bind_expr (gimple gimple_bind) 262 { 263 if (gimplify_ctxp->bind_expr_stack == NULL) 264 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8); 265 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind); 266 } 267 268 /* Pop the first element off the stack of bindings. */ 269 270 static void 271 gimple_pop_bind_expr (void) 272 { 273 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack); 274 } 275 276 /* Return the first element of the stack of bindings. */ 277 278 gimple 279 gimple_current_bind_expr (void) 280 { 281 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack); 282 } 283 284 /* Return the stack of bindings created during gimplification. */ 285 286 VEC(gimple, heap) * 287 gimple_bind_expr_stack (void) 288 { 289 return gimplify_ctxp->bind_expr_stack; 290 } 291 292 /* Return true iff there is a COND_EXPR between us and the innermost 293 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ 294 295 static bool 296 gimple_conditional_context (void) 297 { 298 return gimplify_ctxp->conditions > 0; 299 } 300 301 /* Note that we've entered a COND_EXPR. */ 302 303 static void 304 gimple_push_condition (void) 305 { 306 #ifdef ENABLE_GIMPLE_CHECKING 307 if (gimplify_ctxp->conditions == 0) 308 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); 309 #endif 310 ++(gimplify_ctxp->conditions); 311 } 312 313 /* Note that we've left a COND_EXPR. If we're back at unconditional scope 314 now, add any conditional cleanups we've seen to the prequeue. */ 315 316 static void 317 gimple_pop_condition (gimple_seq *pre_p) 318 { 319 int conds = --(gimplify_ctxp->conditions); 320 321 gcc_assert (conds >= 0); 322 if (conds == 0) 323 { 324 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); 325 gimplify_ctxp->conditional_cleanups = NULL; 326 } 327 } 328 329 /* A stable comparison routine for use with splay trees and DECLs. */ 330 331 static int 332 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) 333 { 334 tree a = (tree) xa; 335 tree b = (tree) xb; 336 337 return DECL_UID (a) - DECL_UID (b); 338 } 339 340 /* Create a new omp construct that deals with variable remapping. */ 341 342 static struct gimplify_omp_ctx * 343 new_omp_context (enum omp_region_type region_type) 344 { 345 struct gimplify_omp_ctx *c; 346 347 c = XCNEW (struct gimplify_omp_ctx); 348 c->outer_context = gimplify_omp_ctxp; 349 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); 350 c->privatized_types = pointer_set_create (); 351 c->location = input_location; 352 c->region_type = region_type; 353 if ((region_type & ORT_TASK) == 0) 354 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; 355 else 356 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; 357 358 return c; 359 } 360 361 /* Destroy an omp construct that deals with variable remapping. */ 362 363 static void 364 delete_omp_context (struct gimplify_omp_ctx *c) 365 { 366 splay_tree_delete (c->variables); 367 pointer_set_destroy (c->privatized_types); 368 XDELETE (c); 369 } 370 371 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); 372 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); 373 374 /* Both gimplify the statement T and append it to *SEQ_P. This function 375 behaves exactly as gimplify_stmt, but you don't have to pass T as a 376 reference. */ 377 378 void 379 gimplify_and_add (tree t, gimple_seq *seq_p) 380 { 381 gimplify_stmt (&t, seq_p); 382 } 383 384 /* Gimplify statement T into sequence *SEQ_P, and return the first 385 tuple in the sequence of generated tuples for this statement. 386 Return NULL if gimplifying T produced no tuples. */ 387 388 static gimple 389 gimplify_and_return_first (tree t, gimple_seq *seq_p) 390 { 391 gimple_stmt_iterator last = gsi_last (*seq_p); 392 393 gimplify_and_add (t, seq_p); 394 395 if (!gsi_end_p (last)) 396 { 397 gsi_next (&last); 398 return gsi_stmt (last); 399 } 400 else 401 return gimple_seq_first_stmt (*seq_p); 402 } 403 404 /* Strip off a legitimate source ending from the input string NAME of 405 length LEN. Rather than having to know the names used by all of 406 our front ends, we strip off an ending of a period followed by 407 up to five characters. (Java uses ".class".) */ 408 409 static inline void 410 remove_suffix (char *name, int len) 411 { 412 int i; 413 414 for (i = 2; i < 8 && len > i; i++) 415 { 416 if (name[len - i] == '.') 417 { 418 name[len - i] = '\0'; 419 break; 420 } 421 } 422 } 423 424 /* Create a new temporary name with PREFIX. Return an identifier. */ 425 426 static GTY(()) unsigned int tmp_var_id_num; 427 428 tree 429 create_tmp_var_name (const char *prefix) 430 { 431 char *tmp_name; 432 433 if (prefix) 434 { 435 char *preftmp = ASTRDUP (prefix); 436 437 remove_suffix (preftmp, strlen (preftmp)); 438 clean_symbol_name (preftmp); 439 440 prefix = preftmp; 441 } 442 443 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++); 444 return get_identifier (tmp_name); 445 } 446 447 /* Create a new temporary variable declaration of type TYPE. 448 Do NOT push it into the current binding. */ 449 450 tree 451 create_tmp_var_raw (tree type, const char *prefix) 452 { 453 tree tmp_var; 454 455 tmp_var = build_decl (input_location, 456 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL, 457 type); 458 459 /* The variable was declared by the compiler. */ 460 DECL_ARTIFICIAL (tmp_var) = 1; 461 /* And we don't want debug info for it. */ 462 DECL_IGNORED_P (tmp_var) = 1; 463 464 /* Make the variable writable. */ 465 TREE_READONLY (tmp_var) = 0; 466 467 DECL_EXTERNAL (tmp_var) = 0; 468 TREE_STATIC (tmp_var) = 0; 469 TREE_USED (tmp_var) = 1; 470 471 return tmp_var; 472 } 473 474 /* Create a new temporary variable declaration of type TYPE. DO push the 475 variable into the current binding. Further, assume that this is called 476 only from gimplification or optimization, at which point the creation of 477 certain types are bugs. */ 478 479 tree 480 create_tmp_var (tree type, const char *prefix) 481 { 482 tree tmp_var; 483 484 /* We don't allow types that are addressable (meaning we can't make copies), 485 or incomplete. We also used to reject every variable size objects here, 486 but now support those for which a constant upper bound can be obtained. 487 The processing for variable sizes is performed in gimple_add_tmp_var, 488 point at which it really matters and possibly reached via paths not going 489 through this function, e.g. after direct calls to create_tmp_var_raw. */ 490 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); 491 492 tmp_var = create_tmp_var_raw (type, prefix); 493 gimple_add_tmp_var (tmp_var); 494 return tmp_var; 495 } 496 497 /* Create a new temporary variable declaration of type TYPE by calling 498 create_tmp_var and if TYPE is a vector or a complex number, mark the new 499 temporary as gimple register. */ 500 501 tree 502 create_tmp_reg (tree type, const char *prefix) 503 { 504 tree tmp; 505 506 tmp = create_tmp_var (type, prefix); 507 if (TREE_CODE (type) == COMPLEX_TYPE 508 || TREE_CODE (type) == VECTOR_TYPE) 509 DECL_GIMPLE_REG_P (tmp) = 1; 510 511 return tmp; 512 } 513 514 /* Create a temporary with a name derived from VAL. Subroutine of 515 lookup_tmp_var; nobody else should call this function. */ 516 517 static inline tree 518 create_tmp_from_val (tree val) 519 { 520 /* Drop all qualifiers and address-space information from the value type. */ 521 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val)); 522 } 523 524 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse 525 an existing expression temporary. */ 526 527 static tree 528 lookup_tmp_var (tree val, bool is_formal) 529 { 530 tree ret; 531 532 /* If not optimizing, never really reuse a temporary. local-alloc 533 won't allocate any variable that is used in more than one basic 534 block, which means it will go into memory, causing much extra 535 work in reload and final and poorer code generation, outweighing 536 the extra memory allocation here. */ 537 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) 538 ret = create_tmp_from_val (val); 539 else 540 { 541 elt_t elt, *elt_p; 542 void **slot; 543 544 elt.val = val; 545 if (gimplify_ctxp->temp_htab == NULL) 546 gimplify_ctxp->temp_htab 547 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free); 548 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT); 549 if (*slot == NULL) 550 { 551 elt_p = XNEW (elt_t); 552 elt_p->val = val; 553 elt_p->temp = ret = create_tmp_from_val (val); 554 *slot = (void *) elt_p; 555 } 556 else 557 { 558 elt_p = (elt_t *) *slot; 559 ret = elt_p->temp; 560 } 561 } 562 563 return ret; 564 } 565 566 /* Return true if T is a CALL_EXPR or an expression that can be 567 assigned to a temporary. Note that this predicate should only be 568 used during gimplification. See the rationale for this in 569 gimplify_modify_expr. */ 570 571 static bool 572 is_gimple_reg_rhs_or_call (tree t) 573 { 574 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS 575 || TREE_CODE (t) == CALL_EXPR); 576 } 577 578 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that 579 this predicate should only be used during gimplification. See the 580 rationale for this in gimplify_modify_expr. */ 581 582 static bool 583 is_gimple_mem_rhs_or_call (tree t) 584 { 585 /* If we're dealing with a renamable type, either source or dest must be 586 a renamed variable. */ 587 if (is_gimple_reg_type (TREE_TYPE (t))) 588 return is_gimple_val (t); 589 else 590 return (is_gimple_val (t) || is_gimple_lvalue (t) 591 || TREE_CODE (t) == CALL_EXPR); 592 } 593 594 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */ 595 596 static tree 597 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, 598 bool is_formal) 599 { 600 tree t, mod; 601 602 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we 603 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ 604 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call, 605 fb_rvalue); 606 607 t = lookup_tmp_var (val, is_formal); 608 609 if (is_formal 610 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE 611 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)) 612 DECL_GIMPLE_REG_P (t) = 1; 613 614 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); 615 616 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val)); 617 618 /* gimplify_modify_expr might want to reduce this further. */ 619 gimplify_and_add (mod, pre_p); 620 ggc_free (mod); 621 622 /* If we're gimplifying into ssa, gimplify_modify_expr will have 623 given our temporary an SSA name. Find and return it. */ 624 if (gimplify_ctxp->into_ssa) 625 { 626 gimple last = gimple_seq_last_stmt (*pre_p); 627 t = gimple_get_lhs (last); 628 } 629 630 return t; 631 } 632 633 /* Return a formal temporary variable initialized with VAL. PRE_P is as 634 in gimplify_expr. Only use this function if: 635 636 1) The value of the unfactored expression represented by VAL will not 637 change between the initialization and use of the temporary, and 638 2) The temporary will not be otherwise modified. 639 640 For instance, #1 means that this is inappropriate for SAVE_EXPR temps, 641 and #2 means it is inappropriate for && temps. 642 643 For other cases, use get_initialized_tmp_var instead. */ 644 645 tree 646 get_formal_tmp_var (tree val, gimple_seq *pre_p) 647 { 648 return internal_get_tmp_var (val, pre_p, NULL, true); 649 } 650 651 /* Return a temporary variable initialized with VAL. PRE_P and POST_P 652 are as in gimplify_expr. */ 653 654 tree 655 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p) 656 { 657 return internal_get_tmp_var (val, pre_p, post_p, false); 658 } 659 660 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true, 661 generate debug info for them; otherwise don't. */ 662 663 void 664 declare_vars (tree vars, gimple scope, bool debug_info) 665 { 666 tree last = vars; 667 if (last) 668 { 669 tree temps, block; 670 671 gcc_assert (gimple_code (scope) == GIMPLE_BIND); 672 673 temps = nreverse (last); 674 675 block = gimple_bind_block (scope); 676 gcc_assert (!block || TREE_CODE (block) == BLOCK); 677 if (!block || !debug_info) 678 { 679 DECL_CHAIN (last) = gimple_bind_vars (scope); 680 gimple_bind_set_vars (scope, temps); 681 } 682 else 683 { 684 /* We need to attach the nodes both to the BIND_EXPR and to its 685 associated BLOCK for debugging purposes. The key point here 686 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR 687 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ 688 if (BLOCK_VARS (block)) 689 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); 690 else 691 { 692 gimple_bind_set_vars (scope, 693 chainon (gimple_bind_vars (scope), temps)); 694 BLOCK_VARS (block) = temps; 695 } 696 } 697 } 698 } 699 700 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound 701 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if 702 no such upper bound can be obtained. */ 703 704 static void 705 force_constant_size (tree var) 706 { 707 /* The only attempt we make is by querying the maximum size of objects 708 of the variable's type. */ 709 710 HOST_WIDE_INT max_size; 711 712 gcc_assert (TREE_CODE (var) == VAR_DECL); 713 714 max_size = max_int_size_in_bytes (TREE_TYPE (var)); 715 716 gcc_assert (max_size >= 0); 717 718 DECL_SIZE_UNIT (var) 719 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); 720 DECL_SIZE (var) 721 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); 722 } 723 724 /* Push the temporary variable TMP into the current binding. */ 725 726 void 727 gimple_add_tmp_var (tree tmp) 728 { 729 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 730 731 /* Later processing assumes that the object size is constant, which might 732 not be true at this point. Force the use of a constant upper bound in 733 this case. */ 734 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1)) 735 force_constant_size (tmp); 736 737 DECL_CONTEXT (tmp) = current_function_decl; 738 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 739 740 if (gimplify_ctxp) 741 { 742 DECL_CHAIN (tmp) = gimplify_ctxp->temps; 743 gimplify_ctxp->temps = tmp; 744 745 /* Mark temporaries local within the nearest enclosing parallel. */ 746 if (gimplify_omp_ctxp) 747 { 748 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 749 while (ctx && ctx->region_type == ORT_WORKSHARE) 750 ctx = ctx->outer_context; 751 if (ctx) 752 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); 753 } 754 } 755 else if (cfun) 756 record_vars (tmp); 757 else 758 { 759 gimple_seq body_seq; 760 761 /* This case is for nested functions. We need to expose the locals 762 they create. */ 763 body_seq = gimple_body (current_function_decl); 764 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); 765 } 766 } 767 768 /* Determine whether to assign a location to the statement GS. */ 769 770 static bool 771 should_carry_location_p (gimple gs) 772 { 773 /* Don't emit a line note for a label. We particularly don't want to 774 emit one for the break label, since it doesn't actually correspond 775 to the beginning of the loop/switch. */ 776 if (gimple_code (gs) == GIMPLE_LABEL) 777 return false; 778 779 return true; 780 } 781 782 /* Return true if a location should not be emitted for this statement 783 by annotate_one_with_location. */ 784 785 static inline bool 786 gimple_do_not_emit_location_p (gimple g) 787 { 788 return gimple_plf (g, GF_PLF_1); 789 } 790 791 /* Mark statement G so a location will not be emitted by 792 annotate_one_with_location. */ 793 794 static inline void 795 gimple_set_do_not_emit_location (gimple g) 796 { 797 /* The PLF flags are initialized to 0 when a new tuple is created, 798 so no need to initialize it anywhere. */ 799 gimple_set_plf (g, GF_PLF_1, true); 800 } 801 802 /* Set the location for gimple statement GS to LOCATION. */ 803 804 static void 805 annotate_one_with_location (gimple gs, location_t location) 806 { 807 if (!gimple_has_location (gs) 808 && !gimple_do_not_emit_location_p (gs) 809 && should_carry_location_p (gs)) 810 gimple_set_location (gs, location); 811 } 812 813 /* Set LOCATION for all the statements after iterator GSI in sequence 814 SEQ. If GSI is pointing to the end of the sequence, start with the 815 first statement in SEQ. */ 816 817 static void 818 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi, 819 location_t location) 820 { 821 if (gsi_end_p (gsi)) 822 gsi = gsi_start (seq); 823 else 824 gsi_next (&gsi); 825 826 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 827 annotate_one_with_location (gsi_stmt (gsi), location); 828 } 829 830 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */ 831 832 void 833 annotate_all_with_location (gimple_seq stmt_p, location_t location) 834 { 835 gimple_stmt_iterator i; 836 837 if (gimple_seq_empty_p (stmt_p)) 838 return; 839 840 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i)) 841 { 842 gimple gs = gsi_stmt (i); 843 annotate_one_with_location (gs, location); 844 } 845 } 846 847 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree 848 nodes that are referenced more than once in GENERIC functions. This is 849 necessary because gimplification (translation into GIMPLE) is performed 850 by modifying tree nodes in-place, so gimplication of a shared node in a 851 first context could generate an invalid GIMPLE form in a second context. 852 853 This is achieved with a simple mark/copy/unmark algorithm that walks the 854 GENERIC representation top-down, marks nodes with TREE_VISITED the first 855 time it encounters them, duplicates them if they already have TREE_VISITED 856 set, and finally removes the TREE_VISITED marks it has set. 857 858 The algorithm works only at the function level, i.e. it generates a GENERIC 859 representation of a function with no nodes shared within the function when 860 passed a GENERIC function (except for nodes that are allowed to be shared). 861 862 At the global level, it is also necessary to unshare tree nodes that are 863 referenced in more than one function, for the same aforementioned reason. 864 This requires some cooperation from the front-end. There are 2 strategies: 865 866 1. Manual unsharing. The front-end needs to call unshare_expr on every 867 expression that might end up being shared across functions. 868 869 2. Deep unsharing. This is an extension of regular unsharing. Instead 870 of calling unshare_expr on expressions that might be shared across 871 functions, the front-end pre-marks them with TREE_VISITED. This will 872 ensure that they are unshared on the first reference within functions 873 when the regular unsharing algorithm runs. The counterpart is that 874 this algorithm must look deeper than for manual unsharing, which is 875 specified by LANG_HOOKS_DEEP_UNSHARING. 876 877 If there are only few specific cases of node sharing across functions, it is 878 probably easier for a front-end to unshare the expressions manually. On the 879 contrary, if the expressions generated at the global level are as widespread 880 as expressions generated within functions, deep unsharing is very likely the 881 way to go. */ 882 883 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes. 884 These nodes model computations that must be done once. If we were to 885 unshare something like SAVE_EXPR(i++), the gimplification process would 886 create wrong code. However, if DATA is non-null, it must hold a pointer 887 set that is used to unshare the subtrees of these nodes. */ 888 889 static tree 890 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) 891 { 892 tree t = *tp; 893 enum tree_code code = TREE_CODE (t); 894 895 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but 896 copy their subtrees if we can make sure to do it only once. */ 897 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR) 898 { 899 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t)) 900 ; 901 else 902 *walk_subtrees = 0; 903 } 904 905 /* Stop at types, decls, constants like copy_tree_r. */ 906 else if (TREE_CODE_CLASS (code) == tcc_type 907 || TREE_CODE_CLASS (code) == tcc_declaration 908 || TREE_CODE_CLASS (code) == tcc_constant 909 /* We can't do anything sensible with a BLOCK used as an 910 expression, but we also can't just die when we see it 911 because of non-expression uses. So we avert our eyes 912 and cross our fingers. Silly Java. */ 913 || code == BLOCK) 914 *walk_subtrees = 0; 915 916 /* Cope with the statement expression extension. */ 917 else if (code == STATEMENT_LIST) 918 ; 919 920 /* Leave the bulk of the work to copy_tree_r itself. */ 921 else 922 copy_tree_r (tp, walk_subtrees, NULL); 923 924 return NULL_TREE; 925 } 926 927 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP. 928 If *TP has been visited already, then *TP is deeply copied by calling 929 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */ 930 931 static tree 932 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data) 933 { 934 tree t = *tp; 935 enum tree_code code = TREE_CODE (t); 936 937 /* Skip types, decls, and constants. But we do want to look at their 938 types and the bounds of types. Mark them as visited so we properly 939 unmark their subtrees on the unmark pass. If we've already seen them, 940 don't look down further. */ 941 if (TREE_CODE_CLASS (code) == tcc_type 942 || TREE_CODE_CLASS (code) == tcc_declaration 943 || TREE_CODE_CLASS (code) == tcc_constant) 944 { 945 if (TREE_VISITED (t)) 946 *walk_subtrees = 0; 947 else 948 TREE_VISITED (t) = 1; 949 } 950 951 /* If this node has been visited already, unshare it and don't look 952 any deeper. */ 953 else if (TREE_VISITED (t)) 954 { 955 walk_tree (tp, mostly_copy_tree_r, data, NULL); 956 *walk_subtrees = 0; 957 } 958 959 /* Otherwise, mark the node as visited and keep looking. */ 960 else 961 TREE_VISITED (t) = 1; 962 963 return NULL_TREE; 964 } 965 966 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the 967 copy_if_shared_r callback unmodified. */ 968 969 static inline void 970 copy_if_shared (tree *tp, void *data) 971 { 972 walk_tree (tp, copy_if_shared_r, data, NULL); 973 } 974 975 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of 976 any nested functions. */ 977 978 static void 979 unshare_body (tree fndecl) 980 { 981 struct cgraph_node *cgn = cgraph_get_node (fndecl); 982 /* If the language requires deep unsharing, we need a pointer set to make 983 sure we don't repeatedly unshare subtrees of unshareable nodes. */ 984 struct pointer_set_t *visited 985 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL; 986 987 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited); 988 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited); 989 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited); 990 991 if (visited) 992 pointer_set_destroy (visited); 993 994 if (cgn) 995 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 996 unshare_body (cgn->decl); 997 } 998 999 /* Callback for walk_tree to unmark the visited trees rooted at *TP. 1000 Subtrees are walked until the first unvisited node is encountered. */ 1001 1002 static tree 1003 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 1004 { 1005 tree t = *tp; 1006 1007 /* If this node has been visited, unmark it and keep looking. */ 1008 if (TREE_VISITED (t)) 1009 TREE_VISITED (t) = 0; 1010 1011 /* Otherwise, don't look any deeper. */ 1012 else 1013 *walk_subtrees = 0; 1014 1015 return NULL_TREE; 1016 } 1017 1018 /* Unmark the visited trees rooted at *TP. */ 1019 1020 static inline void 1021 unmark_visited (tree *tp) 1022 { 1023 walk_tree (tp, unmark_visited_r, NULL, NULL); 1024 } 1025 1026 /* Likewise, but mark all trees as not visited. */ 1027 1028 static void 1029 unvisit_body (tree fndecl) 1030 { 1031 struct cgraph_node *cgn = cgraph_get_node (fndecl); 1032 1033 unmark_visited (&DECL_SAVED_TREE (fndecl)); 1034 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl))); 1035 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl))); 1036 1037 if (cgn) 1038 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 1039 unvisit_body (cgn->decl); 1040 } 1041 1042 /* Unconditionally make an unshared copy of EXPR. This is used when using 1043 stored expressions which span multiple functions, such as BINFO_VTABLE, 1044 as the normal unsharing process can't tell that they're shared. */ 1045 1046 tree 1047 unshare_expr (tree expr) 1048 { 1049 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); 1050 return expr; 1051 } 1052 1053 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both 1054 contain statements and have a value. Assign its value to a temporary 1055 and give it void_type_node. Return the temporary, or NULL_TREE if 1056 WRAPPER was already void. */ 1057 1058 tree 1059 voidify_wrapper_expr (tree wrapper, tree temp) 1060 { 1061 tree type = TREE_TYPE (wrapper); 1062 if (type && !VOID_TYPE_P (type)) 1063 { 1064 tree *p; 1065 1066 /* Set p to point to the body of the wrapper. Loop until we find 1067 something that isn't a wrapper. */ 1068 for (p = &wrapper; p && *p; ) 1069 { 1070 switch (TREE_CODE (*p)) 1071 { 1072 case BIND_EXPR: 1073 TREE_SIDE_EFFECTS (*p) = 1; 1074 TREE_TYPE (*p) = void_type_node; 1075 /* For a BIND_EXPR, the body is operand 1. */ 1076 p = &BIND_EXPR_BODY (*p); 1077 break; 1078 1079 case CLEANUP_POINT_EXPR: 1080 case TRY_FINALLY_EXPR: 1081 case TRY_CATCH_EXPR: 1082 TREE_SIDE_EFFECTS (*p) = 1; 1083 TREE_TYPE (*p) = void_type_node; 1084 p = &TREE_OPERAND (*p, 0); 1085 break; 1086 1087 case STATEMENT_LIST: 1088 { 1089 tree_stmt_iterator i = tsi_last (*p); 1090 TREE_SIDE_EFFECTS (*p) = 1; 1091 TREE_TYPE (*p) = void_type_node; 1092 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); 1093 } 1094 break; 1095 1096 case COMPOUND_EXPR: 1097 /* Advance to the last statement. Set all container types to 1098 void. */ 1099 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) 1100 { 1101 TREE_SIDE_EFFECTS (*p) = 1; 1102 TREE_TYPE (*p) = void_type_node; 1103 } 1104 break; 1105 1106 case TRANSACTION_EXPR: 1107 TREE_SIDE_EFFECTS (*p) = 1; 1108 TREE_TYPE (*p) = void_type_node; 1109 p = &TRANSACTION_EXPR_BODY (*p); 1110 break; 1111 1112 default: 1113 /* Assume that any tree upon which voidify_wrapper_expr is 1114 directly called is a wrapper, and that its body is op0. */ 1115 if (p == &wrapper) 1116 { 1117 TREE_SIDE_EFFECTS (*p) = 1; 1118 TREE_TYPE (*p) = void_type_node; 1119 p = &TREE_OPERAND (*p, 0); 1120 break; 1121 } 1122 goto out; 1123 } 1124 } 1125 1126 out: 1127 if (p == NULL || IS_EMPTY_STMT (*p)) 1128 temp = NULL_TREE; 1129 else if (temp) 1130 { 1131 /* The wrapper is on the RHS of an assignment that we're pushing 1132 down. */ 1133 gcc_assert (TREE_CODE (temp) == INIT_EXPR 1134 || TREE_CODE (temp) == MODIFY_EXPR); 1135 TREE_OPERAND (temp, 1) = *p; 1136 *p = temp; 1137 } 1138 else 1139 { 1140 temp = create_tmp_var (type, "retval"); 1141 *p = build2 (INIT_EXPR, type, temp, *p); 1142 } 1143 1144 return temp; 1145 } 1146 1147 return NULL_TREE; 1148 } 1149 1150 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as 1151 a temporary through which they communicate. */ 1152 1153 static void 1154 build_stack_save_restore (gimple *save, gimple *restore) 1155 { 1156 tree tmp_var; 1157 1158 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0); 1159 tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); 1160 gimple_call_set_lhs (*save, tmp_var); 1161 1162 *restore 1163 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE), 1164 1, tmp_var); 1165 } 1166 1167 /* Gimplify a BIND_EXPR. Just voidify and recurse. */ 1168 1169 static enum gimplify_status 1170 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) 1171 { 1172 tree bind_expr = *expr_p; 1173 bool old_save_stack = gimplify_ctxp->save_stack; 1174 tree t; 1175 gimple gimple_bind; 1176 gimple_seq body, cleanup; 1177 gimple stack_save; 1178 1179 tree temp = voidify_wrapper_expr (bind_expr, NULL); 1180 1181 /* Mark variables seen in this bind expr. */ 1182 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1183 { 1184 if (TREE_CODE (t) == VAR_DECL) 1185 { 1186 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 1187 1188 /* Mark variable as local. */ 1189 if (ctx && !DECL_EXTERNAL (t) 1190 && (! DECL_SEEN_IN_BIND_EXPR_P (t) 1191 || splay_tree_lookup (ctx->variables, 1192 (splay_tree_key) t) == NULL)) 1193 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN); 1194 1195 DECL_SEEN_IN_BIND_EXPR_P (t) = 1; 1196 1197 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) 1198 cfun->has_local_explicit_reg_vars = true; 1199 } 1200 1201 /* Preliminarily mark non-addressed complex variables as eligible 1202 for promotion to gimple registers. We'll transform their uses 1203 as we find them. */ 1204 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE 1205 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) 1206 && !TREE_THIS_VOLATILE (t) 1207 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t)) 1208 && !needs_to_live_in_memory (t)) 1209 DECL_GIMPLE_REG_P (t) = 1; 1210 } 1211 1212 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, 1213 BIND_EXPR_BLOCK (bind_expr)); 1214 gimple_push_bind_expr (gimple_bind); 1215 1216 gimplify_ctxp->save_stack = false; 1217 1218 /* Gimplify the body into the GIMPLE_BIND tuple's body. */ 1219 body = NULL; 1220 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); 1221 gimple_bind_set_body (gimple_bind, body); 1222 1223 cleanup = NULL; 1224 stack_save = NULL; 1225 if (gimplify_ctxp->save_stack) 1226 { 1227 gimple stack_restore; 1228 1229 /* Save stack on entry and restore it on exit. Add a try_finally 1230 block to achieve this. Note that mudflap depends on the 1231 format of the emitted code: see mx_register_decls(). */ 1232 build_stack_save_restore (&stack_save, &stack_restore); 1233 1234 gimplify_seq_add_stmt (&cleanup, stack_restore); 1235 } 1236 1237 /* Add clobbers for all variables that go out of scope. */ 1238 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1239 { 1240 if (TREE_CODE (t) == VAR_DECL 1241 && !is_global_var (t) 1242 && DECL_CONTEXT (t) == current_function_decl 1243 && !DECL_HARD_REGISTER (t) 1244 && !TREE_THIS_VOLATILE (t) 1245 && !DECL_HAS_VALUE_EXPR_P (t) 1246 /* Only care for variables that have to be in memory. Others 1247 will be rewritten into SSA names, hence moved to the top-level. */ 1248 && !is_gimple_reg (t)) 1249 { 1250 tree clobber = build_constructor (TREE_TYPE (t), NULL); 1251 TREE_THIS_VOLATILE (clobber) = 1; 1252 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber)); 1253 } 1254 } 1255 1256 if (cleanup) 1257 { 1258 gimple gs; 1259 gimple_seq new_body; 1260 1261 new_body = NULL; 1262 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup, 1263 GIMPLE_TRY_FINALLY); 1264 1265 if (stack_save) 1266 gimplify_seq_add_stmt (&new_body, stack_save); 1267 gimplify_seq_add_stmt (&new_body, gs); 1268 gimple_bind_set_body (gimple_bind, new_body); 1269 } 1270 1271 gimplify_ctxp->save_stack = old_save_stack; 1272 gimple_pop_bind_expr (); 1273 1274 gimplify_seq_add_stmt (pre_p, gimple_bind); 1275 1276 if (temp) 1277 { 1278 *expr_p = temp; 1279 return GS_OK; 1280 } 1281 1282 *expr_p = NULL_TREE; 1283 return GS_ALL_DONE; 1284 } 1285 1286 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a 1287 GIMPLE value, it is assigned to a new temporary and the statement is 1288 re-written to return the temporary. 1289 1290 PRE_P points to the sequence where side effects that must happen before 1291 STMT should be stored. */ 1292 1293 static enum gimplify_status 1294 gimplify_return_expr (tree stmt, gimple_seq *pre_p) 1295 { 1296 gimple ret; 1297 tree ret_expr = TREE_OPERAND (stmt, 0); 1298 tree result_decl, result; 1299 1300 if (ret_expr == error_mark_node) 1301 return GS_ERROR; 1302 1303 if (!ret_expr 1304 || TREE_CODE (ret_expr) == RESULT_DECL 1305 || ret_expr == error_mark_node) 1306 { 1307 gimple ret = gimple_build_return (ret_expr); 1308 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1309 gimplify_seq_add_stmt (pre_p, ret); 1310 return GS_ALL_DONE; 1311 } 1312 1313 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) 1314 result_decl = NULL_TREE; 1315 else 1316 { 1317 result_decl = TREE_OPERAND (ret_expr, 0); 1318 1319 /* See through a return by reference. */ 1320 if (TREE_CODE (result_decl) == INDIRECT_REF) 1321 result_decl = TREE_OPERAND (result_decl, 0); 1322 1323 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR 1324 || TREE_CODE (ret_expr) == INIT_EXPR) 1325 && TREE_CODE (result_decl) == RESULT_DECL); 1326 } 1327 1328 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. 1329 Recall that aggregate_value_p is FALSE for any aggregate type that is 1330 returned in registers. If we're returning values in registers, then 1331 we don't want to extend the lifetime of the RESULT_DECL, particularly 1332 across another call. In addition, for those aggregates for which 1333 hard_function_value generates a PARALLEL, we'll die during normal 1334 expansion of structure assignments; there's special code in expand_return 1335 to handle this case that does not exist in expand_expr. */ 1336 if (!result_decl) 1337 result = NULL_TREE; 1338 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) 1339 { 1340 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST) 1341 { 1342 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl))) 1343 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p); 1344 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL 1345 should be effectively allocated by the caller, i.e. all calls to 1346 this function must be subject to the Return Slot Optimization. */ 1347 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p); 1348 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p); 1349 } 1350 result = result_decl; 1351 } 1352 else if (gimplify_ctxp->return_temp) 1353 result = gimplify_ctxp->return_temp; 1354 else 1355 { 1356 result = create_tmp_reg (TREE_TYPE (result_decl), NULL); 1357 1358 /* ??? With complex control flow (usually involving abnormal edges), 1359 we can wind up warning about an uninitialized value for this. Due 1360 to how this variable is constructed and initialized, this is never 1361 true. Give up and never warn. */ 1362 TREE_NO_WARNING (result) = 1; 1363 1364 gimplify_ctxp->return_temp = result; 1365 } 1366 1367 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. 1368 Then gimplify the whole thing. */ 1369 if (result != result_decl) 1370 TREE_OPERAND (ret_expr, 0) = result; 1371 1372 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); 1373 1374 ret = gimple_build_return (result); 1375 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1376 gimplify_seq_add_stmt (pre_p, ret); 1377 1378 return GS_ALL_DONE; 1379 } 1380 1381 /* Gimplify a variable-length array DECL. */ 1382 1383 static void 1384 gimplify_vla_decl (tree decl, gimple_seq *seq_p) 1385 { 1386 /* This is a variable-sized decl. Simplify its size and mark it 1387 for deferred expansion. Note that mudflap depends on the format 1388 of the emitted code: see mx_register_decls(). */ 1389 tree t, addr, ptr_type; 1390 1391 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); 1392 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); 1393 1394 /* All occurrences of this decl in final gimplified code will be 1395 replaced by indirection. Setting DECL_VALUE_EXPR does two 1396 things: First, it lets the rest of the gimplifier know what 1397 replacement to use. Second, it lets the debug info know 1398 where to find the value. */ 1399 ptr_type = build_pointer_type (TREE_TYPE (decl)); 1400 addr = create_tmp_var (ptr_type, get_name (decl)); 1401 DECL_IGNORED_P (addr) = 0; 1402 t = build_fold_indirect_ref (addr); 1403 TREE_THIS_NOTRAP (t) = 1; 1404 SET_DECL_VALUE_EXPR (decl, t); 1405 DECL_HAS_VALUE_EXPR_P (decl) = 1; 1406 1407 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 1408 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl), 1409 size_int (DECL_ALIGN (decl))); 1410 /* The call has been built for a variable-sized object. */ 1411 CALL_ALLOCA_FOR_VAR_P (t) = 1; 1412 t = fold_convert (ptr_type, t); 1413 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); 1414 1415 gimplify_and_add (t, seq_p); 1416 1417 /* Indicate that we need to restore the stack level when the 1418 enclosing BIND_EXPR is exited. */ 1419 gimplify_ctxp->save_stack = true; 1420 } 1421 1422 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation 1423 and initialization explicit. */ 1424 1425 static enum gimplify_status 1426 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) 1427 { 1428 tree stmt = *stmt_p; 1429 tree decl = DECL_EXPR_DECL (stmt); 1430 1431 *stmt_p = NULL_TREE; 1432 1433 if (TREE_TYPE (decl) == error_mark_node) 1434 return GS_ERROR; 1435 1436 if ((TREE_CODE (decl) == TYPE_DECL 1437 || TREE_CODE (decl) == VAR_DECL) 1438 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) 1439 gimplify_type_sizes (TREE_TYPE (decl), seq_p); 1440 1441 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified 1442 in case its size expressions contain problematic nodes like CALL_EXPR. */ 1443 if (TREE_CODE (decl) == TYPE_DECL 1444 && DECL_ORIGINAL_TYPE (decl) 1445 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl))) 1446 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p); 1447 1448 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl)) 1449 { 1450 tree init = DECL_INITIAL (decl); 1451 1452 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 1453 || (!TREE_STATIC (decl) 1454 && flag_stack_check == GENERIC_STACK_CHECK 1455 && compare_tree_int (DECL_SIZE_UNIT (decl), 1456 STACK_CHECK_MAX_VAR_SIZE) > 0)) 1457 gimplify_vla_decl (decl, seq_p); 1458 1459 /* Some front ends do not explicitly declare all anonymous 1460 artificial variables. We compensate here by declaring the 1461 variables, though it would be better if the front ends would 1462 explicitly declare them. */ 1463 if (!DECL_SEEN_IN_BIND_EXPR_P (decl) 1464 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) 1465 gimple_add_tmp_var (decl); 1466 1467 if (init && init != error_mark_node) 1468 { 1469 if (!TREE_STATIC (decl)) 1470 { 1471 DECL_INITIAL (decl) = NULL_TREE; 1472 init = build2 (INIT_EXPR, void_type_node, decl, init); 1473 gimplify_and_add (init, seq_p); 1474 ggc_free (init); 1475 } 1476 else 1477 /* We must still examine initializers for static variables 1478 as they may contain a label address. */ 1479 walk_tree (&init, force_labels_r, NULL, NULL); 1480 } 1481 } 1482 1483 return GS_ALL_DONE; 1484 } 1485 1486 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body 1487 and replacing the LOOP_EXPR with goto, but if the loop contains an 1488 EXIT_EXPR, we need to append a label for it to jump to. */ 1489 1490 static enum gimplify_status 1491 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) 1492 { 1493 tree saved_label = gimplify_ctxp->exit_label; 1494 tree start_label = create_artificial_label (UNKNOWN_LOCATION); 1495 1496 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); 1497 1498 gimplify_ctxp->exit_label = NULL_TREE; 1499 1500 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); 1501 1502 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); 1503 1504 if (gimplify_ctxp->exit_label) 1505 gimplify_seq_add_stmt (pre_p, 1506 gimple_build_label (gimplify_ctxp->exit_label)); 1507 1508 gimplify_ctxp->exit_label = saved_label; 1509 1510 *expr_p = NULL; 1511 return GS_ALL_DONE; 1512 } 1513 1514 /* Gimplify a statement list onto a sequence. These may be created either 1515 by an enlightened front-end, or by shortcut_cond_expr. */ 1516 1517 static enum gimplify_status 1518 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) 1519 { 1520 tree temp = voidify_wrapper_expr (*expr_p, NULL); 1521 1522 tree_stmt_iterator i = tsi_start (*expr_p); 1523 1524 while (!tsi_end_p (i)) 1525 { 1526 gimplify_stmt (tsi_stmt_ptr (i), pre_p); 1527 tsi_delink (&i); 1528 } 1529 1530 if (temp) 1531 { 1532 *expr_p = temp; 1533 return GS_OK; 1534 } 1535 1536 return GS_ALL_DONE; 1537 } 1538 1539 /* Compare two case labels. Because the front end should already have 1540 made sure that case ranges do not overlap, it is enough to only compare 1541 the CASE_LOW values of each case label. */ 1542 1543 static int 1544 compare_case_labels (const void *p1, const void *p2) 1545 { 1546 const_tree const case1 = *(const_tree const*)p1; 1547 const_tree const case2 = *(const_tree const*)p2; 1548 1549 /* The 'default' case label always goes first. */ 1550 if (!CASE_LOW (case1)) 1551 return -1; 1552 else if (!CASE_LOW (case2)) 1553 return 1; 1554 else 1555 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2)); 1556 } 1557 1558 /* Sort the case labels in LABEL_VEC in place in ascending order. */ 1559 1560 void 1561 sort_case_labels (VEC(tree,heap)* label_vec) 1562 { 1563 VEC_qsort (tree, label_vec, compare_case_labels); 1564 } 1565 1566 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can 1567 branch to. */ 1568 1569 static enum gimplify_status 1570 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) 1571 { 1572 tree switch_expr = *expr_p; 1573 gimple_seq switch_body_seq = NULL; 1574 enum gimplify_status ret; 1575 1576 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, 1577 fb_rvalue); 1578 if (ret == GS_ERROR || ret == GS_UNHANDLED) 1579 return ret; 1580 1581 if (SWITCH_BODY (switch_expr)) 1582 { 1583 VEC (tree,heap) *labels; 1584 VEC (tree,heap) *saved_labels; 1585 tree default_case = NULL_TREE; 1586 size_t i, len; 1587 gimple gimple_switch; 1588 1589 /* If someone can be bothered to fill in the labels, they can 1590 be bothered to null out the body too. */ 1591 gcc_assert (!SWITCH_LABELS (switch_expr)); 1592 1593 /* save old labels, get new ones from body, then restore the old 1594 labels. Save all the things from the switch body to append after. */ 1595 saved_labels = gimplify_ctxp->case_labels; 1596 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8); 1597 1598 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); 1599 labels = gimplify_ctxp->case_labels; 1600 gimplify_ctxp->case_labels = saved_labels; 1601 1602 i = 0; 1603 while (i < VEC_length (tree, labels)) 1604 { 1605 tree elt = VEC_index (tree, labels, i); 1606 tree low = CASE_LOW (elt); 1607 bool remove_element = FALSE; 1608 1609 if (low) 1610 { 1611 /* Discard empty ranges. */ 1612 tree high = CASE_HIGH (elt); 1613 if (high && tree_int_cst_lt (high, low)) 1614 remove_element = TRUE; 1615 } 1616 else 1617 { 1618 /* The default case must be the last label in the list. */ 1619 gcc_assert (!default_case); 1620 default_case = elt; 1621 remove_element = TRUE; 1622 } 1623 1624 if (remove_element) 1625 VEC_ordered_remove (tree, labels, i); 1626 else 1627 i++; 1628 } 1629 len = i; 1630 1631 if (!VEC_empty (tree, labels)) 1632 sort_case_labels (labels); 1633 1634 if (!default_case) 1635 { 1636 tree type = TREE_TYPE (switch_expr); 1637 1638 /* If the switch has no default label, add one, so that we jump 1639 around the switch body. If the labels already cover the whole 1640 range of type, add the default label pointing to one of the 1641 existing labels. */ 1642 if (type == void_type_node) 1643 type = TREE_TYPE (SWITCH_COND (switch_expr)); 1644 if (len 1645 && INTEGRAL_TYPE_P (type) 1646 && TYPE_MIN_VALUE (type) 1647 && TYPE_MAX_VALUE (type) 1648 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)), 1649 TYPE_MIN_VALUE (type))) 1650 { 1651 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1)); 1652 if (!high) 1653 high = CASE_LOW (VEC_index (tree, labels, len - 1)); 1654 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type))) 1655 { 1656 for (i = 1; i < len; i++) 1657 { 1658 high = CASE_LOW (VEC_index (tree, labels, i)); 1659 low = CASE_HIGH (VEC_index (tree, labels, i - 1)); 1660 if (!low) 1661 low = CASE_LOW (VEC_index (tree, labels, i - 1)); 1662 if ((TREE_INT_CST_LOW (low) + 1 1663 != TREE_INT_CST_LOW (high)) 1664 || (TREE_INT_CST_HIGH (low) 1665 + (TREE_INT_CST_LOW (high) == 0) 1666 != TREE_INT_CST_HIGH (high))) 1667 break; 1668 } 1669 if (i == len) 1670 { 1671 tree label = CASE_LABEL (VEC_index (tree, labels, 0)); 1672 default_case = build_case_label (NULL_TREE, NULL_TREE, 1673 label); 1674 } 1675 } 1676 } 1677 1678 if (!default_case) 1679 { 1680 gimple new_default; 1681 1682 default_case 1683 = build_case_label (NULL_TREE, NULL_TREE, 1684 create_artificial_label (UNKNOWN_LOCATION)); 1685 new_default = gimple_build_label (CASE_LABEL (default_case)); 1686 gimplify_seq_add_stmt (&switch_body_seq, new_default); 1687 } 1688 } 1689 1690 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr), 1691 default_case, labels); 1692 gimplify_seq_add_stmt (pre_p, gimple_switch); 1693 gimplify_seq_add_seq (pre_p, switch_body_seq); 1694 VEC_free(tree, heap, labels); 1695 } 1696 else 1697 gcc_assert (SWITCH_LABELS (switch_expr)); 1698 1699 return GS_ALL_DONE; 1700 } 1701 1702 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */ 1703 1704 static enum gimplify_status 1705 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) 1706 { 1707 struct gimplify_ctx *ctxp; 1708 gimple gimple_label; 1709 1710 /* Invalid OpenMP programs can play Duff's Device type games with 1711 #pragma omp parallel. At least in the C front end, we don't 1712 detect such invalid branches until after gimplification. */ 1713 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) 1714 if (ctxp->case_labels) 1715 break; 1716 1717 gimple_label = gimple_build_label (CASE_LABEL (*expr_p)); 1718 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p); 1719 gimplify_seq_add_stmt (pre_p, gimple_label); 1720 1721 return GS_ALL_DONE; 1722 } 1723 1724 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first 1725 if necessary. */ 1726 1727 tree 1728 build_and_jump (tree *label_p) 1729 { 1730 if (label_p == NULL) 1731 /* If there's nowhere to jump, just fall through. */ 1732 return NULL_TREE; 1733 1734 if (*label_p == NULL_TREE) 1735 { 1736 tree label = create_artificial_label (UNKNOWN_LOCATION); 1737 *label_p = label; 1738 } 1739 1740 return build1 (GOTO_EXPR, void_type_node, *label_p); 1741 } 1742 1743 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. 1744 This also involves building a label to jump to and communicating it to 1745 gimplify_loop_expr through gimplify_ctxp->exit_label. */ 1746 1747 static enum gimplify_status 1748 gimplify_exit_expr (tree *expr_p) 1749 { 1750 tree cond = TREE_OPERAND (*expr_p, 0); 1751 tree expr; 1752 1753 expr = build_and_jump (&gimplify_ctxp->exit_label); 1754 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); 1755 *expr_p = expr; 1756 1757 return GS_OK; 1758 } 1759 1760 /* A helper function to be called via walk_tree. Mark all labels under *TP 1761 as being forced. To be called for DECL_INITIAL of static variables. */ 1762 1763 tree 1764 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 1765 { 1766 if (TYPE_P (*tp)) 1767 *walk_subtrees = 0; 1768 if (TREE_CODE (*tp) == LABEL_DECL) 1769 FORCED_LABEL (*tp) = 1; 1770 1771 return NULL_TREE; 1772 } 1773 1774 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is 1775 different from its canonical type, wrap the whole thing inside a 1776 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical 1777 type. 1778 1779 The canonical type of a COMPONENT_REF is the type of the field being 1780 referenced--unless the field is a bit-field which can be read directly 1781 in a smaller mode, in which case the canonical type is the 1782 sign-appropriate type corresponding to that mode. */ 1783 1784 static void 1785 canonicalize_component_ref (tree *expr_p) 1786 { 1787 tree expr = *expr_p; 1788 tree type; 1789 1790 gcc_assert (TREE_CODE (expr) == COMPONENT_REF); 1791 1792 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) 1793 type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); 1794 else 1795 type = TREE_TYPE (TREE_OPERAND (expr, 1)); 1796 1797 /* One could argue that all the stuff below is not necessary for 1798 the non-bitfield case and declare it a FE error if type 1799 adjustment would be needed. */ 1800 if (TREE_TYPE (expr) != type) 1801 { 1802 #ifdef ENABLE_TYPES_CHECKING 1803 tree old_type = TREE_TYPE (expr); 1804 #endif 1805 int type_quals; 1806 1807 /* We need to preserve qualifiers and propagate them from 1808 operand 0. */ 1809 type_quals = TYPE_QUALS (type) 1810 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); 1811 if (TYPE_QUALS (type) != type_quals) 1812 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); 1813 1814 /* Set the type of the COMPONENT_REF to the underlying type. */ 1815 TREE_TYPE (expr) = type; 1816 1817 #ifdef ENABLE_TYPES_CHECKING 1818 /* It is now a FE error, if the conversion from the canonical 1819 type to the original expression type is not useless. */ 1820 gcc_assert (useless_type_conversion_p (old_type, type)); 1821 #endif 1822 } 1823 } 1824 1825 /* If a NOP conversion is changing a pointer to array of foo to a pointer 1826 to foo, embed that change in the ADDR_EXPR by converting 1827 T array[U]; 1828 (T *)&array 1829 ==> 1830 &array[L] 1831 where L is the lower bound. For simplicity, only do this for constant 1832 lower bound. 1833 The constraint is that the type of &array[L] is trivially convertible 1834 to T *. */ 1835 1836 static void 1837 canonicalize_addr_expr (tree *expr_p) 1838 { 1839 tree expr = *expr_p; 1840 tree addr_expr = TREE_OPERAND (expr, 0); 1841 tree datype, ddatype, pddatype; 1842 1843 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ 1844 if (!POINTER_TYPE_P (TREE_TYPE (expr)) 1845 || TREE_CODE (addr_expr) != ADDR_EXPR) 1846 return; 1847 1848 /* The addr_expr type should be a pointer to an array. */ 1849 datype = TREE_TYPE (TREE_TYPE (addr_expr)); 1850 if (TREE_CODE (datype) != ARRAY_TYPE) 1851 return; 1852 1853 /* The pointer to element type shall be trivially convertible to 1854 the expression pointer type. */ 1855 ddatype = TREE_TYPE (datype); 1856 pddatype = build_pointer_type (ddatype); 1857 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)), 1858 pddatype)) 1859 return; 1860 1861 /* The lower bound and element sizes must be constant. */ 1862 if (!TYPE_SIZE_UNIT (ddatype) 1863 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST 1864 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) 1865 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) 1866 return; 1867 1868 /* All checks succeeded. Build a new node to merge the cast. */ 1869 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), 1870 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), 1871 NULL_TREE, NULL_TREE); 1872 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); 1873 1874 /* We can have stripped a required restrict qualifier above. */ 1875 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 1876 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 1877 } 1878 1879 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions 1880 underneath as appropriate. */ 1881 1882 static enum gimplify_status 1883 gimplify_conversion (tree *expr_p) 1884 { 1885 location_t loc = EXPR_LOCATION (*expr_p); 1886 gcc_assert (CONVERT_EXPR_P (*expr_p)); 1887 1888 /* Then strip away all but the outermost conversion. */ 1889 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); 1890 1891 /* And remove the outermost conversion if it's useless. */ 1892 if (tree_ssa_useless_type_conversion (*expr_p)) 1893 *expr_p = TREE_OPERAND (*expr_p, 0); 1894 1895 /* If we still have a conversion at the toplevel, 1896 then canonicalize some constructs. */ 1897 if (CONVERT_EXPR_P (*expr_p)) 1898 { 1899 tree sub = TREE_OPERAND (*expr_p, 0); 1900 1901 /* If a NOP conversion is changing the type of a COMPONENT_REF 1902 expression, then canonicalize its type now in order to expose more 1903 redundant conversions. */ 1904 if (TREE_CODE (sub) == COMPONENT_REF) 1905 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); 1906 1907 /* If a NOP conversion is changing a pointer to array of foo 1908 to a pointer to foo, embed that change in the ADDR_EXPR. */ 1909 else if (TREE_CODE (sub) == ADDR_EXPR) 1910 canonicalize_addr_expr (expr_p); 1911 } 1912 1913 /* If we have a conversion to a non-register type force the 1914 use of a VIEW_CONVERT_EXPR instead. */ 1915 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) 1916 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), 1917 TREE_OPERAND (*expr_p, 0)); 1918 1919 return GS_OK; 1920 } 1921 1922 /* Nonlocal VLAs seen in the current function. */ 1923 static struct pointer_set_t *nonlocal_vlas; 1924 1925 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a 1926 DECL_VALUE_EXPR, and it's worth re-examining things. */ 1927 1928 static enum gimplify_status 1929 gimplify_var_or_parm_decl (tree *expr_p) 1930 { 1931 tree decl = *expr_p; 1932 1933 /* ??? If this is a local variable, and it has not been seen in any 1934 outer BIND_EXPR, then it's probably the result of a duplicate 1935 declaration, for which we've already issued an error. It would 1936 be really nice if the front end wouldn't leak these at all. 1937 Currently the only known culprit is C++ destructors, as seen 1938 in g++.old-deja/g++.jason/binding.C. */ 1939 if (TREE_CODE (decl) == VAR_DECL 1940 && !DECL_SEEN_IN_BIND_EXPR_P (decl) 1941 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) 1942 && decl_function_context (decl) == current_function_decl) 1943 { 1944 gcc_assert (seen_error ()); 1945 return GS_ERROR; 1946 } 1947 1948 /* When within an OpenMP context, notice uses of variables. */ 1949 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) 1950 return GS_ALL_DONE; 1951 1952 /* If the decl is an alias for another expression, substitute it now. */ 1953 if (DECL_HAS_VALUE_EXPR_P (decl)) 1954 { 1955 tree value_expr = DECL_VALUE_EXPR (decl); 1956 1957 /* For referenced nonlocal VLAs add a decl for debugging purposes 1958 to the current function. */ 1959 if (TREE_CODE (decl) == VAR_DECL 1960 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 1961 && nonlocal_vlas != NULL 1962 && TREE_CODE (value_expr) == INDIRECT_REF 1963 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL 1964 && decl_function_context (decl) != current_function_decl) 1965 { 1966 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 1967 while (ctx && ctx->region_type == ORT_WORKSHARE) 1968 ctx = ctx->outer_context; 1969 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl)) 1970 { 1971 tree copy = copy_node (decl), block; 1972 1973 lang_hooks.dup_lang_specific_decl (copy); 1974 SET_DECL_RTL (copy, 0); 1975 TREE_USED (copy) = 1; 1976 block = DECL_INITIAL (current_function_decl); 1977 DECL_CHAIN (copy) = BLOCK_VARS (block); 1978 BLOCK_VARS (block) = copy; 1979 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr)); 1980 DECL_HAS_VALUE_EXPR_P (copy) = 1; 1981 } 1982 } 1983 1984 *expr_p = unshare_expr (value_expr); 1985 return GS_OK; 1986 } 1987 1988 return GS_ALL_DONE; 1989 } 1990 1991 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR 1992 node *EXPR_P. 1993 1994 compound_lval 1995 : min_lval '[' val ']' 1996 | min_lval '.' ID 1997 | compound_lval '[' val ']' 1998 | compound_lval '.' ID 1999 2000 This is not part of the original SIMPLE definition, which separates 2001 array and member references, but it seems reasonable to handle them 2002 together. Also, this way we don't run into problems with union 2003 aliasing; gcc requires that for accesses through a union to alias, the 2004 union reference must be explicit, which was not always the case when we 2005 were splitting up array and member refs. 2006 2007 PRE_P points to the sequence where side effects that must happen before 2008 *EXPR_P should be stored. 2009 2010 POST_P points to the sequence where side effects that must happen after 2011 *EXPR_P should be stored. */ 2012 2013 static enum gimplify_status 2014 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 2015 fallback_t fallback) 2016 { 2017 tree *p; 2018 VEC(tree,heap) *stack; 2019 enum gimplify_status ret = GS_ALL_DONE, tret; 2020 int i; 2021 location_t loc = EXPR_LOCATION (*expr_p); 2022 tree expr = *expr_p; 2023 2024 /* Create a stack of the subexpressions so later we can walk them in 2025 order from inner to outer. */ 2026 stack = VEC_alloc (tree, heap, 10); 2027 2028 /* We can handle anything that get_inner_reference can deal with. */ 2029 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) 2030 { 2031 restart: 2032 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ 2033 if (TREE_CODE (*p) == INDIRECT_REF) 2034 *p = fold_indirect_ref_loc (loc, *p); 2035 2036 if (handled_component_p (*p)) 2037 ; 2038 /* Expand DECL_VALUE_EXPR now. In some cases that may expose 2039 additional COMPONENT_REFs. */ 2040 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL) 2041 && gimplify_var_or_parm_decl (p) == GS_OK) 2042 goto restart; 2043 else 2044 break; 2045 2046 VEC_safe_push (tree, heap, stack, *p); 2047 } 2048 2049 gcc_assert (VEC_length (tree, stack)); 2050 2051 /* Now STACK is a stack of pointers to all the refs we've walked through 2052 and P points to the innermost expression. 2053 2054 Java requires that we elaborated nodes in source order. That 2055 means we must gimplify the inner expression followed by each of 2056 the indices, in order. But we can't gimplify the inner 2057 expression until we deal with any variable bounds, sizes, or 2058 positions in order to deal with PLACEHOLDER_EXPRs. 2059 2060 So we do this in three steps. First we deal with the annotations 2061 for any variables in the components, then we gimplify the base, 2062 then we gimplify any indices, from left to right. */ 2063 for (i = VEC_length (tree, stack) - 1; i >= 0; i--) 2064 { 2065 tree t = VEC_index (tree, stack, i); 2066 2067 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 2068 { 2069 /* Gimplify the low bound and element type size and put them into 2070 the ARRAY_REF. If these values are set, they have already been 2071 gimplified. */ 2072 if (TREE_OPERAND (t, 2) == NULL_TREE) 2073 { 2074 tree low = unshare_expr (array_ref_low_bound (t)); 2075 if (!is_gimple_min_invariant (low)) 2076 { 2077 TREE_OPERAND (t, 2) = low; 2078 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 2079 post_p, is_gimple_reg, 2080 fb_rvalue); 2081 ret = MIN (ret, tret); 2082 } 2083 } 2084 else 2085 { 2086 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2087 is_gimple_reg, fb_rvalue); 2088 ret = MIN (ret, tret); 2089 } 2090 2091 if (TREE_OPERAND (t, 3) == NULL_TREE) 2092 { 2093 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); 2094 tree elmt_size = unshare_expr (array_ref_element_size (t)); 2095 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); 2096 2097 /* Divide the element size by the alignment of the element 2098 type (above). */ 2099 elmt_size 2100 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor); 2101 2102 if (!is_gimple_min_invariant (elmt_size)) 2103 { 2104 TREE_OPERAND (t, 3) = elmt_size; 2105 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, 2106 post_p, is_gimple_reg, 2107 fb_rvalue); 2108 ret = MIN (ret, tret); 2109 } 2110 } 2111 else 2112 { 2113 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p, 2114 is_gimple_reg, fb_rvalue); 2115 ret = MIN (ret, tret); 2116 } 2117 } 2118 else if (TREE_CODE (t) == COMPONENT_REF) 2119 { 2120 /* Set the field offset into T and gimplify it. */ 2121 if (TREE_OPERAND (t, 2) == NULL_TREE) 2122 { 2123 tree offset = unshare_expr (component_ref_field_offset (t)); 2124 tree field = TREE_OPERAND (t, 1); 2125 tree factor 2126 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); 2127 2128 /* Divide the offset by its alignment. */ 2129 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor); 2130 2131 if (!is_gimple_min_invariant (offset)) 2132 { 2133 TREE_OPERAND (t, 2) = offset; 2134 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 2135 post_p, is_gimple_reg, 2136 fb_rvalue); 2137 ret = MIN (ret, tret); 2138 } 2139 } 2140 else 2141 { 2142 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2143 is_gimple_reg, fb_rvalue); 2144 ret = MIN (ret, tret); 2145 } 2146 } 2147 } 2148 2149 /* Step 2 is to gimplify the base expression. Make sure lvalue is set 2150 so as to match the min_lval predicate. Failure to do so may result 2151 in the creation of large aggregate temporaries. */ 2152 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, 2153 fallback | fb_lvalue); 2154 ret = MIN (ret, tret); 2155 2156 /* And finally, the indices and operands to BIT_FIELD_REF. During this 2157 loop we also remove any useless conversions. */ 2158 for (; VEC_length (tree, stack) > 0; ) 2159 { 2160 tree t = VEC_pop (tree, stack); 2161 2162 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 2163 { 2164 /* Gimplify the dimension. */ 2165 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) 2166 { 2167 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, 2168 is_gimple_val, fb_rvalue); 2169 ret = MIN (ret, tret); 2170 } 2171 } 2172 else if (TREE_CODE (t) == BIT_FIELD_REF) 2173 { 2174 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, 2175 is_gimple_val, fb_rvalue); 2176 ret = MIN (ret, tret); 2177 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2178 is_gimple_val, fb_rvalue); 2179 ret = MIN (ret, tret); 2180 } 2181 2182 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); 2183 2184 /* The innermost expression P may have originally had 2185 TREE_SIDE_EFFECTS set which would have caused all the outer 2186 expressions in *EXPR_P leading to P to also have had 2187 TREE_SIDE_EFFECTS set. */ 2188 recalculate_side_effects (t); 2189 } 2190 2191 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ 2192 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) 2193 { 2194 canonicalize_component_ref (expr_p); 2195 } 2196 2197 VEC_free (tree, heap, stack); 2198 2199 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE); 2200 2201 return ret; 2202 } 2203 2204 /* Gimplify the self modifying expression pointed to by EXPR_P 2205 (++, --, +=, -=). 2206 2207 PRE_P points to the list where side effects that must happen before 2208 *EXPR_P should be stored. 2209 2210 POST_P points to the list where side effects that must happen after 2211 *EXPR_P should be stored. 2212 2213 WANT_VALUE is nonzero iff we want to use the value of this expression 2214 in another expression. */ 2215 2216 static enum gimplify_status 2217 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 2218 bool want_value) 2219 { 2220 enum tree_code code; 2221 tree lhs, lvalue, rhs, t1; 2222 gimple_seq post = NULL, *orig_post_p = post_p; 2223 bool postfix; 2224 enum tree_code arith_code; 2225 enum gimplify_status ret; 2226 location_t loc = EXPR_LOCATION (*expr_p); 2227 2228 code = TREE_CODE (*expr_p); 2229 2230 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR 2231 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); 2232 2233 /* Prefix or postfix? */ 2234 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) 2235 /* Faster to treat as prefix if result is not used. */ 2236 postfix = want_value; 2237 else 2238 postfix = false; 2239 2240 /* For postfix, make sure the inner expression's post side effects 2241 are executed after side effects from this expression. */ 2242 if (postfix) 2243 post_p = &post; 2244 2245 /* Add or subtract? */ 2246 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) 2247 arith_code = PLUS_EXPR; 2248 else 2249 arith_code = MINUS_EXPR; 2250 2251 /* Gimplify the LHS into a GIMPLE lvalue. */ 2252 lvalue = TREE_OPERAND (*expr_p, 0); 2253 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 2254 if (ret == GS_ERROR) 2255 return ret; 2256 2257 /* Extract the operands to the arithmetic operation. */ 2258 lhs = lvalue; 2259 rhs = TREE_OPERAND (*expr_p, 1); 2260 2261 /* For postfix operator, we evaluate the LHS to an rvalue and then use 2262 that as the result value and in the postqueue operation. We also 2263 make sure to make lvalue a minimal lval, see 2264 gcc.c-torture/execute/20040313-1.c for an example where this matters. */ 2265 if (postfix) 2266 { 2267 if (!is_gimple_min_lval (lvalue)) 2268 { 2269 mark_addressable (lvalue); 2270 lvalue = build_fold_addr_expr_loc (input_location, lvalue); 2271 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue); 2272 lvalue = build_fold_indirect_ref_loc (input_location, lvalue); 2273 } 2274 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); 2275 if (ret == GS_ERROR) 2276 return ret; 2277 } 2278 2279 /* For POINTERs increment, use POINTER_PLUS_EXPR. */ 2280 if (POINTER_TYPE_P (TREE_TYPE (lhs))) 2281 { 2282 rhs = convert_to_ptrofftype_loc (loc, rhs); 2283 if (arith_code == MINUS_EXPR) 2284 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs); 2285 arith_code = POINTER_PLUS_EXPR; 2286 } 2287 2288 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs); 2289 2290 if (postfix) 2291 { 2292 gimplify_assign (lvalue, t1, orig_post_p); 2293 gimplify_seq_add_seq (orig_post_p, post); 2294 *expr_p = lhs; 2295 return GS_ALL_DONE; 2296 } 2297 else 2298 { 2299 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); 2300 return GS_OK; 2301 } 2302 } 2303 2304 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ 2305 2306 static void 2307 maybe_with_size_expr (tree *expr_p) 2308 { 2309 tree expr = *expr_p; 2310 tree type = TREE_TYPE (expr); 2311 tree size; 2312 2313 /* If we've already wrapped this or the type is error_mark_node, we can't do 2314 anything. */ 2315 if (TREE_CODE (expr) == WITH_SIZE_EXPR 2316 || type == error_mark_node) 2317 return; 2318 2319 /* If the size isn't known or is a constant, we have nothing to do. */ 2320 size = TYPE_SIZE_UNIT (type); 2321 if (!size || TREE_CODE (size) == INTEGER_CST) 2322 return; 2323 2324 /* Otherwise, make a WITH_SIZE_EXPR. */ 2325 size = unshare_expr (size); 2326 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); 2327 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); 2328 } 2329 2330 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P 2331 Store any side-effects in PRE_P. CALL_LOCATION is the location of 2332 the CALL_EXPR. */ 2333 2334 static enum gimplify_status 2335 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location) 2336 { 2337 bool (*test) (tree); 2338 fallback_t fb; 2339 2340 /* In general, we allow lvalues for function arguments to avoid 2341 extra overhead of copying large aggregates out of even larger 2342 aggregates into temporaries only to copy the temporaries to 2343 the argument list. Make optimizers happy by pulling out to 2344 temporaries those types that fit in registers. */ 2345 if (is_gimple_reg_type (TREE_TYPE (*arg_p))) 2346 test = is_gimple_val, fb = fb_rvalue; 2347 else 2348 { 2349 test = is_gimple_lvalue, fb = fb_either; 2350 /* Also strip a TARGET_EXPR that would force an extra copy. */ 2351 if (TREE_CODE (*arg_p) == TARGET_EXPR) 2352 { 2353 tree init = TARGET_EXPR_INITIAL (*arg_p); 2354 if (init 2355 && !VOID_TYPE_P (TREE_TYPE (init))) 2356 *arg_p = init; 2357 } 2358 } 2359 2360 /* If this is a variable sized type, we must remember the size. */ 2361 maybe_with_size_expr (arg_p); 2362 2363 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */ 2364 /* Make sure arguments have the same location as the function call 2365 itself. */ 2366 protected_set_expr_location (*arg_p, call_location); 2367 2368 /* There is a sequence point before a function call. Side effects in 2369 the argument list must occur before the actual call. So, when 2370 gimplifying arguments, force gimplify_expr to use an internal 2371 post queue which is then appended to the end of PRE_P. */ 2372 return gimplify_expr (arg_p, pre_p, NULL, test, fb); 2373 } 2374 2375 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. 2376 WANT_VALUE is true if the result of the call is desired. */ 2377 2378 static enum gimplify_status 2379 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 2380 { 2381 tree fndecl, parms, p, fnptrtype; 2382 enum gimplify_status ret; 2383 int i, nargs; 2384 gimple call; 2385 bool builtin_va_start_p = FALSE; 2386 location_t loc = EXPR_LOCATION (*expr_p); 2387 2388 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); 2389 2390 /* For reliable diagnostics during inlining, it is necessary that 2391 every call_expr be annotated with file and line. */ 2392 if (! EXPR_HAS_LOCATION (*expr_p)) 2393 SET_EXPR_LOCATION (*expr_p, input_location); 2394 2395 /* This may be a call to a builtin function. 2396 2397 Builtin function calls may be transformed into different 2398 (and more efficient) builtin function calls under certain 2399 circumstances. Unfortunately, gimplification can muck things 2400 up enough that the builtin expanders are not aware that certain 2401 transformations are still valid. 2402 2403 So we attempt transformation/gimplification of the call before 2404 we gimplify the CALL_EXPR. At this time we do not manage to 2405 transform all calls in the same manner as the expanders do, but 2406 we do transform most of them. */ 2407 fndecl = get_callee_fndecl (*expr_p); 2408 if (fndecl && DECL_BUILT_IN (fndecl)) 2409 { 2410 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 2411 2412 if (new_tree && new_tree != *expr_p) 2413 { 2414 /* There was a transformation of this call which computes the 2415 same value, but in a more efficient way. Return and try 2416 again. */ 2417 *expr_p = new_tree; 2418 return GS_OK; 2419 } 2420 2421 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 2422 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START) 2423 { 2424 builtin_va_start_p = TRUE; 2425 if (call_expr_nargs (*expr_p) < 2) 2426 { 2427 error ("too few arguments to function %<va_start%>"); 2428 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 2429 return GS_OK; 2430 } 2431 2432 if (fold_builtin_next_arg (*expr_p, true)) 2433 { 2434 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 2435 return GS_OK; 2436 } 2437 } 2438 } 2439 2440 /* Remember the original function pointer type. */ 2441 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); 2442 2443 /* There is a sequence point before the call, so any side effects in 2444 the calling expression must occur before the actual call. Force 2445 gimplify_expr to use an internal post queue. */ 2446 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, 2447 is_gimple_call_addr, fb_rvalue); 2448 2449 nargs = call_expr_nargs (*expr_p); 2450 2451 /* Get argument types for verification. */ 2452 fndecl = get_callee_fndecl (*expr_p); 2453 parms = NULL_TREE; 2454 if (fndecl) 2455 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 2456 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p)))) 2457 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p)))); 2458 2459 if (fndecl && DECL_ARGUMENTS (fndecl)) 2460 p = DECL_ARGUMENTS (fndecl); 2461 else if (parms) 2462 p = parms; 2463 else 2464 p = NULL_TREE; 2465 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) 2466 ; 2467 2468 /* If the last argument is __builtin_va_arg_pack () and it is not 2469 passed as a named argument, decrease the number of CALL_EXPR 2470 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ 2471 if (!p 2472 && i < nargs 2473 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) 2474 { 2475 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); 2476 tree last_arg_fndecl = get_callee_fndecl (last_arg); 2477 2478 if (last_arg_fndecl 2479 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL 2480 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL 2481 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK) 2482 { 2483 tree call = *expr_p; 2484 2485 --nargs; 2486 *expr_p = build_call_array_loc (loc, TREE_TYPE (call), 2487 CALL_EXPR_FN (call), 2488 nargs, CALL_EXPR_ARGP (call)); 2489 2490 /* Copy all CALL_EXPR flags, location and block, except 2491 CALL_EXPR_VA_ARG_PACK flag. */ 2492 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); 2493 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); 2494 CALL_EXPR_RETURN_SLOT_OPT (*expr_p) 2495 = CALL_EXPR_RETURN_SLOT_OPT (call); 2496 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); 2497 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call)); 2498 TREE_BLOCK (*expr_p) = TREE_BLOCK (call); 2499 2500 /* Set CALL_EXPR_VA_ARG_PACK. */ 2501 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; 2502 } 2503 } 2504 2505 /* Finally, gimplify the function arguments. */ 2506 if (nargs > 0) 2507 { 2508 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); 2509 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; 2510 PUSH_ARGS_REVERSED ? i-- : i++) 2511 { 2512 enum gimplify_status t; 2513 2514 /* Avoid gimplifying the second argument to va_start, which needs to 2515 be the plain PARM_DECL. */ 2516 if ((i != 1) || !builtin_va_start_p) 2517 { 2518 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, 2519 EXPR_LOCATION (*expr_p)); 2520 2521 if (t == GS_ERROR) 2522 ret = GS_ERROR; 2523 } 2524 } 2525 } 2526 2527 /* Verify the function result. */ 2528 if (want_value && fndecl 2529 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype)))) 2530 { 2531 error_at (loc, "using result of function returning %<void%>"); 2532 ret = GS_ERROR; 2533 } 2534 2535 /* Try this again in case gimplification exposed something. */ 2536 if (ret != GS_ERROR) 2537 { 2538 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 2539 2540 if (new_tree && new_tree != *expr_p) 2541 { 2542 /* There was a transformation of this call which computes the 2543 same value, but in a more efficient way. Return and try 2544 again. */ 2545 *expr_p = new_tree; 2546 return GS_OK; 2547 } 2548 } 2549 else 2550 { 2551 *expr_p = error_mark_node; 2552 return GS_ERROR; 2553 } 2554 2555 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its 2556 decl. This allows us to eliminate redundant or useless 2557 calls to "const" functions. */ 2558 if (TREE_CODE (*expr_p) == CALL_EXPR) 2559 { 2560 int flags = call_expr_flags (*expr_p); 2561 if (flags & (ECF_CONST | ECF_PURE) 2562 /* An infinite loop is considered a side effect. */ 2563 && !(flags & (ECF_LOOPING_CONST_OR_PURE))) 2564 TREE_SIDE_EFFECTS (*expr_p) = 0; 2565 } 2566 2567 /* If the value is not needed by the caller, emit a new GIMPLE_CALL 2568 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified 2569 form and delegate the creation of a GIMPLE_CALL to 2570 gimplify_modify_expr. This is always possible because when 2571 WANT_VALUE is true, the caller wants the result of this call into 2572 a temporary, which means that we will emit an INIT_EXPR in 2573 internal_get_tmp_var which will then be handled by 2574 gimplify_modify_expr. */ 2575 if (!want_value) 2576 { 2577 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we 2578 have to do is replicate it as a GIMPLE_CALL tuple. */ 2579 gimple_stmt_iterator gsi; 2580 call = gimple_build_call_from_tree (*expr_p); 2581 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype)); 2582 gimplify_seq_add_stmt (pre_p, call); 2583 gsi = gsi_last (*pre_p); 2584 fold_stmt (&gsi); 2585 *expr_p = NULL_TREE; 2586 } 2587 else 2588 /* Remember the original function type. */ 2589 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype, 2590 CALL_EXPR_FN (*expr_p)); 2591 2592 return ret; 2593 } 2594 2595 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by 2596 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. 2597 2598 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the 2599 condition is true or false, respectively. If null, we should generate 2600 our own to skip over the evaluation of this specific expression. 2601 2602 LOCUS is the source location of the COND_EXPR. 2603 2604 This function is the tree equivalent of do_jump. 2605 2606 shortcut_cond_r should only be called by shortcut_cond_expr. */ 2607 2608 static tree 2609 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p, 2610 location_t locus) 2611 { 2612 tree local_label = NULL_TREE; 2613 tree t, expr = NULL; 2614 2615 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to 2616 retain the shortcut semantics. Just insert the gotos here; 2617 shortcut_cond_expr will append the real blocks later. */ 2618 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 2619 { 2620 location_t new_locus; 2621 2622 /* Turn if (a && b) into 2623 2624 if (a); else goto no; 2625 if (b) goto yes; else goto no; 2626 (no:) */ 2627 2628 if (false_label_p == NULL) 2629 false_label_p = &local_label; 2630 2631 /* Keep the original source location on the first 'if'. */ 2632 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus); 2633 append_to_statement_list (t, &expr); 2634 2635 /* Set the source location of the && on the second 'if'. */ 2636 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 2637 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 2638 new_locus); 2639 append_to_statement_list (t, &expr); 2640 } 2641 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 2642 { 2643 location_t new_locus; 2644 2645 /* Turn if (a || b) into 2646 2647 if (a) goto yes; 2648 if (b) goto yes; else goto no; 2649 (yes:) */ 2650 2651 if (true_label_p == NULL) 2652 true_label_p = &local_label; 2653 2654 /* Keep the original source location on the first 'if'. */ 2655 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus); 2656 append_to_statement_list (t, &expr); 2657 2658 /* Set the source location of the || on the second 'if'. */ 2659 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 2660 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 2661 new_locus); 2662 append_to_statement_list (t, &expr); 2663 } 2664 else if (TREE_CODE (pred) == COND_EXPR 2665 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1))) 2666 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2)))) 2667 { 2668 location_t new_locus; 2669 2670 /* As long as we're messing with gotos, turn if (a ? b : c) into 2671 if (a) 2672 if (b) goto yes; else goto no; 2673 else 2674 if (c) goto yes; else goto no; 2675 2676 Don't do this if one of the arms has void type, which can happen 2677 in C++ when the arm is throw. */ 2678 2679 /* Keep the original source location on the first 'if'. Set the source 2680 location of the ? on the second 'if'. */ 2681 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 2682 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), 2683 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, 2684 false_label_p, locus), 2685 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, 2686 false_label_p, new_locus)); 2687 } 2688 else 2689 { 2690 expr = build3 (COND_EXPR, void_type_node, pred, 2691 build_and_jump (true_label_p), 2692 build_and_jump (false_label_p)); 2693 SET_EXPR_LOCATION (expr, locus); 2694 } 2695 2696 if (local_label) 2697 { 2698 t = build1 (LABEL_EXPR, void_type_node, local_label); 2699 append_to_statement_list (t, &expr); 2700 } 2701 2702 return expr; 2703 } 2704 2705 /* Given a conditional expression EXPR with short-circuit boolean 2706 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the 2707 predicate appart into the equivalent sequence of conditionals. */ 2708 2709 static tree 2710 shortcut_cond_expr (tree expr) 2711 { 2712 tree pred = TREE_OPERAND (expr, 0); 2713 tree then_ = TREE_OPERAND (expr, 1); 2714 tree else_ = TREE_OPERAND (expr, 2); 2715 tree true_label, false_label, end_label, t; 2716 tree *true_label_p; 2717 tree *false_label_p; 2718 bool emit_end, emit_false, jump_over_else; 2719 bool then_se = then_ && TREE_SIDE_EFFECTS (then_); 2720 bool else_se = else_ && TREE_SIDE_EFFECTS (else_); 2721 2722 /* First do simple transformations. */ 2723 if (!else_se) 2724 { 2725 /* If there is no 'else', turn 2726 if (a && b) then c 2727 into 2728 if (a) if (b) then c. */ 2729 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 2730 { 2731 /* Keep the original source location on the first 'if'. */ 2732 location_t locus = EXPR_LOC_OR_HERE (expr); 2733 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 2734 /* Set the source location of the && on the second 'if'. */ 2735 if (EXPR_HAS_LOCATION (pred)) 2736 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 2737 then_ = shortcut_cond_expr (expr); 2738 then_se = then_ && TREE_SIDE_EFFECTS (then_); 2739 pred = TREE_OPERAND (pred, 0); 2740 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); 2741 SET_EXPR_LOCATION (expr, locus); 2742 } 2743 } 2744 2745 if (!then_se) 2746 { 2747 /* If there is no 'then', turn 2748 if (a || b); else d 2749 into 2750 if (a); else if (b); else d. */ 2751 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 2752 { 2753 /* Keep the original source location on the first 'if'. */ 2754 location_t locus = EXPR_LOC_OR_HERE (expr); 2755 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 2756 /* Set the source location of the || on the second 'if'. */ 2757 if (EXPR_HAS_LOCATION (pred)) 2758 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 2759 else_ = shortcut_cond_expr (expr); 2760 else_se = else_ && TREE_SIDE_EFFECTS (else_); 2761 pred = TREE_OPERAND (pred, 0); 2762 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); 2763 SET_EXPR_LOCATION (expr, locus); 2764 } 2765 } 2766 2767 /* If we're done, great. */ 2768 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR 2769 && TREE_CODE (pred) != TRUTH_ORIF_EXPR) 2770 return expr; 2771 2772 /* Otherwise we need to mess with gotos. Change 2773 if (a) c; else d; 2774 to 2775 if (a); else goto no; 2776 c; goto end; 2777 no: d; end: 2778 and recursively gimplify the condition. */ 2779 2780 true_label = false_label = end_label = NULL_TREE; 2781 2782 /* If our arms just jump somewhere, hijack those labels so we don't 2783 generate jumps to jumps. */ 2784 2785 if (then_ 2786 && TREE_CODE (then_) == GOTO_EXPR 2787 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL) 2788 { 2789 true_label = GOTO_DESTINATION (then_); 2790 then_ = NULL; 2791 then_se = false; 2792 } 2793 2794 if (else_ 2795 && TREE_CODE (else_) == GOTO_EXPR 2796 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL) 2797 { 2798 false_label = GOTO_DESTINATION (else_); 2799 else_ = NULL; 2800 else_se = false; 2801 } 2802 2803 /* If we aren't hijacking a label for the 'then' branch, it falls through. */ 2804 if (true_label) 2805 true_label_p = &true_label; 2806 else 2807 true_label_p = NULL; 2808 2809 /* The 'else' branch also needs a label if it contains interesting code. */ 2810 if (false_label || else_se) 2811 false_label_p = &false_label; 2812 else 2813 false_label_p = NULL; 2814 2815 /* If there was nothing else in our arms, just forward the label(s). */ 2816 if (!then_se && !else_se) 2817 return shortcut_cond_r (pred, true_label_p, false_label_p, 2818 EXPR_LOC_OR_HERE (expr)); 2819 2820 /* If our last subexpression already has a terminal label, reuse it. */ 2821 if (else_se) 2822 t = expr_last (else_); 2823 else if (then_se) 2824 t = expr_last (then_); 2825 else 2826 t = NULL; 2827 if (t && TREE_CODE (t) == LABEL_EXPR) 2828 end_label = LABEL_EXPR_LABEL (t); 2829 2830 /* If we don't care about jumping to the 'else' branch, jump to the end 2831 if the condition is false. */ 2832 if (!false_label_p) 2833 false_label_p = &end_label; 2834 2835 /* We only want to emit these labels if we aren't hijacking them. */ 2836 emit_end = (end_label == NULL_TREE); 2837 emit_false = (false_label == NULL_TREE); 2838 2839 /* We only emit the jump over the else clause if we have to--if the 2840 then clause may fall through. Otherwise we can wind up with a 2841 useless jump and a useless label at the end of gimplified code, 2842 which will cause us to think that this conditional as a whole 2843 falls through even if it doesn't. If we then inline a function 2844 which ends with such a condition, that can cause us to issue an 2845 inappropriate warning about control reaching the end of a 2846 non-void function. */ 2847 jump_over_else = block_may_fallthru (then_); 2848 2849 pred = shortcut_cond_r (pred, true_label_p, false_label_p, 2850 EXPR_LOC_OR_HERE (expr)); 2851 2852 expr = NULL; 2853 append_to_statement_list (pred, &expr); 2854 2855 append_to_statement_list (then_, &expr); 2856 if (else_se) 2857 { 2858 if (jump_over_else) 2859 { 2860 tree last = expr_last (expr); 2861 t = build_and_jump (&end_label); 2862 if (EXPR_HAS_LOCATION (last)) 2863 SET_EXPR_LOCATION (t, EXPR_LOCATION (last)); 2864 append_to_statement_list (t, &expr); 2865 } 2866 if (emit_false) 2867 { 2868 t = build1 (LABEL_EXPR, void_type_node, false_label); 2869 append_to_statement_list (t, &expr); 2870 } 2871 append_to_statement_list (else_, &expr); 2872 } 2873 if (emit_end && end_label) 2874 { 2875 t = build1 (LABEL_EXPR, void_type_node, end_label); 2876 append_to_statement_list (t, &expr); 2877 } 2878 2879 return expr; 2880 } 2881 2882 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ 2883 2884 tree 2885 gimple_boolify (tree expr) 2886 { 2887 tree type = TREE_TYPE (expr); 2888 location_t loc = EXPR_LOCATION (expr); 2889 2890 if (TREE_CODE (expr) == NE_EXPR 2891 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR 2892 && integer_zerop (TREE_OPERAND (expr, 1))) 2893 { 2894 tree call = TREE_OPERAND (expr, 0); 2895 tree fn = get_callee_fndecl (call); 2896 2897 /* For __builtin_expect ((long) (x), y) recurse into x as well 2898 if x is truth_value_p. */ 2899 if (fn 2900 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL 2901 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT 2902 && call_expr_nargs (call) == 2) 2903 { 2904 tree arg = CALL_EXPR_ARG (call, 0); 2905 if (arg) 2906 { 2907 if (TREE_CODE (arg) == NOP_EXPR 2908 && TREE_TYPE (arg) == TREE_TYPE (call)) 2909 arg = TREE_OPERAND (arg, 0); 2910 if (truth_value_p (TREE_CODE (arg))) 2911 { 2912 arg = gimple_boolify (arg); 2913 CALL_EXPR_ARG (call, 0) 2914 = fold_convert_loc (loc, TREE_TYPE (call), arg); 2915 } 2916 } 2917 } 2918 } 2919 2920 switch (TREE_CODE (expr)) 2921 { 2922 case TRUTH_AND_EXPR: 2923 case TRUTH_OR_EXPR: 2924 case TRUTH_XOR_EXPR: 2925 case TRUTH_ANDIF_EXPR: 2926 case TRUTH_ORIF_EXPR: 2927 /* Also boolify the arguments of truth exprs. */ 2928 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); 2929 /* FALLTHRU */ 2930 2931 case TRUTH_NOT_EXPR: 2932 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 2933 2934 /* These expressions always produce boolean results. */ 2935 if (TREE_CODE (type) != BOOLEAN_TYPE) 2936 TREE_TYPE (expr) = boolean_type_node; 2937 return expr; 2938 2939 default: 2940 if (COMPARISON_CLASS_P (expr)) 2941 { 2942 /* There expressions always prduce boolean results. */ 2943 if (TREE_CODE (type) != BOOLEAN_TYPE) 2944 TREE_TYPE (expr) = boolean_type_node; 2945 return expr; 2946 } 2947 /* Other expressions that get here must have boolean values, but 2948 might need to be converted to the appropriate mode. */ 2949 if (TREE_CODE (type) == BOOLEAN_TYPE) 2950 return expr; 2951 return fold_convert_loc (loc, boolean_type_node, expr); 2952 } 2953 } 2954 2955 /* Given a conditional expression *EXPR_P without side effects, gimplify 2956 its operands. New statements are inserted to PRE_P. */ 2957 2958 static enum gimplify_status 2959 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) 2960 { 2961 tree expr = *expr_p, cond; 2962 enum gimplify_status ret, tret; 2963 enum tree_code code; 2964 2965 cond = gimple_boolify (COND_EXPR_COND (expr)); 2966 2967 /* We need to handle && and || specially, as their gimplification 2968 creates pure cond_expr, thus leading to an infinite cycle otherwise. */ 2969 code = TREE_CODE (cond); 2970 if (code == TRUTH_ANDIF_EXPR) 2971 TREE_SET_CODE (cond, TRUTH_AND_EXPR); 2972 else if (code == TRUTH_ORIF_EXPR) 2973 TREE_SET_CODE (cond, TRUTH_OR_EXPR); 2974 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); 2975 COND_EXPR_COND (*expr_p) = cond; 2976 2977 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, 2978 is_gimple_val, fb_rvalue); 2979 ret = MIN (ret, tret); 2980 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, 2981 is_gimple_val, fb_rvalue); 2982 2983 return MIN (ret, tret); 2984 } 2985 2986 /* Return true if evaluating EXPR could trap. 2987 EXPR is GENERIC, while tree_could_trap_p can be called 2988 only on GIMPLE. */ 2989 2990 static bool 2991 generic_expr_could_trap_p (tree expr) 2992 { 2993 unsigned i, n; 2994 2995 if (!expr || is_gimple_val (expr)) 2996 return false; 2997 2998 if (!EXPR_P (expr) || tree_could_trap_p (expr)) 2999 return true; 3000 3001 n = TREE_OPERAND_LENGTH (expr); 3002 for (i = 0; i < n; i++) 3003 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) 3004 return true; 3005 3006 return false; 3007 } 3008 3009 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' 3010 into 3011 3012 if (p) if (p) 3013 t1 = a; a; 3014 else or else 3015 t1 = b; b; 3016 t1; 3017 3018 The second form is used when *EXPR_P is of type void. 3019 3020 PRE_P points to the list where side effects that must happen before 3021 *EXPR_P should be stored. */ 3022 3023 static enum gimplify_status 3024 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) 3025 { 3026 tree expr = *expr_p; 3027 tree type = TREE_TYPE (expr); 3028 location_t loc = EXPR_LOCATION (expr); 3029 tree tmp, arm1, arm2; 3030 enum gimplify_status ret; 3031 tree label_true, label_false, label_cont; 3032 bool have_then_clause_p, have_else_clause_p; 3033 gimple gimple_cond; 3034 enum tree_code pred_code; 3035 gimple_seq seq = NULL; 3036 3037 /* If this COND_EXPR has a value, copy the values into a temporary within 3038 the arms. */ 3039 if (!VOID_TYPE_P (type)) 3040 { 3041 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2); 3042 tree result; 3043 3044 /* If either an rvalue is ok or we do not require an lvalue, create the 3045 temporary. But we cannot do that if the type is addressable. */ 3046 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue)) 3047 && !TREE_ADDRESSABLE (type)) 3048 { 3049 if (gimplify_ctxp->allow_rhs_cond_expr 3050 /* If either branch has side effects or could trap, it can't be 3051 evaluated unconditionally. */ 3052 && !TREE_SIDE_EFFECTS (then_) 3053 && !generic_expr_could_trap_p (then_) 3054 && !TREE_SIDE_EFFECTS (else_) 3055 && !generic_expr_could_trap_p (else_)) 3056 return gimplify_pure_cond_expr (expr_p, pre_p); 3057 3058 tmp = create_tmp_var (type, "iftmp"); 3059 result = tmp; 3060 } 3061 3062 /* Otherwise, only create and copy references to the values. */ 3063 else 3064 { 3065 type = build_pointer_type (type); 3066 3067 if (!VOID_TYPE_P (TREE_TYPE (then_))) 3068 then_ = build_fold_addr_expr_loc (loc, then_); 3069 3070 if (!VOID_TYPE_P (TREE_TYPE (else_))) 3071 else_ = build_fold_addr_expr_loc (loc, else_); 3072 3073 expr 3074 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_); 3075 3076 tmp = create_tmp_var (type, "iftmp"); 3077 result = build_simple_mem_ref_loc (loc, tmp); 3078 } 3079 3080 /* Build the new then clause, `tmp = then_;'. But don't build the 3081 assignment if the value is void; in C++ it can be if it's a throw. */ 3082 if (!VOID_TYPE_P (TREE_TYPE (then_))) 3083 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_); 3084 3085 /* Similarly, build the new else clause, `tmp = else_;'. */ 3086 if (!VOID_TYPE_P (TREE_TYPE (else_))) 3087 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_); 3088 3089 TREE_TYPE (expr) = void_type_node; 3090 recalculate_side_effects (expr); 3091 3092 /* Move the COND_EXPR to the prequeue. */ 3093 gimplify_stmt (&expr, pre_p); 3094 3095 *expr_p = result; 3096 return GS_ALL_DONE; 3097 } 3098 3099 /* Remove any COMPOUND_EXPR so the following cases will be caught. */ 3100 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0)); 3101 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR) 3102 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true); 3103 3104 /* Make sure the condition has BOOLEAN_TYPE. */ 3105 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3106 3107 /* Break apart && and || conditions. */ 3108 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR 3109 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) 3110 { 3111 expr = shortcut_cond_expr (expr); 3112 3113 if (expr != *expr_p) 3114 { 3115 *expr_p = expr; 3116 3117 /* We can't rely on gimplify_expr to re-gimplify the expanded 3118 form properly, as cleanups might cause the target labels to be 3119 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to 3120 set up a conditional context. */ 3121 gimple_push_condition (); 3122 gimplify_stmt (expr_p, &seq); 3123 gimple_pop_condition (pre_p); 3124 gimple_seq_add_seq (pre_p, seq); 3125 3126 return GS_ALL_DONE; 3127 } 3128 } 3129 3130 /* Now do the normal gimplification. */ 3131 3132 /* Gimplify condition. */ 3133 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, 3134 fb_rvalue); 3135 if (ret == GS_ERROR) 3136 return GS_ERROR; 3137 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); 3138 3139 gimple_push_condition (); 3140 3141 have_then_clause_p = have_else_clause_p = false; 3142 if (TREE_OPERAND (expr, 1) != NULL 3143 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR 3144 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL 3145 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) 3146 == current_function_decl) 3147 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 3148 have different locations, otherwise we end up with incorrect 3149 location information on the branches. */ 3150 && (optimize 3151 || !EXPR_HAS_LOCATION (expr) 3152 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1)) 3153 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1)))) 3154 { 3155 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1)); 3156 have_then_clause_p = true; 3157 } 3158 else 3159 label_true = create_artificial_label (UNKNOWN_LOCATION); 3160 if (TREE_OPERAND (expr, 2) != NULL 3161 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR 3162 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL 3163 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) 3164 == current_function_decl) 3165 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 3166 have different locations, otherwise we end up with incorrect 3167 location information on the branches. */ 3168 && (optimize 3169 || !EXPR_HAS_LOCATION (expr) 3170 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2)) 3171 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2)))) 3172 { 3173 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2)); 3174 have_else_clause_p = true; 3175 } 3176 else 3177 label_false = create_artificial_label (UNKNOWN_LOCATION); 3178 3179 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, 3180 &arm2); 3181 3182 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true, 3183 label_false); 3184 3185 gimplify_seq_add_stmt (&seq, gimple_cond); 3186 label_cont = NULL_TREE; 3187 if (!have_then_clause_p) 3188 { 3189 /* For if (...) {} else { code; } put label_true after 3190 the else block. */ 3191 if (TREE_OPERAND (expr, 1) == NULL_TREE 3192 && !have_else_clause_p 3193 && TREE_OPERAND (expr, 2) != NULL_TREE) 3194 label_cont = label_true; 3195 else 3196 { 3197 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); 3198 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); 3199 /* For if (...) { code; } else {} or 3200 if (...) { code; } else goto label; or 3201 if (...) { code; return; } else { ... } 3202 label_cont isn't needed. */ 3203 if (!have_else_clause_p 3204 && TREE_OPERAND (expr, 2) != NULL_TREE 3205 && gimple_seq_may_fallthru (seq)) 3206 { 3207 gimple g; 3208 label_cont = create_artificial_label (UNKNOWN_LOCATION); 3209 3210 g = gimple_build_goto (label_cont); 3211 3212 /* GIMPLE_COND's are very low level; they have embedded 3213 gotos. This particular embedded goto should not be marked 3214 with the location of the original COND_EXPR, as it would 3215 correspond to the COND_EXPR's condition, not the ELSE or the 3216 THEN arms. To avoid marking it with the wrong location, flag 3217 it as "no location". */ 3218 gimple_set_do_not_emit_location (g); 3219 3220 gimplify_seq_add_stmt (&seq, g); 3221 } 3222 } 3223 } 3224 if (!have_else_clause_p) 3225 { 3226 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); 3227 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); 3228 } 3229 if (label_cont) 3230 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); 3231 3232 gimple_pop_condition (pre_p); 3233 gimple_seq_add_seq (pre_p, seq); 3234 3235 if (ret == GS_ERROR) 3236 ; /* Do nothing. */ 3237 else if (have_then_clause_p || have_else_clause_p) 3238 ret = GS_ALL_DONE; 3239 else 3240 { 3241 /* Both arms are empty; replace the COND_EXPR with its predicate. */ 3242 expr = TREE_OPERAND (expr, 0); 3243 gimplify_stmt (&expr, pre_p); 3244 } 3245 3246 *expr_p = NULL; 3247 return ret; 3248 } 3249 3250 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression, 3251 to be marked addressable. 3252 3253 We cannot rely on such an expression being directly markable if a temporary 3254 has been created by the gimplification. In this case, we create another 3255 temporary and initialize it with a copy, which will become a store after we 3256 mark it addressable. This can happen if the front-end passed us something 3257 that it could not mark addressable yet, like a Fortran pass-by-reference 3258 parameter (int) floatvar. */ 3259 3260 static void 3261 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p) 3262 { 3263 while (handled_component_p (*expr_p)) 3264 expr_p = &TREE_OPERAND (*expr_p, 0); 3265 if (is_gimple_reg (*expr_p)) 3266 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL); 3267 } 3268 3269 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 3270 a call to __builtin_memcpy. */ 3271 3272 static enum gimplify_status 3273 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, 3274 gimple_seq *seq_p) 3275 { 3276 tree t, to, to_ptr, from, from_ptr; 3277 gimple gs; 3278 location_t loc = EXPR_LOCATION (*expr_p); 3279 3280 to = TREE_OPERAND (*expr_p, 0); 3281 from = TREE_OPERAND (*expr_p, 1); 3282 3283 /* Mark the RHS addressable. Beware that it may not be possible to do so 3284 directly if a temporary has been created by the gimplification. */ 3285 prepare_gimple_addressable (&from, seq_p); 3286 3287 mark_addressable (from); 3288 from_ptr = build_fold_addr_expr_loc (loc, from); 3289 gimplify_arg (&from_ptr, seq_p, loc); 3290 3291 mark_addressable (to); 3292 to_ptr = build_fold_addr_expr_loc (loc, to); 3293 gimplify_arg (&to_ptr, seq_p, loc); 3294 3295 t = builtin_decl_implicit (BUILT_IN_MEMCPY); 3296 3297 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); 3298 3299 if (want_value) 3300 { 3301 /* tmp = memcpy() */ 3302 t = create_tmp_var (TREE_TYPE (to_ptr), NULL); 3303 gimple_call_set_lhs (gs, t); 3304 gimplify_seq_add_stmt (seq_p, gs); 3305 3306 *expr_p = build_simple_mem_ref (t); 3307 return GS_ALL_DONE; 3308 } 3309 3310 gimplify_seq_add_stmt (seq_p, gs); 3311 *expr_p = NULL; 3312 return GS_ALL_DONE; 3313 } 3314 3315 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 3316 a call to __builtin_memset. In this case we know that the RHS is 3317 a CONSTRUCTOR with an empty element list. */ 3318 3319 static enum gimplify_status 3320 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, 3321 gimple_seq *seq_p) 3322 { 3323 tree t, from, to, to_ptr; 3324 gimple gs; 3325 location_t loc = EXPR_LOCATION (*expr_p); 3326 3327 /* Assert our assumptions, to abort instead of producing wrong code 3328 silently if they are not met. Beware that the RHS CONSTRUCTOR might 3329 not be immediately exposed. */ 3330 from = TREE_OPERAND (*expr_p, 1); 3331 if (TREE_CODE (from) == WITH_SIZE_EXPR) 3332 from = TREE_OPERAND (from, 0); 3333 3334 gcc_assert (TREE_CODE (from) == CONSTRUCTOR 3335 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from))); 3336 3337 /* Now proceed. */ 3338 to = TREE_OPERAND (*expr_p, 0); 3339 3340 to_ptr = build_fold_addr_expr_loc (loc, to); 3341 gimplify_arg (&to_ptr, seq_p, loc); 3342 t = builtin_decl_implicit (BUILT_IN_MEMSET); 3343 3344 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); 3345 3346 if (want_value) 3347 { 3348 /* tmp = memset() */ 3349 t = create_tmp_var (TREE_TYPE (to_ptr), NULL); 3350 gimple_call_set_lhs (gs, t); 3351 gimplify_seq_add_stmt (seq_p, gs); 3352 3353 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); 3354 return GS_ALL_DONE; 3355 } 3356 3357 gimplify_seq_add_stmt (seq_p, gs); 3358 *expr_p = NULL; 3359 return GS_ALL_DONE; 3360 } 3361 3362 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, 3363 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an 3364 assignment. Return non-null if we detect a potential overlap. */ 3365 3366 struct gimplify_init_ctor_preeval_data 3367 { 3368 /* The base decl of the lhs object. May be NULL, in which case we 3369 have to assume the lhs is indirect. */ 3370 tree lhs_base_decl; 3371 3372 /* The alias set of the lhs object. */ 3373 alias_set_type lhs_alias_set; 3374 }; 3375 3376 static tree 3377 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) 3378 { 3379 struct gimplify_init_ctor_preeval_data *data 3380 = (struct gimplify_init_ctor_preeval_data *) xdata; 3381 tree t = *tp; 3382 3383 /* If we find the base object, obviously we have overlap. */ 3384 if (data->lhs_base_decl == t) 3385 return t; 3386 3387 /* If the constructor component is indirect, determine if we have a 3388 potential overlap with the lhs. The only bits of information we 3389 have to go on at this point are addressability and alias sets. */ 3390 if ((INDIRECT_REF_P (t) 3391 || TREE_CODE (t) == MEM_REF) 3392 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 3393 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) 3394 return t; 3395 3396 /* If the constructor component is a call, determine if it can hide a 3397 potential overlap with the lhs through an INDIRECT_REF like above. 3398 ??? Ugh - this is completely broken. In fact this whole analysis 3399 doesn't look conservative. */ 3400 if (TREE_CODE (t) == CALL_EXPR) 3401 { 3402 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); 3403 3404 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) 3405 if (POINTER_TYPE_P (TREE_VALUE (type)) 3406 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 3407 && alias_sets_conflict_p (data->lhs_alias_set, 3408 get_alias_set 3409 (TREE_TYPE (TREE_VALUE (type))))) 3410 return t; 3411 } 3412 3413 if (IS_TYPE_OR_DECL_P (t)) 3414 *walk_subtrees = 0; 3415 return NULL; 3416 } 3417 3418 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR, 3419 force values that overlap with the lhs (as described by *DATA) 3420 into temporaries. */ 3421 3422 static void 3423 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 3424 struct gimplify_init_ctor_preeval_data *data) 3425 { 3426 enum gimplify_status one; 3427 3428 /* If the value is constant, then there's nothing to pre-evaluate. */ 3429 if (TREE_CONSTANT (*expr_p)) 3430 { 3431 /* Ensure it does not have side effects, it might contain a reference to 3432 the object we're initializing. */ 3433 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p)); 3434 return; 3435 } 3436 3437 /* If the type has non-trivial constructors, we can't pre-evaluate. */ 3438 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p))) 3439 return; 3440 3441 /* Recurse for nested constructors. */ 3442 if (TREE_CODE (*expr_p) == CONSTRUCTOR) 3443 { 3444 unsigned HOST_WIDE_INT ix; 3445 constructor_elt *ce; 3446 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p); 3447 3448 FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce) 3449 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); 3450 3451 return; 3452 } 3453 3454 /* If this is a variable sized type, we must remember the size. */ 3455 maybe_with_size_expr (expr_p); 3456 3457 /* Gimplify the constructor element to something appropriate for the rhs 3458 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know 3459 the gimplifier will consider this a store to memory. Doing this 3460 gimplification now means that we won't have to deal with complicated 3461 language-specific trees, nor trees like SAVE_EXPR that can induce 3462 exponential search behavior. */ 3463 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue); 3464 if (one == GS_ERROR) 3465 { 3466 *expr_p = NULL; 3467 return; 3468 } 3469 3470 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap 3471 with the lhs, since "a = { .x=a }" doesn't make sense. This will 3472 always be true for all scalars, since is_gimple_mem_rhs insists on a 3473 temporary variable for them. */ 3474 if (DECL_P (*expr_p)) 3475 return; 3476 3477 /* If this is of variable size, we have no choice but to assume it doesn't 3478 overlap since we can't make a temporary for it. */ 3479 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST) 3480 return; 3481 3482 /* Otherwise, we must search for overlap ... */ 3483 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL)) 3484 return; 3485 3486 /* ... and if found, force the value into a temporary. */ 3487 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 3488 } 3489 3490 /* A subroutine of gimplify_init_ctor_eval. Create a loop for 3491 a RANGE_EXPR in a CONSTRUCTOR for an array. 3492 3493 var = lower; 3494 loop_entry: 3495 object[var] = value; 3496 if (var == upper) 3497 goto loop_exit; 3498 var = var + 1; 3499 goto loop_entry; 3500 loop_exit: 3501 3502 We increment var _after_ the loop exit check because we might otherwise 3503 fail if upper == TYPE_MAX_VALUE (type for upper). 3504 3505 Note that we never have to deal with SAVE_EXPRs here, because this has 3506 already been taken care of for us, in gimplify_init_ctor_preeval(). */ 3507 3508 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *, 3509 gimple_seq *, bool); 3510 3511 static void 3512 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, 3513 tree value, tree array_elt_type, 3514 gimple_seq *pre_p, bool cleared) 3515 { 3516 tree loop_entry_label, loop_exit_label, fall_thru_label; 3517 tree var, var_type, cref, tmp; 3518 3519 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION); 3520 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION); 3521 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION); 3522 3523 /* Create and initialize the index variable. */ 3524 var_type = TREE_TYPE (upper); 3525 var = create_tmp_var (var_type, NULL); 3526 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower)); 3527 3528 /* Add the loop entry label. */ 3529 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label)); 3530 3531 /* Build the reference. */ 3532 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 3533 var, NULL_TREE, NULL_TREE); 3534 3535 /* If we are a constructor, just call gimplify_init_ctor_eval to do 3536 the store. Otherwise just assign value to the reference. */ 3537 3538 if (TREE_CODE (value) == CONSTRUCTOR) 3539 /* NB we might have to call ourself recursively through 3540 gimplify_init_ctor_eval if the value is a constructor. */ 3541 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 3542 pre_p, cleared); 3543 else 3544 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value)); 3545 3546 /* We exit the loop when the index var is equal to the upper bound. */ 3547 gimplify_seq_add_stmt (pre_p, 3548 gimple_build_cond (EQ_EXPR, var, upper, 3549 loop_exit_label, fall_thru_label)); 3550 3551 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label)); 3552 3553 /* Otherwise, increment the index var... */ 3554 tmp = build2 (PLUS_EXPR, var_type, var, 3555 fold_convert (var_type, integer_one_node)); 3556 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp)); 3557 3558 /* ...and jump back to the loop entry. */ 3559 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label)); 3560 3561 /* Add the loop exit label. */ 3562 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label)); 3563 } 3564 3565 /* Return true if FDECL is accessing a field that is zero sized. */ 3566 3567 static bool 3568 zero_sized_field_decl (const_tree fdecl) 3569 { 3570 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl) 3571 && integer_zerop (DECL_SIZE (fdecl))) 3572 return true; 3573 return false; 3574 } 3575 3576 /* Return true if TYPE is zero sized. */ 3577 3578 static bool 3579 zero_sized_type (const_tree type) 3580 { 3581 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type) 3582 && integer_zerop (TYPE_SIZE (type))) 3583 return true; 3584 return false; 3585 } 3586 3587 /* A subroutine of gimplify_init_constructor. Generate individual 3588 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the 3589 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the 3590 CONSTRUCTOR. CLEARED is true if the entire LHS object has been 3591 zeroed first. */ 3592 3593 static void 3594 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts, 3595 gimple_seq *pre_p, bool cleared) 3596 { 3597 tree array_elt_type = NULL; 3598 unsigned HOST_WIDE_INT ix; 3599 tree purpose, value; 3600 3601 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE) 3602 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object))); 3603 3604 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value) 3605 { 3606 tree cref; 3607 3608 /* NULL values are created above for gimplification errors. */ 3609 if (value == NULL) 3610 continue; 3611 3612 if (cleared && initializer_zerop (value)) 3613 continue; 3614 3615 /* ??? Here's to hoping the front end fills in all of the indices, 3616 so we don't have to figure out what's missing ourselves. */ 3617 gcc_assert (purpose); 3618 3619 /* Skip zero-sized fields, unless value has side-effects. This can 3620 happen with calls to functions returning a zero-sized type, which 3621 we shouldn't discard. As a number of downstream passes don't 3622 expect sets of zero-sized fields, we rely on the gimplification of 3623 the MODIFY_EXPR we make below to drop the assignment statement. */ 3624 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose)) 3625 continue; 3626 3627 /* If we have a RANGE_EXPR, we have to build a loop to assign the 3628 whole range. */ 3629 if (TREE_CODE (purpose) == RANGE_EXPR) 3630 { 3631 tree lower = TREE_OPERAND (purpose, 0); 3632 tree upper = TREE_OPERAND (purpose, 1); 3633 3634 /* If the lower bound is equal to upper, just treat it as if 3635 upper was the index. */ 3636 if (simple_cst_equal (lower, upper)) 3637 purpose = upper; 3638 else 3639 { 3640 gimplify_init_ctor_eval_range (object, lower, upper, value, 3641 array_elt_type, pre_p, cleared); 3642 continue; 3643 } 3644 } 3645 3646 if (array_elt_type) 3647 { 3648 /* Do not use bitsizetype for ARRAY_REF indices. */ 3649 if (TYPE_DOMAIN (TREE_TYPE (object))) 3650 purpose 3651 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))), 3652 purpose); 3653 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 3654 purpose, NULL_TREE, NULL_TREE); 3655 } 3656 else 3657 { 3658 gcc_assert (TREE_CODE (purpose) == FIELD_DECL); 3659 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose), 3660 unshare_expr (object), purpose, NULL_TREE); 3661 } 3662 3663 if (TREE_CODE (value) == CONSTRUCTOR 3664 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE) 3665 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 3666 pre_p, cleared); 3667 else 3668 { 3669 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value); 3670 gimplify_and_add (init, pre_p); 3671 ggc_free (init); 3672 } 3673 } 3674 } 3675 3676 /* Return the appropriate RHS predicate for this LHS. */ 3677 3678 gimple_predicate 3679 rhs_predicate_for (tree lhs) 3680 { 3681 if (is_gimple_reg (lhs)) 3682 return is_gimple_reg_rhs_or_call; 3683 else 3684 return is_gimple_mem_rhs_or_call; 3685 } 3686 3687 /* Gimplify a C99 compound literal expression. This just means adding 3688 the DECL_EXPR before the current statement and using its anonymous 3689 decl instead. */ 3690 3691 static enum gimplify_status 3692 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p) 3693 { 3694 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p); 3695 tree decl = DECL_EXPR_DECL (decl_s); 3696 /* Mark the decl as addressable if the compound literal 3697 expression is addressable now, otherwise it is marked too late 3698 after we gimplify the initialization expression. */ 3699 if (TREE_ADDRESSABLE (*expr_p)) 3700 TREE_ADDRESSABLE (decl) = 1; 3701 3702 /* Preliminarily mark non-addressed complex variables as eligible 3703 for promotion to gimple registers. We'll transform their uses 3704 as we find them. */ 3705 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE 3706 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE) 3707 && !TREE_THIS_VOLATILE (decl) 3708 && !needs_to_live_in_memory (decl)) 3709 DECL_GIMPLE_REG_P (decl) = 1; 3710 3711 /* This decl isn't mentioned in the enclosing block, so add it to the 3712 list of temps. FIXME it seems a bit of a kludge to say that 3713 anonymous artificial vars aren't pushed, but everything else is. */ 3714 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl)) 3715 gimple_add_tmp_var (decl); 3716 3717 gimplify_and_add (decl_s, pre_p); 3718 *expr_p = decl; 3719 return GS_OK; 3720 } 3721 3722 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR, 3723 return a new CONSTRUCTOR if something changed. */ 3724 3725 static tree 3726 optimize_compound_literals_in_ctor (tree orig_ctor) 3727 { 3728 tree ctor = orig_ctor; 3729 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor); 3730 unsigned int idx, num = VEC_length (constructor_elt, elts); 3731 3732 for (idx = 0; idx < num; idx++) 3733 { 3734 tree value = VEC_index (constructor_elt, elts, idx)->value; 3735 tree newval = value; 3736 if (TREE_CODE (value) == CONSTRUCTOR) 3737 newval = optimize_compound_literals_in_ctor (value); 3738 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR) 3739 { 3740 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value); 3741 tree decl = DECL_EXPR_DECL (decl_s); 3742 tree init = DECL_INITIAL (decl); 3743 3744 if (!TREE_ADDRESSABLE (value) 3745 && !TREE_ADDRESSABLE (decl) 3746 && init 3747 && TREE_CODE (init) == CONSTRUCTOR) 3748 newval = optimize_compound_literals_in_ctor (init); 3749 } 3750 if (newval == value) 3751 continue; 3752 3753 if (ctor == orig_ctor) 3754 { 3755 ctor = copy_node (orig_ctor); 3756 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts); 3757 elts = CONSTRUCTOR_ELTS (ctor); 3758 } 3759 VEC_index (constructor_elt, elts, idx)->value = newval; 3760 } 3761 return ctor; 3762 } 3763 3764 /* A subroutine of gimplify_modify_expr. Break out elements of a 3765 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs. 3766 3767 Note that we still need to clear any elements that don't have explicit 3768 initializers, so if not all elements are initialized we keep the 3769 original MODIFY_EXPR, we just remove all of the constructor elements. 3770 3771 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return 3772 GS_ERROR if we would have to create a temporary when gimplifying 3773 this constructor. Otherwise, return GS_OK. 3774 3775 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */ 3776 3777 static enum gimplify_status 3778 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 3779 bool want_value, bool notify_temp_creation) 3780 { 3781 tree object, ctor, type; 3782 enum gimplify_status ret; 3783 VEC(constructor_elt,gc) *elts; 3784 3785 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR); 3786 3787 if (!notify_temp_creation) 3788 { 3789 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 3790 is_gimple_lvalue, fb_lvalue); 3791 if (ret == GS_ERROR) 3792 return ret; 3793 } 3794 3795 object = TREE_OPERAND (*expr_p, 0); 3796 ctor = TREE_OPERAND (*expr_p, 1) = 3797 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1)); 3798 type = TREE_TYPE (ctor); 3799 elts = CONSTRUCTOR_ELTS (ctor); 3800 ret = GS_ALL_DONE; 3801 3802 switch (TREE_CODE (type)) 3803 { 3804 case RECORD_TYPE: 3805 case UNION_TYPE: 3806 case QUAL_UNION_TYPE: 3807 case ARRAY_TYPE: 3808 { 3809 struct gimplify_init_ctor_preeval_data preeval_data; 3810 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements; 3811 bool cleared, complete_p, valid_const_initializer; 3812 3813 /* Aggregate types must lower constructors to initialization of 3814 individual elements. The exception is that a CONSTRUCTOR node 3815 with no elements indicates zero-initialization of the whole. */ 3816 if (VEC_empty (constructor_elt, elts)) 3817 { 3818 if (notify_temp_creation) 3819 return GS_OK; 3820 break; 3821 } 3822 3823 /* Fetch information about the constructor to direct later processing. 3824 We might want to make static versions of it in various cases, and 3825 can only do so if it known to be a valid constant initializer. */ 3826 valid_const_initializer 3827 = categorize_ctor_elements (ctor, &num_nonzero_elements, 3828 &num_ctor_elements, &complete_p); 3829 3830 /* If a const aggregate variable is being initialized, then it 3831 should never be a lose to promote the variable to be static. */ 3832 if (valid_const_initializer 3833 && num_nonzero_elements > 1 3834 && TREE_READONLY (object) 3835 && TREE_CODE (object) == VAR_DECL 3836 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))) 3837 { 3838 if (notify_temp_creation) 3839 return GS_ERROR; 3840 DECL_INITIAL (object) = ctor; 3841 TREE_STATIC (object) = 1; 3842 if (!DECL_NAME (object)) 3843 DECL_NAME (object) = create_tmp_var_name ("C"); 3844 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL); 3845 3846 /* ??? C++ doesn't automatically append a .<number> to the 3847 assembler name, and even when it does, it looks a FE private 3848 data structures to figure out what that number should be, 3849 which are not set for this variable. I suppose this is 3850 important for local statics for inline functions, which aren't 3851 "local" in the object file sense. So in order to get a unique 3852 TU-local symbol, we must invoke the lhd version now. */ 3853 lhd_set_decl_assembler_name (object); 3854 3855 *expr_p = NULL_TREE; 3856 break; 3857 } 3858 3859 /* If there are "lots" of initialized elements, even discounting 3860 those that are not address constants (and thus *must* be 3861 computed at runtime), then partition the constructor into 3862 constant and non-constant parts. Block copy the constant 3863 parts in, then generate code for the non-constant parts. */ 3864 /* TODO. There's code in cp/typeck.c to do this. */ 3865 3866 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0) 3867 /* store_constructor will ignore the clearing of variable-sized 3868 objects. Initializers for such objects must explicitly set 3869 every field that needs to be set. */ 3870 cleared = false; 3871 else if (!complete_p) 3872 /* If the constructor isn't complete, clear the whole object 3873 beforehand. 3874 3875 ??? This ought not to be needed. For any element not present 3876 in the initializer, we should simply set them to zero. Except 3877 we'd need to *find* the elements that are not present, and that 3878 requires trickery to avoid quadratic compile-time behavior in 3879 large cases or excessive memory use in small cases. */ 3880 cleared = true; 3881 else if (num_ctor_elements - num_nonzero_elements 3882 > CLEAR_RATIO (optimize_function_for_speed_p (cfun)) 3883 && num_nonzero_elements < num_ctor_elements / 4) 3884 /* If there are "lots" of zeros, it's more efficient to clear 3885 the memory and then set the nonzero elements. */ 3886 cleared = true; 3887 else 3888 cleared = false; 3889 3890 /* If there are "lots" of initialized elements, and all of them 3891 are valid address constants, then the entire initializer can 3892 be dropped to memory, and then memcpy'd out. Don't do this 3893 for sparse arrays, though, as it's more efficient to follow 3894 the standard CONSTRUCTOR behavior of memset followed by 3895 individual element initialization. Also don't do this for small 3896 all-zero initializers (which aren't big enough to merit 3897 clearing), and don't try to make bitwise copies of 3898 TREE_ADDRESSABLE types. */ 3899 if (valid_const_initializer 3900 && !(cleared || num_nonzero_elements == 0) 3901 && !TREE_ADDRESSABLE (type)) 3902 { 3903 HOST_WIDE_INT size = int_size_in_bytes (type); 3904 unsigned int align; 3905 3906 /* ??? We can still get unbounded array types, at least 3907 from the C++ front end. This seems wrong, but attempt 3908 to work around it for now. */ 3909 if (size < 0) 3910 { 3911 size = int_size_in_bytes (TREE_TYPE (object)); 3912 if (size >= 0) 3913 TREE_TYPE (ctor) = type = TREE_TYPE (object); 3914 } 3915 3916 /* Find the maximum alignment we can assume for the object. */ 3917 /* ??? Make use of DECL_OFFSET_ALIGN. */ 3918 if (DECL_P (object)) 3919 align = DECL_ALIGN (object); 3920 else 3921 align = TYPE_ALIGN (type); 3922 3923 if (size > 0 3924 && num_nonzero_elements > 1 3925 && !can_move_by_pieces (size, align)) 3926 { 3927 if (notify_temp_creation) 3928 return GS_ERROR; 3929 3930 walk_tree (&ctor, force_labels_r, NULL, NULL); 3931 ctor = tree_output_constant_def (ctor); 3932 if (!useless_type_conversion_p (type, TREE_TYPE (ctor))) 3933 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor); 3934 TREE_OPERAND (*expr_p, 1) = ctor; 3935 3936 /* This is no longer an assignment of a CONSTRUCTOR, but 3937 we still may have processing to do on the LHS. So 3938 pretend we didn't do anything here to let that happen. */ 3939 return GS_UNHANDLED; 3940 } 3941 } 3942 3943 /* If the target is volatile, we have non-zero elements and more than 3944 one field to assign, initialize the target from a temporary. */ 3945 if (TREE_THIS_VOLATILE (object) 3946 && !TREE_ADDRESSABLE (type) 3947 && num_nonzero_elements > 0 3948 && VEC_length (constructor_elt, elts) > 1) 3949 { 3950 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL); 3951 TREE_OPERAND (*expr_p, 0) = temp; 3952 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), 3953 *expr_p, 3954 build2 (MODIFY_EXPR, void_type_node, 3955 object, temp)); 3956 return GS_OK; 3957 } 3958 3959 if (notify_temp_creation) 3960 return GS_OK; 3961 3962 /* If there are nonzero elements and if needed, pre-evaluate to capture 3963 elements overlapping with the lhs into temporaries. We must do this 3964 before clearing to fetch the values before they are zeroed-out. */ 3965 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR) 3966 { 3967 preeval_data.lhs_base_decl = get_base_address (object); 3968 if (!DECL_P (preeval_data.lhs_base_decl)) 3969 preeval_data.lhs_base_decl = NULL; 3970 preeval_data.lhs_alias_set = get_alias_set (object); 3971 3972 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), 3973 pre_p, post_p, &preeval_data); 3974 } 3975 3976 if (cleared) 3977 { 3978 /* Zap the CONSTRUCTOR element list, which simplifies this case. 3979 Note that we still have to gimplify, in order to handle the 3980 case of variable sized types. Avoid shared tree structures. */ 3981 CONSTRUCTOR_ELTS (ctor) = NULL; 3982 TREE_SIDE_EFFECTS (ctor) = 0; 3983 object = unshare_expr (object); 3984 gimplify_stmt (expr_p, pre_p); 3985 } 3986 3987 /* If we have not block cleared the object, or if there are nonzero 3988 elements in the constructor, add assignments to the individual 3989 scalar fields of the object. */ 3990 if (!cleared || num_nonzero_elements > 0) 3991 gimplify_init_ctor_eval (object, elts, pre_p, cleared); 3992 3993 *expr_p = NULL_TREE; 3994 } 3995 break; 3996 3997 case COMPLEX_TYPE: 3998 { 3999 tree r, i; 4000 4001 if (notify_temp_creation) 4002 return GS_OK; 4003 4004 /* Extract the real and imaginary parts out of the ctor. */ 4005 gcc_assert (VEC_length (constructor_elt, elts) == 2); 4006 r = VEC_index (constructor_elt, elts, 0)->value; 4007 i = VEC_index (constructor_elt, elts, 1)->value; 4008 if (r == NULL || i == NULL) 4009 { 4010 tree zero = build_zero_cst (TREE_TYPE (type)); 4011 if (r == NULL) 4012 r = zero; 4013 if (i == NULL) 4014 i = zero; 4015 } 4016 4017 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to 4018 represent creation of a complex value. */ 4019 if (TREE_CONSTANT (r) && TREE_CONSTANT (i)) 4020 { 4021 ctor = build_complex (type, r, i); 4022 TREE_OPERAND (*expr_p, 1) = ctor; 4023 } 4024 else 4025 { 4026 ctor = build2 (COMPLEX_EXPR, type, r, i); 4027 TREE_OPERAND (*expr_p, 1) = ctor; 4028 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), 4029 pre_p, 4030 post_p, 4031 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), 4032 fb_rvalue); 4033 } 4034 } 4035 break; 4036 4037 case VECTOR_TYPE: 4038 { 4039 unsigned HOST_WIDE_INT ix; 4040 constructor_elt *ce; 4041 4042 if (notify_temp_creation) 4043 return GS_OK; 4044 4045 /* Go ahead and simplify constant constructors to VECTOR_CST. */ 4046 if (TREE_CONSTANT (ctor)) 4047 { 4048 bool constant_p = true; 4049 tree value; 4050 4051 /* Even when ctor is constant, it might contain non-*_CST 4052 elements, such as addresses or trapping values like 4053 1.0/0.0 - 1.0/0.0. Such expressions don't belong 4054 in VECTOR_CST nodes. */ 4055 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) 4056 if (!CONSTANT_CLASS_P (value)) 4057 { 4058 constant_p = false; 4059 break; 4060 } 4061 4062 if (constant_p) 4063 { 4064 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); 4065 break; 4066 } 4067 4068 /* Don't reduce an initializer constant even if we can't 4069 make a VECTOR_CST. It won't do anything for us, and it'll 4070 prevent us from representing it as a single constant. */ 4071 if (initializer_constant_valid_p (ctor, type)) 4072 break; 4073 4074 TREE_CONSTANT (ctor) = 0; 4075 } 4076 4077 /* Vector types use CONSTRUCTOR all the way through gimple 4078 compilation as a general initializer. */ 4079 FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce) 4080 { 4081 enum gimplify_status tret; 4082 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val, 4083 fb_rvalue); 4084 if (tret == GS_ERROR) 4085 ret = GS_ERROR; 4086 } 4087 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0))) 4088 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p); 4089 } 4090 break; 4091 4092 default: 4093 /* So how did we get a CONSTRUCTOR for a scalar type? */ 4094 gcc_unreachable (); 4095 } 4096 4097 if (ret == GS_ERROR) 4098 return GS_ERROR; 4099 else if (want_value) 4100 { 4101 *expr_p = object; 4102 return GS_OK; 4103 } 4104 else 4105 { 4106 /* If we have gimplified both sides of the initializer but have 4107 not emitted an assignment, do so now. */ 4108 if (*expr_p) 4109 { 4110 tree lhs = TREE_OPERAND (*expr_p, 0); 4111 tree rhs = TREE_OPERAND (*expr_p, 1); 4112 gimple init = gimple_build_assign (lhs, rhs); 4113 gimplify_seq_add_stmt (pre_p, init); 4114 *expr_p = NULL; 4115 } 4116 4117 return GS_ALL_DONE; 4118 } 4119 } 4120 4121 /* Given a pointer value OP0, return a simplified version of an 4122 indirection through OP0, or NULL_TREE if no simplification is 4123 possible. Note that the resulting type may be different from 4124 the type pointed to in the sense that it is still compatible 4125 from the langhooks point of view. */ 4126 4127 tree 4128 gimple_fold_indirect_ref (tree t) 4129 { 4130 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype); 4131 tree sub = t; 4132 tree subtype; 4133 4134 STRIP_NOPS (sub); 4135 subtype = TREE_TYPE (sub); 4136 if (!POINTER_TYPE_P (subtype)) 4137 return NULL_TREE; 4138 4139 if (TREE_CODE (sub) == ADDR_EXPR) 4140 { 4141 tree op = TREE_OPERAND (sub, 0); 4142 tree optype = TREE_TYPE (op); 4143 /* *&p => p */ 4144 if (useless_type_conversion_p (type, optype)) 4145 return op; 4146 4147 /* *(foo *)&fooarray => fooarray[0] */ 4148 if (TREE_CODE (optype) == ARRAY_TYPE 4149 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST 4150 && useless_type_conversion_p (type, TREE_TYPE (optype))) 4151 { 4152 tree type_domain = TYPE_DOMAIN (optype); 4153 tree min_val = size_zero_node; 4154 if (type_domain && TYPE_MIN_VALUE (type_domain)) 4155 min_val = TYPE_MIN_VALUE (type_domain); 4156 if (TREE_CODE (min_val) == INTEGER_CST) 4157 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); 4158 } 4159 /* *(foo *)&complexfoo => __real__ complexfoo */ 4160 else if (TREE_CODE (optype) == COMPLEX_TYPE 4161 && useless_type_conversion_p (type, TREE_TYPE (optype))) 4162 return fold_build1 (REALPART_EXPR, type, op); 4163 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */ 4164 else if (TREE_CODE (optype) == VECTOR_TYPE 4165 && useless_type_conversion_p (type, TREE_TYPE (optype))) 4166 { 4167 tree part_width = TYPE_SIZE (type); 4168 tree index = bitsize_int (0); 4169 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); 4170 } 4171 } 4172 4173 /* *(p + CST) -> ... */ 4174 if (TREE_CODE (sub) == POINTER_PLUS_EXPR 4175 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) 4176 { 4177 tree addr = TREE_OPERAND (sub, 0); 4178 tree off = TREE_OPERAND (sub, 1); 4179 tree addrtype; 4180 4181 STRIP_NOPS (addr); 4182 addrtype = TREE_TYPE (addr); 4183 4184 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */ 4185 if (TREE_CODE (addr) == ADDR_EXPR 4186 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE 4187 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))) 4188 && host_integerp (off, 1)) 4189 { 4190 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1); 4191 tree part_width = TYPE_SIZE (type); 4192 unsigned HOST_WIDE_INT part_widthi 4193 = tree_low_cst (part_width, 0) / BITS_PER_UNIT; 4194 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; 4195 tree index = bitsize_int (indexi); 4196 if (offset / part_widthi 4197 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))) 4198 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0), 4199 part_width, index); 4200 } 4201 4202 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */ 4203 if (TREE_CODE (addr) == ADDR_EXPR 4204 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE 4205 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))) 4206 { 4207 tree size = TYPE_SIZE_UNIT (type); 4208 if (tree_int_cst_equal (size, off)) 4209 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0)); 4210 } 4211 4212 /* *(p + CST) -> MEM_REF <p, CST>. */ 4213 if (TREE_CODE (addr) != ADDR_EXPR 4214 || DECL_P (TREE_OPERAND (addr, 0))) 4215 return fold_build2 (MEM_REF, type, 4216 addr, 4217 build_int_cst_wide (ptype, 4218 TREE_INT_CST_LOW (off), 4219 TREE_INT_CST_HIGH (off))); 4220 } 4221 4222 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ 4223 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE 4224 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST 4225 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) 4226 { 4227 tree type_domain; 4228 tree min_val = size_zero_node; 4229 tree osub = sub; 4230 sub = gimple_fold_indirect_ref (sub); 4231 if (! sub) 4232 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); 4233 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); 4234 if (type_domain && TYPE_MIN_VALUE (type_domain)) 4235 min_val = TYPE_MIN_VALUE (type_domain); 4236 if (TREE_CODE (min_val) == INTEGER_CST) 4237 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); 4238 } 4239 4240 return NULL_TREE; 4241 } 4242 4243 /* Given a pointer value OP0, return a simplified version of an 4244 indirection through OP0, or NULL_TREE if no simplification is 4245 possible. This may only be applied to a rhs of an expression. 4246 Note that the resulting type may be different from the type pointed 4247 to in the sense that it is still compatible from the langhooks 4248 point of view. */ 4249 4250 static tree 4251 gimple_fold_indirect_ref_rhs (tree t) 4252 { 4253 return gimple_fold_indirect_ref (t); 4254 } 4255 4256 /* Subroutine of gimplify_modify_expr to do simplifications of 4257 MODIFY_EXPRs based on the code of the RHS. We loop for as long as 4258 something changes. */ 4259 4260 static enum gimplify_status 4261 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, 4262 gimple_seq *pre_p, gimple_seq *post_p, 4263 bool want_value) 4264 { 4265 enum gimplify_status ret = GS_UNHANDLED; 4266 bool changed; 4267 4268 do 4269 { 4270 changed = false; 4271 switch (TREE_CODE (*from_p)) 4272 { 4273 case VAR_DECL: 4274 /* If we're assigning from a read-only variable initialized with 4275 a constructor, do the direct assignment from the constructor, 4276 but only if neither source nor target are volatile since this 4277 latter assignment might end up being done on a per-field basis. */ 4278 if (DECL_INITIAL (*from_p) 4279 && TREE_READONLY (*from_p) 4280 && !TREE_THIS_VOLATILE (*from_p) 4281 && !TREE_THIS_VOLATILE (*to_p) 4282 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) 4283 { 4284 tree old_from = *from_p; 4285 enum gimplify_status subret; 4286 4287 /* Move the constructor into the RHS. */ 4288 *from_p = unshare_expr (DECL_INITIAL (*from_p)); 4289 4290 /* Let's see if gimplify_init_constructor will need to put 4291 it in memory. */ 4292 subret = gimplify_init_constructor (expr_p, NULL, NULL, 4293 false, true); 4294 if (subret == GS_ERROR) 4295 { 4296 /* If so, revert the change. */ 4297 *from_p = old_from; 4298 } 4299 else 4300 { 4301 ret = GS_OK; 4302 changed = true; 4303 } 4304 } 4305 break; 4306 case INDIRECT_REF: 4307 { 4308 /* If we have code like 4309 4310 *(const A*)(A*)&x 4311 4312 where the type of "x" is a (possibly cv-qualified variant 4313 of "A"), treat the entire expression as identical to "x". 4314 This kind of code arises in C++ when an object is bound 4315 to a const reference, and if "x" is a TARGET_EXPR we want 4316 to take advantage of the optimization below. */ 4317 bool volatile_p = TREE_THIS_VOLATILE (*from_p); 4318 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); 4319 if (t) 4320 { 4321 if (TREE_THIS_VOLATILE (t) != volatile_p) 4322 { 4323 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration) 4324 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p), 4325 build_fold_addr_expr (t)); 4326 if (REFERENCE_CLASS_P (t)) 4327 TREE_THIS_VOLATILE (t) = volatile_p; 4328 } 4329 *from_p = t; 4330 ret = GS_OK; 4331 changed = true; 4332 } 4333 break; 4334 } 4335 4336 case TARGET_EXPR: 4337 { 4338 /* If we are initializing something from a TARGET_EXPR, strip the 4339 TARGET_EXPR and initialize it directly, if possible. This can't 4340 be done if the initializer is void, since that implies that the 4341 temporary is set in some non-trivial way. 4342 4343 ??? What about code that pulls out the temp and uses it 4344 elsewhere? I think that such code never uses the TARGET_EXPR as 4345 an initializer. If I'm wrong, we'll die because the temp won't 4346 have any RTL. In that case, I guess we'll need to replace 4347 references somehow. */ 4348 tree init = TARGET_EXPR_INITIAL (*from_p); 4349 4350 if (init 4351 && !VOID_TYPE_P (TREE_TYPE (init))) 4352 { 4353 *from_p = init; 4354 ret = GS_OK; 4355 changed = true; 4356 } 4357 } 4358 break; 4359 4360 case COMPOUND_EXPR: 4361 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be 4362 caught. */ 4363 gimplify_compound_expr (from_p, pre_p, true); 4364 ret = GS_OK; 4365 changed = true; 4366 break; 4367 4368 case CONSTRUCTOR: 4369 /* If we already made some changes, let the front end have a 4370 crack at this before we break it down. */ 4371 if (ret != GS_UNHANDLED) 4372 break; 4373 /* If we're initializing from a CONSTRUCTOR, break this into 4374 individual MODIFY_EXPRs. */ 4375 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, 4376 false); 4377 4378 case COND_EXPR: 4379 /* If we're assigning to a non-register type, push the assignment 4380 down into the branches. This is mandatory for ADDRESSABLE types, 4381 since we cannot generate temporaries for such, but it saves a 4382 copy in other cases as well. */ 4383 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) 4384 { 4385 /* This code should mirror the code in gimplify_cond_expr. */ 4386 enum tree_code code = TREE_CODE (*expr_p); 4387 tree cond = *from_p; 4388 tree result = *to_p; 4389 4390 ret = gimplify_expr (&result, pre_p, post_p, 4391 is_gimple_lvalue, fb_lvalue); 4392 if (ret != GS_ERROR) 4393 ret = GS_OK; 4394 4395 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) 4396 TREE_OPERAND (cond, 1) 4397 = build2 (code, void_type_node, result, 4398 TREE_OPERAND (cond, 1)); 4399 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 4400 TREE_OPERAND (cond, 2) 4401 = build2 (code, void_type_node, unshare_expr (result), 4402 TREE_OPERAND (cond, 2)); 4403 4404 TREE_TYPE (cond) = void_type_node; 4405 recalculate_side_effects (cond); 4406 4407 if (want_value) 4408 { 4409 gimplify_and_add (cond, pre_p); 4410 *expr_p = unshare_expr (result); 4411 } 4412 else 4413 *expr_p = cond; 4414 return ret; 4415 } 4416 break; 4417 4418 case CALL_EXPR: 4419 /* For calls that return in memory, give *to_p as the CALL_EXPR's 4420 return slot so that we don't generate a temporary. */ 4421 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) 4422 && aggregate_value_p (*from_p, *from_p)) 4423 { 4424 bool use_target; 4425 4426 if (!(rhs_predicate_for (*to_p))(*from_p)) 4427 /* If we need a temporary, *to_p isn't accurate. */ 4428 use_target = false; 4429 /* It's OK to use the return slot directly unless it's an NRV. */ 4430 else if (TREE_CODE (*to_p) == RESULT_DECL 4431 && DECL_NAME (*to_p) == NULL_TREE 4432 && needs_to_live_in_memory (*to_p)) 4433 use_target = true; 4434 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) 4435 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) 4436 /* Don't force regs into memory. */ 4437 use_target = false; 4438 else if (TREE_CODE (*expr_p) == INIT_EXPR) 4439 /* It's OK to use the target directly if it's being 4440 initialized. */ 4441 use_target = true; 4442 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE)) 4443 /* Always use the target and thus RSO for variable-sized types. 4444 GIMPLE cannot deal with a variable-sized assignment 4445 embedded in a call statement. */ 4446 use_target = true; 4447 else if (TREE_CODE (*to_p) != SSA_NAME 4448 && (!is_gimple_variable (*to_p) 4449 || needs_to_live_in_memory (*to_p))) 4450 /* Don't use the original target if it's already addressable; 4451 if its address escapes, and the called function uses the 4452 NRV optimization, a conforming program could see *to_p 4453 change before the called function returns; see c++/19317. 4454 When optimizing, the return_slot pass marks more functions 4455 as safe after we have escape info. */ 4456 use_target = false; 4457 else 4458 use_target = true; 4459 4460 if (use_target) 4461 { 4462 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; 4463 mark_addressable (*to_p); 4464 } 4465 } 4466 break; 4467 4468 case WITH_SIZE_EXPR: 4469 /* Likewise for calls that return an aggregate of non-constant size, 4470 since we would not be able to generate a temporary at all. */ 4471 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR) 4472 { 4473 *from_p = TREE_OPERAND (*from_p, 0); 4474 /* We don't change ret in this case because the 4475 WITH_SIZE_EXPR might have been added in 4476 gimplify_modify_expr, so returning GS_OK would lead to an 4477 infinite loop. */ 4478 changed = true; 4479 } 4480 break; 4481 4482 /* If we're initializing from a container, push the initialization 4483 inside it. */ 4484 case CLEANUP_POINT_EXPR: 4485 case BIND_EXPR: 4486 case STATEMENT_LIST: 4487 { 4488 tree wrap = *from_p; 4489 tree t; 4490 4491 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, 4492 fb_lvalue); 4493 if (ret != GS_ERROR) 4494 ret = GS_OK; 4495 4496 t = voidify_wrapper_expr (wrap, *expr_p); 4497 gcc_assert (t == *expr_p); 4498 4499 if (want_value) 4500 { 4501 gimplify_and_add (wrap, pre_p); 4502 *expr_p = unshare_expr (*to_p); 4503 } 4504 else 4505 *expr_p = wrap; 4506 return GS_OK; 4507 } 4508 4509 case COMPOUND_LITERAL_EXPR: 4510 { 4511 tree complit = TREE_OPERAND (*expr_p, 1); 4512 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); 4513 tree decl = DECL_EXPR_DECL (decl_s); 4514 tree init = DECL_INITIAL (decl); 4515 4516 /* struct T x = (struct T) { 0, 1, 2 } can be optimized 4517 into struct T x = { 0, 1, 2 } if the address of the 4518 compound literal has never been taken. */ 4519 if (!TREE_ADDRESSABLE (complit) 4520 && !TREE_ADDRESSABLE (decl) 4521 && init) 4522 { 4523 *expr_p = copy_node (*expr_p); 4524 TREE_OPERAND (*expr_p, 1) = init; 4525 return GS_OK; 4526 } 4527 } 4528 4529 default: 4530 break; 4531 } 4532 } 4533 while (changed); 4534 4535 return ret; 4536 } 4537 4538 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is 4539 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with 4540 DECL_GIMPLE_REG_P set. 4541 4542 IMPORTANT NOTE: This promotion is performed by introducing a load of the 4543 other, unmodified part of the complex object just before the total store. 4544 As a consequence, if the object is still uninitialized, an undefined value 4545 will be loaded into a register, which may result in a spurious exception 4546 if the register is floating-point and the value happens to be a signaling 4547 NaN for example. Then the fully-fledged complex operations lowering pass 4548 followed by a DCE pass are necessary in order to fix things up. */ 4549 4550 static enum gimplify_status 4551 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p, 4552 bool want_value) 4553 { 4554 enum tree_code code, ocode; 4555 tree lhs, rhs, new_rhs, other, realpart, imagpart; 4556 4557 lhs = TREE_OPERAND (*expr_p, 0); 4558 rhs = TREE_OPERAND (*expr_p, 1); 4559 code = TREE_CODE (lhs); 4560 lhs = TREE_OPERAND (lhs, 0); 4561 4562 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR; 4563 other = build1 (ocode, TREE_TYPE (rhs), lhs); 4564 TREE_NO_WARNING (other) = 1; 4565 other = get_formal_tmp_var (other, pre_p); 4566 4567 realpart = code == REALPART_EXPR ? rhs : other; 4568 imagpart = code == REALPART_EXPR ? other : rhs; 4569 4570 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart)) 4571 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart); 4572 else 4573 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart); 4574 4575 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs)); 4576 *expr_p = (want_value) ? rhs : NULL_TREE; 4577 4578 return GS_ALL_DONE; 4579 } 4580 4581 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P. 4582 4583 modify_expr 4584 : varname '=' rhs 4585 | '*' ID '=' rhs 4586 4587 PRE_P points to the list where side effects that must happen before 4588 *EXPR_P should be stored. 4589 4590 POST_P points to the list where side effects that must happen after 4591 *EXPR_P should be stored. 4592 4593 WANT_VALUE is nonzero iff we want to use the value of this expression 4594 in another expression. */ 4595 4596 static enum gimplify_status 4597 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 4598 bool want_value) 4599 { 4600 tree *from_p = &TREE_OPERAND (*expr_p, 1); 4601 tree *to_p = &TREE_OPERAND (*expr_p, 0); 4602 enum gimplify_status ret = GS_UNHANDLED; 4603 gimple assign; 4604 location_t loc = EXPR_LOCATION (*expr_p); 4605 4606 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR 4607 || TREE_CODE (*expr_p) == INIT_EXPR); 4608 4609 /* Trying to simplify a clobber using normal logic doesn't work, 4610 so handle it here. */ 4611 if (TREE_CLOBBER_P (*from_p)) 4612 { 4613 gcc_assert (!want_value && TREE_CODE (*to_p) == VAR_DECL); 4614 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p)); 4615 *expr_p = NULL; 4616 return GS_ALL_DONE; 4617 } 4618 4619 /* Insert pointer conversions required by the middle-end that are not 4620 required by the frontend. This fixes middle-end type checking for 4621 for example gcc.dg/redecl-6.c. */ 4622 if (POINTER_TYPE_P (TREE_TYPE (*to_p))) 4623 { 4624 STRIP_USELESS_TYPE_CONVERSION (*from_p); 4625 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p))) 4626 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p); 4627 } 4628 4629 /* See if any simplifications can be done based on what the RHS is. */ 4630 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 4631 want_value); 4632 if (ret != GS_UNHANDLED) 4633 return ret; 4634 4635 /* For zero sized types only gimplify the left hand side and right hand 4636 side as statements and throw away the assignment. Do this after 4637 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable 4638 types properly. */ 4639 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value) 4640 { 4641 gimplify_stmt (from_p, pre_p); 4642 gimplify_stmt (to_p, pre_p); 4643 *expr_p = NULL_TREE; 4644 return GS_ALL_DONE; 4645 } 4646 4647 /* If the value being copied is of variable width, compute the length 4648 of the copy into a WITH_SIZE_EXPR. Note that we need to do this 4649 before gimplifying any of the operands so that we can resolve any 4650 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses 4651 the size of the expression to be copied, not of the destination, so 4652 that is what we must do here. */ 4653 maybe_with_size_expr (from_p); 4654 4655 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 4656 if (ret == GS_ERROR) 4657 return ret; 4658 4659 /* As a special case, we have to temporarily allow for assignments 4660 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is 4661 a toplevel statement, when gimplifying the GENERIC expression 4662 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple 4663 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>. 4664 4665 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To 4666 prevent gimplify_expr from trying to create a new temporary for 4667 foo's LHS, we tell it that it should only gimplify until it 4668 reaches the CALL_EXPR. On return from gimplify_expr, the newly 4669 created GIMPLE_CALL <foo> will be the last statement in *PRE_P 4670 and all we need to do here is set 'a' to be its LHS. */ 4671 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p), 4672 fb_rvalue); 4673 if (ret == GS_ERROR) 4674 return ret; 4675 4676 /* Now see if the above changed *from_p to something we handle specially. */ 4677 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 4678 want_value); 4679 if (ret != GS_UNHANDLED) 4680 return ret; 4681 4682 /* If we've got a variable sized assignment between two lvalues (i.e. does 4683 not involve a call), then we can make things a bit more straightforward 4684 by converting the assignment to memcpy or memset. */ 4685 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) 4686 { 4687 tree from = TREE_OPERAND (*from_p, 0); 4688 tree size = TREE_OPERAND (*from_p, 1); 4689 4690 if (TREE_CODE (from) == CONSTRUCTOR) 4691 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p); 4692 4693 if (is_gimple_addressable (from)) 4694 { 4695 *from_p = from; 4696 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value, 4697 pre_p); 4698 } 4699 } 4700 4701 /* Transform partial stores to non-addressable complex variables into 4702 total stores. This allows us to use real instead of virtual operands 4703 for these variables, which improves optimization. */ 4704 if ((TREE_CODE (*to_p) == REALPART_EXPR 4705 || TREE_CODE (*to_p) == IMAGPART_EXPR) 4706 && is_gimple_reg (TREE_OPERAND (*to_p, 0))) 4707 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); 4708 4709 /* Try to alleviate the effects of the gimplification creating artificial 4710 temporaries (see for example is_gimple_reg_rhs) on the debug info. */ 4711 if (!gimplify_ctxp->into_ssa 4712 && TREE_CODE (*from_p) == VAR_DECL 4713 && DECL_IGNORED_P (*from_p) 4714 && DECL_P (*to_p) 4715 && !DECL_IGNORED_P (*to_p)) 4716 { 4717 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p)) 4718 DECL_NAME (*from_p) 4719 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p))); 4720 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1; 4721 SET_DECL_DEBUG_EXPR (*from_p, *to_p); 4722 } 4723 4724 if (want_value && TREE_THIS_VOLATILE (*to_p)) 4725 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p); 4726 4727 if (TREE_CODE (*from_p) == CALL_EXPR) 4728 { 4729 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL 4730 instead of a GIMPLE_ASSIGN. */ 4731 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p)); 4732 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0); 4733 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p)); 4734 assign = gimple_build_call_from_tree (*from_p); 4735 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype)); 4736 if (!gimple_call_noreturn_p (assign)) 4737 gimple_call_set_lhs (assign, *to_p); 4738 } 4739 else 4740 { 4741 assign = gimple_build_assign (*to_p, *from_p); 4742 gimple_set_location (assign, EXPR_LOCATION (*expr_p)); 4743 } 4744 4745 gimplify_seq_add_stmt (pre_p, assign); 4746 4747 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p)) 4748 { 4749 /* If we've somehow already got an SSA_NAME on the LHS, then 4750 we've probably modified it twice. Not good. */ 4751 gcc_assert (TREE_CODE (*to_p) != SSA_NAME); 4752 *to_p = make_ssa_name (*to_p, assign); 4753 gimple_set_lhs (assign, *to_p); 4754 } 4755 4756 if (want_value) 4757 { 4758 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p); 4759 return GS_OK; 4760 } 4761 else 4762 *expr_p = NULL; 4763 4764 return GS_ALL_DONE; 4765 } 4766 4767 /* Gimplify a comparison between two variable-sized objects. Do this 4768 with a call to BUILT_IN_MEMCMP. */ 4769 4770 static enum gimplify_status 4771 gimplify_variable_sized_compare (tree *expr_p) 4772 { 4773 location_t loc = EXPR_LOCATION (*expr_p); 4774 tree op0 = TREE_OPERAND (*expr_p, 0); 4775 tree op1 = TREE_OPERAND (*expr_p, 1); 4776 tree t, arg, dest, src, expr; 4777 4778 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0)); 4779 arg = unshare_expr (arg); 4780 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0); 4781 src = build_fold_addr_expr_loc (loc, op1); 4782 dest = build_fold_addr_expr_loc (loc, op0); 4783 t = builtin_decl_implicit (BUILT_IN_MEMCMP); 4784 t = build_call_expr_loc (loc, t, 3, dest, src, arg); 4785 4786 expr 4787 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); 4788 SET_EXPR_LOCATION (expr, loc); 4789 *expr_p = expr; 4790 4791 return GS_OK; 4792 } 4793 4794 /* Gimplify a comparison between two aggregate objects of integral scalar 4795 mode as a comparison between the bitwise equivalent scalar values. */ 4796 4797 static enum gimplify_status 4798 gimplify_scalar_mode_aggregate_compare (tree *expr_p) 4799 { 4800 location_t loc = EXPR_LOCATION (*expr_p); 4801 tree op0 = TREE_OPERAND (*expr_p, 0); 4802 tree op1 = TREE_OPERAND (*expr_p, 1); 4803 4804 tree type = TREE_TYPE (op0); 4805 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1); 4806 4807 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0); 4808 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1); 4809 4810 *expr_p 4811 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1); 4812 4813 return GS_OK; 4814 } 4815 4816 /* Gimplify an expression sequence. This function gimplifies each 4817 expression and rewrites the original expression with the last 4818 expression of the sequence in GIMPLE form. 4819 4820 PRE_P points to the list where the side effects for all the 4821 expressions in the sequence will be emitted. 4822 4823 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */ 4824 4825 static enum gimplify_status 4826 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 4827 { 4828 tree t = *expr_p; 4829 4830 do 4831 { 4832 tree *sub_p = &TREE_OPERAND (t, 0); 4833 4834 if (TREE_CODE (*sub_p) == COMPOUND_EXPR) 4835 gimplify_compound_expr (sub_p, pre_p, false); 4836 else 4837 gimplify_stmt (sub_p, pre_p); 4838 4839 t = TREE_OPERAND (t, 1); 4840 } 4841 while (TREE_CODE (t) == COMPOUND_EXPR); 4842 4843 *expr_p = t; 4844 if (want_value) 4845 return GS_OK; 4846 else 4847 { 4848 gimplify_stmt (expr_p, pre_p); 4849 return GS_ALL_DONE; 4850 } 4851 } 4852 4853 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to 4854 gimplify. After gimplification, EXPR_P will point to a new temporary 4855 that holds the original value of the SAVE_EXPR node. 4856 4857 PRE_P points to the list where side effects that must happen before 4858 *EXPR_P should be stored. */ 4859 4860 static enum gimplify_status 4861 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 4862 { 4863 enum gimplify_status ret = GS_ALL_DONE; 4864 tree val; 4865 4866 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR); 4867 val = TREE_OPERAND (*expr_p, 0); 4868 4869 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */ 4870 if (!SAVE_EXPR_RESOLVED_P (*expr_p)) 4871 { 4872 /* The operand may be a void-valued expression such as SAVE_EXPRs 4873 generated by the Java frontend for class initialization. It is 4874 being executed only for its side-effects. */ 4875 if (TREE_TYPE (val) == void_type_node) 4876 { 4877 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 4878 is_gimple_stmt, fb_none); 4879 val = NULL; 4880 } 4881 else 4882 val = get_initialized_tmp_var (val, pre_p, post_p); 4883 4884 TREE_OPERAND (*expr_p, 0) = val; 4885 SAVE_EXPR_RESOLVED_P (*expr_p) = 1; 4886 } 4887 4888 *expr_p = val; 4889 4890 return ret; 4891 } 4892 4893 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P 4894 4895 unary_expr 4896 : ... 4897 | '&' varname 4898 ... 4899 4900 PRE_P points to the list where side effects that must happen before 4901 *EXPR_P should be stored. 4902 4903 POST_P points to the list where side effects that must happen after 4904 *EXPR_P should be stored. */ 4905 4906 static enum gimplify_status 4907 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 4908 { 4909 tree expr = *expr_p; 4910 tree op0 = TREE_OPERAND (expr, 0); 4911 enum gimplify_status ret; 4912 location_t loc = EXPR_LOCATION (*expr_p); 4913 4914 switch (TREE_CODE (op0)) 4915 { 4916 case INDIRECT_REF: 4917 do_indirect_ref: 4918 /* Check if we are dealing with an expression of the form '&*ptr'. 4919 While the front end folds away '&*ptr' into 'ptr', these 4920 expressions may be generated internally by the compiler (e.g., 4921 builtins like __builtin_va_end). */ 4922 /* Caution: the silent array decomposition semantics we allow for 4923 ADDR_EXPR means we can't always discard the pair. */ 4924 /* Gimplification of the ADDR_EXPR operand may drop 4925 cv-qualification conversions, so make sure we add them if 4926 needed. */ 4927 { 4928 tree op00 = TREE_OPERAND (op0, 0); 4929 tree t_expr = TREE_TYPE (expr); 4930 tree t_op00 = TREE_TYPE (op00); 4931 4932 if (!useless_type_conversion_p (t_expr, t_op00)) 4933 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00); 4934 *expr_p = op00; 4935 ret = GS_OK; 4936 } 4937 break; 4938 4939 case VIEW_CONVERT_EXPR: 4940 /* Take the address of our operand and then convert it to the type of 4941 this ADDR_EXPR. 4942 4943 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at 4944 all clear. The impact of this transformation is even less clear. */ 4945 4946 /* If the operand is a useless conversion, look through it. Doing so 4947 guarantees that the ADDR_EXPR and its operand will remain of the 4948 same type. */ 4949 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0))) 4950 op0 = TREE_OPERAND (op0, 0); 4951 4952 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr), 4953 build_fold_addr_expr_loc (loc, 4954 TREE_OPERAND (op0, 0))); 4955 ret = GS_OK; 4956 break; 4957 4958 default: 4959 /* We use fb_either here because the C frontend sometimes takes 4960 the address of a call that returns a struct; see 4961 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make 4962 the implied temporary explicit. */ 4963 4964 /* Make the operand addressable. */ 4965 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p, 4966 is_gimple_addressable, fb_either); 4967 if (ret == GS_ERROR) 4968 break; 4969 4970 /* Then mark it. Beware that it may not be possible to do so directly 4971 if a temporary has been created by the gimplification. */ 4972 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p); 4973 4974 op0 = TREE_OPERAND (expr, 0); 4975 4976 /* For various reasons, the gimplification of the expression 4977 may have made a new INDIRECT_REF. */ 4978 if (TREE_CODE (op0) == INDIRECT_REF) 4979 goto do_indirect_ref; 4980 4981 mark_addressable (TREE_OPERAND (expr, 0)); 4982 4983 /* The FEs may end up building ADDR_EXPRs early on a decl with 4984 an incomplete type. Re-build ADDR_EXPRs in canonical form 4985 here. */ 4986 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr)))) 4987 *expr_p = build_fold_addr_expr (op0); 4988 4989 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */ 4990 recompute_tree_invariant_for_addr_expr (*expr_p); 4991 4992 /* If we re-built the ADDR_EXPR add a conversion to the original type 4993 if required. */ 4994 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 4995 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 4996 4997 break; 4998 } 4999 5000 return ret; 5001 } 5002 5003 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple 5004 value; output operands should be a gimple lvalue. */ 5005 5006 static enum gimplify_status 5007 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5008 { 5009 tree expr; 5010 int noutputs; 5011 const char **oconstraints; 5012 int i; 5013 tree link; 5014 const char *constraint; 5015 bool allows_mem, allows_reg, is_inout; 5016 enum gimplify_status ret, tret; 5017 gimple stmt; 5018 VEC(tree, gc) *inputs; 5019 VEC(tree, gc) *outputs; 5020 VEC(tree, gc) *clobbers; 5021 VEC(tree, gc) *labels; 5022 tree link_next; 5023 5024 expr = *expr_p; 5025 noutputs = list_length (ASM_OUTPUTS (expr)); 5026 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); 5027 5028 inputs = outputs = clobbers = labels = NULL; 5029 5030 ret = GS_ALL_DONE; 5031 link_next = NULL_TREE; 5032 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next) 5033 { 5034 bool ok; 5035 size_t constraint_len; 5036 5037 link_next = TREE_CHAIN (link); 5038 5039 oconstraints[i] 5040 = constraint 5041 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 5042 constraint_len = strlen (constraint); 5043 if (constraint_len == 0) 5044 continue; 5045 5046 ok = parse_output_constraint (&constraint, i, 0, 0, 5047 &allows_mem, &allows_reg, &is_inout); 5048 if (!ok) 5049 { 5050 ret = GS_ERROR; 5051 is_inout = false; 5052 } 5053 5054 if (!allows_reg && allows_mem) 5055 mark_addressable (TREE_VALUE (link)); 5056 5057 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5058 is_inout ? is_gimple_min_lval : is_gimple_lvalue, 5059 fb_lvalue | fb_mayfail); 5060 if (tret == GS_ERROR) 5061 { 5062 error ("invalid lvalue in asm output %d", i); 5063 ret = tret; 5064 } 5065 5066 VEC_safe_push (tree, gc, outputs, link); 5067 TREE_CHAIN (link) = NULL_TREE; 5068 5069 if (is_inout) 5070 { 5071 /* An input/output operand. To give the optimizers more 5072 flexibility, split it into separate input and output 5073 operands. */ 5074 tree input; 5075 char buf[10]; 5076 5077 /* Turn the in/out constraint into an output constraint. */ 5078 char *p = xstrdup (constraint); 5079 p[0] = '='; 5080 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p); 5081 5082 /* And add a matching input constraint. */ 5083 if (allows_reg) 5084 { 5085 sprintf (buf, "%d", i); 5086 5087 /* If there are multiple alternatives in the constraint, 5088 handle each of them individually. Those that allow register 5089 will be replaced with operand number, the others will stay 5090 unchanged. */ 5091 if (strchr (p, ',') != NULL) 5092 { 5093 size_t len = 0, buflen = strlen (buf); 5094 char *beg, *end, *str, *dst; 5095 5096 for (beg = p + 1;;) 5097 { 5098 end = strchr (beg, ','); 5099 if (end == NULL) 5100 end = strchr (beg, '\0'); 5101 if ((size_t) (end - beg) < buflen) 5102 len += buflen + 1; 5103 else 5104 len += end - beg + 1; 5105 if (*end) 5106 beg = end + 1; 5107 else 5108 break; 5109 } 5110 5111 str = (char *) alloca (len); 5112 for (beg = p + 1, dst = str;;) 5113 { 5114 const char *tem; 5115 bool mem_p, reg_p, inout_p; 5116 5117 end = strchr (beg, ','); 5118 if (end) 5119 *end = '\0'; 5120 beg[-1] = '='; 5121 tem = beg - 1; 5122 parse_output_constraint (&tem, i, 0, 0, 5123 &mem_p, ®_p, &inout_p); 5124 if (dst != str) 5125 *dst++ = ','; 5126 if (reg_p) 5127 { 5128 memcpy (dst, buf, buflen); 5129 dst += buflen; 5130 } 5131 else 5132 { 5133 if (end) 5134 len = end - beg; 5135 else 5136 len = strlen (beg); 5137 memcpy (dst, beg, len); 5138 dst += len; 5139 } 5140 if (end) 5141 beg = end + 1; 5142 else 5143 break; 5144 } 5145 *dst = '\0'; 5146 input = build_string (dst - str, str); 5147 } 5148 else 5149 input = build_string (strlen (buf), buf); 5150 } 5151 else 5152 input = build_string (constraint_len - 1, constraint + 1); 5153 5154 free (p); 5155 5156 input = build_tree_list (build_tree_list (NULL_TREE, input), 5157 unshare_expr (TREE_VALUE (link))); 5158 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input); 5159 } 5160 } 5161 5162 link_next = NULL_TREE; 5163 for (link = ASM_INPUTS (expr); link; ++i, link = link_next) 5164 { 5165 link_next = TREE_CHAIN (link); 5166 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 5167 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 5168 oconstraints, &allows_mem, &allows_reg); 5169 5170 /* If we can't make copies, we can only accept memory. */ 5171 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) 5172 { 5173 if (allows_mem) 5174 allows_reg = 0; 5175 else 5176 { 5177 error ("impossible constraint in %<asm%>"); 5178 error ("non-memory input %d must stay in memory", i); 5179 return GS_ERROR; 5180 } 5181 } 5182 5183 /* If the operand is a memory input, it should be an lvalue. */ 5184 if (!allows_reg && allows_mem) 5185 { 5186 tree inputv = TREE_VALUE (link); 5187 STRIP_NOPS (inputv); 5188 if (TREE_CODE (inputv) == PREDECREMENT_EXPR 5189 || TREE_CODE (inputv) == PREINCREMENT_EXPR 5190 || TREE_CODE (inputv) == POSTDECREMENT_EXPR 5191 || TREE_CODE (inputv) == POSTINCREMENT_EXPR) 5192 TREE_VALUE (link) = error_mark_node; 5193 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5194 is_gimple_lvalue, fb_lvalue | fb_mayfail); 5195 mark_addressable (TREE_VALUE (link)); 5196 if (tret == GS_ERROR) 5197 { 5198 if (EXPR_HAS_LOCATION (TREE_VALUE (link))) 5199 input_location = EXPR_LOCATION (TREE_VALUE (link)); 5200 error ("memory input %d is not directly addressable", i); 5201 ret = tret; 5202 } 5203 } 5204 else 5205 { 5206 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5207 is_gimple_asm_val, fb_rvalue); 5208 if (tret == GS_ERROR) 5209 ret = tret; 5210 } 5211 5212 TREE_CHAIN (link) = NULL_TREE; 5213 VEC_safe_push (tree, gc, inputs, link); 5214 } 5215 5216 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link)) 5217 VEC_safe_push (tree, gc, clobbers, link); 5218 5219 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link)) 5220 VEC_safe_push (tree, gc, labels, link); 5221 5222 /* Do not add ASMs with errors to the gimple IL stream. */ 5223 if (ret != GS_ERROR) 5224 { 5225 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)), 5226 inputs, outputs, clobbers, labels); 5227 5228 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr)); 5229 gimple_asm_set_input (stmt, ASM_INPUT_P (expr)); 5230 5231 gimplify_seq_add_stmt (pre_p, stmt); 5232 } 5233 5234 return ret; 5235 } 5236 5237 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding 5238 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while 5239 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we 5240 return to this function. 5241 5242 FIXME should we complexify the prequeue handling instead? Or use flags 5243 for all the cleanups and let the optimizer tighten them up? The current 5244 code seems pretty fragile; it will break on a cleanup within any 5245 non-conditional nesting. But any such nesting would be broken, anyway; 5246 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct 5247 and continues out of it. We can do that at the RTL level, though, so 5248 having an optimizer to tighten up try/finally regions would be a Good 5249 Thing. */ 5250 5251 static enum gimplify_status 5252 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p) 5253 { 5254 gimple_stmt_iterator iter; 5255 gimple_seq body_sequence = NULL; 5256 5257 tree temp = voidify_wrapper_expr (*expr_p, NULL); 5258 5259 /* We only care about the number of conditions between the innermost 5260 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and 5261 any cleanups collected outside the CLEANUP_POINT_EXPR. */ 5262 int old_conds = gimplify_ctxp->conditions; 5263 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups; 5264 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr; 5265 gimplify_ctxp->conditions = 0; 5266 gimplify_ctxp->conditional_cleanups = NULL; 5267 gimplify_ctxp->in_cleanup_point_expr = true; 5268 5269 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence); 5270 5271 gimplify_ctxp->conditions = old_conds; 5272 gimplify_ctxp->conditional_cleanups = old_cleanups; 5273 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr; 5274 5275 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); ) 5276 { 5277 gimple wce = gsi_stmt (iter); 5278 5279 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR) 5280 { 5281 if (gsi_one_before_end_p (iter)) 5282 { 5283 /* Note that gsi_insert_seq_before and gsi_remove do not 5284 scan operands, unlike some other sequence mutators. */ 5285 if (!gimple_wce_cleanup_eh_only (wce)) 5286 gsi_insert_seq_before_without_update (&iter, 5287 gimple_wce_cleanup (wce), 5288 GSI_SAME_STMT); 5289 gsi_remove (&iter, true); 5290 break; 5291 } 5292 else 5293 { 5294 gimple gtry; 5295 gimple_seq seq; 5296 enum gimple_try_flags kind; 5297 5298 if (gimple_wce_cleanup_eh_only (wce)) 5299 kind = GIMPLE_TRY_CATCH; 5300 else 5301 kind = GIMPLE_TRY_FINALLY; 5302 seq = gsi_split_seq_after (iter); 5303 5304 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind); 5305 /* Do not use gsi_replace here, as it may scan operands. 5306 We want to do a simple structural modification only. */ 5307 *gsi_stmt_ptr (&iter) = gtry; 5308 iter = gsi_start (seq); 5309 } 5310 } 5311 else 5312 gsi_next (&iter); 5313 } 5314 5315 gimplify_seq_add_seq (pre_p, body_sequence); 5316 if (temp) 5317 { 5318 *expr_p = temp; 5319 return GS_OK; 5320 } 5321 else 5322 { 5323 *expr_p = NULL; 5324 return GS_ALL_DONE; 5325 } 5326 } 5327 5328 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP 5329 is the cleanup action required. EH_ONLY is true if the cleanup should 5330 only be executed if an exception is thrown, not on normal exit. */ 5331 5332 static void 5333 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p) 5334 { 5335 gimple wce; 5336 gimple_seq cleanup_stmts = NULL; 5337 5338 /* Errors can result in improperly nested cleanups. Which results in 5339 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */ 5340 if (seen_error ()) 5341 return; 5342 5343 if (gimple_conditional_context ()) 5344 { 5345 /* If we're in a conditional context, this is more complex. We only 5346 want to run the cleanup if we actually ran the initialization that 5347 necessitates it, but we want to run it after the end of the 5348 conditional context. So we wrap the try/finally around the 5349 condition and use a flag to determine whether or not to actually 5350 run the destructor. Thus 5351 5352 test ? f(A()) : 0 5353 5354 becomes (approximately) 5355 5356 flag = 0; 5357 try { 5358 if (test) { A::A(temp); flag = 1; val = f(temp); } 5359 else { val = 0; } 5360 } finally { 5361 if (flag) A::~A(temp); 5362 } 5363 val 5364 */ 5365 tree flag = create_tmp_var (boolean_type_node, "cleanup"); 5366 gimple ffalse = gimple_build_assign (flag, boolean_false_node); 5367 gimple ftrue = gimple_build_assign (flag, boolean_true_node); 5368 5369 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL); 5370 gimplify_stmt (&cleanup, &cleanup_stmts); 5371 wce = gimple_build_wce (cleanup_stmts); 5372 5373 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse); 5374 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); 5375 gimplify_seq_add_stmt (pre_p, ftrue); 5376 5377 /* Because of this manipulation, and the EH edges that jump 5378 threading cannot redirect, the temporary (VAR) will appear 5379 to be used uninitialized. Don't warn. */ 5380 TREE_NO_WARNING (var) = 1; 5381 } 5382 else 5383 { 5384 gimplify_stmt (&cleanup, &cleanup_stmts); 5385 wce = gimple_build_wce (cleanup_stmts); 5386 gimple_wce_set_cleanup_eh_only (wce, eh_only); 5387 gimplify_seq_add_stmt (pre_p, wce); 5388 } 5389 } 5390 5391 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ 5392 5393 static enum gimplify_status 5394 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5395 { 5396 tree targ = *expr_p; 5397 tree temp = TARGET_EXPR_SLOT (targ); 5398 tree init = TARGET_EXPR_INITIAL (targ); 5399 enum gimplify_status ret; 5400 5401 if (init) 5402 { 5403 tree cleanup = NULL_TREE; 5404 5405 /* TARGET_EXPR temps aren't part of the enclosing block, so add it 5406 to the temps list. Handle also variable length TARGET_EXPRs. */ 5407 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST) 5408 { 5409 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp))) 5410 gimplify_type_sizes (TREE_TYPE (temp), pre_p); 5411 gimplify_vla_decl (temp, pre_p); 5412 } 5413 else 5414 gimple_add_tmp_var (temp); 5415 5416 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the 5417 expression is supposed to initialize the slot. */ 5418 if (VOID_TYPE_P (TREE_TYPE (init))) 5419 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 5420 else 5421 { 5422 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init); 5423 init = init_expr; 5424 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 5425 init = NULL; 5426 ggc_free (init_expr); 5427 } 5428 if (ret == GS_ERROR) 5429 { 5430 /* PR c++/28266 Make sure this is expanded only once. */ 5431 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 5432 return GS_ERROR; 5433 } 5434 if (init) 5435 gimplify_and_add (init, pre_p); 5436 5437 /* If needed, push the cleanup for the temp. */ 5438 if (TARGET_EXPR_CLEANUP (targ)) 5439 { 5440 if (CLEANUP_EH_ONLY (targ)) 5441 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), 5442 CLEANUP_EH_ONLY (targ), pre_p); 5443 else 5444 cleanup = TARGET_EXPR_CLEANUP (targ); 5445 } 5446 5447 /* Add a clobber for the temporary going out of scope, like 5448 gimplify_bind_expr. */ 5449 if (gimplify_ctxp->in_cleanup_point_expr 5450 && needs_to_live_in_memory (temp)) 5451 { 5452 tree clobber = build_constructor (TREE_TYPE (temp), NULL); 5453 TREE_THIS_VOLATILE (clobber) = true; 5454 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber); 5455 if (cleanup) 5456 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup, 5457 clobber); 5458 else 5459 cleanup = clobber; 5460 } 5461 5462 if (cleanup) 5463 gimple_push_cleanup (temp, cleanup, false, pre_p); 5464 5465 /* Only expand this once. */ 5466 TREE_OPERAND (targ, 3) = init; 5467 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 5468 } 5469 else 5470 /* We should have expanded this before. */ 5471 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); 5472 5473 *expr_p = temp; 5474 return GS_OK; 5475 } 5476 5477 /* Gimplification of expression trees. */ 5478 5479 /* Gimplify an expression which appears at statement context. The 5480 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is 5481 NULL, a new sequence is allocated. 5482 5483 Return true if we actually added a statement to the queue. */ 5484 5485 bool 5486 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p) 5487 { 5488 gimple_seq_node last; 5489 5490 if (!*seq_p) 5491 *seq_p = gimple_seq_alloc (); 5492 5493 last = gimple_seq_last (*seq_p); 5494 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none); 5495 return last != gimple_seq_last (*seq_p); 5496 } 5497 5498 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels 5499 to CTX. If entries already exist, force them to be some flavor of private. 5500 If there is no enclosing parallel, do nothing. */ 5501 5502 void 5503 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl) 5504 { 5505 splay_tree_node n; 5506 5507 if (decl == NULL || !DECL_P (decl)) 5508 return; 5509 5510 do 5511 { 5512 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5513 if (n != NULL) 5514 { 5515 if (n->value & GOVD_SHARED) 5516 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN); 5517 else 5518 return; 5519 } 5520 else if (ctx->region_type != ORT_WORKSHARE) 5521 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); 5522 5523 ctx = ctx->outer_context; 5524 } 5525 while (ctx); 5526 } 5527 5528 /* Similarly for each of the type sizes of TYPE. */ 5529 5530 static void 5531 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type) 5532 { 5533 if (type == NULL || type == error_mark_node) 5534 return; 5535 type = TYPE_MAIN_VARIANT (type); 5536 5537 if (pointer_set_insert (ctx->privatized_types, type)) 5538 return; 5539 5540 switch (TREE_CODE (type)) 5541 { 5542 case INTEGER_TYPE: 5543 case ENUMERAL_TYPE: 5544 case BOOLEAN_TYPE: 5545 case REAL_TYPE: 5546 case FIXED_POINT_TYPE: 5547 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type)); 5548 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type)); 5549 break; 5550 5551 case ARRAY_TYPE: 5552 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 5553 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type)); 5554 break; 5555 5556 case RECORD_TYPE: 5557 case UNION_TYPE: 5558 case QUAL_UNION_TYPE: 5559 { 5560 tree field; 5561 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 5562 if (TREE_CODE (field) == FIELD_DECL) 5563 { 5564 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); 5565 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); 5566 } 5567 } 5568 break; 5569 5570 case POINTER_TYPE: 5571 case REFERENCE_TYPE: 5572 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 5573 break; 5574 5575 default: 5576 break; 5577 } 5578 5579 omp_firstprivatize_variable (ctx, TYPE_SIZE (type)); 5580 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type)); 5581 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type); 5582 } 5583 5584 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */ 5585 5586 static void 5587 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags) 5588 { 5589 splay_tree_node n; 5590 unsigned int nflags; 5591 tree t; 5592 5593 if (error_operand_p (decl)) 5594 return; 5595 5596 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means 5597 there are constructors involved somewhere. */ 5598 if (TREE_ADDRESSABLE (TREE_TYPE (decl)) 5599 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) 5600 flags |= GOVD_SEEN; 5601 5602 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5603 if (n != NULL) 5604 { 5605 /* We shouldn't be re-adding the decl with the same data 5606 sharing class. */ 5607 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0); 5608 /* The only combination of data sharing classes we should see is 5609 FIRSTPRIVATE and LASTPRIVATE. */ 5610 nflags = n->value | flags; 5611 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS) 5612 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)); 5613 n->value = nflags; 5614 return; 5615 } 5616 5617 /* When adding a variable-sized variable, we have to handle all sorts 5618 of additional bits of data: the pointer replacement variable, and 5619 the parameters of the type. */ 5620 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 5621 { 5622 /* Add the pointer replacement variable as PRIVATE if the variable 5623 replacement is private, else FIRSTPRIVATE since we'll need the 5624 address of the original variable either for SHARED, or for the 5625 copy into or out of the context. */ 5626 if (!(flags & GOVD_LOCAL)) 5627 { 5628 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE; 5629 nflags |= flags & GOVD_SEEN; 5630 t = DECL_VALUE_EXPR (decl); 5631 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 5632 t = TREE_OPERAND (t, 0); 5633 gcc_assert (DECL_P (t)); 5634 omp_add_variable (ctx, t, nflags); 5635 } 5636 5637 /* Add all of the variable and type parameters (which should have 5638 been gimplified to a formal temporary) as FIRSTPRIVATE. */ 5639 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl)); 5640 omp_firstprivatize_variable (ctx, DECL_SIZE (decl)); 5641 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 5642 5643 /* The variable-sized variable itself is never SHARED, only some form 5644 of PRIVATE. The sharing would take place via the pointer variable 5645 which we remapped above. */ 5646 if (flags & GOVD_SHARED) 5647 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE 5648 | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); 5649 5650 /* We're going to make use of the TYPE_SIZE_UNIT at least in the 5651 alloca statement we generate for the variable, so make sure it 5652 is available. This isn't automatically needed for the SHARED 5653 case, since we won't be allocating local storage then. 5654 For local variables TYPE_SIZE_UNIT might not be gimplified yet, 5655 in this case omp_notice_variable will be called later 5656 on when it is gimplified. */ 5657 else if (! (flags & GOVD_LOCAL) 5658 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl)))) 5659 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); 5660 } 5661 else if (lang_hooks.decls.omp_privatize_by_reference (decl)) 5662 { 5663 gcc_assert ((flags & GOVD_LOCAL) == 0); 5664 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 5665 5666 /* Similar to the direct variable sized case above, we'll need the 5667 size of references being privatized. */ 5668 if ((flags & GOVD_SHARED) == 0) 5669 { 5670 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); 5671 if (TREE_CODE (t) != INTEGER_CST) 5672 omp_notice_variable (ctx, t, true); 5673 } 5674 } 5675 5676 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); 5677 } 5678 5679 /* Notice a threadprivate variable DECL used in OpenMP context CTX. 5680 This just prints out diagnostics about threadprivate variable uses 5681 in untied tasks. If DECL2 is non-NULL, prevent this warning 5682 on that variable. */ 5683 5684 static bool 5685 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl, 5686 tree decl2) 5687 { 5688 splay_tree_node n; 5689 5690 if (ctx->region_type != ORT_UNTIED_TASK) 5691 return false; 5692 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5693 if (n == NULL) 5694 { 5695 error ("threadprivate variable %qE used in untied task", 5696 DECL_NAME (decl)); 5697 error_at (ctx->location, "enclosing task"); 5698 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0); 5699 } 5700 if (decl2) 5701 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0); 5702 return false; 5703 } 5704 5705 /* Record the fact that DECL was used within the OpenMP context CTX. 5706 IN_CODE is true when real code uses DECL, and false when we should 5707 merely emit default(none) errors. Return true if DECL is going to 5708 be remapped and thus DECL shouldn't be gimplified into its 5709 DECL_VALUE_EXPR (if any). */ 5710 5711 static bool 5712 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code) 5713 { 5714 splay_tree_node n; 5715 unsigned flags = in_code ? GOVD_SEEN : 0; 5716 bool ret = false, shared; 5717 5718 if (error_operand_p (decl)) 5719 return false; 5720 5721 /* Threadprivate variables are predetermined. */ 5722 if (is_global_var (decl)) 5723 { 5724 if (DECL_THREAD_LOCAL_P (decl)) 5725 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE); 5726 5727 if (DECL_HAS_VALUE_EXPR_P (decl)) 5728 { 5729 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 5730 5731 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) 5732 return omp_notice_threadprivate_variable (ctx, decl, value); 5733 } 5734 } 5735 5736 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5737 if (n == NULL) 5738 { 5739 enum omp_clause_default_kind default_kind, kind; 5740 struct gimplify_omp_ctx *octx; 5741 5742 if (ctx->region_type == ORT_WORKSHARE) 5743 goto do_outer; 5744 5745 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be 5746 remapped firstprivate instead of shared. To some extent this is 5747 addressed in omp_firstprivatize_type_sizes, but not effectively. */ 5748 default_kind = ctx->default_kind; 5749 kind = lang_hooks.decls.omp_predetermined_sharing (decl); 5750 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) 5751 default_kind = kind; 5752 5753 switch (default_kind) 5754 { 5755 case OMP_CLAUSE_DEFAULT_NONE: 5756 error ("%qE not specified in enclosing parallel", 5757 DECL_NAME (lang_hooks.decls.omp_report_decl (decl))); 5758 if ((ctx->region_type & ORT_TASK) != 0) 5759 error_at (ctx->location, "enclosing task"); 5760 else 5761 error_at (ctx->location, "enclosing parallel"); 5762 /* FALLTHRU */ 5763 case OMP_CLAUSE_DEFAULT_SHARED: 5764 flags |= GOVD_SHARED; 5765 break; 5766 case OMP_CLAUSE_DEFAULT_PRIVATE: 5767 flags |= GOVD_PRIVATE; 5768 break; 5769 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: 5770 flags |= GOVD_FIRSTPRIVATE; 5771 break; 5772 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: 5773 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ 5774 gcc_assert ((ctx->region_type & ORT_TASK) != 0); 5775 if (ctx->outer_context) 5776 omp_notice_variable (ctx->outer_context, decl, in_code); 5777 for (octx = ctx->outer_context; octx; octx = octx->outer_context) 5778 { 5779 splay_tree_node n2; 5780 5781 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl); 5782 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED) 5783 { 5784 flags |= GOVD_FIRSTPRIVATE; 5785 break; 5786 } 5787 if ((octx->region_type & ORT_PARALLEL) != 0) 5788 break; 5789 } 5790 if (flags & GOVD_FIRSTPRIVATE) 5791 break; 5792 if (octx == NULL 5793 && (TREE_CODE (decl) == PARM_DECL 5794 || (!is_global_var (decl) 5795 && DECL_CONTEXT (decl) == current_function_decl))) 5796 { 5797 flags |= GOVD_FIRSTPRIVATE; 5798 break; 5799 } 5800 flags |= GOVD_SHARED; 5801 break; 5802 default: 5803 gcc_unreachable (); 5804 } 5805 5806 if ((flags & GOVD_PRIVATE) 5807 && lang_hooks.decls.omp_private_outer_ref (decl)) 5808 flags |= GOVD_PRIVATE_OUTER_REF; 5809 5810 omp_add_variable (ctx, decl, flags); 5811 5812 shared = (flags & GOVD_SHARED) != 0; 5813 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 5814 goto do_outer; 5815 } 5816 5817 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0 5818 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN 5819 && DECL_SIZE (decl) 5820 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 5821 { 5822 splay_tree_node n2; 5823 tree t = DECL_VALUE_EXPR (decl); 5824 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 5825 t = TREE_OPERAND (t, 0); 5826 gcc_assert (DECL_P (t)); 5827 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 5828 n2->value |= GOVD_SEEN; 5829 } 5830 5831 shared = ((flags | n->value) & GOVD_SHARED) != 0; 5832 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 5833 5834 /* If nothing changed, there's nothing left to do. */ 5835 if ((n->value & flags) == flags) 5836 return ret; 5837 flags |= n->value; 5838 n->value = flags; 5839 5840 do_outer: 5841 /* If the variable is private in the current context, then we don't 5842 need to propagate anything to an outer context. */ 5843 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF)) 5844 return ret; 5845 if (ctx->outer_context 5846 && omp_notice_variable (ctx->outer_context, decl, in_code)) 5847 return true; 5848 return ret; 5849 } 5850 5851 /* Verify that DECL is private within CTX. If there's specific information 5852 to the contrary in the innermost scope, generate an error. */ 5853 5854 static bool 5855 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl) 5856 { 5857 splay_tree_node n; 5858 5859 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5860 if (n != NULL) 5861 { 5862 if (n->value & GOVD_SHARED) 5863 { 5864 if (ctx == gimplify_omp_ctxp) 5865 { 5866 error ("iteration variable %qE should be private", 5867 DECL_NAME (decl)); 5868 n->value = GOVD_PRIVATE; 5869 return true; 5870 } 5871 else 5872 return false; 5873 } 5874 else if ((n->value & GOVD_EXPLICIT) != 0 5875 && (ctx == gimplify_omp_ctxp 5876 || (ctx->region_type == ORT_COMBINED_PARALLEL 5877 && gimplify_omp_ctxp->outer_context == ctx))) 5878 { 5879 if ((n->value & GOVD_FIRSTPRIVATE) != 0) 5880 error ("iteration variable %qE should not be firstprivate", 5881 DECL_NAME (decl)); 5882 else if ((n->value & GOVD_REDUCTION) != 0) 5883 error ("iteration variable %qE should not be reduction", 5884 DECL_NAME (decl)); 5885 } 5886 return (ctx == gimplify_omp_ctxp 5887 || (ctx->region_type == ORT_COMBINED_PARALLEL 5888 && gimplify_omp_ctxp->outer_context == ctx)); 5889 } 5890 5891 if (ctx->region_type != ORT_WORKSHARE) 5892 return false; 5893 else if (ctx->outer_context) 5894 return omp_is_private (ctx->outer_context, decl); 5895 return false; 5896 } 5897 5898 /* Return true if DECL is private within a parallel region 5899 that binds to the current construct's context or in parallel 5900 region's REDUCTION clause. */ 5901 5902 static bool 5903 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl) 5904 { 5905 splay_tree_node n; 5906 5907 do 5908 { 5909 ctx = ctx->outer_context; 5910 if (ctx == NULL) 5911 return !(is_global_var (decl) 5912 /* References might be private, but might be shared too. */ 5913 || lang_hooks.decls.omp_privatize_by_reference (decl)); 5914 5915 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 5916 if (n != NULL) 5917 return (n->value & GOVD_SHARED) == 0; 5918 } 5919 while (ctx->region_type == ORT_WORKSHARE); 5920 return false; 5921 } 5922 5923 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new 5924 and previous omp contexts. */ 5925 5926 static void 5927 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p, 5928 enum omp_region_type region_type) 5929 { 5930 struct gimplify_omp_ctx *ctx, *outer_ctx; 5931 struct gimplify_ctx gctx; 5932 tree c; 5933 5934 ctx = new_omp_context (region_type); 5935 outer_ctx = ctx->outer_context; 5936 5937 while ((c = *list_p) != NULL) 5938 { 5939 bool remove = false; 5940 bool notice_outer = true; 5941 const char *check_non_private = NULL; 5942 unsigned int flags; 5943 tree decl; 5944 5945 switch (OMP_CLAUSE_CODE (c)) 5946 { 5947 case OMP_CLAUSE_PRIVATE: 5948 flags = GOVD_PRIVATE | GOVD_EXPLICIT; 5949 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c))) 5950 { 5951 flags |= GOVD_PRIVATE_OUTER_REF; 5952 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1; 5953 } 5954 else 5955 notice_outer = false; 5956 goto do_add; 5957 case OMP_CLAUSE_SHARED: 5958 flags = GOVD_SHARED | GOVD_EXPLICIT; 5959 goto do_add; 5960 case OMP_CLAUSE_FIRSTPRIVATE: 5961 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 5962 check_non_private = "firstprivate"; 5963 goto do_add; 5964 case OMP_CLAUSE_LASTPRIVATE: 5965 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT; 5966 check_non_private = "lastprivate"; 5967 goto do_add; 5968 case OMP_CLAUSE_REDUCTION: 5969 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT; 5970 check_non_private = "reduction"; 5971 goto do_add; 5972 5973 do_add: 5974 decl = OMP_CLAUSE_DECL (c); 5975 if (error_operand_p (decl)) 5976 { 5977 remove = true; 5978 break; 5979 } 5980 omp_add_variable (ctx, decl, flags); 5981 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 5982 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 5983 { 5984 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c), 5985 GOVD_LOCAL | GOVD_SEEN); 5986 gimplify_omp_ctxp = ctx; 5987 push_gimplify_context (&gctx); 5988 5989 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc (); 5990 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc (); 5991 5992 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), 5993 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)); 5994 pop_gimplify_context 5995 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))); 5996 push_gimplify_context (&gctx); 5997 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), 5998 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); 5999 pop_gimplify_context 6000 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c))); 6001 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE; 6002 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE; 6003 6004 gimplify_omp_ctxp = outer_ctx; 6005 } 6006 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6007 && OMP_CLAUSE_LASTPRIVATE_STMT (c)) 6008 { 6009 gimplify_omp_ctxp = ctx; 6010 push_gimplify_context (&gctx); 6011 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR) 6012 { 6013 tree bind = build3 (BIND_EXPR, void_type_node, NULL, 6014 NULL, NULL); 6015 TREE_SIDE_EFFECTS (bind) = 1; 6016 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c); 6017 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind; 6018 } 6019 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), 6020 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); 6021 pop_gimplify_context 6022 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))); 6023 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE; 6024 6025 gimplify_omp_ctxp = outer_ctx; 6026 } 6027 if (notice_outer) 6028 goto do_notice; 6029 break; 6030 6031 case OMP_CLAUSE_COPYIN: 6032 case OMP_CLAUSE_COPYPRIVATE: 6033 decl = OMP_CLAUSE_DECL (c); 6034 if (error_operand_p (decl)) 6035 { 6036 remove = true; 6037 break; 6038 } 6039 do_notice: 6040 if (outer_ctx) 6041 omp_notice_variable (outer_ctx, decl, true); 6042 if (check_non_private 6043 && region_type == ORT_WORKSHARE 6044 && omp_check_private (ctx, decl)) 6045 { 6046 error ("%s variable %qE is private in outer context", 6047 check_non_private, DECL_NAME (decl)); 6048 remove = true; 6049 } 6050 break; 6051 6052 case OMP_CLAUSE_FINAL: 6053 case OMP_CLAUSE_IF: 6054 OMP_CLAUSE_OPERAND (c, 0) 6055 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0)); 6056 /* Fall through. */ 6057 6058 case OMP_CLAUSE_SCHEDULE: 6059 case OMP_CLAUSE_NUM_THREADS: 6060 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, 6061 is_gimple_val, fb_rvalue) == GS_ERROR) 6062 remove = true; 6063 break; 6064 6065 case OMP_CLAUSE_NOWAIT: 6066 case OMP_CLAUSE_ORDERED: 6067 case OMP_CLAUSE_UNTIED: 6068 case OMP_CLAUSE_COLLAPSE: 6069 case OMP_CLAUSE_MERGEABLE: 6070 break; 6071 6072 case OMP_CLAUSE_DEFAULT: 6073 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c); 6074 break; 6075 6076 default: 6077 gcc_unreachable (); 6078 } 6079 6080 if (remove) 6081 *list_p = OMP_CLAUSE_CHAIN (c); 6082 else 6083 list_p = &OMP_CLAUSE_CHAIN (c); 6084 } 6085 6086 gimplify_omp_ctxp = ctx; 6087 } 6088 6089 /* For all variables that were not actually used within the context, 6090 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */ 6091 6092 static int 6093 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data) 6094 { 6095 tree *list_p = (tree *) data; 6096 tree decl = (tree) n->key; 6097 unsigned flags = n->value; 6098 enum omp_clause_code code; 6099 tree clause; 6100 bool private_debug; 6101 6102 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL)) 6103 return 0; 6104 if ((flags & GOVD_SEEN) == 0) 6105 return 0; 6106 if (flags & GOVD_DEBUG_PRIVATE) 6107 { 6108 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE); 6109 private_debug = true; 6110 } 6111 else 6112 private_debug 6113 = lang_hooks.decls.omp_private_debug_clause (decl, 6114 !!(flags & GOVD_SHARED)); 6115 if (private_debug) 6116 code = OMP_CLAUSE_PRIVATE; 6117 else if (flags & GOVD_SHARED) 6118 { 6119 if (is_global_var (decl)) 6120 { 6121 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; 6122 while (ctx != NULL) 6123 { 6124 splay_tree_node on 6125 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6126 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE 6127 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0) 6128 break; 6129 ctx = ctx->outer_context; 6130 } 6131 if (ctx == NULL) 6132 return 0; 6133 } 6134 code = OMP_CLAUSE_SHARED; 6135 } 6136 else if (flags & GOVD_PRIVATE) 6137 code = OMP_CLAUSE_PRIVATE; 6138 else if (flags & GOVD_FIRSTPRIVATE) 6139 code = OMP_CLAUSE_FIRSTPRIVATE; 6140 else 6141 gcc_unreachable (); 6142 6143 clause = build_omp_clause (input_location, code); 6144 OMP_CLAUSE_DECL (clause) = decl; 6145 OMP_CLAUSE_CHAIN (clause) = *list_p; 6146 if (private_debug) 6147 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1; 6148 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF)) 6149 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1; 6150 *list_p = clause; 6151 lang_hooks.decls.omp_finish_clause (clause); 6152 6153 return 0; 6154 } 6155 6156 static void 6157 gimplify_adjust_omp_clauses (tree *list_p) 6158 { 6159 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 6160 tree c, decl; 6161 6162 while ((c = *list_p) != NULL) 6163 { 6164 splay_tree_node n; 6165 bool remove = false; 6166 6167 switch (OMP_CLAUSE_CODE (c)) 6168 { 6169 case OMP_CLAUSE_PRIVATE: 6170 case OMP_CLAUSE_SHARED: 6171 case OMP_CLAUSE_FIRSTPRIVATE: 6172 decl = OMP_CLAUSE_DECL (c); 6173 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6174 remove = !(n->value & GOVD_SEEN); 6175 if (! remove) 6176 { 6177 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED; 6178 if ((n->value & GOVD_DEBUG_PRIVATE) 6179 || lang_hooks.decls.omp_private_debug_clause (decl, shared)) 6180 { 6181 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0 6182 || ((n->value & GOVD_DATA_SHARE_CLASS) 6183 == GOVD_PRIVATE)); 6184 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE); 6185 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1; 6186 } 6187 } 6188 break; 6189 6190 case OMP_CLAUSE_LASTPRIVATE: 6191 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to 6192 accurately reflect the presence of a FIRSTPRIVATE clause. */ 6193 decl = OMP_CLAUSE_DECL (c); 6194 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6195 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) 6196 = (n->value & GOVD_FIRSTPRIVATE) != 0; 6197 break; 6198 6199 case OMP_CLAUSE_REDUCTION: 6200 case OMP_CLAUSE_COPYIN: 6201 case OMP_CLAUSE_COPYPRIVATE: 6202 case OMP_CLAUSE_IF: 6203 case OMP_CLAUSE_NUM_THREADS: 6204 case OMP_CLAUSE_SCHEDULE: 6205 case OMP_CLAUSE_NOWAIT: 6206 case OMP_CLAUSE_ORDERED: 6207 case OMP_CLAUSE_DEFAULT: 6208 case OMP_CLAUSE_UNTIED: 6209 case OMP_CLAUSE_COLLAPSE: 6210 case OMP_CLAUSE_FINAL: 6211 case OMP_CLAUSE_MERGEABLE: 6212 break; 6213 6214 default: 6215 gcc_unreachable (); 6216 } 6217 6218 if (remove) 6219 *list_p = OMP_CLAUSE_CHAIN (c); 6220 else 6221 list_p = &OMP_CLAUSE_CHAIN (c); 6222 } 6223 6224 /* Add in any implicit data sharing. */ 6225 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p); 6226 6227 gimplify_omp_ctxp = ctx->outer_context; 6228 delete_omp_context (ctx); 6229 } 6230 6231 /* Gimplify the contents of an OMP_PARALLEL statement. This involves 6232 gimplification of the body, as well as scanning the body for used 6233 variables. We need to do this scan now, because variable-sized 6234 decls will be decomposed during gimplification. */ 6235 6236 static void 6237 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p) 6238 { 6239 tree expr = *expr_p; 6240 gimple g; 6241 gimple_seq body = NULL; 6242 struct gimplify_ctx gctx; 6243 6244 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, 6245 OMP_PARALLEL_COMBINED (expr) 6246 ? ORT_COMBINED_PARALLEL 6247 : ORT_PARALLEL); 6248 6249 push_gimplify_context (&gctx); 6250 6251 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body); 6252 if (gimple_code (g) == GIMPLE_BIND) 6253 pop_gimplify_context (g); 6254 else 6255 pop_gimplify_context (NULL); 6256 6257 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr)); 6258 6259 g = gimple_build_omp_parallel (body, 6260 OMP_PARALLEL_CLAUSES (expr), 6261 NULL_TREE, NULL_TREE); 6262 if (OMP_PARALLEL_COMBINED (expr)) 6263 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED); 6264 gimplify_seq_add_stmt (pre_p, g); 6265 *expr_p = NULL_TREE; 6266 } 6267 6268 /* Gimplify the contents of an OMP_TASK statement. This involves 6269 gimplification of the body, as well as scanning the body for used 6270 variables. We need to do this scan now, because variable-sized 6271 decls will be decomposed during gimplification. */ 6272 6273 static void 6274 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p) 6275 { 6276 tree expr = *expr_p; 6277 gimple g; 6278 gimple_seq body = NULL; 6279 struct gimplify_ctx gctx; 6280 6281 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, 6282 find_omp_clause (OMP_TASK_CLAUSES (expr), 6283 OMP_CLAUSE_UNTIED) 6284 ? ORT_UNTIED_TASK : ORT_TASK); 6285 6286 push_gimplify_context (&gctx); 6287 6288 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); 6289 if (gimple_code (g) == GIMPLE_BIND) 6290 pop_gimplify_context (g); 6291 else 6292 pop_gimplify_context (NULL); 6293 6294 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr)); 6295 6296 g = gimple_build_omp_task (body, 6297 OMP_TASK_CLAUSES (expr), 6298 NULL_TREE, NULL_TREE, 6299 NULL_TREE, NULL_TREE, NULL_TREE); 6300 gimplify_seq_add_stmt (pre_p, g); 6301 *expr_p = NULL_TREE; 6302 } 6303 6304 /* Gimplify the gross structure of an OMP_FOR statement. */ 6305 6306 static enum gimplify_status 6307 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) 6308 { 6309 tree for_stmt, decl, var, t; 6310 enum gimplify_status ret = GS_ALL_DONE; 6311 enum gimplify_status tret; 6312 gimple gfor; 6313 gimple_seq for_body, for_pre_body; 6314 int i; 6315 6316 for_stmt = *expr_p; 6317 6318 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, 6319 ORT_WORKSHARE); 6320 6321 /* Handle OMP_FOR_INIT. */ 6322 for_pre_body = NULL; 6323 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); 6324 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE; 6325 6326 for_body = gimple_seq_alloc (); 6327 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 6328 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt))); 6329 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 6330 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt))); 6331 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 6332 { 6333 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 6334 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 6335 decl = TREE_OPERAND (t, 0); 6336 gcc_assert (DECL_P (decl)); 6337 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)) 6338 || POINTER_TYPE_P (TREE_TYPE (decl))); 6339 6340 /* Make sure the iteration variable is private. */ 6341 if (omp_is_private (gimplify_omp_ctxp, decl)) 6342 omp_notice_variable (gimplify_omp_ctxp, decl, true); 6343 else 6344 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN); 6345 6346 /* If DECL is not a gimple register, create a temporary variable to act 6347 as an iteration counter. This is valid, since DECL cannot be 6348 modified in the body of the loop. */ 6349 if (!is_gimple_reg (decl)) 6350 { 6351 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); 6352 TREE_OPERAND (t, 0) = var; 6353 6354 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var)); 6355 6356 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN); 6357 } 6358 else 6359 var = decl; 6360 6361 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 6362 is_gimple_val, fb_rvalue); 6363 ret = MIN (ret, tret); 6364 if (ret == GS_ERROR) 6365 return ret; 6366 6367 /* Handle OMP_FOR_COND. */ 6368 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 6369 gcc_assert (COMPARISON_CLASS_P (t)); 6370 gcc_assert (TREE_OPERAND (t, 0) == decl); 6371 6372 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 6373 is_gimple_val, fb_rvalue); 6374 ret = MIN (ret, tret); 6375 6376 /* Handle OMP_FOR_INCR. */ 6377 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 6378 switch (TREE_CODE (t)) 6379 { 6380 case PREINCREMENT_EXPR: 6381 case POSTINCREMENT_EXPR: 6382 t = build_int_cst (TREE_TYPE (decl), 1); 6383 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 6384 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 6385 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 6386 break; 6387 6388 case PREDECREMENT_EXPR: 6389 case POSTDECREMENT_EXPR: 6390 t = build_int_cst (TREE_TYPE (decl), -1); 6391 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 6392 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 6393 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 6394 break; 6395 6396 case MODIFY_EXPR: 6397 gcc_assert (TREE_OPERAND (t, 0) == decl); 6398 TREE_OPERAND (t, 0) = var; 6399 6400 t = TREE_OPERAND (t, 1); 6401 switch (TREE_CODE (t)) 6402 { 6403 case PLUS_EXPR: 6404 if (TREE_OPERAND (t, 1) == decl) 6405 { 6406 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0); 6407 TREE_OPERAND (t, 0) = var; 6408 break; 6409 } 6410 6411 /* Fallthru. */ 6412 case MINUS_EXPR: 6413 case POINTER_PLUS_EXPR: 6414 gcc_assert (TREE_OPERAND (t, 0) == decl); 6415 TREE_OPERAND (t, 0) = var; 6416 break; 6417 default: 6418 gcc_unreachable (); 6419 } 6420 6421 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 6422 is_gimple_val, fb_rvalue); 6423 ret = MIN (ret, tret); 6424 break; 6425 6426 default: 6427 gcc_unreachable (); 6428 } 6429 6430 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1) 6431 { 6432 tree c; 6433 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c)) 6434 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6435 && OMP_CLAUSE_DECL (c) == decl 6436 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL) 6437 { 6438 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 6439 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 6440 gcc_assert (TREE_OPERAND (t, 0) == var); 6441 t = TREE_OPERAND (t, 1); 6442 gcc_assert (TREE_CODE (t) == PLUS_EXPR 6443 || TREE_CODE (t) == MINUS_EXPR 6444 || TREE_CODE (t) == POINTER_PLUS_EXPR); 6445 gcc_assert (TREE_OPERAND (t, 0) == var); 6446 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl, 6447 TREE_OPERAND (t, 1)); 6448 gimplify_assign (decl, t, 6449 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); 6450 } 6451 } 6452 } 6453 6454 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body); 6455 6456 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt)); 6457 6458 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt), 6459 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)), 6460 for_pre_body); 6461 6462 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 6463 { 6464 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 6465 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0)); 6466 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1)); 6467 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 6468 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t)); 6469 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1)); 6470 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 6471 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1)); 6472 } 6473 6474 gimplify_seq_add_stmt (pre_p, gfor); 6475 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR; 6476 } 6477 6478 /* Gimplify the gross structure of other OpenMP worksharing constructs. 6479 In particular, OMP_SECTIONS and OMP_SINGLE. */ 6480 6481 static void 6482 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p) 6483 { 6484 tree expr = *expr_p; 6485 gimple stmt; 6486 gimple_seq body = NULL; 6487 6488 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE); 6489 gimplify_and_add (OMP_BODY (expr), &body); 6490 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr)); 6491 6492 if (TREE_CODE (expr) == OMP_SECTIONS) 6493 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr)); 6494 else if (TREE_CODE (expr) == OMP_SINGLE) 6495 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr)); 6496 else 6497 gcc_unreachable (); 6498 6499 gimplify_seq_add_stmt (pre_p, stmt); 6500 } 6501 6502 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have 6503 stabilized the lhs of the atomic operation as *ADDR. Return true if 6504 EXPR is this stabilized form. */ 6505 6506 static bool 6507 goa_lhs_expr_p (tree expr, tree addr) 6508 { 6509 /* Also include casts to other type variants. The C front end is fond 6510 of adding these for e.g. volatile variables. This is like 6511 STRIP_TYPE_NOPS but includes the main variant lookup. */ 6512 STRIP_USELESS_TYPE_CONVERSION (expr); 6513 6514 if (TREE_CODE (expr) == INDIRECT_REF) 6515 { 6516 expr = TREE_OPERAND (expr, 0); 6517 while (expr != addr 6518 && (CONVERT_EXPR_P (expr) 6519 || TREE_CODE (expr) == NON_LVALUE_EXPR) 6520 && TREE_CODE (expr) == TREE_CODE (addr) 6521 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr))) 6522 { 6523 expr = TREE_OPERAND (expr, 0); 6524 addr = TREE_OPERAND (addr, 0); 6525 } 6526 if (expr == addr) 6527 return true; 6528 return (TREE_CODE (addr) == ADDR_EXPR 6529 && TREE_CODE (expr) == ADDR_EXPR 6530 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0)); 6531 } 6532 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0)) 6533 return true; 6534 return false; 6535 } 6536 6537 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an 6538 expression does not involve the lhs, evaluate it into a temporary. 6539 Return 1 if the lhs appeared as a subexpression, 0 if it did not, 6540 or -1 if an error was encountered. */ 6541 6542 static int 6543 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr, 6544 tree lhs_var) 6545 { 6546 tree expr = *expr_p; 6547 int saw_lhs; 6548 6549 if (goa_lhs_expr_p (expr, lhs_addr)) 6550 { 6551 *expr_p = lhs_var; 6552 return 1; 6553 } 6554 if (is_gimple_val (expr)) 6555 return 0; 6556 6557 saw_lhs = 0; 6558 switch (TREE_CODE_CLASS (TREE_CODE (expr))) 6559 { 6560 case tcc_binary: 6561 case tcc_comparison: 6562 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr, 6563 lhs_var); 6564 case tcc_unary: 6565 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr, 6566 lhs_var); 6567 break; 6568 case tcc_expression: 6569 switch (TREE_CODE (expr)) 6570 { 6571 case TRUTH_ANDIF_EXPR: 6572 case TRUTH_ORIF_EXPR: 6573 case TRUTH_AND_EXPR: 6574 case TRUTH_OR_EXPR: 6575 case TRUTH_XOR_EXPR: 6576 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, 6577 lhs_addr, lhs_var); 6578 case TRUTH_NOT_EXPR: 6579 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, 6580 lhs_addr, lhs_var); 6581 break; 6582 case COMPOUND_EXPR: 6583 /* Break out any preevaluations from cp_build_modify_expr. */ 6584 for (; TREE_CODE (expr) == COMPOUND_EXPR; 6585 expr = TREE_OPERAND (expr, 1)) 6586 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p); 6587 *expr_p = expr; 6588 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var); 6589 default: 6590 break; 6591 } 6592 break; 6593 default: 6594 break; 6595 } 6596 6597 if (saw_lhs == 0) 6598 { 6599 enum gimplify_status gs; 6600 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue); 6601 if (gs != GS_ALL_DONE) 6602 saw_lhs = -1; 6603 } 6604 6605 return saw_lhs; 6606 } 6607 6608 /* Gimplify an OMP_ATOMIC statement. */ 6609 6610 static enum gimplify_status 6611 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p) 6612 { 6613 tree addr = TREE_OPERAND (*expr_p, 0); 6614 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ 6615 ? NULL : TREE_OPERAND (*expr_p, 1); 6616 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); 6617 tree tmp_load; 6618 gimple loadstmt, storestmt; 6619 6620 tmp_load = create_tmp_reg (type, NULL); 6621 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) 6622 return GS_ERROR; 6623 6624 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) 6625 != GS_ALL_DONE) 6626 return GS_ERROR; 6627 6628 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr); 6629 gimplify_seq_add_stmt (pre_p, loadstmt); 6630 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue) 6631 != GS_ALL_DONE) 6632 return GS_ERROR; 6633 6634 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ) 6635 rhs = tmp_load; 6636 storestmt = gimple_build_omp_atomic_store (rhs); 6637 gimplify_seq_add_stmt (pre_p, storestmt); 6638 switch (TREE_CODE (*expr_p)) 6639 { 6640 case OMP_ATOMIC_READ: 6641 case OMP_ATOMIC_CAPTURE_OLD: 6642 *expr_p = tmp_load; 6643 gimple_omp_atomic_set_need_value (loadstmt); 6644 break; 6645 case OMP_ATOMIC_CAPTURE_NEW: 6646 *expr_p = rhs; 6647 gimple_omp_atomic_set_need_value (storestmt); 6648 break; 6649 default: 6650 *expr_p = NULL; 6651 break; 6652 } 6653 6654 return GS_ALL_DONE; 6655 } 6656 6657 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the 6658 body, and adding some EH bits. */ 6659 6660 static enum gimplify_status 6661 gimplify_transaction (tree *expr_p, gimple_seq *pre_p) 6662 { 6663 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr); 6664 gimple g; 6665 gimple_seq body = NULL; 6666 struct gimplify_ctx gctx; 6667 int subcode = 0; 6668 6669 /* Wrap the transaction body in a BIND_EXPR so we have a context 6670 where to put decls for OpenMP. */ 6671 if (TREE_CODE (tbody) != BIND_EXPR) 6672 { 6673 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL); 6674 TREE_SIDE_EFFECTS (bind) = 1; 6675 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody)); 6676 TRANSACTION_EXPR_BODY (expr) = bind; 6677 } 6678 6679 push_gimplify_context (&gctx); 6680 temp = voidify_wrapper_expr (*expr_p, NULL); 6681 6682 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body); 6683 pop_gimplify_context (g); 6684 6685 g = gimple_build_transaction (body, NULL); 6686 if (TRANSACTION_EXPR_OUTER (expr)) 6687 subcode = GTMA_IS_OUTER; 6688 else if (TRANSACTION_EXPR_RELAXED (expr)) 6689 subcode = GTMA_IS_RELAXED; 6690 gimple_transaction_set_subcode (g, subcode); 6691 6692 gimplify_seq_add_stmt (pre_p, g); 6693 6694 if (temp) 6695 { 6696 *expr_p = temp; 6697 return GS_OK; 6698 } 6699 6700 *expr_p = NULL_TREE; 6701 return GS_ALL_DONE; 6702 } 6703 6704 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the 6705 expression produces a value to be used as an operand inside a GIMPLE 6706 statement, the value will be stored back in *EXPR_P. This value will 6707 be a tree of class tcc_declaration, tcc_constant, tcc_reference or 6708 an SSA_NAME. The corresponding sequence of GIMPLE statements is 6709 emitted in PRE_P and POST_P. 6710 6711 Additionally, this process may overwrite parts of the input 6712 expression during gimplification. Ideally, it should be 6713 possible to do non-destructive gimplification. 6714 6715 EXPR_P points to the GENERIC expression to convert to GIMPLE. If 6716 the expression needs to evaluate to a value to be used as 6717 an operand in a GIMPLE statement, this value will be stored in 6718 *EXPR_P on exit. This happens when the caller specifies one 6719 of fb_lvalue or fb_rvalue fallback flags. 6720 6721 PRE_P will contain the sequence of GIMPLE statements corresponding 6722 to the evaluation of EXPR and all the side-effects that must 6723 be executed before the main expression. On exit, the last 6724 statement of PRE_P is the core statement being gimplified. For 6725 instance, when gimplifying 'if (++a)' the last statement in 6726 PRE_P will be 'if (t.1)' where t.1 is the result of 6727 pre-incrementing 'a'. 6728 6729 POST_P will contain the sequence of GIMPLE statements corresponding 6730 to the evaluation of all the side-effects that must be executed 6731 after the main expression. If this is NULL, the post 6732 side-effects are stored at the end of PRE_P. 6733 6734 The reason why the output is split in two is to handle post 6735 side-effects explicitly. In some cases, an expression may have 6736 inner and outer post side-effects which need to be emitted in 6737 an order different from the one given by the recursive 6738 traversal. For instance, for the expression (*p--)++ the post 6739 side-effects of '--' must actually occur *after* the post 6740 side-effects of '++'. However, gimplification will first visit 6741 the inner expression, so if a separate POST sequence was not 6742 used, the resulting sequence would be: 6743 6744 1 t.1 = *p 6745 2 p = p - 1 6746 3 t.2 = t.1 + 1 6747 4 *p = t.2 6748 6749 However, the post-decrement operation in line #2 must not be 6750 evaluated until after the store to *p at line #4, so the 6751 correct sequence should be: 6752 6753 1 t.1 = *p 6754 2 t.2 = t.1 + 1 6755 3 *p = t.2 6756 4 p = p - 1 6757 6758 So, by specifying a separate post queue, it is possible 6759 to emit the post side-effects in the correct order. 6760 If POST_P is NULL, an internal queue will be used. Before 6761 returning to the caller, the sequence POST_P is appended to 6762 the main output sequence PRE_P. 6763 6764 GIMPLE_TEST_F points to a function that takes a tree T and 6765 returns nonzero if T is in the GIMPLE form requested by the 6766 caller. The GIMPLE predicates are in gimple.c. 6767 6768 FALLBACK tells the function what sort of a temporary we want if 6769 gimplification cannot produce an expression that complies with 6770 GIMPLE_TEST_F. 6771 6772 fb_none means that no temporary should be generated 6773 fb_rvalue means that an rvalue is OK to generate 6774 fb_lvalue means that an lvalue is OK to generate 6775 fb_either means that either is OK, but an lvalue is preferable. 6776 fb_mayfail means that gimplification may fail (in which case 6777 GS_ERROR will be returned) 6778 6779 The return value is either GS_ERROR or GS_ALL_DONE, since this 6780 function iterates until EXPR is completely gimplified or an error 6781 occurs. */ 6782 6783 enum gimplify_status 6784 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 6785 bool (*gimple_test_f) (tree), fallback_t fallback) 6786 { 6787 tree tmp; 6788 gimple_seq internal_pre = NULL; 6789 gimple_seq internal_post = NULL; 6790 tree save_expr; 6791 bool is_statement; 6792 location_t saved_location; 6793 enum gimplify_status ret; 6794 gimple_stmt_iterator pre_last_gsi, post_last_gsi; 6795 6796 save_expr = *expr_p; 6797 if (save_expr == NULL_TREE) 6798 return GS_ALL_DONE; 6799 6800 /* If we are gimplifying a top-level statement, PRE_P must be valid. */ 6801 is_statement = gimple_test_f == is_gimple_stmt; 6802 if (is_statement) 6803 gcc_assert (pre_p); 6804 6805 /* Consistency checks. */ 6806 if (gimple_test_f == is_gimple_reg) 6807 gcc_assert (fallback & (fb_rvalue | fb_lvalue)); 6808 else if (gimple_test_f == is_gimple_val 6809 || gimple_test_f == is_gimple_call_addr 6810 || gimple_test_f == is_gimple_condexpr 6811 || gimple_test_f == is_gimple_mem_rhs 6812 || gimple_test_f == is_gimple_mem_rhs_or_call 6813 || gimple_test_f == is_gimple_reg_rhs 6814 || gimple_test_f == is_gimple_reg_rhs_or_call 6815 || gimple_test_f == is_gimple_asm_val 6816 || gimple_test_f == is_gimple_mem_ref_addr) 6817 gcc_assert (fallback & fb_rvalue); 6818 else if (gimple_test_f == is_gimple_min_lval 6819 || gimple_test_f == is_gimple_lvalue) 6820 gcc_assert (fallback & fb_lvalue); 6821 else if (gimple_test_f == is_gimple_addressable) 6822 gcc_assert (fallback & fb_either); 6823 else if (gimple_test_f == is_gimple_stmt) 6824 gcc_assert (fallback == fb_none); 6825 else 6826 { 6827 /* We should have recognized the GIMPLE_TEST_F predicate to 6828 know what kind of fallback to use in case a temporary is 6829 needed to hold the value or address of *EXPR_P. */ 6830 gcc_unreachable (); 6831 } 6832 6833 /* We used to check the predicate here and return immediately if it 6834 succeeds. This is wrong; the design is for gimplification to be 6835 idempotent, and for the predicates to only test for valid forms, not 6836 whether they are fully simplified. */ 6837 if (pre_p == NULL) 6838 pre_p = &internal_pre; 6839 6840 if (post_p == NULL) 6841 post_p = &internal_post; 6842 6843 /* Remember the last statements added to PRE_P and POST_P. Every 6844 new statement added by the gimplification helpers needs to be 6845 annotated with location information. To centralize the 6846 responsibility, we remember the last statement that had been 6847 added to both queues before gimplifying *EXPR_P. If 6848 gimplification produces new statements in PRE_P and POST_P, those 6849 statements will be annotated with the same location information 6850 as *EXPR_P. */ 6851 pre_last_gsi = gsi_last (*pre_p); 6852 post_last_gsi = gsi_last (*post_p); 6853 6854 saved_location = input_location; 6855 if (save_expr != error_mark_node 6856 && EXPR_HAS_LOCATION (*expr_p)) 6857 input_location = EXPR_LOCATION (*expr_p); 6858 6859 /* Loop over the specific gimplifiers until the toplevel node 6860 remains the same. */ 6861 do 6862 { 6863 /* Strip away as many useless type conversions as possible 6864 at the toplevel. */ 6865 STRIP_USELESS_TYPE_CONVERSION (*expr_p); 6866 6867 /* Remember the expr. */ 6868 save_expr = *expr_p; 6869 6870 /* Die, die, die, my darling. */ 6871 if (save_expr == error_mark_node 6872 || (TREE_TYPE (save_expr) 6873 && TREE_TYPE (save_expr) == error_mark_node)) 6874 { 6875 ret = GS_ERROR; 6876 break; 6877 } 6878 6879 /* Do any language-specific gimplification. */ 6880 ret = ((enum gimplify_status) 6881 lang_hooks.gimplify_expr (expr_p, pre_p, post_p)); 6882 if (ret == GS_OK) 6883 { 6884 if (*expr_p == NULL_TREE) 6885 break; 6886 if (*expr_p != save_expr) 6887 continue; 6888 } 6889 else if (ret != GS_UNHANDLED) 6890 break; 6891 6892 /* Make sure that all the cases set 'ret' appropriately. */ 6893 ret = GS_UNHANDLED; 6894 switch (TREE_CODE (*expr_p)) 6895 { 6896 /* First deal with the special cases. */ 6897 6898 case POSTINCREMENT_EXPR: 6899 case POSTDECREMENT_EXPR: 6900 case PREINCREMENT_EXPR: 6901 case PREDECREMENT_EXPR: 6902 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p, 6903 fallback != fb_none); 6904 break; 6905 6906 case ARRAY_REF: 6907 case ARRAY_RANGE_REF: 6908 case REALPART_EXPR: 6909 case IMAGPART_EXPR: 6910 case COMPONENT_REF: 6911 case VIEW_CONVERT_EXPR: 6912 ret = gimplify_compound_lval (expr_p, pre_p, post_p, 6913 fallback ? fallback : fb_rvalue); 6914 break; 6915 6916 case COND_EXPR: 6917 ret = gimplify_cond_expr (expr_p, pre_p, fallback); 6918 6919 /* C99 code may assign to an array in a structure value of a 6920 conditional expression, and this has undefined behavior 6921 only on execution, so create a temporary if an lvalue is 6922 required. */ 6923 if (fallback == fb_lvalue) 6924 { 6925 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 6926 mark_addressable (*expr_p); 6927 ret = GS_OK; 6928 } 6929 break; 6930 6931 case CALL_EXPR: 6932 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); 6933 6934 /* C99 code may assign to an array in a structure returned 6935 from a function, and this has undefined behavior only on 6936 execution, so create a temporary if an lvalue is 6937 required. */ 6938 if (fallback == fb_lvalue) 6939 { 6940 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 6941 mark_addressable (*expr_p); 6942 ret = GS_OK; 6943 } 6944 break; 6945 6946 case TREE_LIST: 6947 gcc_unreachable (); 6948 6949 case COMPOUND_EXPR: 6950 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none); 6951 break; 6952 6953 case COMPOUND_LITERAL_EXPR: 6954 ret = gimplify_compound_literal_expr (expr_p, pre_p); 6955 break; 6956 6957 case MODIFY_EXPR: 6958 case INIT_EXPR: 6959 ret = gimplify_modify_expr (expr_p, pre_p, post_p, 6960 fallback != fb_none); 6961 break; 6962 6963 case TRUTH_ANDIF_EXPR: 6964 case TRUTH_ORIF_EXPR: 6965 { 6966 /* Preserve the original type of the expression and the 6967 source location of the outer expression. */ 6968 tree org_type = TREE_TYPE (*expr_p); 6969 *expr_p = gimple_boolify (*expr_p); 6970 *expr_p = build3_loc (input_location, COND_EXPR, 6971 org_type, *expr_p, 6972 fold_convert_loc 6973 (input_location, 6974 org_type, boolean_true_node), 6975 fold_convert_loc 6976 (input_location, 6977 org_type, boolean_false_node)); 6978 ret = GS_OK; 6979 break; 6980 } 6981 6982 case TRUTH_NOT_EXPR: 6983 { 6984 tree type = TREE_TYPE (*expr_p); 6985 /* The parsers are careful to generate TRUTH_NOT_EXPR 6986 only with operands that are always zero or one. 6987 We do not fold here but handle the only interesting case 6988 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */ 6989 *expr_p = gimple_boolify (*expr_p); 6990 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1) 6991 *expr_p = build1_loc (input_location, BIT_NOT_EXPR, 6992 TREE_TYPE (*expr_p), 6993 TREE_OPERAND (*expr_p, 0)); 6994 else 6995 *expr_p = build2_loc (input_location, BIT_XOR_EXPR, 6996 TREE_TYPE (*expr_p), 6997 TREE_OPERAND (*expr_p, 0), 6998 build_int_cst (TREE_TYPE (*expr_p), 1)); 6999 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p))) 7000 *expr_p = fold_convert_loc (input_location, type, *expr_p); 7001 ret = GS_OK; 7002 break; 7003 } 7004 7005 case ADDR_EXPR: 7006 ret = gimplify_addr_expr (expr_p, pre_p, post_p); 7007 break; 7008 7009 case VA_ARG_EXPR: 7010 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p); 7011 break; 7012 7013 CASE_CONVERT: 7014 if (IS_EMPTY_STMT (*expr_p)) 7015 { 7016 ret = GS_ALL_DONE; 7017 break; 7018 } 7019 7020 if (VOID_TYPE_P (TREE_TYPE (*expr_p)) 7021 || fallback == fb_none) 7022 { 7023 /* Just strip a conversion to void (or in void context) and 7024 try again. */ 7025 *expr_p = TREE_OPERAND (*expr_p, 0); 7026 ret = GS_OK; 7027 break; 7028 } 7029 7030 ret = gimplify_conversion (expr_p); 7031 if (ret == GS_ERROR) 7032 break; 7033 if (*expr_p != save_expr) 7034 break; 7035 /* FALLTHRU */ 7036 7037 case FIX_TRUNC_EXPR: 7038 /* unary_expr: ... | '(' cast ')' val | ... */ 7039 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 7040 is_gimple_val, fb_rvalue); 7041 recalculate_side_effects (*expr_p); 7042 break; 7043 7044 case INDIRECT_REF: 7045 { 7046 bool volatilep = TREE_THIS_VOLATILE (*expr_p); 7047 bool notrap = TREE_THIS_NOTRAP (*expr_p); 7048 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0)); 7049 7050 *expr_p = fold_indirect_ref_loc (input_location, *expr_p); 7051 if (*expr_p != save_expr) 7052 { 7053 ret = GS_OK; 7054 break; 7055 } 7056 7057 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 7058 is_gimple_reg, fb_rvalue); 7059 if (ret == GS_ERROR) 7060 break; 7061 7062 recalculate_side_effects (*expr_p); 7063 *expr_p = fold_build2_loc (input_location, MEM_REF, 7064 TREE_TYPE (*expr_p), 7065 TREE_OPERAND (*expr_p, 0), 7066 build_int_cst (saved_ptr_type, 0)); 7067 TREE_THIS_VOLATILE (*expr_p) = volatilep; 7068 TREE_THIS_NOTRAP (*expr_p) = notrap; 7069 ret = GS_OK; 7070 break; 7071 } 7072 7073 /* We arrive here through the various re-gimplifcation paths. */ 7074 case MEM_REF: 7075 /* First try re-folding the whole thing. */ 7076 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p), 7077 TREE_OPERAND (*expr_p, 0), 7078 TREE_OPERAND (*expr_p, 1)); 7079 if (tmp) 7080 { 7081 *expr_p = tmp; 7082 recalculate_side_effects (*expr_p); 7083 ret = GS_OK; 7084 break; 7085 } 7086 /* Avoid re-gimplifying the address operand if it is already 7087 in suitable form. Re-gimplifying would mark the address 7088 operand addressable. Always gimplify when not in SSA form 7089 as we still may have to gimplify decls with value-exprs. */ 7090 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa 7091 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0))) 7092 { 7093 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 7094 is_gimple_mem_ref_addr, fb_rvalue); 7095 if (ret == GS_ERROR) 7096 break; 7097 } 7098 recalculate_side_effects (*expr_p); 7099 ret = GS_ALL_DONE; 7100 break; 7101 7102 /* Constants need not be gimplified. */ 7103 case INTEGER_CST: 7104 case REAL_CST: 7105 case FIXED_CST: 7106 case STRING_CST: 7107 case COMPLEX_CST: 7108 case VECTOR_CST: 7109 ret = GS_ALL_DONE; 7110 break; 7111 7112 case CONST_DECL: 7113 /* If we require an lvalue, such as for ADDR_EXPR, retain the 7114 CONST_DECL node. Otherwise the decl is replaceable by its 7115 value. */ 7116 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ 7117 if (fallback & fb_lvalue) 7118 ret = GS_ALL_DONE; 7119 else 7120 { 7121 *expr_p = DECL_INITIAL (*expr_p); 7122 ret = GS_OK; 7123 } 7124 break; 7125 7126 case DECL_EXPR: 7127 ret = gimplify_decl_expr (expr_p, pre_p); 7128 break; 7129 7130 case BIND_EXPR: 7131 ret = gimplify_bind_expr (expr_p, pre_p); 7132 break; 7133 7134 case LOOP_EXPR: 7135 ret = gimplify_loop_expr (expr_p, pre_p); 7136 break; 7137 7138 case SWITCH_EXPR: 7139 ret = gimplify_switch_expr (expr_p, pre_p); 7140 break; 7141 7142 case EXIT_EXPR: 7143 ret = gimplify_exit_expr (expr_p); 7144 break; 7145 7146 case GOTO_EXPR: 7147 /* If the target is not LABEL, then it is a computed jump 7148 and the target needs to be gimplified. */ 7149 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL) 7150 { 7151 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p, 7152 NULL, is_gimple_val, fb_rvalue); 7153 if (ret == GS_ERROR) 7154 break; 7155 } 7156 gimplify_seq_add_stmt (pre_p, 7157 gimple_build_goto (GOTO_DESTINATION (*expr_p))); 7158 ret = GS_ALL_DONE; 7159 break; 7160 7161 case PREDICT_EXPR: 7162 gimplify_seq_add_stmt (pre_p, 7163 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), 7164 PREDICT_EXPR_OUTCOME (*expr_p))); 7165 ret = GS_ALL_DONE; 7166 break; 7167 7168 case LABEL_EXPR: 7169 ret = GS_ALL_DONE; 7170 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) 7171 == current_function_decl); 7172 gimplify_seq_add_stmt (pre_p, 7173 gimple_build_label (LABEL_EXPR_LABEL (*expr_p))); 7174 break; 7175 7176 case CASE_LABEL_EXPR: 7177 ret = gimplify_case_label_expr (expr_p, pre_p); 7178 break; 7179 7180 case RETURN_EXPR: 7181 ret = gimplify_return_expr (*expr_p, pre_p); 7182 break; 7183 7184 case CONSTRUCTOR: 7185 /* Don't reduce this in place; let gimplify_init_constructor work its 7186 magic. Buf if we're just elaborating this for side effects, just 7187 gimplify any element that has side-effects. */ 7188 if (fallback == fb_none) 7189 { 7190 unsigned HOST_WIDE_INT ix; 7191 tree val; 7192 tree temp = NULL_TREE; 7193 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val) 7194 if (TREE_SIDE_EFFECTS (val)) 7195 append_to_statement_list (val, &temp); 7196 7197 *expr_p = temp; 7198 ret = temp ? GS_OK : GS_ALL_DONE; 7199 } 7200 /* C99 code may assign to an array in a constructed 7201 structure or union, and this has undefined behavior only 7202 on execution, so create a temporary if an lvalue is 7203 required. */ 7204 else if (fallback == fb_lvalue) 7205 { 7206 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 7207 mark_addressable (*expr_p); 7208 ret = GS_OK; 7209 } 7210 else 7211 ret = GS_ALL_DONE; 7212 break; 7213 7214 /* The following are special cases that are not handled by the 7215 original GIMPLE grammar. */ 7216 7217 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and 7218 eliminated. */ 7219 case SAVE_EXPR: 7220 ret = gimplify_save_expr (expr_p, pre_p, post_p); 7221 break; 7222 7223 case BIT_FIELD_REF: 7224 { 7225 enum gimplify_status r0, r1, r2; 7226 7227 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 7228 post_p, is_gimple_lvalue, fb_either); 7229 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 7230 post_p, is_gimple_val, fb_rvalue); 7231 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 7232 post_p, is_gimple_val, fb_rvalue); 7233 recalculate_side_effects (*expr_p); 7234 7235 ret = MIN (r0, MIN (r1, r2)); 7236 } 7237 break; 7238 7239 case TARGET_MEM_REF: 7240 { 7241 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE; 7242 7243 if (TMR_BASE (*expr_p)) 7244 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p, 7245 post_p, is_gimple_mem_ref_addr, fb_either); 7246 if (TMR_INDEX (*expr_p)) 7247 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p, 7248 post_p, is_gimple_val, fb_rvalue); 7249 if (TMR_INDEX2 (*expr_p)) 7250 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p, 7251 post_p, is_gimple_val, fb_rvalue); 7252 /* TMR_STEP and TMR_OFFSET are always integer constants. */ 7253 ret = MIN (r0, r1); 7254 } 7255 break; 7256 7257 case NON_LVALUE_EXPR: 7258 /* This should have been stripped above. */ 7259 gcc_unreachable (); 7260 7261 case ASM_EXPR: 7262 ret = gimplify_asm_expr (expr_p, pre_p, post_p); 7263 break; 7264 7265 case TRY_FINALLY_EXPR: 7266 case TRY_CATCH_EXPR: 7267 { 7268 gimple_seq eval, cleanup; 7269 gimple try_; 7270 7271 eval = cleanup = NULL; 7272 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval); 7273 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup); 7274 /* Don't create bogus GIMPLE_TRY with empty cleanup. */ 7275 if (gimple_seq_empty_p (cleanup)) 7276 { 7277 gimple_seq_add_seq (pre_p, eval); 7278 ret = GS_ALL_DONE; 7279 break; 7280 } 7281 try_ = gimple_build_try (eval, cleanup, 7282 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR 7283 ? GIMPLE_TRY_FINALLY 7284 : GIMPLE_TRY_CATCH); 7285 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR) 7286 gimple_try_set_catch_is_cleanup (try_, 7287 TRY_CATCH_IS_CLEANUP (*expr_p)); 7288 gimplify_seq_add_stmt (pre_p, try_); 7289 ret = GS_ALL_DONE; 7290 break; 7291 } 7292 7293 case CLEANUP_POINT_EXPR: 7294 ret = gimplify_cleanup_point_expr (expr_p, pre_p); 7295 break; 7296 7297 case TARGET_EXPR: 7298 ret = gimplify_target_expr (expr_p, pre_p, post_p); 7299 break; 7300 7301 case CATCH_EXPR: 7302 { 7303 gimple c; 7304 gimple_seq handler = NULL; 7305 gimplify_and_add (CATCH_BODY (*expr_p), &handler); 7306 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler); 7307 gimplify_seq_add_stmt (pre_p, c); 7308 ret = GS_ALL_DONE; 7309 break; 7310 } 7311 7312 case EH_FILTER_EXPR: 7313 { 7314 gimple ehf; 7315 gimple_seq failure = NULL; 7316 7317 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure); 7318 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure); 7319 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p)); 7320 gimplify_seq_add_stmt (pre_p, ehf); 7321 ret = GS_ALL_DONE; 7322 break; 7323 } 7324 7325 case OBJ_TYPE_REF: 7326 { 7327 enum gimplify_status r0, r1; 7328 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, 7329 post_p, is_gimple_val, fb_rvalue); 7330 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, 7331 post_p, is_gimple_val, fb_rvalue); 7332 TREE_SIDE_EFFECTS (*expr_p) = 0; 7333 ret = MIN (r0, r1); 7334 } 7335 break; 7336 7337 case LABEL_DECL: 7338 /* We get here when taking the address of a label. We mark 7339 the label as "forced"; meaning it can never be removed and 7340 it is a potential target for any computed goto. */ 7341 FORCED_LABEL (*expr_p) = 1; 7342 ret = GS_ALL_DONE; 7343 break; 7344 7345 case STATEMENT_LIST: 7346 ret = gimplify_statement_list (expr_p, pre_p); 7347 break; 7348 7349 case WITH_SIZE_EXPR: 7350 { 7351 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 7352 post_p == &internal_post ? NULL : post_p, 7353 gimple_test_f, fallback); 7354 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 7355 is_gimple_val, fb_rvalue); 7356 ret = GS_ALL_DONE; 7357 } 7358 break; 7359 7360 case VAR_DECL: 7361 case PARM_DECL: 7362 ret = gimplify_var_or_parm_decl (expr_p); 7363 break; 7364 7365 case RESULT_DECL: 7366 /* When within an OpenMP context, notice uses of variables. */ 7367 if (gimplify_omp_ctxp) 7368 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); 7369 ret = GS_ALL_DONE; 7370 break; 7371 7372 case SSA_NAME: 7373 /* Allow callbacks into the gimplifier during optimization. */ 7374 ret = GS_ALL_DONE; 7375 break; 7376 7377 case OMP_PARALLEL: 7378 gimplify_omp_parallel (expr_p, pre_p); 7379 ret = GS_ALL_DONE; 7380 break; 7381 7382 case OMP_TASK: 7383 gimplify_omp_task (expr_p, pre_p); 7384 ret = GS_ALL_DONE; 7385 break; 7386 7387 case OMP_FOR: 7388 ret = gimplify_omp_for (expr_p, pre_p); 7389 break; 7390 7391 case OMP_SECTIONS: 7392 case OMP_SINGLE: 7393 gimplify_omp_workshare (expr_p, pre_p); 7394 ret = GS_ALL_DONE; 7395 break; 7396 7397 case OMP_SECTION: 7398 case OMP_MASTER: 7399 case OMP_ORDERED: 7400 case OMP_CRITICAL: 7401 { 7402 gimple_seq body = NULL; 7403 gimple g; 7404 7405 gimplify_and_add (OMP_BODY (*expr_p), &body); 7406 switch (TREE_CODE (*expr_p)) 7407 { 7408 case OMP_SECTION: 7409 g = gimple_build_omp_section (body); 7410 break; 7411 case OMP_MASTER: 7412 g = gimple_build_omp_master (body); 7413 break; 7414 case OMP_ORDERED: 7415 g = gimple_build_omp_ordered (body); 7416 break; 7417 case OMP_CRITICAL: 7418 g = gimple_build_omp_critical (body, 7419 OMP_CRITICAL_NAME (*expr_p)); 7420 break; 7421 default: 7422 gcc_unreachable (); 7423 } 7424 gimplify_seq_add_stmt (pre_p, g); 7425 ret = GS_ALL_DONE; 7426 break; 7427 } 7428 7429 case OMP_ATOMIC: 7430 case OMP_ATOMIC_READ: 7431 case OMP_ATOMIC_CAPTURE_OLD: 7432 case OMP_ATOMIC_CAPTURE_NEW: 7433 ret = gimplify_omp_atomic (expr_p, pre_p); 7434 break; 7435 7436 case TRANSACTION_EXPR: 7437 ret = gimplify_transaction (expr_p, pre_p); 7438 break; 7439 7440 case TRUTH_AND_EXPR: 7441 case TRUTH_OR_EXPR: 7442 case TRUTH_XOR_EXPR: 7443 { 7444 tree orig_type = TREE_TYPE (*expr_p); 7445 tree new_type, xop0, xop1; 7446 *expr_p = gimple_boolify (*expr_p); 7447 new_type = TREE_TYPE (*expr_p); 7448 if (!useless_type_conversion_p (orig_type, new_type)) 7449 { 7450 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p); 7451 ret = GS_OK; 7452 break; 7453 } 7454 7455 /* Boolified binary truth expressions are semantically equivalent 7456 to bitwise binary expressions. Canonicalize them to the 7457 bitwise variant. */ 7458 switch (TREE_CODE (*expr_p)) 7459 { 7460 case TRUTH_AND_EXPR: 7461 TREE_SET_CODE (*expr_p, BIT_AND_EXPR); 7462 break; 7463 case TRUTH_OR_EXPR: 7464 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR); 7465 break; 7466 case TRUTH_XOR_EXPR: 7467 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR); 7468 break; 7469 default: 7470 break; 7471 } 7472 /* Now make sure that operands have compatible type to 7473 expression's new_type. */ 7474 xop0 = TREE_OPERAND (*expr_p, 0); 7475 xop1 = TREE_OPERAND (*expr_p, 1); 7476 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0))) 7477 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location, 7478 new_type, 7479 xop0); 7480 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1))) 7481 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location, 7482 new_type, 7483 xop1); 7484 /* Continue classified as tcc_binary. */ 7485 goto expr_2; 7486 } 7487 7488 case FMA_EXPR: 7489 case VEC_PERM_EXPR: 7490 /* Classified as tcc_expression. */ 7491 goto expr_3; 7492 7493 case POINTER_PLUS_EXPR: 7494 { 7495 enum gimplify_status r0, r1; 7496 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 7497 post_p, is_gimple_val, fb_rvalue); 7498 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 7499 post_p, is_gimple_val, fb_rvalue); 7500 recalculate_side_effects (*expr_p); 7501 ret = MIN (r0, r1); 7502 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this 7503 after gimplifying operands - this is similar to how 7504 it would be folding all gimplified stmts on creation 7505 to have them canonicalized, which is what we eventually 7506 should do anyway. */ 7507 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST 7508 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0))) 7509 { 7510 *expr_p = build_fold_addr_expr_with_type_loc 7511 (input_location, 7512 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)), 7513 TREE_OPERAND (*expr_p, 0), 7514 fold_convert (ptr_type_node, 7515 TREE_OPERAND (*expr_p, 1))), 7516 TREE_TYPE (*expr_p)); 7517 ret = MIN (ret, GS_OK); 7518 } 7519 break; 7520 } 7521 7522 default: 7523 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) 7524 { 7525 case tcc_comparison: 7526 /* Handle comparison of objects of non scalar mode aggregates 7527 with a call to memcmp. It would be nice to only have to do 7528 this for variable-sized objects, but then we'd have to allow 7529 the same nest of reference nodes we allow for MODIFY_EXPR and 7530 that's too complex. 7531 7532 Compare scalar mode aggregates as scalar mode values. Using 7533 memcmp for them would be very inefficient at best, and is 7534 plain wrong if bitfields are involved. */ 7535 { 7536 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); 7537 7538 /* Vector comparisons need no boolification. */ 7539 if (TREE_CODE (type) == VECTOR_TYPE) 7540 goto expr_2; 7541 else if (!AGGREGATE_TYPE_P (type)) 7542 { 7543 tree org_type = TREE_TYPE (*expr_p); 7544 *expr_p = gimple_boolify (*expr_p); 7545 if (!useless_type_conversion_p (org_type, 7546 TREE_TYPE (*expr_p))) 7547 { 7548 *expr_p = fold_convert_loc (input_location, 7549 org_type, *expr_p); 7550 ret = GS_OK; 7551 } 7552 else 7553 goto expr_2; 7554 } 7555 else if (TYPE_MODE (type) != BLKmode) 7556 ret = gimplify_scalar_mode_aggregate_compare (expr_p); 7557 else 7558 ret = gimplify_variable_sized_compare (expr_p); 7559 7560 break; 7561 } 7562 7563 /* If *EXPR_P does not need to be special-cased, handle it 7564 according to its class. */ 7565 case tcc_unary: 7566 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 7567 post_p, is_gimple_val, fb_rvalue); 7568 break; 7569 7570 case tcc_binary: 7571 expr_2: 7572 { 7573 enum gimplify_status r0, r1; 7574 7575 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 7576 post_p, is_gimple_val, fb_rvalue); 7577 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 7578 post_p, is_gimple_val, fb_rvalue); 7579 7580 ret = MIN (r0, r1); 7581 break; 7582 } 7583 7584 expr_3: 7585 { 7586 enum gimplify_status r0, r1, r2; 7587 7588 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 7589 post_p, is_gimple_val, fb_rvalue); 7590 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 7591 post_p, is_gimple_val, fb_rvalue); 7592 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 7593 post_p, is_gimple_val, fb_rvalue); 7594 7595 ret = MIN (MIN (r0, r1), r2); 7596 break; 7597 } 7598 7599 case tcc_declaration: 7600 case tcc_constant: 7601 ret = GS_ALL_DONE; 7602 goto dont_recalculate; 7603 7604 default: 7605 gcc_unreachable (); 7606 } 7607 7608 recalculate_side_effects (*expr_p); 7609 7610 dont_recalculate: 7611 break; 7612 } 7613 7614 gcc_assert (*expr_p || ret != GS_OK); 7615 } 7616 while (ret == GS_OK); 7617 7618 /* If we encountered an error_mark somewhere nested inside, either 7619 stub out the statement or propagate the error back out. */ 7620 if (ret == GS_ERROR) 7621 { 7622 if (is_statement) 7623 *expr_p = NULL; 7624 goto out; 7625 } 7626 7627 /* This was only valid as a return value from the langhook, which 7628 we handled. Make sure it doesn't escape from any other context. */ 7629 gcc_assert (ret != GS_UNHANDLED); 7630 7631 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p)) 7632 { 7633 /* We aren't looking for a value, and we don't have a valid 7634 statement. If it doesn't have side-effects, throw it away. */ 7635 if (!TREE_SIDE_EFFECTS (*expr_p)) 7636 *expr_p = NULL; 7637 else if (!TREE_THIS_VOLATILE (*expr_p)) 7638 { 7639 /* This is probably a _REF that contains something nested that 7640 has side effects. Recurse through the operands to find it. */ 7641 enum tree_code code = TREE_CODE (*expr_p); 7642 7643 switch (code) 7644 { 7645 case COMPONENT_REF: 7646 case REALPART_EXPR: 7647 case IMAGPART_EXPR: 7648 case VIEW_CONVERT_EXPR: 7649 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 7650 gimple_test_f, fallback); 7651 break; 7652 7653 case ARRAY_REF: 7654 case ARRAY_RANGE_REF: 7655 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 7656 gimple_test_f, fallback); 7657 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 7658 gimple_test_f, fallback); 7659 break; 7660 7661 default: 7662 /* Anything else with side-effects must be converted to 7663 a valid statement before we get here. */ 7664 gcc_unreachable (); 7665 } 7666 7667 *expr_p = NULL; 7668 } 7669 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)) 7670 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode) 7671 { 7672 /* Historically, the compiler has treated a bare reference 7673 to a non-BLKmode volatile lvalue as forcing a load. */ 7674 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p)); 7675 7676 /* Normally, we do not want to create a temporary for a 7677 TREE_ADDRESSABLE type because such a type should not be 7678 copied by bitwise-assignment. However, we make an 7679 exception here, as all we are doing here is ensuring that 7680 we read the bytes that make up the type. We use 7681 create_tmp_var_raw because create_tmp_var will abort when 7682 given a TREE_ADDRESSABLE type. */ 7683 tree tmp = create_tmp_var_raw (type, "vol"); 7684 gimple_add_tmp_var (tmp); 7685 gimplify_assign (tmp, *expr_p, pre_p); 7686 *expr_p = NULL; 7687 } 7688 else 7689 /* We can't do anything useful with a volatile reference to 7690 an incomplete type, so just throw it away. Likewise for 7691 a BLKmode type, since any implicit inner load should 7692 already have been turned into an explicit one by the 7693 gimplification process. */ 7694 *expr_p = NULL; 7695 } 7696 7697 /* If we are gimplifying at the statement level, we're done. Tack 7698 everything together and return. */ 7699 if (fallback == fb_none || is_statement) 7700 { 7701 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear 7702 it out for GC to reclaim it. */ 7703 *expr_p = NULL_TREE; 7704 7705 if (!gimple_seq_empty_p (internal_pre) 7706 || !gimple_seq_empty_p (internal_post)) 7707 { 7708 gimplify_seq_add_seq (&internal_pre, internal_post); 7709 gimplify_seq_add_seq (pre_p, internal_pre); 7710 } 7711 7712 /* The result of gimplifying *EXPR_P is going to be the last few 7713 statements in *PRE_P and *POST_P. Add location information 7714 to all the statements that were added by the gimplification 7715 helpers. */ 7716 if (!gimple_seq_empty_p (*pre_p)) 7717 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location); 7718 7719 if (!gimple_seq_empty_p (*post_p)) 7720 annotate_all_with_location_after (*post_p, post_last_gsi, 7721 input_location); 7722 7723 goto out; 7724 } 7725 7726 #ifdef ENABLE_GIMPLE_CHECKING 7727 if (*expr_p) 7728 { 7729 enum tree_code code = TREE_CODE (*expr_p); 7730 /* These expressions should already be in gimple IR form. */ 7731 gcc_assert (code != MODIFY_EXPR 7732 && code != ASM_EXPR 7733 && code != BIND_EXPR 7734 && code != CATCH_EXPR 7735 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr) 7736 && code != EH_FILTER_EXPR 7737 && code != GOTO_EXPR 7738 && code != LABEL_EXPR 7739 && code != LOOP_EXPR 7740 && code != SWITCH_EXPR 7741 && code != TRY_FINALLY_EXPR 7742 && code != OMP_CRITICAL 7743 && code != OMP_FOR 7744 && code != OMP_MASTER 7745 && code != OMP_ORDERED 7746 && code != OMP_PARALLEL 7747 && code != OMP_SECTIONS 7748 && code != OMP_SECTION 7749 && code != OMP_SINGLE); 7750 } 7751 #endif 7752 7753 /* Otherwise we're gimplifying a subexpression, so the resulting 7754 value is interesting. If it's a valid operand that matches 7755 GIMPLE_TEST_F, we're done. Unless we are handling some 7756 post-effects internally; if that's the case, we need to copy into 7757 a temporary before adding the post-effects to POST_P. */ 7758 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p)) 7759 goto out; 7760 7761 /* Otherwise, we need to create a new temporary for the gimplified 7762 expression. */ 7763 7764 /* We can't return an lvalue if we have an internal postqueue. The 7765 object the lvalue refers to would (probably) be modified by the 7766 postqueue; we need to copy the value out first, which means an 7767 rvalue. */ 7768 if ((fallback & fb_lvalue) 7769 && gimple_seq_empty_p (internal_post) 7770 && is_gimple_addressable (*expr_p)) 7771 { 7772 /* An lvalue will do. Take the address of the expression, store it 7773 in a temporary, and replace the expression with an INDIRECT_REF of 7774 that temporary. */ 7775 tmp = build_fold_addr_expr_loc (input_location, *expr_p); 7776 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue); 7777 *expr_p = build_simple_mem_ref (tmp); 7778 } 7779 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p)) 7780 { 7781 /* An rvalue will do. Assign the gimplified expression into a 7782 new temporary TMP and replace the original expression with 7783 TMP. First, make sure that the expression has a type so that 7784 it can be assigned into a temporary. */ 7785 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p))); 7786 7787 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue)) 7788 /* The postqueue might change the value of the expression between 7789 the initialization and use of the temporary, so we can't use a 7790 formal temp. FIXME do we care? */ 7791 { 7792 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 7793 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE 7794 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE) 7795 DECL_GIMPLE_REG_P (*expr_p) = 1; 7796 } 7797 else 7798 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 7799 } 7800 else 7801 { 7802 #ifdef ENABLE_GIMPLE_CHECKING 7803 if (!(fallback & fb_mayfail)) 7804 { 7805 fprintf (stderr, "gimplification failed:\n"); 7806 print_generic_expr (stderr, *expr_p, 0); 7807 debug_tree (*expr_p); 7808 internal_error ("gimplification failed"); 7809 } 7810 #endif 7811 gcc_assert (fallback & fb_mayfail); 7812 7813 /* If this is an asm statement, and the user asked for the 7814 impossible, don't die. Fail and let gimplify_asm_expr 7815 issue an error. */ 7816 ret = GS_ERROR; 7817 goto out; 7818 } 7819 7820 /* Make sure the temporary matches our predicate. */ 7821 gcc_assert ((*gimple_test_f) (*expr_p)); 7822 7823 if (!gimple_seq_empty_p (internal_post)) 7824 { 7825 annotate_all_with_location (internal_post, input_location); 7826 gimplify_seq_add_seq (pre_p, internal_post); 7827 } 7828 7829 out: 7830 input_location = saved_location; 7831 return ret; 7832 } 7833 7834 /* Look through TYPE for variable-sized objects and gimplify each such 7835 size that we find. Add to LIST_P any statements generated. */ 7836 7837 void 7838 gimplify_type_sizes (tree type, gimple_seq *list_p) 7839 { 7840 tree field, t; 7841 7842 if (type == NULL || type == error_mark_node) 7843 return; 7844 7845 /* We first do the main variant, then copy into any other variants. */ 7846 type = TYPE_MAIN_VARIANT (type); 7847 7848 /* Avoid infinite recursion. */ 7849 if (TYPE_SIZES_GIMPLIFIED (type)) 7850 return; 7851 7852 TYPE_SIZES_GIMPLIFIED (type) = 1; 7853 7854 switch (TREE_CODE (type)) 7855 { 7856 case INTEGER_TYPE: 7857 case ENUMERAL_TYPE: 7858 case BOOLEAN_TYPE: 7859 case REAL_TYPE: 7860 case FIXED_POINT_TYPE: 7861 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p); 7862 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p); 7863 7864 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 7865 { 7866 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type); 7867 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type); 7868 } 7869 break; 7870 7871 case ARRAY_TYPE: 7872 /* These types may not have declarations, so handle them here. */ 7873 gimplify_type_sizes (TREE_TYPE (type), list_p); 7874 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); 7875 /* Ensure VLA bounds aren't removed, for -O0 they should be variables 7876 with assigned stack slots, for -O1+ -g they should be tracked 7877 by VTA. */ 7878 if (!(TYPE_NAME (type) 7879 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL 7880 && DECL_IGNORED_P (TYPE_NAME (type))) 7881 && TYPE_DOMAIN (type) 7882 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) 7883 { 7884 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); 7885 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) 7886 DECL_IGNORED_P (t) = 0; 7887 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 7888 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) 7889 DECL_IGNORED_P (t) = 0; 7890 } 7891 break; 7892 7893 case RECORD_TYPE: 7894 case UNION_TYPE: 7895 case QUAL_UNION_TYPE: 7896 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 7897 if (TREE_CODE (field) == FIELD_DECL) 7898 { 7899 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); 7900 gimplify_one_sizepos (&DECL_SIZE (field), list_p); 7901 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p); 7902 gimplify_type_sizes (TREE_TYPE (field), list_p); 7903 } 7904 break; 7905 7906 case POINTER_TYPE: 7907 case REFERENCE_TYPE: 7908 /* We used to recurse on the pointed-to type here, which turned out to 7909 be incorrect because its definition might refer to variables not 7910 yet initialized at this point if a forward declaration is involved. 7911 7912 It was actually useful for anonymous pointed-to types to ensure 7913 that the sizes evaluation dominates every possible later use of the 7914 values. Restricting to such types here would be safe since there 7915 is no possible forward declaration around, but would introduce an 7916 undesirable middle-end semantic to anonymity. We then defer to 7917 front-ends the responsibility of ensuring that the sizes are 7918 evaluated both early and late enough, e.g. by attaching artificial 7919 type declarations to the tree. */ 7920 break; 7921 7922 default: 7923 break; 7924 } 7925 7926 gimplify_one_sizepos (&TYPE_SIZE (type), list_p); 7927 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p); 7928 7929 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 7930 { 7931 TYPE_SIZE (t) = TYPE_SIZE (type); 7932 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type); 7933 TYPE_SIZES_GIMPLIFIED (t) = 1; 7934 } 7935 } 7936 7937 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P, 7938 a size or position, has had all of its SAVE_EXPRs evaluated. 7939 We add any required statements to *STMT_P. */ 7940 7941 void 7942 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p) 7943 { 7944 tree type, expr = *expr_p; 7945 7946 /* We don't do anything if the value isn't there, is constant, or contains 7947 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already 7948 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier 7949 will want to replace it with a new variable, but that will cause problems 7950 if this type is from outside the function. It's OK to have that here. */ 7951 if (is_gimple_sizepos (expr)) 7952 return; 7953 7954 type = TREE_TYPE (expr); 7955 *expr_p = unshare_expr (expr); 7956 7957 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue); 7958 expr = *expr_p; 7959 7960 /* Verify that we've an exact type match with the original expression. 7961 In particular, we do not wish to drop a "sizetype" in favour of a 7962 type of similar dimensions. We don't want to pollute the generic 7963 type-stripping code with this knowledge because it doesn't matter 7964 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT 7965 and friends retain their "sizetype-ness". */ 7966 if (TREE_TYPE (expr) != type 7967 && TREE_CODE (type) == INTEGER_TYPE 7968 && TYPE_IS_SIZETYPE (type)) 7969 { 7970 tree tmp; 7971 gimple stmt; 7972 7973 *expr_p = create_tmp_var (type, NULL); 7974 tmp = build1 (NOP_EXPR, type, expr); 7975 stmt = gimplify_assign (*expr_p, tmp, stmt_p); 7976 gimple_set_location (stmt, EXPR_LOC_OR_HERE (expr)); 7977 } 7978 } 7979 7980 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node 7981 containing the sequence of corresponding GIMPLE statements. If DO_PARMS 7982 is true, also gimplify the parameters. */ 7983 7984 gimple 7985 gimplify_body (tree fndecl, bool do_parms) 7986 { 7987 location_t saved_location = input_location; 7988 gimple_seq parm_stmts, seq; 7989 gimple outer_bind; 7990 struct gimplify_ctx gctx; 7991 struct cgraph_node *cgn; 7992 7993 timevar_push (TV_TREE_GIMPLIFY); 7994 7995 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during 7996 gimplification. */ 7997 default_rtl_profile (); 7998 7999 gcc_assert (gimplify_ctxp == NULL); 8000 push_gimplify_context (&gctx); 8001 8002 /* Unshare most shared trees in the body and in that of any nested functions. 8003 It would seem we don't have to do this for nested functions because 8004 they are supposed to be output and then the outer function gimplified 8005 first, but the g++ front end doesn't always do it that way. */ 8006 unshare_body (fndecl); 8007 unvisit_body (fndecl); 8008 8009 cgn = cgraph_get_node (fndecl); 8010 if (cgn && cgn->origin) 8011 nonlocal_vlas = pointer_set_create (); 8012 8013 /* Make sure input_location isn't set to something weird. */ 8014 input_location = DECL_SOURCE_LOCATION (fndecl); 8015 8016 /* Resolve callee-copies. This has to be done before processing 8017 the body so that DECL_VALUE_EXPR gets processed correctly. */ 8018 parm_stmts = do_parms ? gimplify_parameters () : NULL; 8019 8020 /* Gimplify the function's body. */ 8021 seq = NULL; 8022 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq); 8023 outer_bind = gimple_seq_first_stmt (seq); 8024 if (!outer_bind) 8025 { 8026 outer_bind = gimple_build_nop (); 8027 gimplify_seq_add_stmt (&seq, outer_bind); 8028 } 8029 8030 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is 8031 not the case, wrap everything in a GIMPLE_BIND to make it so. */ 8032 if (gimple_code (outer_bind) == GIMPLE_BIND 8033 && gimple_seq_first (seq) == gimple_seq_last (seq)) 8034 ; 8035 else 8036 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); 8037 8038 DECL_SAVED_TREE (fndecl) = NULL_TREE; 8039 8040 /* If we had callee-copies statements, insert them at the beginning 8041 of the function and clear DECL_VALUE_EXPR_P on the parameters. */ 8042 if (!gimple_seq_empty_p (parm_stmts)) 8043 { 8044 tree parm; 8045 8046 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); 8047 gimple_bind_set_body (outer_bind, parm_stmts); 8048 8049 for (parm = DECL_ARGUMENTS (current_function_decl); 8050 parm; parm = DECL_CHAIN (parm)) 8051 if (DECL_HAS_VALUE_EXPR_P (parm)) 8052 { 8053 DECL_HAS_VALUE_EXPR_P (parm) = 0; 8054 DECL_IGNORED_P (parm) = 0; 8055 } 8056 } 8057 8058 if (nonlocal_vlas) 8059 { 8060 pointer_set_destroy (nonlocal_vlas); 8061 nonlocal_vlas = NULL; 8062 } 8063 8064 pop_gimplify_context (outer_bind); 8065 gcc_assert (gimplify_ctxp == NULL); 8066 8067 if (!seen_error ()) 8068 verify_gimple_in_seq (gimple_bind_body (outer_bind)); 8069 8070 timevar_pop (TV_TREE_GIMPLIFY); 8071 input_location = saved_location; 8072 8073 return outer_bind; 8074 } 8075 8076 typedef char *char_p; /* For DEF_VEC_P. */ 8077 DEF_VEC_P(char_p); 8078 DEF_VEC_ALLOC_P(char_p,heap); 8079 8080 /* Return whether we should exclude FNDECL from instrumentation. */ 8081 8082 static bool 8083 flag_instrument_functions_exclude_p (tree fndecl) 8084 { 8085 VEC(char_p,heap) *vec; 8086 8087 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_functions; 8088 if (VEC_length (char_p, vec) > 0) 8089 { 8090 const char *name; 8091 int i; 8092 char *s; 8093 8094 name = lang_hooks.decl_printable_name (fndecl, 0); 8095 FOR_EACH_VEC_ELT (char_p, vec, i, s) 8096 if (strstr (name, s) != NULL) 8097 return true; 8098 } 8099 8100 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_files; 8101 if (VEC_length (char_p, vec) > 0) 8102 { 8103 const char *name; 8104 int i; 8105 char *s; 8106 8107 name = DECL_SOURCE_FILE (fndecl); 8108 FOR_EACH_VEC_ELT (char_p, vec, i, s) 8109 if (strstr (name, s) != NULL) 8110 return true; 8111 } 8112 8113 return false; 8114 } 8115 8116 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL 8117 node for the function we want to gimplify. 8118 8119 Return the sequence of GIMPLE statements corresponding to the body 8120 of FNDECL. */ 8121 8122 void 8123 gimplify_function_tree (tree fndecl) 8124 { 8125 tree oldfn, parm, ret; 8126 gimple_seq seq; 8127 gimple bind; 8128 8129 gcc_assert (!gimple_body (fndecl)); 8130 8131 oldfn = current_function_decl; 8132 current_function_decl = fndecl; 8133 if (DECL_STRUCT_FUNCTION (fndecl)) 8134 push_cfun (DECL_STRUCT_FUNCTION (fndecl)); 8135 else 8136 push_struct_function (fndecl); 8137 8138 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm)) 8139 { 8140 /* Preliminarily mark non-addressed complex variables as eligible 8141 for promotion to gimple registers. We'll transform their uses 8142 as we find them. */ 8143 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE 8144 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE) 8145 && !TREE_THIS_VOLATILE (parm) 8146 && !needs_to_live_in_memory (parm)) 8147 DECL_GIMPLE_REG_P (parm) = 1; 8148 } 8149 8150 ret = DECL_RESULT (fndecl); 8151 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE 8152 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE) 8153 && !needs_to_live_in_memory (ret)) 8154 DECL_GIMPLE_REG_P (ret) = 1; 8155 8156 bind = gimplify_body (fndecl, true); 8157 8158 /* The tree body of the function is no longer needed, replace it 8159 with the new GIMPLE body. */ 8160 seq = gimple_seq_alloc (); 8161 gimple_seq_add_stmt (&seq, bind); 8162 gimple_set_body (fndecl, seq); 8163 8164 /* If we're instrumenting function entry/exit, then prepend the call to 8165 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to 8166 catch the exit hook. */ 8167 /* ??? Add some way to ignore exceptions for this TFE. */ 8168 if (flag_instrument_function_entry_exit 8169 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) 8170 && !flag_instrument_functions_exclude_p (fndecl)) 8171 { 8172 tree x; 8173 gimple new_bind; 8174 gimple tf; 8175 gimple_seq cleanup = NULL, body = NULL; 8176 tree tmp_var; 8177 gimple call; 8178 8179 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 8180 call = gimple_build_call (x, 1, integer_zero_node); 8181 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 8182 gimple_call_set_lhs (call, tmp_var); 8183 gimplify_seq_add_stmt (&cleanup, call); 8184 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT); 8185 call = gimple_build_call (x, 2, 8186 build_fold_addr_expr (current_function_decl), 8187 tmp_var); 8188 gimplify_seq_add_stmt (&cleanup, call); 8189 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY); 8190 8191 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 8192 call = gimple_build_call (x, 1, integer_zero_node); 8193 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 8194 gimple_call_set_lhs (call, tmp_var); 8195 gimplify_seq_add_stmt (&body, call); 8196 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER); 8197 call = gimple_build_call (x, 2, 8198 build_fold_addr_expr (current_function_decl), 8199 tmp_var); 8200 gimplify_seq_add_stmt (&body, call); 8201 gimplify_seq_add_stmt (&body, tf); 8202 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind)); 8203 /* Clear the block for BIND, since it is no longer directly inside 8204 the function, but within a try block. */ 8205 gimple_bind_set_block (bind, NULL); 8206 8207 /* Replace the current function body with the body 8208 wrapped in the try/finally TF. */ 8209 seq = gimple_seq_alloc (); 8210 gimple_seq_add_stmt (&seq, new_bind); 8211 gimple_set_body (fndecl, seq); 8212 } 8213 8214 DECL_SAVED_TREE (fndecl) = NULL_TREE; 8215 cfun->curr_properties = PROP_gimple_any; 8216 8217 current_function_decl = oldfn; 8218 pop_cfun (); 8219 } 8220 8221 /* Some transformations like inlining may invalidate the GIMPLE form 8222 for operands. This function traverses all the operands in STMT and 8223 gimplifies anything that is not a valid gimple operand. Any new 8224 GIMPLE statements are inserted before *GSI_P. */ 8225 8226 void 8227 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p) 8228 { 8229 size_t i, num_ops; 8230 tree orig_lhs = NULL_TREE, lhs, t; 8231 gimple_seq pre = NULL; 8232 gimple post_stmt = NULL; 8233 struct gimplify_ctx gctx; 8234 8235 push_gimplify_context (&gctx); 8236 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun); 8237 8238 switch (gimple_code (stmt)) 8239 { 8240 case GIMPLE_COND: 8241 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL, 8242 is_gimple_val, fb_rvalue); 8243 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL, 8244 is_gimple_val, fb_rvalue); 8245 break; 8246 case GIMPLE_SWITCH: 8247 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL, 8248 is_gimple_val, fb_rvalue); 8249 break; 8250 case GIMPLE_OMP_ATOMIC_LOAD: 8251 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL, 8252 is_gimple_val, fb_rvalue); 8253 break; 8254 case GIMPLE_ASM: 8255 { 8256 size_t i, noutputs = gimple_asm_noutputs (stmt); 8257 const char *constraint, **oconstraints; 8258 bool allows_mem, allows_reg, is_inout; 8259 8260 oconstraints 8261 = (const char **) alloca ((noutputs) * sizeof (const char *)); 8262 for (i = 0; i < noutputs; i++) 8263 { 8264 tree op = gimple_asm_output_op (stmt, i); 8265 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); 8266 oconstraints[i] = constraint; 8267 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, 8268 &allows_reg, &is_inout); 8269 gimplify_expr (&TREE_VALUE (op), &pre, NULL, 8270 is_inout ? is_gimple_min_lval : is_gimple_lvalue, 8271 fb_lvalue | fb_mayfail); 8272 } 8273 for (i = 0; i < gimple_asm_ninputs (stmt); i++) 8274 { 8275 tree op = gimple_asm_input_op (stmt, i); 8276 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); 8277 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 8278 oconstraints, &allows_mem, &allows_reg); 8279 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem) 8280 allows_reg = 0; 8281 if (!allows_reg && allows_mem) 8282 gimplify_expr (&TREE_VALUE (op), &pre, NULL, 8283 is_gimple_lvalue, fb_lvalue | fb_mayfail); 8284 else 8285 gimplify_expr (&TREE_VALUE (op), &pre, NULL, 8286 is_gimple_asm_val, fb_rvalue); 8287 } 8288 } 8289 break; 8290 default: 8291 /* NOTE: We start gimplifying operands from last to first to 8292 make sure that side-effects on the RHS of calls, assignments 8293 and ASMs are executed before the LHS. The ordering is not 8294 important for other statements. */ 8295 num_ops = gimple_num_ops (stmt); 8296 orig_lhs = gimple_get_lhs (stmt); 8297 for (i = num_ops; i > 0; i--) 8298 { 8299 tree op = gimple_op (stmt, i - 1); 8300 if (op == NULL_TREE) 8301 continue; 8302 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt))) 8303 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue); 8304 else if (i == 2 8305 && is_gimple_assign (stmt) 8306 && num_ops == 2 8307 && get_gimple_rhs_class (gimple_expr_code (stmt)) 8308 == GIMPLE_SINGLE_RHS) 8309 gimplify_expr (&op, &pre, NULL, 8310 rhs_predicate_for (gimple_assign_lhs (stmt)), 8311 fb_rvalue); 8312 else if (i == 2 && is_gimple_call (stmt)) 8313 { 8314 if (TREE_CODE (op) == FUNCTION_DECL) 8315 continue; 8316 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue); 8317 } 8318 else 8319 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue); 8320 gimple_set_op (stmt, i - 1, op); 8321 } 8322 8323 lhs = gimple_get_lhs (stmt); 8324 /* If the LHS changed it in a way that requires a simple RHS, 8325 create temporary. */ 8326 if (lhs && !is_gimple_reg (lhs)) 8327 { 8328 bool need_temp = false; 8329 8330 if (is_gimple_assign (stmt) 8331 && num_ops == 2 8332 && get_gimple_rhs_class (gimple_expr_code (stmt)) 8333 == GIMPLE_SINGLE_RHS) 8334 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL, 8335 rhs_predicate_for (gimple_assign_lhs (stmt)), 8336 fb_rvalue); 8337 else if (is_gimple_reg (lhs)) 8338 { 8339 if (is_gimple_reg_type (TREE_TYPE (lhs))) 8340 { 8341 if (is_gimple_call (stmt)) 8342 { 8343 i = gimple_call_flags (stmt); 8344 if ((i & ECF_LOOPING_CONST_OR_PURE) 8345 || !(i & (ECF_CONST | ECF_PURE))) 8346 need_temp = true; 8347 } 8348 if (stmt_can_throw_internal (stmt)) 8349 need_temp = true; 8350 } 8351 } 8352 else 8353 { 8354 if (is_gimple_reg_type (TREE_TYPE (lhs))) 8355 need_temp = true; 8356 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode) 8357 { 8358 if (is_gimple_call (stmt)) 8359 { 8360 tree fndecl = gimple_call_fndecl (stmt); 8361 8362 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl) 8363 && !(fndecl && DECL_RESULT (fndecl) 8364 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))) 8365 need_temp = true; 8366 } 8367 else 8368 need_temp = true; 8369 } 8370 } 8371 if (need_temp) 8372 { 8373 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL); 8374 8375 if (TREE_CODE (orig_lhs) == SSA_NAME) 8376 orig_lhs = SSA_NAME_VAR (orig_lhs); 8377 8378 if (gimple_in_ssa_p (cfun)) 8379 temp = make_ssa_name (temp, NULL); 8380 gimple_set_lhs (stmt, temp); 8381 post_stmt = gimple_build_assign (lhs, temp); 8382 if (TREE_CODE (lhs) == SSA_NAME) 8383 SSA_NAME_DEF_STMT (lhs) = post_stmt; 8384 } 8385 } 8386 break; 8387 } 8388 8389 if (gimple_referenced_vars (cfun)) 8390 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) 8391 add_referenced_var (t); 8392 8393 if (!gimple_seq_empty_p (pre)) 8394 { 8395 if (gimple_in_ssa_p (cfun)) 8396 { 8397 gimple_stmt_iterator i; 8398 8399 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i)) 8400 mark_symbols_for_renaming (gsi_stmt (i)); 8401 } 8402 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT); 8403 } 8404 if (post_stmt) 8405 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT); 8406 8407 pop_gimplify_context (NULL); 8408 } 8409 8410 /* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies 8411 the predicate that will hold for the result. If VAR is not NULL, make the 8412 base variable of the final destination be VAR if suitable. */ 8413 8414 tree 8415 force_gimple_operand_1 (tree expr, gimple_seq *stmts, 8416 gimple_predicate gimple_test_f, tree var) 8417 { 8418 tree t; 8419 enum gimplify_status ret; 8420 struct gimplify_ctx gctx; 8421 8422 *stmts = NULL; 8423 8424 /* gimple_test_f might be more strict than is_gimple_val, make 8425 sure we pass both. Just checking gimple_test_f doesn't work 8426 because most gimple predicates do not work recursively. */ 8427 if (is_gimple_val (expr) 8428 && (*gimple_test_f) (expr)) 8429 return expr; 8430 8431 push_gimplify_context (&gctx); 8432 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun); 8433 gimplify_ctxp->allow_rhs_cond_expr = true; 8434 8435 if (var) 8436 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr); 8437 8438 if (TREE_CODE (expr) != MODIFY_EXPR 8439 && TREE_TYPE (expr) == void_type_node) 8440 { 8441 gimplify_and_add (expr, stmts); 8442 expr = NULL_TREE; 8443 } 8444 else 8445 { 8446 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue); 8447 gcc_assert (ret != GS_ERROR); 8448 } 8449 8450 if (gimple_referenced_vars (cfun)) 8451 for (t = gimplify_ctxp->temps; t ; t = DECL_CHAIN (t)) 8452 add_referenced_var (t); 8453 8454 pop_gimplify_context (NULL); 8455 8456 return expr; 8457 } 8458 8459 /* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true, 8460 force the result to be either ssa_name or an invariant, otherwise 8461 just force it to be a rhs expression. If VAR is not NULL, make the 8462 base variable of the final destination be VAR if suitable. */ 8463 8464 tree 8465 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var) 8466 { 8467 return force_gimple_operand_1 (expr, stmts, 8468 simple ? is_gimple_val : is_gimple_reg_rhs, 8469 var); 8470 } 8471 8472 /* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F 8473 and VAR. If some statements are produced, emits them at GSI. 8474 If BEFORE is true. the statements are appended before GSI, otherwise 8475 they are appended after it. M specifies the way GSI moves after 8476 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */ 8477 8478 tree 8479 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr, 8480 gimple_predicate gimple_test_f, 8481 tree var, bool before, 8482 enum gsi_iterator_update m) 8483 { 8484 gimple_seq stmts; 8485 8486 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var); 8487 8488 if (!gimple_seq_empty_p (stmts)) 8489 { 8490 if (gimple_in_ssa_p (cfun)) 8491 { 8492 gimple_stmt_iterator i; 8493 8494 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i)) 8495 mark_symbols_for_renaming (gsi_stmt (i)); 8496 } 8497 8498 if (before) 8499 gsi_insert_seq_before (gsi, stmts, m); 8500 else 8501 gsi_insert_seq_after (gsi, stmts, m); 8502 } 8503 8504 return expr; 8505 } 8506 8507 /* Invoke force_gimple_operand_1 for EXPR with parameter VAR. 8508 If SIMPLE is true, force the result to be either ssa_name or an invariant, 8509 otherwise just force it to be a rhs expression. If some statements are 8510 produced, emits them at GSI. If BEFORE is true, the statements are 8511 appended before GSI, otherwise they are appended after it. M specifies 8512 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING 8513 are the usual values). */ 8514 8515 tree 8516 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr, 8517 bool simple_p, tree var, bool before, 8518 enum gsi_iterator_update m) 8519 { 8520 return force_gimple_operand_gsi_1 (gsi, expr, 8521 simple_p 8522 ? is_gimple_val : is_gimple_reg_rhs, 8523 var, before, m); 8524 } 8525 8526 8527 #include "gt-gimplify.h" 8528