1 /* Perform the semantic phase of parsing, i.e., the process of 2 building tree structure, checking semantic consistency, and 3 building RTL. These routines are used both during actual parsing 4 and during the instantiation of template functions. 5 6 Copyright (C) 1998-2018 Free Software Foundation, Inc. 7 Written by Mark Mitchell (mmitchell@usa.net) based on code found 8 formerly in parse.y and pt.c. 9 10 This file is part of GCC. 11 12 GCC is free software; you can redistribute it and/or modify it 13 under the terms of the GNU General Public License as published by 14 the Free Software Foundation; either version 3, or (at your option) 15 any later version. 16 17 GCC is distributed in the hope that it will be useful, but 18 WITHOUT ANY WARRANTY; without even the implied warranty of 19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 General Public License for more details. 21 22 You should have received a copy of the GNU General Public License 23 along with GCC; see the file COPYING3. If not see 24 <http://www.gnu.org/licenses/>. */ 25 26 #include "config.h" 27 #include "system.h" 28 #include "coretypes.h" 29 #include "target.h" 30 #include "bitmap.h" 31 #include "cp-tree.h" 32 #include "stringpool.h" 33 #include "cgraph.h" 34 #include "stmt.h" 35 #include "varasm.h" 36 #include "stor-layout.h" 37 #include "c-family/c-objc.h" 38 #include "tree-inline.h" 39 #include "intl.h" 40 #include "tree-iterator.h" 41 #include "omp-general.h" 42 #include "convert.h" 43 #include "stringpool.h" 44 #include "attribs.h" 45 #include "gomp-constants.h" 46 #include "predict.h" 47 48 /* There routines provide a modular interface to perform many parsing 49 operations. They may therefore be used during actual parsing, or 50 during template instantiation, which may be regarded as a 51 degenerate form of parsing. */ 52 53 static tree maybe_convert_cond (tree); 54 static tree finalize_nrv_r (tree *, int *, void *); 55 static tree capture_decltype (tree); 56 57 /* Used for OpenMP non-static data member privatization. */ 58 59 static hash_map<tree, tree> *omp_private_member_map; 60 static vec<tree> omp_private_member_vec; 61 static bool omp_private_member_ignore_next; 62 63 64 /* Deferred Access Checking Overview 65 --------------------------------- 66 67 Most C++ expressions and declarations require access checking 68 to be performed during parsing. However, in several cases, 69 this has to be treated differently. 70 71 For member declarations, access checking has to be deferred 72 until more information about the declaration is known. For 73 example: 74 75 class A { 76 typedef int X; 77 public: 78 X f(); 79 }; 80 81 A::X A::f(); 82 A::X g(); 83 84 When we are parsing the function return type `A::X', we don't 85 really know if this is allowed until we parse the function name. 86 87 Furthermore, some contexts require that access checking is 88 never performed at all. These include class heads, and template 89 instantiations. 90 91 Typical use of access checking functions is described here: 92 93 1. When we enter a context that requires certain access checking 94 mode, the function `push_deferring_access_checks' is called with 95 DEFERRING argument specifying the desired mode. Access checking 96 may be performed immediately (dk_no_deferred), deferred 97 (dk_deferred), or not performed (dk_no_check). 98 99 2. When a declaration such as a type, or a variable, is encountered, 100 the function `perform_or_defer_access_check' is called. It 101 maintains a vector of all deferred checks. 102 103 3. The global `current_class_type' or `current_function_decl' is then 104 setup by the parser. `enforce_access' relies on these information 105 to check access. 106 107 4. Upon exiting the context mentioned in step 1, 108 `perform_deferred_access_checks' is called to check all declaration 109 stored in the vector. `pop_deferring_access_checks' is then 110 called to restore the previous access checking mode. 111 112 In case of parsing error, we simply call `pop_deferring_access_checks' 113 without `perform_deferred_access_checks'. */ 114 115 struct GTY(()) deferred_access { 116 /* A vector representing name-lookups for which we have deferred 117 checking access controls. We cannot check the accessibility of 118 names used in a decl-specifier-seq until we know what is being 119 declared because code like: 120 121 class A { 122 class B {}; 123 B* f(); 124 } 125 126 A::B* A::f() { return 0; } 127 128 is valid, even though `A::B' is not generally accessible. */ 129 vec<deferred_access_check, va_gc> * GTY(()) deferred_access_checks; 130 131 /* The current mode of access checks. */ 132 enum deferring_kind deferring_access_checks_kind; 133 134 }; 135 136 /* Data for deferred access checking. */ 137 static GTY(()) vec<deferred_access, va_gc> *deferred_access_stack; 138 static GTY(()) unsigned deferred_access_no_check; 139 140 /* Save the current deferred access states and start deferred 141 access checking iff DEFER_P is true. */ 142 143 void 144 push_deferring_access_checks (deferring_kind deferring) 145 { 146 /* For context like template instantiation, access checking 147 disabling applies to all nested context. */ 148 if (deferred_access_no_check || deferring == dk_no_check) 149 deferred_access_no_check++; 150 else 151 { 152 deferred_access e = {NULL, deferring}; 153 vec_safe_push (deferred_access_stack, e); 154 } 155 } 156 157 /* Save the current deferred access states and start deferred access 158 checking, continuing the set of deferred checks in CHECKS. */ 159 160 void 161 reopen_deferring_access_checks (vec<deferred_access_check, va_gc> * checks) 162 { 163 push_deferring_access_checks (dk_deferred); 164 if (!deferred_access_no_check) 165 deferred_access_stack->last().deferred_access_checks = checks; 166 } 167 168 /* Resume deferring access checks again after we stopped doing 169 this previously. */ 170 171 void 172 resume_deferring_access_checks (void) 173 { 174 if (!deferred_access_no_check) 175 deferred_access_stack->last().deferring_access_checks_kind = dk_deferred; 176 } 177 178 /* Stop deferring access checks. */ 179 180 void 181 stop_deferring_access_checks (void) 182 { 183 if (!deferred_access_no_check) 184 deferred_access_stack->last().deferring_access_checks_kind = dk_no_deferred; 185 } 186 187 /* Discard the current deferred access checks and restore the 188 previous states. */ 189 190 void 191 pop_deferring_access_checks (void) 192 { 193 if (deferred_access_no_check) 194 deferred_access_no_check--; 195 else 196 deferred_access_stack->pop (); 197 } 198 199 /* Returns a TREE_LIST representing the deferred checks. 200 The TREE_PURPOSE of each node is the type through which the 201 access occurred; the TREE_VALUE is the declaration named. 202 */ 203 204 vec<deferred_access_check, va_gc> * 205 get_deferred_access_checks (void) 206 { 207 if (deferred_access_no_check) 208 return NULL; 209 else 210 return (deferred_access_stack->last().deferred_access_checks); 211 } 212 213 /* Take current deferred checks and combine with the 214 previous states if we also defer checks previously. 215 Otherwise perform checks now. */ 216 217 void 218 pop_to_parent_deferring_access_checks (void) 219 { 220 if (deferred_access_no_check) 221 deferred_access_no_check--; 222 else 223 { 224 vec<deferred_access_check, va_gc> *checks; 225 deferred_access *ptr; 226 227 checks = (deferred_access_stack->last ().deferred_access_checks); 228 229 deferred_access_stack->pop (); 230 ptr = &deferred_access_stack->last (); 231 if (ptr->deferring_access_checks_kind == dk_no_deferred) 232 { 233 /* Check access. */ 234 perform_access_checks (checks, tf_warning_or_error); 235 } 236 else 237 { 238 /* Merge with parent. */ 239 int i, j; 240 deferred_access_check *chk, *probe; 241 242 FOR_EACH_VEC_SAFE_ELT (checks, i, chk) 243 { 244 FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, j, probe) 245 { 246 if (probe->binfo == chk->binfo && 247 probe->decl == chk->decl && 248 probe->diag_decl == chk->diag_decl) 249 goto found; 250 } 251 /* Insert into parent's checks. */ 252 vec_safe_push (ptr->deferred_access_checks, *chk); 253 found:; 254 } 255 } 256 } 257 } 258 259 /* Perform the access checks in CHECKS. The TREE_PURPOSE of each node 260 is the BINFO indicating the qualifying scope used to access the 261 DECL node stored in the TREE_VALUE of the node. If CHECKS is empty 262 or we aren't in SFINAE context or all the checks succeed return TRUE, 263 otherwise FALSE. */ 264 265 bool 266 perform_access_checks (vec<deferred_access_check, va_gc> *checks, 267 tsubst_flags_t complain) 268 { 269 int i; 270 deferred_access_check *chk; 271 location_t loc = input_location; 272 bool ok = true; 273 274 if (!checks) 275 return true; 276 277 FOR_EACH_VEC_SAFE_ELT (checks, i, chk) 278 { 279 input_location = chk->loc; 280 ok &= enforce_access (chk->binfo, chk->decl, chk->diag_decl, complain); 281 } 282 283 input_location = loc; 284 return (complain & tf_error) ? true : ok; 285 } 286 287 /* Perform the deferred access checks. 288 289 After performing the checks, we still have to keep the list 290 `deferred_access_stack->deferred_access_checks' since we may want 291 to check access for them again later in a different context. 292 For example: 293 294 class A { 295 typedef int X; 296 static X a; 297 }; 298 A::X A::a, x; // No error for `A::a', error for `x' 299 300 We have to perform deferred access of `A::X', first with `A::a', 301 next with `x'. Return value like perform_access_checks above. */ 302 303 bool 304 perform_deferred_access_checks (tsubst_flags_t complain) 305 { 306 return perform_access_checks (get_deferred_access_checks (), complain); 307 } 308 309 /* Defer checking the accessibility of DECL, when looked up in 310 BINFO. DIAG_DECL is the declaration to use to print diagnostics. 311 Return value like perform_access_checks above. 312 If non-NULL, report failures to AFI. */ 313 314 bool 315 perform_or_defer_access_check (tree binfo, tree decl, tree diag_decl, 316 tsubst_flags_t complain, 317 access_failure_info *afi) 318 { 319 int i; 320 deferred_access *ptr; 321 deferred_access_check *chk; 322 323 324 /* Exit if we are in a context that no access checking is performed. 325 */ 326 if (deferred_access_no_check) 327 return true; 328 329 gcc_assert (TREE_CODE (binfo) == TREE_BINFO); 330 331 ptr = &deferred_access_stack->last (); 332 333 /* If we are not supposed to defer access checks, just check now. */ 334 if (ptr->deferring_access_checks_kind == dk_no_deferred) 335 { 336 bool ok = enforce_access (binfo, decl, diag_decl, complain, afi); 337 return (complain & tf_error) ? true : ok; 338 } 339 340 /* See if we are already going to perform this check. */ 341 FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, i, chk) 342 { 343 if (chk->decl == decl && chk->binfo == binfo && 344 chk->diag_decl == diag_decl) 345 { 346 return true; 347 } 348 } 349 /* If not, record the check. */ 350 deferred_access_check new_access = {binfo, decl, diag_decl, input_location}; 351 vec_safe_push (ptr->deferred_access_checks, new_access); 352 353 return true; 354 } 355 356 /* Returns nonzero if the current statement is a full expression, 357 i.e. temporaries created during that statement should be destroyed 358 at the end of the statement. */ 359 360 int 361 stmts_are_full_exprs_p (void) 362 { 363 return current_stmt_tree ()->stmts_are_full_exprs_p; 364 } 365 366 /* T is a statement. Add it to the statement-tree. This is the C++ 367 version. The C/ObjC frontends have a slightly different version of 368 this function. */ 369 370 tree 371 add_stmt (tree t) 372 { 373 enum tree_code code = TREE_CODE (t); 374 375 if (EXPR_P (t) && code != LABEL_EXPR) 376 { 377 if (!EXPR_HAS_LOCATION (t)) 378 SET_EXPR_LOCATION (t, input_location); 379 380 /* When we expand a statement-tree, we must know whether or not the 381 statements are full-expressions. We record that fact here. */ 382 STMT_IS_FULL_EXPR_P (t) = stmts_are_full_exprs_p (); 383 } 384 385 if (code == LABEL_EXPR || code == CASE_LABEL_EXPR) 386 STATEMENT_LIST_HAS_LABEL (cur_stmt_list) = 1; 387 388 /* Add T to the statement-tree. Non-side-effect statements need to be 389 recorded during statement expressions. */ 390 gcc_checking_assert (!stmt_list_stack->is_empty ()); 391 append_to_statement_list_force (t, &cur_stmt_list); 392 393 return t; 394 } 395 396 /* Returns the stmt_tree to which statements are currently being added. */ 397 398 stmt_tree 399 current_stmt_tree (void) 400 { 401 return (cfun 402 ? &cfun->language->base.x_stmt_tree 403 : &scope_chain->x_stmt_tree); 404 } 405 406 /* If statements are full expressions, wrap STMT in a CLEANUP_POINT_EXPR. */ 407 408 static tree 409 maybe_cleanup_point_expr (tree expr) 410 { 411 if (!processing_template_decl && stmts_are_full_exprs_p ()) 412 expr = fold_build_cleanup_point_expr (TREE_TYPE (expr), expr); 413 return expr; 414 } 415 416 /* Like maybe_cleanup_point_expr except have the type of the new expression be 417 void so we don't need to create a temporary variable to hold the inner 418 expression. The reason why we do this is because the original type might be 419 an aggregate and we cannot create a temporary variable for that type. */ 420 421 tree 422 maybe_cleanup_point_expr_void (tree expr) 423 { 424 if (!processing_template_decl && stmts_are_full_exprs_p ()) 425 expr = fold_build_cleanup_point_expr (void_type_node, expr); 426 return expr; 427 } 428 429 430 431 /* Create a declaration statement for the declaration given by the DECL. */ 432 433 void 434 add_decl_expr (tree decl) 435 { 436 tree r = build_stmt (DECL_SOURCE_LOCATION (decl), DECL_EXPR, decl); 437 if (DECL_INITIAL (decl) 438 || (DECL_SIZE (decl) && TREE_SIDE_EFFECTS (DECL_SIZE (decl)))) 439 r = maybe_cleanup_point_expr_void (r); 440 add_stmt (r); 441 } 442 443 /* Finish a scope. */ 444 445 tree 446 do_poplevel (tree stmt_list) 447 { 448 tree block = NULL; 449 450 if (stmts_are_full_exprs_p ()) 451 block = poplevel (kept_level_p (), 1, 0); 452 453 stmt_list = pop_stmt_list (stmt_list); 454 455 if (!processing_template_decl) 456 { 457 stmt_list = c_build_bind_expr (input_location, block, stmt_list); 458 /* ??? See c_end_compound_stmt re statement expressions. */ 459 } 460 461 return stmt_list; 462 } 463 464 /* Begin a new scope. */ 465 466 static tree 467 do_pushlevel (scope_kind sk) 468 { 469 tree ret = push_stmt_list (); 470 if (stmts_are_full_exprs_p ()) 471 begin_scope (sk, NULL); 472 return ret; 473 } 474 475 /* Queue a cleanup. CLEANUP is an expression/statement to be executed 476 when the current scope is exited. EH_ONLY is true when this is not 477 meant to apply to normal control flow transfer. */ 478 479 void 480 push_cleanup (tree decl, tree cleanup, bool eh_only) 481 { 482 tree stmt = build_stmt (input_location, CLEANUP_STMT, NULL, cleanup, decl); 483 CLEANUP_EH_ONLY (stmt) = eh_only; 484 add_stmt (stmt); 485 CLEANUP_BODY (stmt) = push_stmt_list (); 486 } 487 488 /* Simple infinite loop tracking for -Wreturn-type. We keep a stack of all 489 the current loops, represented by 'NULL_TREE' if we've seen a possible 490 exit, and 'error_mark_node' if not. This is currently used only to 491 suppress the warning about a function with no return statements, and 492 therefore we don't bother noting returns as possible exits. We also 493 don't bother with gotos. */ 494 495 static void 496 begin_maybe_infinite_loop (tree cond) 497 { 498 /* Only track this while parsing a function, not during instantiation. */ 499 if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl) 500 && !processing_template_decl)) 501 return; 502 bool maybe_infinite = true; 503 if (cond) 504 { 505 cond = fold_non_dependent_expr (cond); 506 maybe_infinite = integer_nonzerop (cond); 507 } 508 vec_safe_push (cp_function_chain->infinite_loops, 509 maybe_infinite ? error_mark_node : NULL_TREE); 510 511 } 512 513 /* A break is a possible exit for the current loop. */ 514 515 void 516 break_maybe_infinite_loop (void) 517 { 518 if (!cfun) 519 return; 520 cp_function_chain->infinite_loops->last() = NULL_TREE; 521 } 522 523 /* If we reach the end of the loop without seeing a possible exit, we have 524 an infinite loop. */ 525 526 static void 527 end_maybe_infinite_loop (tree cond) 528 { 529 if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl) 530 && !processing_template_decl)) 531 return; 532 tree current = cp_function_chain->infinite_loops->pop(); 533 if (current != NULL_TREE) 534 { 535 cond = fold_non_dependent_expr (cond); 536 if (integer_nonzerop (cond)) 537 current_function_infinite_loop = 1; 538 } 539 } 540 541 542 /* Begin a conditional that might contain a declaration. When generating 543 normal code, we want the declaration to appear before the statement 544 containing the conditional. When generating template code, we want the 545 conditional to be rendered as the raw DECL_EXPR. */ 546 547 static void 548 begin_cond (tree *cond_p) 549 { 550 if (processing_template_decl) 551 *cond_p = push_stmt_list (); 552 } 553 554 /* Finish such a conditional. */ 555 556 static void 557 finish_cond (tree *cond_p, tree expr) 558 { 559 if (processing_template_decl) 560 { 561 tree cond = pop_stmt_list (*cond_p); 562 563 if (expr == NULL_TREE) 564 /* Empty condition in 'for'. */ 565 gcc_assert (empty_expr_stmt_p (cond)); 566 else if (check_for_bare_parameter_packs (expr)) 567 expr = error_mark_node; 568 else if (!empty_expr_stmt_p (cond)) 569 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), cond, expr); 570 } 571 *cond_p = expr; 572 } 573 574 /* If *COND_P specifies a conditional with a declaration, transform the 575 loop such that 576 while (A x = 42) { } 577 for (; A x = 42;) { } 578 becomes 579 while (true) { A x = 42; if (!x) break; } 580 for (;;) { A x = 42; if (!x) break; } 581 The statement list for BODY will be empty if the conditional did 582 not declare anything. */ 583 584 static void 585 simplify_loop_decl_cond (tree *cond_p, tree body) 586 { 587 tree cond, if_stmt; 588 589 if (!TREE_SIDE_EFFECTS (body)) 590 return; 591 592 cond = *cond_p; 593 *cond_p = boolean_true_node; 594 595 if_stmt = begin_if_stmt (); 596 cond = cp_build_unary_op (TRUTH_NOT_EXPR, cond, false, tf_warning_or_error); 597 finish_if_stmt_cond (cond, if_stmt); 598 finish_break_stmt (); 599 finish_then_clause (if_stmt); 600 finish_if_stmt (if_stmt); 601 } 602 603 /* Finish a goto-statement. */ 604 605 tree 606 finish_goto_stmt (tree destination) 607 { 608 if (identifier_p (destination)) 609 destination = lookup_label (destination); 610 611 /* We warn about unused labels with -Wunused. That means we have to 612 mark the used labels as used. */ 613 if (TREE_CODE (destination) == LABEL_DECL) 614 TREE_USED (destination) = 1; 615 else 616 { 617 destination = mark_rvalue_use (destination); 618 if (!processing_template_decl) 619 { 620 destination = cp_convert (ptr_type_node, destination, 621 tf_warning_or_error); 622 if (error_operand_p (destination)) 623 return NULL_TREE; 624 destination 625 = fold_build_cleanup_point_expr (TREE_TYPE (destination), 626 destination); 627 } 628 } 629 630 check_goto (destination); 631 632 add_stmt (build_predict_expr (PRED_GOTO, NOT_TAKEN)); 633 return add_stmt (build_stmt (input_location, GOTO_EXPR, destination)); 634 } 635 636 /* COND is the condition-expression for an if, while, etc., 637 statement. Convert it to a boolean value, if appropriate. 638 In addition, verify sequence points if -Wsequence-point is enabled. */ 639 640 static tree 641 maybe_convert_cond (tree cond) 642 { 643 /* Empty conditions remain empty. */ 644 if (!cond) 645 return NULL_TREE; 646 647 /* Wait until we instantiate templates before doing conversion. */ 648 if (processing_template_decl) 649 return cond; 650 651 if (warn_sequence_point) 652 verify_sequence_points (cond); 653 654 /* Do the conversion. */ 655 cond = convert_from_reference (cond); 656 657 if (TREE_CODE (cond) == MODIFY_EXPR 658 && !TREE_NO_WARNING (cond) 659 && warn_parentheses) 660 { 661 warning_at (EXPR_LOC_OR_LOC (cond, input_location), OPT_Wparentheses, 662 "suggest parentheses around assignment used as truth value"); 663 TREE_NO_WARNING (cond) = 1; 664 } 665 666 return condition_conversion (cond); 667 } 668 669 /* Finish an expression-statement, whose EXPRESSION is as indicated. */ 670 671 tree 672 finish_expr_stmt (tree expr) 673 { 674 tree r = NULL_TREE; 675 location_t loc = EXPR_LOCATION (expr); 676 677 if (expr != NULL_TREE) 678 { 679 /* If we ran into a problem, make sure we complained. */ 680 gcc_assert (expr != error_mark_node || seen_error ()); 681 682 if (!processing_template_decl) 683 { 684 if (warn_sequence_point) 685 verify_sequence_points (expr); 686 expr = convert_to_void (expr, ICV_STATEMENT, tf_warning_or_error); 687 } 688 else if (!type_dependent_expression_p (expr)) 689 convert_to_void (build_non_dependent_expr (expr), ICV_STATEMENT, 690 tf_warning_or_error); 691 692 if (check_for_bare_parameter_packs (expr)) 693 expr = error_mark_node; 694 695 /* Simplification of inner statement expressions, compound exprs, 696 etc can result in us already having an EXPR_STMT. */ 697 if (TREE_CODE (expr) != CLEANUP_POINT_EXPR) 698 { 699 if (TREE_CODE (expr) != EXPR_STMT) 700 expr = build_stmt (loc, EXPR_STMT, expr); 701 expr = maybe_cleanup_point_expr_void (expr); 702 } 703 704 r = add_stmt (expr); 705 } 706 707 return r; 708 } 709 710 711 /* Begin an if-statement. Returns a newly created IF_STMT if 712 appropriate. */ 713 714 tree 715 begin_if_stmt (void) 716 { 717 tree r, scope; 718 scope = do_pushlevel (sk_cond); 719 r = build_stmt (input_location, IF_STMT, NULL_TREE, 720 NULL_TREE, NULL_TREE, scope); 721 current_binding_level->this_entity = r; 722 begin_cond (&IF_COND (r)); 723 return r; 724 } 725 726 /* Process the COND of an if-statement, which may be given by 727 IF_STMT. */ 728 729 tree 730 finish_if_stmt_cond (tree cond, tree if_stmt) 731 { 732 cond = maybe_convert_cond (cond); 733 if (IF_STMT_CONSTEXPR_P (if_stmt) 734 && !type_dependent_expression_p (cond) 735 && require_constant_expression (cond) 736 && !instantiation_dependent_expression_p (cond) 737 /* Wait until instantiation time, since only then COND has been 738 converted to bool. */ 739 && TREE_TYPE (cond) == boolean_type_node) 740 { 741 cond = instantiate_non_dependent_expr (cond); 742 cond = cxx_constant_value (cond, NULL_TREE); 743 } 744 finish_cond (&IF_COND (if_stmt), cond); 745 add_stmt (if_stmt); 746 THEN_CLAUSE (if_stmt) = push_stmt_list (); 747 return cond; 748 } 749 750 /* Finish the then-clause of an if-statement, which may be given by 751 IF_STMT. */ 752 753 tree 754 finish_then_clause (tree if_stmt) 755 { 756 THEN_CLAUSE (if_stmt) = pop_stmt_list (THEN_CLAUSE (if_stmt)); 757 return if_stmt; 758 } 759 760 /* Begin the else-clause of an if-statement. */ 761 762 void 763 begin_else_clause (tree if_stmt) 764 { 765 ELSE_CLAUSE (if_stmt) = push_stmt_list (); 766 } 767 768 /* Finish the else-clause of an if-statement, which may be given by 769 IF_STMT. */ 770 771 void 772 finish_else_clause (tree if_stmt) 773 { 774 ELSE_CLAUSE (if_stmt) = pop_stmt_list (ELSE_CLAUSE (if_stmt)); 775 } 776 777 /* Finish an if-statement. */ 778 779 void 780 finish_if_stmt (tree if_stmt) 781 { 782 tree scope = IF_SCOPE (if_stmt); 783 IF_SCOPE (if_stmt) = NULL; 784 add_stmt (do_poplevel (scope)); 785 } 786 787 /* Begin a while-statement. Returns a newly created WHILE_STMT if 788 appropriate. */ 789 790 tree 791 begin_while_stmt (void) 792 { 793 tree r; 794 r = build_stmt (input_location, WHILE_STMT, NULL_TREE, NULL_TREE); 795 add_stmt (r); 796 WHILE_BODY (r) = do_pushlevel (sk_block); 797 begin_cond (&WHILE_COND (r)); 798 return r; 799 } 800 801 /* Process the COND of a while-statement, which may be given by 802 WHILE_STMT. */ 803 804 void 805 finish_while_stmt_cond (tree cond, tree while_stmt, bool ivdep, 806 unsigned short unroll) 807 { 808 cond = maybe_convert_cond (cond); 809 finish_cond (&WHILE_COND (while_stmt), cond); 810 begin_maybe_infinite_loop (cond); 811 if (ivdep && cond != error_mark_node) 812 WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR, 813 TREE_TYPE (WHILE_COND (while_stmt)), 814 WHILE_COND (while_stmt), 815 build_int_cst (integer_type_node, 816 annot_expr_ivdep_kind), 817 integer_zero_node); 818 if (unroll && cond != error_mark_node) 819 WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR, 820 TREE_TYPE (WHILE_COND (while_stmt)), 821 WHILE_COND (while_stmt), 822 build_int_cst (integer_type_node, 823 annot_expr_unroll_kind), 824 build_int_cst (integer_type_node, 825 unroll)); 826 simplify_loop_decl_cond (&WHILE_COND (while_stmt), WHILE_BODY (while_stmt)); 827 } 828 829 /* Finish a while-statement, which may be given by WHILE_STMT. */ 830 831 void 832 finish_while_stmt (tree while_stmt) 833 { 834 end_maybe_infinite_loop (boolean_true_node); 835 WHILE_BODY (while_stmt) = do_poplevel (WHILE_BODY (while_stmt)); 836 } 837 838 /* Begin a do-statement. Returns a newly created DO_STMT if 839 appropriate. */ 840 841 tree 842 begin_do_stmt (void) 843 { 844 tree r = build_stmt (input_location, DO_STMT, NULL_TREE, NULL_TREE); 845 begin_maybe_infinite_loop (boolean_true_node); 846 add_stmt (r); 847 DO_BODY (r) = push_stmt_list (); 848 return r; 849 } 850 851 /* Finish the body of a do-statement, which may be given by DO_STMT. */ 852 853 void 854 finish_do_body (tree do_stmt) 855 { 856 tree body = DO_BODY (do_stmt) = pop_stmt_list (DO_BODY (do_stmt)); 857 858 if (TREE_CODE (body) == STATEMENT_LIST && STATEMENT_LIST_TAIL (body)) 859 body = STATEMENT_LIST_TAIL (body)->stmt; 860 861 if (IS_EMPTY_STMT (body)) 862 warning (OPT_Wempty_body, 863 "suggest explicit braces around empty body in %<do%> statement"); 864 } 865 866 /* Finish a do-statement, which may be given by DO_STMT, and whose 867 COND is as indicated. */ 868 869 void 870 finish_do_stmt (tree cond, tree do_stmt, bool ivdep, unsigned short unroll) 871 { 872 cond = maybe_convert_cond (cond); 873 end_maybe_infinite_loop (cond); 874 if (ivdep && cond != error_mark_node) 875 cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, 876 build_int_cst (integer_type_node, annot_expr_ivdep_kind), 877 integer_zero_node); 878 if (unroll && cond != error_mark_node) 879 cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, 880 build_int_cst (integer_type_node, annot_expr_unroll_kind), 881 build_int_cst (integer_type_node, unroll)); 882 DO_COND (do_stmt) = cond; 883 } 884 885 /* Finish a return-statement. The EXPRESSION returned, if any, is as 886 indicated. */ 887 888 tree 889 finish_return_stmt (tree expr) 890 { 891 tree r; 892 bool no_warning; 893 894 expr = check_return_expr (expr, &no_warning); 895 896 if (error_operand_p (expr) 897 || (flag_openmp && !check_omp_return ())) 898 { 899 /* Suppress -Wreturn-type for this function. */ 900 if (warn_return_type) 901 TREE_NO_WARNING (current_function_decl) = true; 902 return error_mark_node; 903 } 904 905 if (!processing_template_decl) 906 { 907 if (warn_sequence_point) 908 verify_sequence_points (expr); 909 910 if (DECL_DESTRUCTOR_P (current_function_decl) 911 || (DECL_CONSTRUCTOR_P (current_function_decl) 912 && targetm.cxx.cdtor_returns_this ())) 913 { 914 /* Similarly, all destructors must run destructors for 915 base-classes before returning. So, all returns in a 916 destructor get sent to the DTOR_LABEL; finish_function emits 917 code to return a value there. */ 918 return finish_goto_stmt (cdtor_label); 919 } 920 } 921 922 r = build_stmt (input_location, RETURN_EXPR, expr); 923 TREE_NO_WARNING (r) |= no_warning; 924 r = maybe_cleanup_point_expr_void (r); 925 r = add_stmt (r); 926 927 return r; 928 } 929 930 /* Begin the scope of a for-statement or a range-for-statement. 931 Both the returned trees are to be used in a call to 932 begin_for_stmt or begin_range_for_stmt. */ 933 934 tree 935 begin_for_scope (tree *init) 936 { 937 tree scope = NULL_TREE; 938 if (flag_new_for_scope) 939 scope = do_pushlevel (sk_for); 940 941 if (processing_template_decl) 942 *init = push_stmt_list (); 943 else 944 *init = NULL_TREE; 945 946 return scope; 947 } 948 949 /* Begin a for-statement. Returns a new FOR_STMT. 950 SCOPE and INIT should be the return of begin_for_scope, 951 or both NULL_TREE */ 952 953 tree 954 begin_for_stmt (tree scope, tree init) 955 { 956 tree r; 957 958 r = build_stmt (input_location, FOR_STMT, NULL_TREE, NULL_TREE, 959 NULL_TREE, NULL_TREE, NULL_TREE); 960 961 if (scope == NULL_TREE) 962 { 963 gcc_assert (!init || !flag_new_for_scope); 964 if (!init) 965 scope = begin_for_scope (&init); 966 } 967 FOR_INIT_STMT (r) = init; 968 FOR_SCOPE (r) = scope; 969 970 return r; 971 } 972 973 /* Finish the init-statement of a for-statement, which may be 974 given by FOR_STMT. */ 975 976 void 977 finish_init_stmt (tree for_stmt) 978 { 979 if (processing_template_decl) 980 FOR_INIT_STMT (for_stmt) = pop_stmt_list (FOR_INIT_STMT (for_stmt)); 981 add_stmt (for_stmt); 982 FOR_BODY (for_stmt) = do_pushlevel (sk_block); 983 begin_cond (&FOR_COND (for_stmt)); 984 } 985 986 /* Finish the COND of a for-statement, which may be given by 987 FOR_STMT. */ 988 989 void 990 finish_for_cond (tree cond, tree for_stmt, bool ivdep, unsigned short unroll) 991 { 992 cond = maybe_convert_cond (cond); 993 finish_cond (&FOR_COND (for_stmt), cond); 994 begin_maybe_infinite_loop (cond); 995 if (ivdep && cond != error_mark_node) 996 FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR, 997 TREE_TYPE (FOR_COND (for_stmt)), 998 FOR_COND (for_stmt), 999 build_int_cst (integer_type_node, 1000 annot_expr_ivdep_kind), 1001 integer_zero_node); 1002 if (unroll && cond != error_mark_node) 1003 FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR, 1004 TREE_TYPE (FOR_COND (for_stmt)), 1005 FOR_COND (for_stmt), 1006 build_int_cst (integer_type_node, 1007 annot_expr_unroll_kind), 1008 build_int_cst (integer_type_node, 1009 unroll)); 1010 simplify_loop_decl_cond (&FOR_COND (for_stmt), FOR_BODY (for_stmt)); 1011 } 1012 1013 /* Finish the increment-EXPRESSION in a for-statement, which may be 1014 given by FOR_STMT. */ 1015 1016 void 1017 finish_for_expr (tree expr, tree for_stmt) 1018 { 1019 if (!expr) 1020 return; 1021 /* If EXPR is an overloaded function, issue an error; there is no 1022 context available to use to perform overload resolution. */ 1023 if (type_unknown_p (expr)) 1024 { 1025 cxx_incomplete_type_error (expr, TREE_TYPE (expr)); 1026 expr = error_mark_node; 1027 } 1028 if (!processing_template_decl) 1029 { 1030 if (warn_sequence_point) 1031 verify_sequence_points (expr); 1032 expr = convert_to_void (expr, ICV_THIRD_IN_FOR, 1033 tf_warning_or_error); 1034 } 1035 else if (!type_dependent_expression_p (expr)) 1036 convert_to_void (build_non_dependent_expr (expr), ICV_THIRD_IN_FOR, 1037 tf_warning_or_error); 1038 expr = maybe_cleanup_point_expr_void (expr); 1039 if (check_for_bare_parameter_packs (expr)) 1040 expr = error_mark_node; 1041 FOR_EXPR (for_stmt) = expr; 1042 } 1043 1044 /* Finish the body of a for-statement, which may be given by 1045 FOR_STMT. The increment-EXPR for the loop must be 1046 provided. 1047 It can also finish RANGE_FOR_STMT. */ 1048 1049 void 1050 finish_for_stmt (tree for_stmt) 1051 { 1052 end_maybe_infinite_loop (boolean_true_node); 1053 1054 if (TREE_CODE (for_stmt) == RANGE_FOR_STMT) 1055 RANGE_FOR_BODY (for_stmt) = do_poplevel (RANGE_FOR_BODY (for_stmt)); 1056 else 1057 FOR_BODY (for_stmt) = do_poplevel (FOR_BODY (for_stmt)); 1058 1059 /* Pop the scope for the body of the loop. */ 1060 if (flag_new_for_scope) 1061 { 1062 tree scope; 1063 tree *scope_ptr = (TREE_CODE (for_stmt) == RANGE_FOR_STMT 1064 ? &RANGE_FOR_SCOPE (for_stmt) 1065 : &FOR_SCOPE (for_stmt)); 1066 scope = *scope_ptr; 1067 *scope_ptr = NULL; 1068 add_stmt (do_poplevel (scope)); 1069 } 1070 } 1071 1072 /* Begin a range-for-statement. Returns a new RANGE_FOR_STMT. 1073 SCOPE and INIT should be the return of begin_for_scope, 1074 or both NULL_TREE . 1075 To finish it call finish_for_stmt(). */ 1076 1077 tree 1078 begin_range_for_stmt (tree scope, tree init) 1079 { 1080 tree r; 1081 1082 begin_maybe_infinite_loop (boolean_false_node); 1083 1084 r = build_stmt (input_location, RANGE_FOR_STMT, 1085 NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE); 1086 1087 if (scope == NULL_TREE) 1088 { 1089 gcc_assert (!init || !flag_new_for_scope); 1090 if (!init) 1091 scope = begin_for_scope (&init); 1092 } 1093 1094 /* RANGE_FOR_STMTs do not use nor save the init tree, so we 1095 pop it now. */ 1096 if (init) 1097 pop_stmt_list (init); 1098 RANGE_FOR_SCOPE (r) = scope; 1099 1100 return r; 1101 } 1102 1103 /* Finish the head of a range-based for statement, which may 1104 be given by RANGE_FOR_STMT. DECL must be the declaration 1105 and EXPR must be the loop expression. */ 1106 1107 void 1108 finish_range_for_decl (tree range_for_stmt, tree decl, tree expr) 1109 { 1110 RANGE_FOR_DECL (range_for_stmt) = decl; 1111 RANGE_FOR_EXPR (range_for_stmt) = expr; 1112 add_stmt (range_for_stmt); 1113 RANGE_FOR_BODY (range_for_stmt) = do_pushlevel (sk_block); 1114 } 1115 1116 /* Finish a break-statement. */ 1117 1118 tree 1119 finish_break_stmt (void) 1120 { 1121 /* In switch statements break is sometimes stylistically used after 1122 a return statement. This can lead to spurious warnings about 1123 control reaching the end of a non-void function when it is 1124 inlined. Note that we are calling block_may_fallthru with 1125 language specific tree nodes; this works because 1126 block_may_fallthru returns true when given something it does not 1127 understand. */ 1128 if (!block_may_fallthru (cur_stmt_list)) 1129 return void_node; 1130 note_break_stmt (); 1131 return add_stmt (build_stmt (input_location, BREAK_STMT)); 1132 } 1133 1134 /* Finish a continue-statement. */ 1135 1136 tree 1137 finish_continue_stmt (void) 1138 { 1139 return add_stmt (build_stmt (input_location, CONTINUE_STMT)); 1140 } 1141 1142 /* Begin a switch-statement. Returns a new SWITCH_STMT if 1143 appropriate. */ 1144 1145 tree 1146 begin_switch_stmt (void) 1147 { 1148 tree r, scope; 1149 1150 scope = do_pushlevel (sk_cond); 1151 r = build_stmt (input_location, SWITCH_STMT, NULL_TREE, NULL_TREE, NULL_TREE, scope); 1152 1153 begin_cond (&SWITCH_STMT_COND (r)); 1154 1155 return r; 1156 } 1157 1158 /* Finish the cond of a switch-statement. */ 1159 1160 void 1161 finish_switch_cond (tree cond, tree switch_stmt) 1162 { 1163 tree orig_type = NULL; 1164 1165 if (!processing_template_decl) 1166 { 1167 /* Convert the condition to an integer or enumeration type. */ 1168 cond = build_expr_type_conversion (WANT_INT | WANT_ENUM, cond, true); 1169 if (cond == NULL_TREE) 1170 { 1171 error ("switch quantity not an integer"); 1172 cond = error_mark_node; 1173 } 1174 /* We want unlowered type here to handle enum bit-fields. */ 1175 orig_type = unlowered_expr_type (cond); 1176 if (TREE_CODE (orig_type) != ENUMERAL_TYPE) 1177 orig_type = TREE_TYPE (cond); 1178 if (cond != error_mark_node) 1179 { 1180 /* [stmt.switch] 1181 1182 Integral promotions are performed. */ 1183 cond = perform_integral_promotions (cond); 1184 cond = maybe_cleanup_point_expr (cond); 1185 } 1186 } 1187 if (check_for_bare_parameter_packs (cond)) 1188 cond = error_mark_node; 1189 else if (!processing_template_decl && warn_sequence_point) 1190 verify_sequence_points (cond); 1191 1192 finish_cond (&SWITCH_STMT_COND (switch_stmt), cond); 1193 SWITCH_STMT_TYPE (switch_stmt) = orig_type; 1194 add_stmt (switch_stmt); 1195 push_switch (switch_stmt); 1196 SWITCH_STMT_BODY (switch_stmt) = push_stmt_list (); 1197 } 1198 1199 /* Finish the body of a switch-statement, which may be given by 1200 SWITCH_STMT. The COND to switch on is indicated. */ 1201 1202 void 1203 finish_switch_stmt (tree switch_stmt) 1204 { 1205 tree scope; 1206 1207 SWITCH_STMT_BODY (switch_stmt) = 1208 pop_stmt_list (SWITCH_STMT_BODY (switch_stmt)); 1209 pop_switch (); 1210 1211 scope = SWITCH_STMT_SCOPE (switch_stmt); 1212 SWITCH_STMT_SCOPE (switch_stmt) = NULL; 1213 add_stmt (do_poplevel (scope)); 1214 } 1215 1216 /* Begin a try-block. Returns a newly-created TRY_BLOCK if 1217 appropriate. */ 1218 1219 tree 1220 begin_try_block (void) 1221 { 1222 tree r = build_stmt (input_location, TRY_BLOCK, NULL_TREE, NULL_TREE); 1223 add_stmt (r); 1224 TRY_STMTS (r) = push_stmt_list (); 1225 return r; 1226 } 1227 1228 /* Likewise, for a function-try-block. The block returned in 1229 *COMPOUND_STMT is an artificial outer scope, containing the 1230 function-try-block. */ 1231 1232 tree 1233 begin_function_try_block (tree *compound_stmt) 1234 { 1235 tree r; 1236 /* This outer scope does not exist in the C++ standard, but we need 1237 a place to put __FUNCTION__ and similar variables. */ 1238 *compound_stmt = begin_compound_stmt (0); 1239 r = begin_try_block (); 1240 FN_TRY_BLOCK_P (r) = 1; 1241 return r; 1242 } 1243 1244 /* Finish a try-block, which may be given by TRY_BLOCK. */ 1245 1246 void 1247 finish_try_block (tree try_block) 1248 { 1249 TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block)); 1250 TRY_HANDLERS (try_block) = push_stmt_list (); 1251 } 1252 1253 /* Finish the body of a cleanup try-block, which may be given by 1254 TRY_BLOCK. */ 1255 1256 void 1257 finish_cleanup_try_block (tree try_block) 1258 { 1259 TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block)); 1260 } 1261 1262 /* Finish an implicitly generated try-block, with a cleanup is given 1263 by CLEANUP. */ 1264 1265 void 1266 finish_cleanup (tree cleanup, tree try_block) 1267 { 1268 TRY_HANDLERS (try_block) = cleanup; 1269 CLEANUP_P (try_block) = 1; 1270 } 1271 1272 /* Likewise, for a function-try-block. */ 1273 1274 void 1275 finish_function_try_block (tree try_block) 1276 { 1277 finish_try_block (try_block); 1278 /* FIXME : something queer about CTOR_INITIALIZER somehow following 1279 the try block, but moving it inside. */ 1280 in_function_try_handler = 1; 1281 } 1282 1283 /* Finish a handler-sequence for a try-block, which may be given by 1284 TRY_BLOCK. */ 1285 1286 void 1287 finish_handler_sequence (tree try_block) 1288 { 1289 TRY_HANDLERS (try_block) = pop_stmt_list (TRY_HANDLERS (try_block)); 1290 check_handlers (TRY_HANDLERS (try_block)); 1291 } 1292 1293 /* Finish the handler-seq for a function-try-block, given by 1294 TRY_BLOCK. COMPOUND_STMT is the outer block created by 1295 begin_function_try_block. */ 1296 1297 void 1298 finish_function_handler_sequence (tree try_block, tree compound_stmt) 1299 { 1300 in_function_try_handler = 0; 1301 finish_handler_sequence (try_block); 1302 finish_compound_stmt (compound_stmt); 1303 } 1304 1305 /* Begin a handler. Returns a HANDLER if appropriate. */ 1306 1307 tree 1308 begin_handler (void) 1309 { 1310 tree r; 1311 1312 r = build_stmt (input_location, HANDLER, NULL_TREE, NULL_TREE); 1313 add_stmt (r); 1314 1315 /* Create a binding level for the eh_info and the exception object 1316 cleanup. */ 1317 HANDLER_BODY (r) = do_pushlevel (sk_catch); 1318 1319 return r; 1320 } 1321 1322 /* Finish the handler-parameters for a handler, which may be given by 1323 HANDLER. DECL is the declaration for the catch parameter, or NULL 1324 if this is a `catch (...)' clause. */ 1325 1326 void 1327 finish_handler_parms (tree decl, tree handler) 1328 { 1329 tree type = NULL_TREE; 1330 if (processing_template_decl) 1331 { 1332 if (decl) 1333 { 1334 decl = pushdecl (decl); 1335 decl = push_template_decl (decl); 1336 HANDLER_PARMS (handler) = decl; 1337 type = TREE_TYPE (decl); 1338 } 1339 } 1340 else 1341 { 1342 type = expand_start_catch_block (decl); 1343 if (warn_catch_value 1344 && type != NULL_TREE 1345 && type != error_mark_node 1346 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE) 1347 { 1348 tree orig_type = TREE_TYPE (decl); 1349 if (CLASS_TYPE_P (orig_type)) 1350 { 1351 if (TYPE_POLYMORPHIC_P (orig_type)) 1352 warning (OPT_Wcatch_value_, 1353 "catching polymorphic type %q#T by value", orig_type); 1354 else if (warn_catch_value > 1) 1355 warning (OPT_Wcatch_value_, 1356 "catching type %q#T by value", orig_type); 1357 } 1358 else if (warn_catch_value > 2) 1359 warning (OPT_Wcatch_value_, 1360 "catching non-reference type %q#T", orig_type); 1361 } 1362 } 1363 HANDLER_TYPE (handler) = type; 1364 } 1365 1366 /* Finish a handler, which may be given by HANDLER. The BLOCKs are 1367 the return value from the matching call to finish_handler_parms. */ 1368 1369 void 1370 finish_handler (tree handler) 1371 { 1372 if (!processing_template_decl) 1373 expand_end_catch_block (); 1374 HANDLER_BODY (handler) = do_poplevel (HANDLER_BODY (handler)); 1375 } 1376 1377 /* Begin a compound statement. FLAGS contains some bits that control the 1378 behavior and context. If BCS_NO_SCOPE is set, the compound statement 1379 does not define a scope. If BCS_FN_BODY is set, this is the outermost 1380 block of a function. If BCS_TRY_BLOCK is set, this is the block 1381 created on behalf of a TRY statement. Returns a token to be passed to 1382 finish_compound_stmt. */ 1383 1384 tree 1385 begin_compound_stmt (unsigned int flags) 1386 { 1387 tree r; 1388 1389 if (flags & BCS_NO_SCOPE) 1390 { 1391 r = push_stmt_list (); 1392 STATEMENT_LIST_NO_SCOPE (r) = 1; 1393 1394 /* Normally, we try hard to keep the BLOCK for a statement-expression. 1395 But, if it's a statement-expression with a scopeless block, there's 1396 nothing to keep, and we don't want to accidentally keep a block 1397 *inside* the scopeless block. */ 1398 keep_next_level (false); 1399 } 1400 else 1401 { 1402 scope_kind sk = sk_block; 1403 if (flags & BCS_TRY_BLOCK) 1404 sk = sk_try; 1405 else if (flags & BCS_TRANSACTION) 1406 sk = sk_transaction; 1407 r = do_pushlevel (sk); 1408 } 1409 1410 /* When processing a template, we need to remember where the braces were, 1411 so that we can set up identical scopes when instantiating the template 1412 later. BIND_EXPR is a handy candidate for this. 1413 Note that do_poplevel won't create a BIND_EXPR itself here (and thus 1414 result in nested BIND_EXPRs), since we don't build BLOCK nodes when 1415 processing templates. */ 1416 if (processing_template_decl) 1417 { 1418 r = build3 (BIND_EXPR, NULL, NULL, r, NULL); 1419 BIND_EXPR_TRY_BLOCK (r) = (flags & BCS_TRY_BLOCK) != 0; 1420 BIND_EXPR_BODY_BLOCK (r) = (flags & BCS_FN_BODY) != 0; 1421 TREE_SIDE_EFFECTS (r) = 1; 1422 } 1423 1424 return r; 1425 } 1426 1427 /* Finish a compound-statement, which is given by STMT. */ 1428 1429 void 1430 finish_compound_stmt (tree stmt) 1431 { 1432 if (TREE_CODE (stmt) == BIND_EXPR) 1433 { 1434 tree body = do_poplevel (BIND_EXPR_BODY (stmt)); 1435 /* If the STATEMENT_LIST is empty and this BIND_EXPR isn't special, 1436 discard the BIND_EXPR so it can be merged with the containing 1437 STATEMENT_LIST. */ 1438 if (TREE_CODE (body) == STATEMENT_LIST 1439 && STATEMENT_LIST_HEAD (body) == NULL 1440 && !BIND_EXPR_BODY_BLOCK (stmt) 1441 && !BIND_EXPR_TRY_BLOCK (stmt)) 1442 stmt = body; 1443 else 1444 BIND_EXPR_BODY (stmt) = body; 1445 } 1446 else if (STATEMENT_LIST_NO_SCOPE (stmt)) 1447 stmt = pop_stmt_list (stmt); 1448 else 1449 { 1450 /* Destroy any ObjC "super" receivers that may have been 1451 created. */ 1452 objc_clear_super_receiver (); 1453 1454 stmt = do_poplevel (stmt); 1455 } 1456 1457 /* ??? See c_end_compound_stmt wrt statement expressions. */ 1458 add_stmt (stmt); 1459 } 1460 1461 /* Finish an asm-statement, whose components are a STRING, some 1462 OUTPUT_OPERANDS, some INPUT_OPERANDS, some CLOBBERS and some 1463 LABELS. Also note whether the asm-statement should be 1464 considered volatile. */ 1465 1466 tree 1467 finish_asm_stmt (int volatile_p, tree string, tree output_operands, 1468 tree input_operands, tree clobbers, tree labels) 1469 { 1470 tree r; 1471 tree t; 1472 int ninputs = list_length (input_operands); 1473 int noutputs = list_length (output_operands); 1474 1475 if (!processing_template_decl) 1476 { 1477 const char *constraint; 1478 const char **oconstraints; 1479 bool allows_mem, allows_reg, is_inout; 1480 tree operand; 1481 int i; 1482 1483 oconstraints = XALLOCAVEC (const char *, noutputs); 1484 1485 string = resolve_asm_operand_names (string, output_operands, 1486 input_operands, labels); 1487 1488 for (i = 0, t = output_operands; t; t = TREE_CHAIN (t), ++i) 1489 { 1490 operand = TREE_VALUE (t); 1491 1492 /* ??? Really, this should not be here. Users should be using a 1493 proper lvalue, dammit. But there's a long history of using 1494 casts in the output operands. In cases like longlong.h, this 1495 becomes a primitive form of typechecking -- if the cast can be 1496 removed, then the output operand had a type of the proper width; 1497 otherwise we'll get an error. Gross, but ... */ 1498 STRIP_NOPS (operand); 1499 1500 operand = mark_lvalue_use (operand); 1501 1502 if (!lvalue_or_else (operand, lv_asm, tf_warning_or_error)) 1503 operand = error_mark_node; 1504 1505 if (operand != error_mark_node 1506 && (TREE_READONLY (operand) 1507 || CP_TYPE_CONST_P (TREE_TYPE (operand)) 1508 /* Functions are not modifiable, even though they are 1509 lvalues. */ 1510 || TREE_CODE (TREE_TYPE (operand)) == FUNCTION_TYPE 1511 || TREE_CODE (TREE_TYPE (operand)) == METHOD_TYPE 1512 /* If it's an aggregate and any field is const, then it is 1513 effectively const. */ 1514 || (CLASS_TYPE_P (TREE_TYPE (operand)) 1515 && C_TYPE_FIELDS_READONLY (TREE_TYPE (operand))))) 1516 cxx_readonly_error (operand, lv_asm); 1517 1518 tree *op = &operand; 1519 while (TREE_CODE (*op) == COMPOUND_EXPR) 1520 op = &TREE_OPERAND (*op, 1); 1521 switch (TREE_CODE (*op)) 1522 { 1523 case PREINCREMENT_EXPR: 1524 case PREDECREMENT_EXPR: 1525 case MODIFY_EXPR: 1526 *op = genericize_compound_lvalue (*op); 1527 op = &TREE_OPERAND (*op, 1); 1528 break; 1529 default: 1530 break; 1531 } 1532 1533 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); 1534 oconstraints[i] = constraint; 1535 1536 if (parse_output_constraint (&constraint, i, ninputs, noutputs, 1537 &allows_mem, &allows_reg, &is_inout)) 1538 { 1539 /* If the operand is going to end up in memory, 1540 mark it addressable. */ 1541 if (!allows_reg && !cxx_mark_addressable (*op)) 1542 operand = error_mark_node; 1543 } 1544 else 1545 operand = error_mark_node; 1546 1547 TREE_VALUE (t) = operand; 1548 } 1549 1550 for (i = 0, t = input_operands; t; ++i, t = TREE_CHAIN (t)) 1551 { 1552 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); 1553 bool constraint_parsed 1554 = parse_input_constraint (&constraint, i, ninputs, noutputs, 0, 1555 oconstraints, &allows_mem, &allows_reg); 1556 /* If the operand is going to end up in memory, don't call 1557 decay_conversion. */ 1558 if (constraint_parsed && !allows_reg && allows_mem) 1559 operand = mark_lvalue_use (TREE_VALUE (t)); 1560 else 1561 operand = decay_conversion (TREE_VALUE (t), tf_warning_or_error); 1562 1563 /* If the type of the operand hasn't been determined (e.g., 1564 because it involves an overloaded function), then issue 1565 an error message. There's no context available to 1566 resolve the overloading. */ 1567 if (TREE_TYPE (operand) == unknown_type_node) 1568 { 1569 error ("type of asm operand %qE could not be determined", 1570 TREE_VALUE (t)); 1571 operand = error_mark_node; 1572 } 1573 1574 if (constraint_parsed) 1575 { 1576 /* If the operand is going to end up in memory, 1577 mark it addressable. */ 1578 if (!allows_reg && allows_mem) 1579 { 1580 /* Strip the nops as we allow this case. FIXME, this really 1581 should be rejected or made deprecated. */ 1582 STRIP_NOPS (operand); 1583 1584 tree *op = &operand; 1585 while (TREE_CODE (*op) == COMPOUND_EXPR) 1586 op = &TREE_OPERAND (*op, 1); 1587 switch (TREE_CODE (*op)) 1588 { 1589 case PREINCREMENT_EXPR: 1590 case PREDECREMENT_EXPR: 1591 case MODIFY_EXPR: 1592 *op = genericize_compound_lvalue (*op); 1593 op = &TREE_OPERAND (*op, 1); 1594 break; 1595 default: 1596 break; 1597 } 1598 1599 if (!cxx_mark_addressable (*op)) 1600 operand = error_mark_node; 1601 } 1602 else if (!allows_reg && !allows_mem) 1603 { 1604 /* If constraint allows neither register nor memory, 1605 try harder to get a constant. */ 1606 tree constop = maybe_constant_value (operand); 1607 if (TREE_CONSTANT (constop)) 1608 operand = constop; 1609 } 1610 } 1611 else 1612 operand = error_mark_node; 1613 1614 TREE_VALUE (t) = operand; 1615 } 1616 } 1617 1618 r = build_stmt (input_location, ASM_EXPR, string, 1619 output_operands, input_operands, 1620 clobbers, labels); 1621 ASM_VOLATILE_P (r) = volatile_p || noutputs == 0; 1622 r = maybe_cleanup_point_expr_void (r); 1623 return add_stmt (r); 1624 } 1625 1626 /* Finish a label with the indicated NAME. Returns the new label. */ 1627 1628 tree 1629 finish_label_stmt (tree name) 1630 { 1631 tree decl = define_label (input_location, name); 1632 1633 if (decl == error_mark_node) 1634 return error_mark_node; 1635 1636 add_stmt (build_stmt (input_location, LABEL_EXPR, decl)); 1637 1638 return decl; 1639 } 1640 1641 /* Finish a series of declarations for local labels. G++ allows users 1642 to declare "local" labels, i.e., labels with scope. This extension 1643 is useful when writing code involving statement-expressions. */ 1644 1645 void 1646 finish_label_decl (tree name) 1647 { 1648 if (!at_function_scope_p ()) 1649 { 1650 error ("__label__ declarations are only allowed in function scopes"); 1651 return; 1652 } 1653 1654 add_decl_expr (declare_local_label (name)); 1655 } 1656 1657 /* When DECL goes out of scope, make sure that CLEANUP is executed. */ 1658 1659 void 1660 finish_decl_cleanup (tree decl, tree cleanup) 1661 { 1662 push_cleanup (decl, cleanup, false); 1663 } 1664 1665 /* If the current scope exits with an exception, run CLEANUP. */ 1666 1667 void 1668 finish_eh_cleanup (tree cleanup) 1669 { 1670 push_cleanup (NULL, cleanup, true); 1671 } 1672 1673 /* The MEM_INITS is a list of mem-initializers, in reverse of the 1674 order they were written by the user. Each node is as for 1675 emit_mem_initializers. */ 1676 1677 void 1678 finish_mem_initializers (tree mem_inits) 1679 { 1680 /* Reorder the MEM_INITS so that they are in the order they appeared 1681 in the source program. */ 1682 mem_inits = nreverse (mem_inits); 1683 1684 if (processing_template_decl) 1685 { 1686 tree mem; 1687 1688 for (mem = mem_inits; mem; mem = TREE_CHAIN (mem)) 1689 { 1690 /* If the TREE_PURPOSE is a TYPE_PACK_EXPANSION, skip the 1691 check for bare parameter packs in the TREE_VALUE, because 1692 any parameter packs in the TREE_VALUE have already been 1693 bound as part of the TREE_PURPOSE. See 1694 make_pack_expansion for more information. */ 1695 if (TREE_CODE (TREE_PURPOSE (mem)) != TYPE_PACK_EXPANSION 1696 && check_for_bare_parameter_packs (TREE_VALUE (mem))) 1697 TREE_VALUE (mem) = error_mark_node; 1698 } 1699 1700 add_stmt (build_min_nt_loc (UNKNOWN_LOCATION, 1701 CTOR_INITIALIZER, mem_inits)); 1702 } 1703 else 1704 emit_mem_initializers (mem_inits); 1705 } 1706 1707 /* Obfuscate EXPR if it looks like an id-expression or member access so 1708 that the call to finish_decltype in do_auto_deduction will give the 1709 right result. */ 1710 1711 tree 1712 force_paren_expr (tree expr) 1713 { 1714 /* This is only needed for decltype(auto) in C++14. */ 1715 if (cxx_dialect < cxx14) 1716 return expr; 1717 1718 /* If we're in unevaluated context, we can't be deducing a 1719 return/initializer type, so we don't need to mess with this. */ 1720 if (cp_unevaluated_operand) 1721 return expr; 1722 1723 if (!DECL_P (expr) && TREE_CODE (expr) != COMPONENT_REF 1724 && TREE_CODE (expr) != SCOPE_REF) 1725 return expr; 1726 1727 if (TREE_CODE (expr) == COMPONENT_REF 1728 || TREE_CODE (expr) == SCOPE_REF) 1729 REF_PARENTHESIZED_P (expr) = true; 1730 else if (processing_template_decl) 1731 expr = build1 (PAREN_EXPR, TREE_TYPE (expr), expr); 1732 else if (VAR_P (expr) && DECL_HARD_REGISTER (expr)) 1733 /* We can't bind a hard register variable to a reference. */; 1734 else 1735 { 1736 cp_lvalue_kind kind = lvalue_kind (expr); 1737 if ((kind & ~clk_class) != clk_none) 1738 { 1739 tree type = unlowered_expr_type (expr); 1740 bool rval = !!(kind & clk_rvalueref); 1741 type = cp_build_reference_type (type, rval); 1742 /* This inhibits warnings in, eg, cxx_mark_addressable 1743 (c++/60955). */ 1744 warning_sentinel s (extra_warnings); 1745 expr = build_static_cast (type, expr, tf_error); 1746 if (expr != error_mark_node) 1747 REF_PARENTHESIZED_P (expr) = true; 1748 } 1749 } 1750 1751 return expr; 1752 } 1753 1754 /* If T is an id-expression obfuscated by force_paren_expr, undo the 1755 obfuscation and return the underlying id-expression. Otherwise 1756 return T. */ 1757 1758 tree 1759 maybe_undo_parenthesized_ref (tree t) 1760 { 1761 if (cxx_dialect < cxx14) 1762 return t; 1763 1764 if (INDIRECT_REF_P (t) && REF_PARENTHESIZED_P (t)) 1765 { 1766 t = TREE_OPERAND (t, 0); 1767 while (TREE_CODE (t) == NON_LVALUE_EXPR 1768 || TREE_CODE (t) == NOP_EXPR) 1769 t = TREE_OPERAND (t, 0); 1770 1771 gcc_assert (TREE_CODE (t) == ADDR_EXPR 1772 || TREE_CODE (t) == STATIC_CAST_EXPR); 1773 t = TREE_OPERAND (t, 0); 1774 } 1775 else if (TREE_CODE (t) == PAREN_EXPR) 1776 t = TREE_OPERAND (t, 0); 1777 1778 return t; 1779 } 1780 1781 /* Finish a parenthesized expression EXPR. */ 1782 1783 cp_expr 1784 finish_parenthesized_expr (cp_expr expr) 1785 { 1786 if (EXPR_P (expr)) 1787 /* This inhibits warnings in c_common_truthvalue_conversion. */ 1788 TREE_NO_WARNING (expr) = 1; 1789 1790 if (TREE_CODE (expr) == OFFSET_REF 1791 || TREE_CODE (expr) == SCOPE_REF) 1792 /* [expr.unary.op]/3 The qualified id of a pointer-to-member must not be 1793 enclosed in parentheses. */ 1794 PTRMEM_OK_P (expr) = 0; 1795 1796 if (TREE_CODE (expr) == STRING_CST) 1797 PAREN_STRING_LITERAL_P (expr) = 1; 1798 1799 expr = cp_expr (force_paren_expr (expr), expr.get_location ()); 1800 1801 return expr; 1802 } 1803 1804 /* Finish a reference to a non-static data member (DECL) that is not 1805 preceded by `.' or `->'. */ 1806 1807 tree 1808 finish_non_static_data_member (tree decl, tree object, tree qualifying_scope) 1809 { 1810 gcc_assert (TREE_CODE (decl) == FIELD_DECL); 1811 bool try_omp_private = !object && omp_private_member_map; 1812 tree ret; 1813 1814 if (!object) 1815 { 1816 tree scope = qualifying_scope; 1817 if (scope == NULL_TREE) 1818 scope = context_for_name_lookup (decl); 1819 object = maybe_dummy_object (scope, NULL); 1820 } 1821 1822 object = maybe_resolve_dummy (object, true); 1823 if (object == error_mark_node) 1824 return error_mark_node; 1825 1826 /* DR 613/850: Can use non-static data members without an associated 1827 object in sizeof/decltype/alignof. */ 1828 if (is_dummy_object (object) && cp_unevaluated_operand == 0 1829 && (!processing_template_decl || !current_class_ref)) 1830 { 1831 if (current_function_decl 1832 && DECL_STATIC_FUNCTION_P (current_function_decl)) 1833 error ("invalid use of member %qD in static member function", decl); 1834 else 1835 error ("invalid use of non-static data member %qD", decl); 1836 inform (DECL_SOURCE_LOCATION (decl), "declared here"); 1837 1838 return error_mark_node; 1839 } 1840 1841 if (current_class_ptr) 1842 TREE_USED (current_class_ptr) = 1; 1843 if (processing_template_decl && !qualifying_scope) 1844 { 1845 tree type = TREE_TYPE (decl); 1846 1847 if (TREE_CODE (type) == REFERENCE_TYPE) 1848 /* Quals on the object don't matter. */; 1849 else if (PACK_EXPANSION_P (type)) 1850 /* Don't bother trying to represent this. */ 1851 type = NULL_TREE; 1852 else 1853 { 1854 /* Set the cv qualifiers. */ 1855 int quals = cp_type_quals (TREE_TYPE (object)); 1856 1857 if (DECL_MUTABLE_P (decl)) 1858 quals &= ~TYPE_QUAL_CONST; 1859 1860 quals |= cp_type_quals (TREE_TYPE (decl)); 1861 type = cp_build_qualified_type (type, quals); 1862 } 1863 1864 ret = (convert_from_reference 1865 (build_min (COMPONENT_REF, type, object, decl, NULL_TREE))); 1866 } 1867 /* If PROCESSING_TEMPLATE_DECL is nonzero here, then 1868 QUALIFYING_SCOPE is also non-null. Wrap this in a SCOPE_REF 1869 for now. */ 1870 else if (processing_template_decl) 1871 ret = build_qualified_name (TREE_TYPE (decl), 1872 qualifying_scope, 1873 decl, 1874 /*template_p=*/false); 1875 else 1876 { 1877 tree access_type = TREE_TYPE (object); 1878 1879 perform_or_defer_access_check (TYPE_BINFO (access_type), decl, 1880 decl, tf_warning_or_error); 1881 1882 /* If the data member was named `C::M', convert `*this' to `C' 1883 first. */ 1884 if (qualifying_scope) 1885 { 1886 tree binfo = NULL_TREE; 1887 object = build_scoped_ref (object, qualifying_scope, 1888 &binfo); 1889 } 1890 1891 ret = build_class_member_access_expr (object, decl, 1892 /*access_path=*/NULL_TREE, 1893 /*preserve_reference=*/false, 1894 tf_warning_or_error); 1895 } 1896 if (try_omp_private) 1897 { 1898 tree *v = omp_private_member_map->get (decl); 1899 if (v) 1900 ret = convert_from_reference (*v); 1901 } 1902 return ret; 1903 } 1904 1905 /* If we are currently parsing a template and we encountered a typedef 1906 TYPEDEF_DECL that is being accessed though CONTEXT, this function 1907 adds the typedef to a list tied to the current template. 1908 At template instantiation time, that list is walked and access check 1909 performed for each typedef. 1910 LOCATION is the location of the usage point of TYPEDEF_DECL. */ 1911 1912 void 1913 add_typedef_to_current_template_for_access_check (tree typedef_decl, 1914 tree context, 1915 location_t location) 1916 { 1917 tree template_info = NULL; 1918 tree cs = current_scope (); 1919 1920 if (!is_typedef_decl (typedef_decl) 1921 || !context 1922 || !CLASS_TYPE_P (context) 1923 || !cs) 1924 return; 1925 1926 if (CLASS_TYPE_P (cs) || TREE_CODE (cs) == FUNCTION_DECL) 1927 template_info = get_template_info (cs); 1928 1929 if (template_info 1930 && TI_TEMPLATE (template_info) 1931 && !currently_open_class (context)) 1932 append_type_to_template_for_access_check (cs, typedef_decl, 1933 context, location); 1934 } 1935 1936 /* DECL was the declaration to which a qualified-id resolved. Issue 1937 an error message if it is not accessible. If OBJECT_TYPE is 1938 non-NULL, we have just seen `x->' or `x.' and OBJECT_TYPE is the 1939 type of `*x', or `x', respectively. If the DECL was named as 1940 `A::B' then NESTED_NAME_SPECIFIER is `A'. */ 1941 1942 void 1943 check_accessibility_of_qualified_id (tree decl, 1944 tree object_type, 1945 tree nested_name_specifier) 1946 { 1947 tree scope; 1948 tree qualifying_type = NULL_TREE; 1949 1950 /* If we are parsing a template declaration and if decl is a typedef, 1951 add it to a list tied to the template. 1952 At template instantiation time, that list will be walked and 1953 access check performed. */ 1954 add_typedef_to_current_template_for_access_check (decl, 1955 nested_name_specifier 1956 ? nested_name_specifier 1957 : DECL_CONTEXT (decl), 1958 input_location); 1959 1960 /* If we're not checking, return immediately. */ 1961 if (deferred_access_no_check) 1962 return; 1963 1964 /* Determine the SCOPE of DECL. */ 1965 scope = context_for_name_lookup (decl); 1966 /* If the SCOPE is not a type, then DECL is not a member. */ 1967 if (!TYPE_P (scope)) 1968 return; 1969 /* Compute the scope through which DECL is being accessed. */ 1970 if (object_type 1971 /* OBJECT_TYPE might not be a class type; consider: 1972 1973 class A { typedef int I; }; 1974 I *p; 1975 p->A::I::~I(); 1976 1977 In this case, we will have "A::I" as the DECL, but "I" as the 1978 OBJECT_TYPE. */ 1979 && CLASS_TYPE_P (object_type) 1980 && DERIVED_FROM_P (scope, object_type)) 1981 /* If we are processing a `->' or `.' expression, use the type of the 1982 left-hand side. */ 1983 qualifying_type = object_type; 1984 else if (nested_name_specifier) 1985 { 1986 /* If the reference is to a non-static member of the 1987 current class, treat it as if it were referenced through 1988 `this'. */ 1989 tree ct; 1990 if (DECL_NONSTATIC_MEMBER_P (decl) 1991 && current_class_ptr 1992 && DERIVED_FROM_P (scope, ct = current_nonlambda_class_type ())) 1993 qualifying_type = ct; 1994 /* Otherwise, use the type indicated by the 1995 nested-name-specifier. */ 1996 else 1997 qualifying_type = nested_name_specifier; 1998 } 1999 else 2000 /* Otherwise, the name must be from the current class or one of 2001 its bases. */ 2002 qualifying_type = currently_open_derived_class (scope); 2003 2004 if (qualifying_type 2005 /* It is possible for qualifying type to be a TEMPLATE_TYPE_PARM 2006 or similar in a default argument value. */ 2007 && CLASS_TYPE_P (qualifying_type) 2008 && !dependent_type_p (qualifying_type)) 2009 perform_or_defer_access_check (TYPE_BINFO (qualifying_type), decl, 2010 decl, tf_warning_or_error); 2011 } 2012 2013 /* EXPR is the result of a qualified-id. The QUALIFYING_CLASS was the 2014 class named to the left of the "::" operator. DONE is true if this 2015 expression is a complete postfix-expression; it is false if this 2016 expression is followed by '->', '[', '(', etc. ADDRESS_P is true 2017 iff this expression is the operand of '&'. TEMPLATE_P is true iff 2018 the qualified-id was of the form "A::template B". TEMPLATE_ARG_P 2019 is true iff this qualified name appears as a template argument. */ 2020 2021 tree 2022 finish_qualified_id_expr (tree qualifying_class, 2023 tree expr, 2024 bool done, 2025 bool address_p, 2026 bool template_p, 2027 bool template_arg_p, 2028 tsubst_flags_t complain) 2029 { 2030 gcc_assert (TYPE_P (qualifying_class)); 2031 2032 if (error_operand_p (expr)) 2033 return error_mark_node; 2034 2035 if ((DECL_P (expr) || BASELINK_P (expr)) 2036 && !mark_used (expr, complain)) 2037 return error_mark_node; 2038 2039 if (template_p) 2040 { 2041 if (TREE_CODE (expr) == UNBOUND_CLASS_TEMPLATE) 2042 { 2043 /* cp_parser_lookup_name thought we were looking for a type, 2044 but we're actually looking for a declaration. */ 2045 qualifying_class = TYPE_CONTEXT (expr); 2046 expr = TYPE_IDENTIFIER (expr); 2047 } 2048 else 2049 check_template_keyword (expr); 2050 } 2051 2052 /* If EXPR occurs as the operand of '&', use special handling that 2053 permits a pointer-to-member. */ 2054 if (address_p && done) 2055 { 2056 if (TREE_CODE (expr) == SCOPE_REF) 2057 expr = TREE_OPERAND (expr, 1); 2058 expr = build_offset_ref (qualifying_class, expr, 2059 /*address_p=*/true, complain); 2060 return expr; 2061 } 2062 2063 /* No need to check access within an enum. */ 2064 if (TREE_CODE (qualifying_class) == ENUMERAL_TYPE 2065 && TREE_CODE (expr) != IDENTIFIER_NODE) 2066 return expr; 2067 2068 /* Within the scope of a class, turn references to non-static 2069 members into expression of the form "this->...". */ 2070 if (template_arg_p) 2071 /* But, within a template argument, we do not want make the 2072 transformation, as there is no "this" pointer. */ 2073 ; 2074 else if (TREE_CODE (expr) == FIELD_DECL) 2075 { 2076 push_deferring_access_checks (dk_no_check); 2077 expr = finish_non_static_data_member (expr, NULL_TREE, 2078 qualifying_class); 2079 pop_deferring_access_checks (); 2080 } 2081 else if (BASELINK_P (expr)) 2082 { 2083 /* See if any of the functions are non-static members. */ 2084 /* If so, the expression may be relative to 'this'. */ 2085 if (!shared_member_p (expr) 2086 && current_class_ptr 2087 && DERIVED_FROM_P (qualifying_class, 2088 current_nonlambda_class_type ())) 2089 expr = (build_class_member_access_expr 2090 (maybe_dummy_object (qualifying_class, NULL), 2091 expr, 2092 BASELINK_ACCESS_BINFO (expr), 2093 /*preserve_reference=*/false, 2094 complain)); 2095 else if (done) 2096 /* The expression is a qualified name whose address is not 2097 being taken. */ 2098 expr = build_offset_ref (qualifying_class, expr, /*address_p=*/false, 2099 complain); 2100 } 2101 else 2102 { 2103 /* In a template, return a SCOPE_REF for most qualified-ids 2104 so that we can check access at instantiation time. But if 2105 we're looking at a member of the current instantiation, we 2106 know we have access and building up the SCOPE_REF confuses 2107 non-type template argument handling. */ 2108 if (processing_template_decl 2109 && (!currently_open_class (qualifying_class) 2110 || TREE_CODE (expr) == BIT_NOT_EXPR)) 2111 expr = build_qualified_name (TREE_TYPE (expr), 2112 qualifying_class, expr, 2113 template_p); 2114 2115 expr = convert_from_reference (expr); 2116 } 2117 2118 return expr; 2119 } 2120 2121 /* Begin a statement-expression. The value returned must be passed to 2122 finish_stmt_expr. */ 2123 2124 tree 2125 begin_stmt_expr (void) 2126 { 2127 return push_stmt_list (); 2128 } 2129 2130 /* Process the final expression of a statement expression. EXPR can be 2131 NULL, if the final expression is empty. Return a STATEMENT_LIST 2132 containing all the statements in the statement-expression, or 2133 ERROR_MARK_NODE if there was an error. */ 2134 2135 tree 2136 finish_stmt_expr_expr (tree expr, tree stmt_expr) 2137 { 2138 if (error_operand_p (expr)) 2139 { 2140 /* The type of the statement-expression is the type of the last 2141 expression. */ 2142 TREE_TYPE (stmt_expr) = error_mark_node; 2143 return error_mark_node; 2144 } 2145 2146 /* If the last statement does not have "void" type, then the value 2147 of the last statement is the value of the entire expression. */ 2148 if (expr) 2149 { 2150 tree type = TREE_TYPE (expr); 2151 2152 if (type && type_unknown_p (type)) 2153 { 2154 error ("a statement expression is an insufficient context" 2155 " for overload resolution"); 2156 TREE_TYPE (stmt_expr) = error_mark_node; 2157 return error_mark_node; 2158 } 2159 else if (processing_template_decl) 2160 { 2161 expr = build_stmt (input_location, EXPR_STMT, expr); 2162 expr = add_stmt (expr); 2163 /* Mark the last statement so that we can recognize it as such at 2164 template-instantiation time. */ 2165 EXPR_STMT_STMT_EXPR_RESULT (expr) = 1; 2166 } 2167 else if (VOID_TYPE_P (type)) 2168 { 2169 /* Just treat this like an ordinary statement. */ 2170 expr = finish_expr_stmt (expr); 2171 } 2172 else 2173 { 2174 /* It actually has a value we need to deal with. First, force it 2175 to be an rvalue so that we won't need to build up a copy 2176 constructor call later when we try to assign it to something. */ 2177 expr = force_rvalue (expr, tf_warning_or_error); 2178 if (error_operand_p (expr)) 2179 return error_mark_node; 2180 2181 /* Update for array-to-pointer decay. */ 2182 type = TREE_TYPE (expr); 2183 2184 /* Wrap it in a CLEANUP_POINT_EXPR and add it to the list like a 2185 normal statement, but don't convert to void or actually add 2186 the EXPR_STMT. */ 2187 if (TREE_CODE (expr) != CLEANUP_POINT_EXPR) 2188 expr = maybe_cleanup_point_expr (expr); 2189 add_stmt (expr); 2190 } 2191 2192 /* The type of the statement-expression is the type of the last 2193 expression. */ 2194 TREE_TYPE (stmt_expr) = type; 2195 } 2196 2197 return stmt_expr; 2198 } 2199 2200 /* Finish a statement-expression. EXPR should be the value returned 2201 by the previous begin_stmt_expr. Returns an expression 2202 representing the statement-expression. */ 2203 2204 tree 2205 finish_stmt_expr (tree stmt_expr, bool has_no_scope) 2206 { 2207 tree type; 2208 tree result; 2209 2210 if (error_operand_p (stmt_expr)) 2211 { 2212 pop_stmt_list (stmt_expr); 2213 return error_mark_node; 2214 } 2215 2216 gcc_assert (TREE_CODE (stmt_expr) == STATEMENT_LIST); 2217 2218 type = TREE_TYPE (stmt_expr); 2219 result = pop_stmt_list (stmt_expr); 2220 TREE_TYPE (result) = type; 2221 2222 if (processing_template_decl) 2223 { 2224 result = build_min (STMT_EXPR, type, result); 2225 TREE_SIDE_EFFECTS (result) = 1; 2226 STMT_EXPR_NO_SCOPE (result) = has_no_scope; 2227 } 2228 else if (CLASS_TYPE_P (type)) 2229 { 2230 /* Wrap the statement-expression in a TARGET_EXPR so that the 2231 temporary object created by the final expression is destroyed at 2232 the end of the full-expression containing the 2233 statement-expression. */ 2234 result = force_target_expr (type, result, tf_warning_or_error); 2235 } 2236 2237 return result; 2238 } 2239 2240 /* Returns the expression which provides the value of STMT_EXPR. */ 2241 2242 tree 2243 stmt_expr_value_expr (tree stmt_expr) 2244 { 2245 tree t = STMT_EXPR_STMT (stmt_expr); 2246 2247 if (TREE_CODE (t) == BIND_EXPR) 2248 t = BIND_EXPR_BODY (t); 2249 2250 if (TREE_CODE (t) == STATEMENT_LIST && STATEMENT_LIST_TAIL (t)) 2251 t = STATEMENT_LIST_TAIL (t)->stmt; 2252 2253 if (TREE_CODE (t) == EXPR_STMT) 2254 t = EXPR_STMT_EXPR (t); 2255 2256 return t; 2257 } 2258 2259 /* Return TRUE iff EXPR_STMT is an empty list of 2260 expression statements. */ 2261 2262 bool 2263 empty_expr_stmt_p (tree expr_stmt) 2264 { 2265 tree body = NULL_TREE; 2266 2267 if (expr_stmt == void_node) 2268 return true; 2269 2270 if (expr_stmt) 2271 { 2272 if (TREE_CODE (expr_stmt) == EXPR_STMT) 2273 body = EXPR_STMT_EXPR (expr_stmt); 2274 else if (TREE_CODE (expr_stmt) == STATEMENT_LIST) 2275 body = expr_stmt; 2276 } 2277 2278 if (body) 2279 { 2280 if (TREE_CODE (body) == STATEMENT_LIST) 2281 return tsi_end_p (tsi_start (body)); 2282 else 2283 return empty_expr_stmt_p (body); 2284 } 2285 return false; 2286 } 2287 2288 /* Perform Koenig lookup. FN is the postfix-expression representing 2289 the function (or functions) to call; ARGS are the arguments to the 2290 call. Returns the functions to be considered by overload resolution. */ 2291 2292 cp_expr 2293 perform_koenig_lookup (cp_expr fn, vec<tree, va_gc> *args, 2294 tsubst_flags_t complain) 2295 { 2296 tree identifier = NULL_TREE; 2297 tree functions = NULL_TREE; 2298 tree tmpl_args = NULL_TREE; 2299 bool template_id = false; 2300 location_t loc = fn.get_location (); 2301 2302 if (TREE_CODE (fn) == TEMPLATE_ID_EXPR) 2303 { 2304 /* Use a separate flag to handle null args. */ 2305 template_id = true; 2306 tmpl_args = TREE_OPERAND (fn, 1); 2307 fn = TREE_OPERAND (fn, 0); 2308 } 2309 2310 /* Find the name of the overloaded function. */ 2311 if (identifier_p (fn)) 2312 identifier = fn; 2313 else 2314 { 2315 functions = fn; 2316 identifier = OVL_NAME (functions); 2317 } 2318 2319 /* A call to a namespace-scope function using an unqualified name. 2320 2321 Do Koenig lookup -- unless any of the arguments are 2322 type-dependent. */ 2323 if (!any_type_dependent_arguments_p (args) 2324 && !any_dependent_template_arguments_p (tmpl_args)) 2325 { 2326 fn = lookup_arg_dependent (identifier, functions, args); 2327 if (!fn) 2328 { 2329 /* The unqualified name could not be resolved. */ 2330 if (complain & tf_error) 2331 fn = unqualified_fn_lookup_error (cp_expr (identifier, loc)); 2332 else 2333 fn = identifier; 2334 } 2335 } 2336 2337 if (fn && template_id && fn != error_mark_node) 2338 fn = build2 (TEMPLATE_ID_EXPR, unknown_type_node, fn, tmpl_args); 2339 2340 return fn; 2341 } 2342 2343 /* Generate an expression for `FN (ARGS)'. This may change the 2344 contents of ARGS. 2345 2346 If DISALLOW_VIRTUAL is true, the call to FN will be not generated 2347 as a virtual call, even if FN is virtual. (This flag is set when 2348 encountering an expression where the function name is explicitly 2349 qualified. For example a call to `X::f' never generates a virtual 2350 call.) 2351 2352 Returns code for the call. */ 2353 2354 tree 2355 finish_call_expr (tree fn, vec<tree, va_gc> **args, bool disallow_virtual, 2356 bool koenig_p, tsubst_flags_t complain) 2357 { 2358 tree result; 2359 tree orig_fn; 2360 vec<tree, va_gc> *orig_args = NULL; 2361 2362 if (fn == error_mark_node) 2363 return error_mark_node; 2364 2365 gcc_assert (!TYPE_P (fn)); 2366 2367 /* If FN may be a FUNCTION_DECL obfuscated by force_paren_expr, undo 2368 it so that we can tell this is a call to a known function. */ 2369 fn = maybe_undo_parenthesized_ref (fn); 2370 2371 orig_fn = fn; 2372 2373 if (processing_template_decl) 2374 { 2375 /* If FN is a local extern declaration or set thereof, look them up 2376 again at instantiation time. */ 2377 if (is_overloaded_fn (fn)) 2378 { 2379 tree ifn = get_first_fn (fn); 2380 if (TREE_CODE (ifn) == FUNCTION_DECL 2381 && DECL_LOCAL_FUNCTION_P (ifn)) 2382 orig_fn = DECL_NAME (ifn); 2383 } 2384 2385 /* If the call expression is dependent, build a CALL_EXPR node 2386 with no type; type_dependent_expression_p recognizes 2387 expressions with no type as being dependent. */ 2388 if (type_dependent_expression_p (fn) 2389 || any_type_dependent_arguments_p (*args)) 2390 { 2391 result = build_min_nt_call_vec (orig_fn, *args); 2392 SET_EXPR_LOCATION (result, EXPR_LOC_OR_LOC (fn, input_location)); 2393 KOENIG_LOOKUP_P (result) = koenig_p; 2394 if (is_overloaded_fn (fn)) 2395 { 2396 fn = get_fns (fn); 2397 lookup_keep (fn, true); 2398 } 2399 2400 if (cfun) 2401 { 2402 bool abnormal = true; 2403 for (lkp_iterator iter (fn); abnormal && iter; ++iter) 2404 { 2405 tree fndecl = *iter; 2406 if (TREE_CODE (fndecl) != FUNCTION_DECL 2407 || !TREE_THIS_VOLATILE (fndecl)) 2408 abnormal = false; 2409 } 2410 /* FIXME: Stop warning about falling off end of non-void 2411 function. But this is wrong. Even if we only see 2412 no-return fns at this point, we could select a 2413 future-defined return fn during instantiation. Or 2414 vice-versa. */ 2415 if (abnormal) 2416 current_function_returns_abnormally = 1; 2417 } 2418 return result; 2419 } 2420 orig_args = make_tree_vector_copy (*args); 2421 if (!BASELINK_P (fn) 2422 && TREE_CODE (fn) != PSEUDO_DTOR_EXPR 2423 && TREE_TYPE (fn) != unknown_type_node) 2424 fn = build_non_dependent_expr (fn); 2425 make_args_non_dependent (*args); 2426 } 2427 2428 if (TREE_CODE (fn) == COMPONENT_REF) 2429 { 2430 tree member = TREE_OPERAND (fn, 1); 2431 if (BASELINK_P (member)) 2432 { 2433 tree object = TREE_OPERAND (fn, 0); 2434 return build_new_method_call (object, member, 2435 args, NULL_TREE, 2436 (disallow_virtual 2437 ? LOOKUP_NORMAL | LOOKUP_NONVIRTUAL 2438 : LOOKUP_NORMAL), 2439 /*fn_p=*/NULL, 2440 complain); 2441 } 2442 } 2443 2444 /* Per 13.3.1.1, '(&f)(...)' is the same as '(f)(...)'. */ 2445 if (TREE_CODE (fn) == ADDR_EXPR 2446 && TREE_CODE (TREE_OPERAND (fn, 0)) == OVERLOAD) 2447 fn = TREE_OPERAND (fn, 0); 2448 2449 if (is_overloaded_fn (fn)) 2450 fn = baselink_for_fns (fn); 2451 2452 result = NULL_TREE; 2453 if (BASELINK_P (fn)) 2454 { 2455 tree object; 2456 2457 /* A call to a member function. From [over.call.func]: 2458 2459 If the keyword this is in scope and refers to the class of 2460 that member function, or a derived class thereof, then the 2461 function call is transformed into a qualified function call 2462 using (*this) as the postfix-expression to the left of the 2463 . operator.... [Otherwise] a contrived object of type T 2464 becomes the implied object argument. 2465 2466 In this situation: 2467 2468 struct A { void f(); }; 2469 struct B : public A {}; 2470 struct C : public A { void g() { B::f(); }}; 2471 2472 "the class of that member function" refers to `A'. But 11.2 2473 [class.access.base] says that we need to convert 'this' to B* as 2474 part of the access, so we pass 'B' to maybe_dummy_object. */ 2475 2476 if (DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (get_first_fn (fn))) 2477 { 2478 /* A constructor call always uses a dummy object. (This constructor 2479 call which has the form A::A () is actually invalid and we are 2480 going to reject it later in build_new_method_call.) */ 2481 object = build_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn))); 2482 } 2483 else 2484 object = maybe_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)), 2485 NULL); 2486 2487 result = build_new_method_call (object, fn, args, NULL_TREE, 2488 (disallow_virtual 2489 ? LOOKUP_NORMAL|LOOKUP_NONVIRTUAL 2490 : LOOKUP_NORMAL), 2491 /*fn_p=*/NULL, 2492 complain); 2493 } 2494 else if (is_overloaded_fn (fn)) 2495 { 2496 /* If the function is an overloaded builtin, resolve it. */ 2497 if (TREE_CODE (fn) == FUNCTION_DECL 2498 && (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL 2499 || DECL_BUILT_IN_CLASS (fn) == BUILT_IN_MD)) 2500 result = resolve_overloaded_builtin (input_location, fn, *args); 2501 2502 if (!result) 2503 { 2504 if (warn_sizeof_pointer_memaccess 2505 && (complain & tf_warning) 2506 && !vec_safe_is_empty (*args) 2507 && !processing_template_decl) 2508 { 2509 location_t sizeof_arg_loc[3]; 2510 tree sizeof_arg[3]; 2511 unsigned int i; 2512 for (i = 0; i < 3; i++) 2513 { 2514 tree t; 2515 2516 sizeof_arg_loc[i] = UNKNOWN_LOCATION; 2517 sizeof_arg[i] = NULL_TREE; 2518 if (i >= (*args)->length ()) 2519 continue; 2520 t = (**args)[i]; 2521 if (TREE_CODE (t) != SIZEOF_EXPR) 2522 continue; 2523 if (SIZEOF_EXPR_TYPE_P (t)) 2524 sizeof_arg[i] = TREE_TYPE (TREE_OPERAND (t, 0)); 2525 else 2526 sizeof_arg[i] = TREE_OPERAND (t, 0); 2527 sizeof_arg_loc[i] = EXPR_LOCATION (t); 2528 } 2529 sizeof_pointer_memaccess_warning 2530 (sizeof_arg_loc, fn, *args, 2531 sizeof_arg, same_type_ignoring_top_level_qualifiers_p); 2532 } 2533 2534 /* A call to a namespace-scope function. */ 2535 result = build_new_function_call (fn, args, complain); 2536 } 2537 } 2538 else if (TREE_CODE (fn) == PSEUDO_DTOR_EXPR) 2539 { 2540 if (!vec_safe_is_empty (*args)) 2541 error ("arguments to destructor are not allowed"); 2542 /* Mark the pseudo-destructor call as having side-effects so 2543 that we do not issue warnings about its use. */ 2544 result = build1 (NOP_EXPR, 2545 void_type_node, 2546 TREE_OPERAND (fn, 0)); 2547 TREE_SIDE_EFFECTS (result) = 1; 2548 } 2549 else if (CLASS_TYPE_P (TREE_TYPE (fn))) 2550 /* If the "function" is really an object of class type, it might 2551 have an overloaded `operator ()'. */ 2552 result = build_op_call (fn, args, complain); 2553 2554 if (!result) 2555 /* A call where the function is unknown. */ 2556 result = cp_build_function_call_vec (fn, args, complain); 2557 2558 if (processing_template_decl && result != error_mark_node) 2559 { 2560 if (INDIRECT_REF_P (result)) 2561 result = TREE_OPERAND (result, 0); 2562 result = build_call_vec (TREE_TYPE (result), orig_fn, orig_args); 2563 SET_EXPR_LOCATION (result, input_location); 2564 KOENIG_LOOKUP_P (result) = koenig_p; 2565 release_tree_vector (orig_args); 2566 result = convert_from_reference (result); 2567 } 2568 2569 /* Free or retain OVERLOADs from lookup. */ 2570 if (is_overloaded_fn (orig_fn)) 2571 lookup_keep (get_fns (orig_fn), processing_template_decl); 2572 2573 return result; 2574 } 2575 2576 /* Finish a call to a postfix increment or decrement or EXPR. (Which 2577 is indicated by CODE, which should be POSTINCREMENT_EXPR or 2578 POSTDECREMENT_EXPR.) */ 2579 2580 cp_expr 2581 finish_increment_expr (cp_expr expr, enum tree_code code) 2582 { 2583 /* input_location holds the location of the trailing operator token. 2584 Build a location of the form: 2585 expr++ 2586 ~~~~^~ 2587 with the caret at the operator token, ranging from the start 2588 of EXPR to the end of the operator token. */ 2589 location_t combined_loc = make_location (input_location, 2590 expr.get_start (), 2591 get_finish (input_location)); 2592 cp_expr result = build_x_unary_op (combined_loc, code, expr, 2593 tf_warning_or_error); 2594 /* TODO: build_x_unary_op doesn't honor the location, so set it here. */ 2595 result.set_location (combined_loc); 2596 return result; 2597 } 2598 2599 /* Finish a use of `this'. Returns an expression for `this'. */ 2600 2601 tree 2602 finish_this_expr (void) 2603 { 2604 tree result = NULL_TREE; 2605 2606 if (current_class_ptr) 2607 { 2608 tree type = TREE_TYPE (current_class_ref); 2609 2610 /* In a lambda expression, 'this' refers to the captured 'this'. */ 2611 if (LAMBDA_TYPE_P (type)) 2612 result = lambda_expr_this_capture (CLASSTYPE_LAMBDA_EXPR (type), true); 2613 else 2614 result = current_class_ptr; 2615 } 2616 2617 if (result) 2618 /* The keyword 'this' is a prvalue expression. */ 2619 return rvalue (result); 2620 2621 tree fn = current_nonlambda_function (); 2622 if (fn && DECL_STATIC_FUNCTION_P (fn)) 2623 error ("%<this%> is unavailable for static member functions"); 2624 else if (fn) 2625 error ("invalid use of %<this%> in non-member function"); 2626 else 2627 error ("invalid use of %<this%> at top level"); 2628 return error_mark_node; 2629 } 2630 2631 /* Finish a pseudo-destructor expression. If SCOPE is NULL, the 2632 expression was of the form `OBJECT.~DESTRUCTOR' where DESTRUCTOR is 2633 the TYPE for the type given. If SCOPE is non-NULL, the expression 2634 was of the form `OBJECT.SCOPE::~DESTRUCTOR'. */ 2635 2636 tree 2637 finish_pseudo_destructor_expr (tree object, tree scope, tree destructor, 2638 location_t loc) 2639 { 2640 if (object == error_mark_node || destructor == error_mark_node) 2641 return error_mark_node; 2642 2643 gcc_assert (TYPE_P (destructor)); 2644 2645 if (!processing_template_decl) 2646 { 2647 if (scope == error_mark_node) 2648 { 2649 error_at (loc, "invalid qualifying scope in pseudo-destructor name"); 2650 return error_mark_node; 2651 } 2652 if (is_auto (destructor)) 2653 destructor = TREE_TYPE (object); 2654 if (scope && TYPE_P (scope) && !check_dtor_name (scope, destructor)) 2655 { 2656 error_at (loc, 2657 "qualified type %qT does not match destructor name ~%qT", 2658 scope, destructor); 2659 return error_mark_node; 2660 } 2661 2662 2663 /* [expr.pseudo] says both: 2664 2665 The type designated by the pseudo-destructor-name shall be 2666 the same as the object type. 2667 2668 and: 2669 2670 The cv-unqualified versions of the object type and of the 2671 type designated by the pseudo-destructor-name shall be the 2672 same type. 2673 2674 We implement the more generous second sentence, since that is 2675 what most other compilers do. */ 2676 if (!same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (object), 2677 destructor)) 2678 { 2679 error_at (loc, "%qE is not of type %qT", object, destructor); 2680 return error_mark_node; 2681 } 2682 } 2683 2684 return build3_loc (loc, PSEUDO_DTOR_EXPR, void_type_node, object, 2685 scope, destructor); 2686 } 2687 2688 /* Finish an expression of the form CODE EXPR. */ 2689 2690 cp_expr 2691 finish_unary_op_expr (location_t op_loc, enum tree_code code, cp_expr expr, 2692 tsubst_flags_t complain) 2693 { 2694 /* Build a location of the form: 2695 ++expr 2696 ^~~~~~ 2697 with the caret at the operator token, ranging from the start 2698 of the operator token to the end of EXPR. */ 2699 location_t combined_loc = make_location (op_loc, 2700 op_loc, expr.get_finish ()); 2701 cp_expr result = build_x_unary_op (combined_loc, code, expr, complain); 2702 /* TODO: build_x_unary_op doesn't always honor the location. */ 2703 result.set_location (combined_loc); 2704 2705 tree result_ovl, expr_ovl; 2706 2707 if (!(complain & tf_warning)) 2708 return result; 2709 2710 result_ovl = result; 2711 expr_ovl = expr; 2712 2713 if (!processing_template_decl) 2714 expr_ovl = cp_fully_fold (expr_ovl); 2715 2716 if (!CONSTANT_CLASS_P (expr_ovl) 2717 || TREE_OVERFLOW_P (expr_ovl)) 2718 return result; 2719 2720 if (!processing_template_decl) 2721 result_ovl = cp_fully_fold (result_ovl); 2722 2723 if (CONSTANT_CLASS_P (result_ovl) && TREE_OVERFLOW_P (result_ovl)) 2724 overflow_warning (combined_loc, result_ovl); 2725 2726 return result; 2727 } 2728 2729 /* Finish a compound-literal expression or C++11 functional cast with aggregate 2730 initializer. TYPE is the type to which the CONSTRUCTOR in COMPOUND_LITERAL 2731 is being cast. */ 2732 2733 tree 2734 finish_compound_literal (tree type, tree compound_literal, 2735 tsubst_flags_t complain, 2736 fcl_t fcl_context) 2737 { 2738 if (type == error_mark_node) 2739 return error_mark_node; 2740 2741 if (TREE_CODE (type) == REFERENCE_TYPE) 2742 { 2743 compound_literal 2744 = finish_compound_literal (TREE_TYPE (type), compound_literal, 2745 complain, fcl_context); 2746 return cp_build_c_cast (type, compound_literal, complain); 2747 } 2748 2749 if (!TYPE_OBJ_P (type)) 2750 { 2751 if (complain & tf_error) 2752 error ("compound literal of non-object type %qT", type); 2753 return error_mark_node; 2754 } 2755 2756 if (tree anode = type_uses_auto (type)) 2757 if (CLASS_PLACEHOLDER_TEMPLATE (anode)) 2758 { 2759 type = do_auto_deduction (type, compound_literal, anode, complain, 2760 adc_variable_type); 2761 if (type == error_mark_node) 2762 return error_mark_node; 2763 } 2764 2765 if (processing_template_decl) 2766 { 2767 TREE_TYPE (compound_literal) = type; 2768 /* Mark the expression as a compound literal. */ 2769 TREE_HAS_CONSTRUCTOR (compound_literal) = 1; 2770 if (fcl_context == fcl_c99) 2771 CONSTRUCTOR_C99_COMPOUND_LITERAL (compound_literal) = 1; 2772 return compound_literal; 2773 } 2774 2775 type = complete_type (type); 2776 2777 if (TYPE_NON_AGGREGATE_CLASS (type)) 2778 { 2779 /* Trying to deal with a CONSTRUCTOR instead of a TREE_LIST 2780 everywhere that deals with function arguments would be a pain, so 2781 just wrap it in a TREE_LIST. The parser set a flag so we know 2782 that it came from T{} rather than T({}). */ 2783 CONSTRUCTOR_IS_DIRECT_INIT (compound_literal) = 1; 2784 compound_literal = build_tree_list (NULL_TREE, compound_literal); 2785 return build_functional_cast (type, compound_literal, complain); 2786 } 2787 2788 if (TREE_CODE (type) == ARRAY_TYPE 2789 && check_array_initializer (NULL_TREE, type, compound_literal)) 2790 return error_mark_node; 2791 compound_literal = reshape_init (type, compound_literal, complain); 2792 if (SCALAR_TYPE_P (type) 2793 && !BRACE_ENCLOSED_INITIALIZER_P (compound_literal) 2794 && !check_narrowing (type, compound_literal, complain)) 2795 return error_mark_node; 2796 if (TREE_CODE (type) == ARRAY_TYPE 2797 && TYPE_DOMAIN (type) == NULL_TREE) 2798 { 2799 cp_complete_array_type_or_error (&type, compound_literal, 2800 false, complain); 2801 if (type == error_mark_node) 2802 return error_mark_node; 2803 } 2804 compound_literal = digest_init_flags (type, compound_literal, LOOKUP_NORMAL, 2805 complain); 2806 if (TREE_CODE (compound_literal) == CONSTRUCTOR) 2807 { 2808 TREE_HAS_CONSTRUCTOR (compound_literal) = true; 2809 if (fcl_context == fcl_c99) 2810 CONSTRUCTOR_C99_COMPOUND_LITERAL (compound_literal) = 1; 2811 } 2812 2813 /* Put static/constant array temporaries in static variables. */ 2814 /* FIXME all C99 compound literals should be variables rather than C++ 2815 temporaries, unless they are used as an aggregate initializer. */ 2816 if ((!at_function_scope_p () || CP_TYPE_CONST_P (type)) 2817 && fcl_context == fcl_c99 2818 && TREE_CODE (type) == ARRAY_TYPE 2819 && !TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type) 2820 && initializer_constant_valid_p (compound_literal, type)) 2821 { 2822 tree decl = create_temporary_var (type); 2823 DECL_INITIAL (decl) = compound_literal; 2824 TREE_STATIC (decl) = 1; 2825 if (literal_type_p (type) && CP_TYPE_CONST_NON_VOLATILE_P (type)) 2826 { 2827 /* 5.19 says that a constant expression can include an 2828 lvalue-rvalue conversion applied to "a glvalue of literal type 2829 that refers to a non-volatile temporary object initialized 2830 with a constant expression". Rather than try to communicate 2831 that this VAR_DECL is a temporary, just mark it constexpr. */ 2832 DECL_DECLARED_CONSTEXPR_P (decl) = true; 2833 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl) = true; 2834 TREE_CONSTANT (decl) = true; 2835 } 2836 cp_apply_type_quals_to_decl (cp_type_quals (type), decl); 2837 decl = pushdecl_top_level (decl); 2838 DECL_NAME (decl) = make_anon_name (); 2839 SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl)); 2840 /* Make sure the destructor is callable. */ 2841 tree clean = cxx_maybe_build_cleanup (decl, complain); 2842 if (clean == error_mark_node) 2843 return error_mark_node; 2844 return decl; 2845 } 2846 2847 /* Represent other compound literals with TARGET_EXPR so we produce 2848 an lvalue, but can elide copies. */ 2849 if (!VECTOR_TYPE_P (type)) 2850 compound_literal = get_target_expr_sfinae (compound_literal, complain); 2851 2852 return compound_literal; 2853 } 2854 2855 /* Return the declaration for the function-name variable indicated by 2856 ID. */ 2857 2858 tree 2859 finish_fname (tree id) 2860 { 2861 tree decl; 2862 2863 decl = fname_decl (input_location, C_RID_CODE (id), id); 2864 if (processing_template_decl && current_function_decl 2865 && decl != error_mark_node) 2866 decl = DECL_NAME (decl); 2867 return decl; 2868 } 2869 2870 /* Finish a translation unit. */ 2871 2872 void 2873 finish_translation_unit (void) 2874 { 2875 /* In case there were missing closebraces, 2876 get us back to the global binding level. */ 2877 pop_everything (); 2878 while (current_namespace != global_namespace) 2879 pop_namespace (); 2880 2881 /* Do file scope __FUNCTION__ et al. */ 2882 finish_fname_decls (); 2883 } 2884 2885 /* Finish a template type parameter, specified as AGGR IDENTIFIER. 2886 Returns the parameter. */ 2887 2888 tree 2889 finish_template_type_parm (tree aggr, tree identifier) 2890 { 2891 if (aggr != class_type_node) 2892 { 2893 permerror (input_location, "template type parameters must use the keyword %<class%> or %<typename%>"); 2894 aggr = class_type_node; 2895 } 2896 2897 return build_tree_list (aggr, identifier); 2898 } 2899 2900 /* Finish a template template parameter, specified as AGGR IDENTIFIER. 2901 Returns the parameter. */ 2902 2903 tree 2904 finish_template_template_parm (tree aggr, tree identifier) 2905 { 2906 tree decl = build_decl (input_location, 2907 TYPE_DECL, identifier, NULL_TREE); 2908 2909 tree tmpl = build_lang_decl (TEMPLATE_DECL, identifier, NULL_TREE); 2910 DECL_TEMPLATE_PARMS (tmpl) = current_template_parms; 2911 DECL_TEMPLATE_RESULT (tmpl) = decl; 2912 DECL_ARTIFICIAL (decl) = 1; 2913 2914 // Associate the constraints with the underlying declaration, 2915 // not the template. 2916 tree reqs = TEMPLATE_PARMS_CONSTRAINTS (current_template_parms); 2917 tree constr = build_constraints (reqs, NULL_TREE); 2918 set_constraints (decl, constr); 2919 2920 end_template_decl (); 2921 2922 gcc_assert (DECL_TEMPLATE_PARMS (tmpl)); 2923 2924 check_default_tmpl_args (decl, DECL_TEMPLATE_PARMS (tmpl), 2925 /*is_primary=*/true, /*is_partial=*/false, 2926 /*is_friend=*/0); 2927 2928 return finish_template_type_parm (aggr, tmpl); 2929 } 2930 2931 /* ARGUMENT is the default-argument value for a template template 2932 parameter. If ARGUMENT is invalid, issue error messages and return 2933 the ERROR_MARK_NODE. Otherwise, ARGUMENT itself is returned. */ 2934 2935 tree 2936 check_template_template_default_arg (tree argument) 2937 { 2938 if (TREE_CODE (argument) != TEMPLATE_DECL 2939 && TREE_CODE (argument) != TEMPLATE_TEMPLATE_PARM 2940 && TREE_CODE (argument) != UNBOUND_CLASS_TEMPLATE) 2941 { 2942 if (TREE_CODE (argument) == TYPE_DECL) 2943 error ("invalid use of type %qT as a default value for a template " 2944 "template-parameter", TREE_TYPE (argument)); 2945 else 2946 error ("invalid default argument for a template template parameter"); 2947 return error_mark_node; 2948 } 2949 2950 return argument; 2951 } 2952 2953 /* Begin a class definition, as indicated by T. */ 2954 2955 tree 2956 begin_class_definition (tree t) 2957 { 2958 if (error_operand_p (t) || error_operand_p (TYPE_MAIN_DECL (t))) 2959 return error_mark_node; 2960 2961 if (processing_template_parmlist) 2962 { 2963 error ("definition of %q#T inside template parameter list", t); 2964 return error_mark_node; 2965 } 2966 2967 /* According to the C++ ABI, decimal classes defined in ISO/IEC TR 24733 2968 are passed the same as decimal scalar types. */ 2969 if (TREE_CODE (t) == RECORD_TYPE 2970 && !processing_template_decl) 2971 { 2972 tree ns = TYPE_CONTEXT (t); 2973 if (ns && TREE_CODE (ns) == NAMESPACE_DECL 2974 && DECL_CONTEXT (ns) == std_node 2975 && DECL_NAME (ns) 2976 && id_equal (DECL_NAME (ns), "decimal")) 2977 { 2978 const char *n = TYPE_NAME_STRING (t); 2979 if ((strcmp (n, "decimal32") == 0) 2980 || (strcmp (n, "decimal64") == 0) 2981 || (strcmp (n, "decimal128") == 0)) 2982 TYPE_TRANSPARENT_AGGR (t) = 1; 2983 } 2984 } 2985 2986 /* A non-implicit typename comes from code like: 2987 2988 template <typename T> struct A { 2989 template <typename U> struct A<T>::B ... 2990 2991 This is erroneous. */ 2992 else if (TREE_CODE (t) == TYPENAME_TYPE) 2993 { 2994 error ("invalid definition of qualified type %qT", t); 2995 t = error_mark_node; 2996 } 2997 2998 if (t == error_mark_node || ! MAYBE_CLASS_TYPE_P (t)) 2999 { 3000 t = make_class_type (RECORD_TYPE); 3001 pushtag (make_anon_name (), t, /*tag_scope=*/ts_current); 3002 } 3003 3004 if (TYPE_BEING_DEFINED (t)) 3005 { 3006 t = make_class_type (TREE_CODE (t)); 3007 pushtag (TYPE_IDENTIFIER (t), t, /*tag_scope=*/ts_current); 3008 } 3009 maybe_process_partial_specialization (t); 3010 pushclass (t); 3011 TYPE_BEING_DEFINED (t) = 1; 3012 class_binding_level->defining_class_p = 1; 3013 3014 if (flag_pack_struct) 3015 { 3016 tree v; 3017 TYPE_PACKED (t) = 1; 3018 /* Even though the type is being defined for the first time 3019 here, there might have been a forward declaration, so there 3020 might be cv-qualified variants of T. */ 3021 for (v = TYPE_NEXT_VARIANT (t); v; v = TYPE_NEXT_VARIANT (v)) 3022 TYPE_PACKED (v) = 1; 3023 } 3024 /* Reset the interface data, at the earliest possible 3025 moment, as it might have been set via a class foo; 3026 before. */ 3027 if (! TYPE_UNNAMED_P (t)) 3028 { 3029 struct c_fileinfo *finfo = \ 3030 get_fileinfo (LOCATION_FILE (input_location)); 3031 CLASSTYPE_INTERFACE_ONLY (t) = finfo->interface_only; 3032 SET_CLASSTYPE_INTERFACE_UNKNOWN_X 3033 (t, finfo->interface_unknown); 3034 } 3035 reset_specialization(); 3036 3037 /* Make a declaration for this class in its own scope. */ 3038 build_self_reference (); 3039 3040 return t; 3041 } 3042 3043 /* Finish the member declaration given by DECL. */ 3044 3045 void 3046 finish_member_declaration (tree decl) 3047 { 3048 if (decl == error_mark_node || decl == NULL_TREE) 3049 return; 3050 3051 if (decl == void_type_node) 3052 /* The COMPONENT was a friend, not a member, and so there's 3053 nothing for us to do. */ 3054 return; 3055 3056 /* We should see only one DECL at a time. */ 3057 gcc_assert (DECL_CHAIN (decl) == NULL_TREE); 3058 3059 /* Don't add decls after definition. */ 3060 gcc_assert (TYPE_BEING_DEFINED (current_class_type) 3061 /* We can add lambda types when late parsing default 3062 arguments. */ 3063 || LAMBDA_TYPE_P (TREE_TYPE (decl))); 3064 3065 /* Set up access control for DECL. */ 3066 TREE_PRIVATE (decl) 3067 = (current_access_specifier == access_private_node); 3068 TREE_PROTECTED (decl) 3069 = (current_access_specifier == access_protected_node); 3070 if (TREE_CODE (decl) == TEMPLATE_DECL) 3071 { 3072 TREE_PRIVATE (DECL_TEMPLATE_RESULT (decl)) = TREE_PRIVATE (decl); 3073 TREE_PROTECTED (DECL_TEMPLATE_RESULT (decl)) = TREE_PROTECTED (decl); 3074 } 3075 3076 /* Mark the DECL as a member of the current class, unless it's 3077 a member of an enumeration. */ 3078 if (TREE_CODE (decl) != CONST_DECL) 3079 DECL_CONTEXT (decl) = current_class_type; 3080 3081 if (TREE_CODE (decl) == USING_DECL) 3082 /* For now, ignore class-scope USING_DECLS, so that debugging 3083 backends do not see them. */ 3084 DECL_IGNORED_P (decl) = 1; 3085 3086 /* Check for bare parameter packs in the non-static data member 3087 declaration. */ 3088 if (TREE_CODE (decl) == FIELD_DECL) 3089 { 3090 if (check_for_bare_parameter_packs (TREE_TYPE (decl))) 3091 TREE_TYPE (decl) = error_mark_node; 3092 if (check_for_bare_parameter_packs (DECL_ATTRIBUTES (decl))) 3093 DECL_ATTRIBUTES (decl) = NULL_TREE; 3094 } 3095 3096 /* [dcl.link] 3097 3098 A C language linkage is ignored for the names of class members 3099 and the member function type of class member functions. */ 3100 if (DECL_LANG_SPECIFIC (decl)) 3101 SET_DECL_LANGUAGE (decl, lang_cplusplus); 3102 3103 bool add = false; 3104 3105 /* Functions and non-functions are added differently. */ 3106 if (DECL_DECLARES_FUNCTION_P (decl)) 3107 add = add_method (current_class_type, decl, false); 3108 /* Enter the DECL into the scope of the class, if the class 3109 isn't a closure (whose fields are supposed to be unnamed). */ 3110 else if (CLASSTYPE_LAMBDA_EXPR (current_class_type) 3111 || pushdecl_class_level (decl)) 3112 add = true; 3113 3114 if (add) 3115 { 3116 /* All TYPE_DECLs go at the end of TYPE_FIELDS. Ordinary fields 3117 go at the beginning. The reason is that 3118 legacy_nonfn_member_lookup searches the list in order, and we 3119 want a field name to override a type name so that the "struct 3120 stat hack" will work. In particular: 3121 3122 struct S { enum E { }; static const int E = 5; int ary[S::E]; } s; 3123 3124 is valid. */ 3125 3126 if (TREE_CODE (decl) == TYPE_DECL) 3127 TYPE_FIELDS (current_class_type) 3128 = chainon (TYPE_FIELDS (current_class_type), decl); 3129 else 3130 { 3131 DECL_CHAIN (decl) = TYPE_FIELDS (current_class_type); 3132 TYPE_FIELDS (current_class_type) = decl; 3133 } 3134 3135 maybe_add_class_template_decl_list (current_class_type, decl, 3136 /*friend_p=*/0); 3137 } 3138 } 3139 3140 /* Finish processing a complete template declaration. The PARMS are 3141 the template parameters. */ 3142 3143 void 3144 finish_template_decl (tree parms) 3145 { 3146 if (parms) 3147 end_template_decl (); 3148 else 3149 end_specialization (); 3150 } 3151 3152 // Returns the template type of the class scope being entered. If we're 3153 // entering a constrained class scope. TYPE is the class template 3154 // scope being entered and we may need to match the intended type with 3155 // a constrained specialization. For example: 3156 // 3157 // template<Object T> 3158 // struct S { void f(); }; #1 3159 // 3160 // template<Object T> 3161 // void S<T>::f() { } #2 3162 // 3163 // We check, in #2, that S<T> refers precisely to the type declared by 3164 // #1 (i.e., that the constraints match). Note that the following should 3165 // be an error since there is no specialization of S<T> that is 3166 // unconstrained, but this is not diagnosed here. 3167 // 3168 // template<typename T> 3169 // void S<T>::f() { } 3170 // 3171 // We cannot diagnose this problem here since this function also matches 3172 // qualified template names that are not part of a definition. For example: 3173 // 3174 // template<Integral T, Floating_point U> 3175 // typename pair<T, U>::first_type void f(T, U); 3176 // 3177 // Here, it is unlikely that there is a partial specialization of 3178 // pair constrained for for Integral and Floating_point arguments. 3179 // 3180 // The general rule is: if a constrained specialization with matching 3181 // constraints is found return that type. Also note that if TYPE is not a 3182 // class-type (e.g. a typename type), then no fixup is needed. 3183 3184 static tree 3185 fixup_template_type (tree type) 3186 { 3187 // Find the template parameter list at the a depth appropriate to 3188 // the scope we're trying to enter. 3189 tree parms = current_template_parms; 3190 int depth = template_class_depth (type); 3191 for (int n = processing_template_decl; n > depth && parms; --n) 3192 parms = TREE_CHAIN (parms); 3193 if (!parms) 3194 return type; 3195 tree cur_reqs = TEMPLATE_PARMS_CONSTRAINTS (parms); 3196 tree cur_constr = build_constraints (cur_reqs, NULL_TREE); 3197 3198 // Search for a specialization whose type and constraints match. 3199 tree tmpl = CLASSTYPE_TI_TEMPLATE (type); 3200 tree specs = DECL_TEMPLATE_SPECIALIZATIONS (tmpl); 3201 while (specs) 3202 { 3203 tree spec_constr = get_constraints (TREE_VALUE (specs)); 3204 3205 // If the type and constraints match a specialization, then we 3206 // are entering that type. 3207 if (same_type_p (type, TREE_TYPE (specs)) 3208 && equivalent_constraints (cur_constr, spec_constr)) 3209 return TREE_TYPE (specs); 3210 specs = TREE_CHAIN (specs); 3211 } 3212 3213 // If no specialization matches, then must return the type 3214 // previously found. 3215 return type; 3216 } 3217 3218 /* Finish processing a template-id (which names a type) of the form 3219 NAME < ARGS >. Return the TYPE_DECL for the type named by the 3220 template-id. If ENTERING_SCOPE is nonzero we are about to enter 3221 the scope of template-id indicated. */ 3222 3223 tree 3224 finish_template_type (tree name, tree args, int entering_scope) 3225 { 3226 tree type; 3227 3228 type = lookup_template_class (name, args, 3229 NULL_TREE, NULL_TREE, entering_scope, 3230 tf_warning_or_error | tf_user); 3231 3232 /* If we might be entering the scope of a partial specialization, 3233 find the one with the right constraints. */ 3234 if (flag_concepts 3235 && entering_scope 3236 && CLASS_TYPE_P (type) 3237 && CLASSTYPE_TEMPLATE_INFO (type) 3238 && dependent_type_p (type) 3239 && PRIMARY_TEMPLATE_P (CLASSTYPE_TI_TEMPLATE (type))) 3240 type = fixup_template_type (type); 3241 3242 if (type == error_mark_node) 3243 return type; 3244 else if (CLASS_TYPE_P (type) && !alias_type_or_template_p (type)) 3245 return TYPE_STUB_DECL (type); 3246 else 3247 return TYPE_NAME (type); 3248 } 3249 3250 /* Finish processing a BASE_CLASS with the indicated ACCESS_SPECIFIER. 3251 Return a TREE_LIST containing the ACCESS_SPECIFIER and the 3252 BASE_CLASS, or NULL_TREE if an error occurred. The 3253 ACCESS_SPECIFIER is one of 3254 access_{default,public,protected_private}_node. For a virtual base 3255 we set TREE_TYPE. */ 3256 3257 tree 3258 finish_base_specifier (tree base, tree access, bool virtual_p) 3259 { 3260 tree result; 3261 3262 if (base == error_mark_node) 3263 { 3264 error ("invalid base-class specification"); 3265 result = NULL_TREE; 3266 } 3267 else if (! MAYBE_CLASS_TYPE_P (base)) 3268 { 3269 error ("%qT is not a class type", base); 3270 result = NULL_TREE; 3271 } 3272 else 3273 { 3274 if (cp_type_quals (base) != 0) 3275 { 3276 /* DR 484: Can a base-specifier name a cv-qualified 3277 class type? */ 3278 base = TYPE_MAIN_VARIANT (base); 3279 } 3280 result = build_tree_list (access, base); 3281 if (virtual_p) 3282 TREE_TYPE (result) = integer_type_node; 3283 } 3284 3285 return result; 3286 } 3287 3288 /* If FNS is a member function, a set of member functions, or a 3289 template-id referring to one or more member functions, return a 3290 BASELINK for FNS, incorporating the current access context. 3291 Otherwise, return FNS unchanged. */ 3292 3293 tree 3294 baselink_for_fns (tree fns) 3295 { 3296 tree scope; 3297 tree cl; 3298 3299 if (BASELINK_P (fns) 3300 || error_operand_p (fns)) 3301 return fns; 3302 3303 scope = ovl_scope (fns); 3304 if (!CLASS_TYPE_P (scope)) 3305 return fns; 3306 3307 cl = currently_open_derived_class (scope); 3308 if (!cl) 3309 cl = scope; 3310 cl = TYPE_BINFO (cl); 3311 return build_baselink (cl, cl, fns, /*optype=*/NULL_TREE); 3312 } 3313 3314 /* Returns true iff DECL is a variable from a function outside 3315 the current one. */ 3316 3317 static bool 3318 outer_var_p (tree decl) 3319 { 3320 return ((VAR_P (decl) || TREE_CODE (decl) == PARM_DECL) 3321 && DECL_FUNCTION_SCOPE_P (decl) 3322 /* Don't get confused by temporaries. */ 3323 && DECL_NAME (decl) 3324 && (DECL_CONTEXT (decl) != current_function_decl 3325 || parsing_nsdmi ())); 3326 } 3327 3328 /* As above, but also checks that DECL is automatic. */ 3329 3330 bool 3331 outer_automatic_var_p (tree decl) 3332 { 3333 return (outer_var_p (decl) 3334 && !TREE_STATIC (decl)); 3335 } 3336 3337 /* DECL satisfies outer_automatic_var_p. Possibly complain about it or 3338 rewrite it for lambda capture. 3339 3340 If ODR_USE is true, we're being called from mark_use, and we complain about 3341 use of constant variables. If ODR_USE is false, we're being called for the 3342 id-expression, and we do lambda capture. */ 3343 3344 tree 3345 process_outer_var_ref (tree decl, tsubst_flags_t complain, bool odr_use) 3346 { 3347 if (cp_unevaluated_operand) 3348 /* It's not a use (3.2) if we're in an unevaluated context. */ 3349 return decl; 3350 if (decl == error_mark_node) 3351 return decl; 3352 3353 tree context = DECL_CONTEXT (decl); 3354 tree containing_function = current_function_decl; 3355 tree lambda_stack = NULL_TREE; 3356 tree lambda_expr = NULL_TREE; 3357 tree initializer = convert_from_reference (decl); 3358 3359 /* Mark it as used now even if the use is ill-formed. */ 3360 if (!mark_used (decl, complain)) 3361 return error_mark_node; 3362 3363 if (parsing_nsdmi ()) 3364 containing_function = NULL_TREE; 3365 3366 if (containing_function && LAMBDA_FUNCTION_P (containing_function)) 3367 { 3368 /* Check whether we've already built a proxy. */ 3369 tree var = decl; 3370 while (is_normal_capture_proxy (var)) 3371 var = DECL_CAPTURED_VARIABLE (var); 3372 tree d = retrieve_local_specialization (var); 3373 3374 if (d && d != decl && is_capture_proxy (d)) 3375 { 3376 if (DECL_CONTEXT (d) == containing_function) 3377 /* We already have an inner proxy. */ 3378 return d; 3379 else 3380 /* We need to capture an outer proxy. */ 3381 return process_outer_var_ref (d, complain, odr_use); 3382 } 3383 } 3384 3385 /* If we are in a lambda function, we can move out until we hit 3386 1. the context, 3387 2. a non-lambda function, or 3388 3. a non-default capturing lambda function. */ 3389 while (context != containing_function 3390 /* containing_function can be null with invalid generic lambdas. */ 3391 && containing_function 3392 && LAMBDA_FUNCTION_P (containing_function)) 3393 { 3394 tree closure = DECL_CONTEXT (containing_function); 3395 lambda_expr = CLASSTYPE_LAMBDA_EXPR (closure); 3396 3397 if (TYPE_CLASS_SCOPE_P (closure)) 3398 /* A lambda in an NSDMI (c++/64496). */ 3399 break; 3400 3401 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) 3402 == CPLD_NONE) 3403 break; 3404 3405 lambda_stack = tree_cons (NULL_TREE, 3406 lambda_expr, 3407 lambda_stack); 3408 3409 containing_function 3410 = decl_function_context (containing_function); 3411 } 3412 3413 /* In a lambda within a template, wait until instantiation 3414 time to implicitly capture. */ 3415 if (context == containing_function 3416 && DECL_TEMPLATE_INFO (containing_function) 3417 && uses_template_parms (DECL_TI_ARGS (containing_function))) 3418 return decl; 3419 3420 if (lambda_expr && VAR_P (decl) 3421 && DECL_ANON_UNION_VAR_P (decl)) 3422 { 3423 if (complain & tf_error) 3424 error ("cannot capture member %qD of anonymous union", decl); 3425 return error_mark_node; 3426 } 3427 /* Do lambda capture when processing the id-expression, not when 3428 odr-using a variable. */ 3429 if (!odr_use && context == containing_function) 3430 { 3431 decl = add_default_capture (lambda_stack, 3432 /*id=*/DECL_NAME (decl), 3433 initializer); 3434 } 3435 /* Only an odr-use of an outer automatic variable causes an 3436 error, and a constant variable can decay to a prvalue 3437 constant without odr-use. So don't complain yet. */ 3438 else if (!odr_use && decl_constant_var_p (decl)) 3439 return decl; 3440 else if (lambda_expr) 3441 { 3442 if (complain & tf_error) 3443 { 3444 error ("%qD is not captured", decl); 3445 tree closure = LAMBDA_EXPR_CLOSURE (lambda_expr); 3446 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) 3447 == CPLD_NONE) 3448 inform (location_of (closure), 3449 "the lambda has no capture-default"); 3450 else if (TYPE_CLASS_SCOPE_P (closure)) 3451 inform (UNKNOWN_LOCATION, "lambda in local class %q+T cannot " 3452 "capture variables from the enclosing context", 3453 TYPE_CONTEXT (closure)); 3454 inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here", decl); 3455 } 3456 return error_mark_node; 3457 } 3458 else 3459 { 3460 if (complain & tf_error) 3461 { 3462 error (VAR_P (decl) 3463 ? G_("use of local variable with automatic storage from " 3464 "containing function") 3465 : G_("use of parameter from containing function")); 3466 inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here", decl); 3467 } 3468 return error_mark_node; 3469 } 3470 return decl; 3471 } 3472 3473 /* ID_EXPRESSION is a representation of parsed, but unprocessed, 3474 id-expression. (See cp_parser_id_expression for details.) SCOPE, 3475 if non-NULL, is the type or namespace used to explicitly qualify 3476 ID_EXPRESSION. DECL is the entity to which that name has been 3477 resolved. 3478 3479 *CONSTANT_EXPRESSION_P is true if we are presently parsing a 3480 constant-expression. In that case, *NON_CONSTANT_EXPRESSION_P will 3481 be set to true if this expression isn't permitted in a 3482 constant-expression, but it is otherwise not set by this function. 3483 *ALLOW_NON_CONSTANT_EXPRESSION_P is true if we are parsing a 3484 constant-expression, but a non-constant expression is also 3485 permissible. 3486 3487 DONE is true if this expression is a complete postfix-expression; 3488 it is false if this expression is followed by '->', '[', '(', etc. 3489 ADDRESS_P is true iff this expression is the operand of '&'. 3490 TEMPLATE_P is true iff the qualified-id was of the form 3491 "A::template B". TEMPLATE_ARG_P is true iff this qualified name 3492 appears as a template argument. 3493 3494 If an error occurs, and it is the kind of error that might cause 3495 the parser to abort a tentative parse, *ERROR_MSG is filled in. It 3496 is the caller's responsibility to issue the message. *ERROR_MSG 3497 will be a string with static storage duration, so the caller need 3498 not "free" it. 3499 3500 Return an expression for the entity, after issuing appropriate 3501 diagnostics. This function is also responsible for transforming a 3502 reference to a non-static member into a COMPONENT_REF that makes 3503 the use of "this" explicit. 3504 3505 Upon return, *IDK will be filled in appropriately. */ 3506 cp_expr 3507 finish_id_expression (tree id_expression, 3508 tree decl, 3509 tree scope, 3510 cp_id_kind *idk, 3511 bool integral_constant_expression_p, 3512 bool allow_non_integral_constant_expression_p, 3513 bool *non_integral_constant_expression_p, 3514 bool template_p, 3515 bool done, 3516 bool address_p, 3517 bool template_arg_p, 3518 const char **error_msg, 3519 location_t location) 3520 { 3521 decl = strip_using_decl (decl); 3522 3523 /* Initialize the output parameters. */ 3524 *idk = CP_ID_KIND_NONE; 3525 *error_msg = NULL; 3526 3527 if (id_expression == error_mark_node) 3528 return error_mark_node; 3529 /* If we have a template-id, then no further lookup is 3530 required. If the template-id was for a template-class, we 3531 will sometimes have a TYPE_DECL at this point. */ 3532 else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR 3533 || TREE_CODE (decl) == TYPE_DECL) 3534 ; 3535 /* Look up the name. */ 3536 else 3537 { 3538 if (decl == error_mark_node) 3539 { 3540 /* Name lookup failed. */ 3541 if (scope 3542 && (!TYPE_P (scope) 3543 || (!dependent_type_p (scope) 3544 && !(identifier_p (id_expression) 3545 && IDENTIFIER_CONV_OP_P (id_expression) 3546 && dependent_type_p (TREE_TYPE (id_expression)))))) 3547 { 3548 /* If the qualifying type is non-dependent (and the name 3549 does not name a conversion operator to a dependent 3550 type), issue an error. */ 3551 qualified_name_lookup_error (scope, id_expression, decl, location); 3552 return error_mark_node; 3553 } 3554 else if (!scope) 3555 { 3556 /* It may be resolved via Koenig lookup. */ 3557 *idk = CP_ID_KIND_UNQUALIFIED; 3558 return id_expression; 3559 } 3560 else 3561 decl = id_expression; 3562 } 3563 /* If DECL is a variable that would be out of scope under 3564 ANSI/ISO rules, but in scope in the ARM, name lookup 3565 will succeed. Issue a diagnostic here. */ 3566 else 3567 decl = check_for_out_of_scope_variable (decl); 3568 3569 /* Remember that the name was used in the definition of 3570 the current class so that we can check later to see if 3571 the meaning would have been different after the class 3572 was entirely defined. */ 3573 if (!scope && decl != error_mark_node && identifier_p (id_expression)) 3574 maybe_note_name_used_in_class (id_expression, decl); 3575 3576 /* A use in unevaluated operand might not be instantiated appropriately 3577 if tsubst_copy builds a dummy parm, or if we never instantiate a 3578 generic lambda, so mark it now. */ 3579 if (processing_template_decl && cp_unevaluated_operand) 3580 mark_type_use (decl); 3581 3582 /* Disallow uses of local variables from containing functions, except 3583 within lambda-expressions. */ 3584 if (outer_automatic_var_p (decl)) 3585 { 3586 decl = process_outer_var_ref (decl, tf_warning_or_error); 3587 if (decl == error_mark_node) 3588 return error_mark_node; 3589 } 3590 3591 /* Also disallow uses of function parameters outside the function 3592 body, except inside an unevaluated context (i.e. decltype). */ 3593 if (TREE_CODE (decl) == PARM_DECL 3594 && DECL_CONTEXT (decl) == NULL_TREE 3595 && !cp_unevaluated_operand) 3596 { 3597 *error_msg = G_("use of parameter outside function body"); 3598 return error_mark_node; 3599 } 3600 } 3601 3602 /* If we didn't find anything, or what we found was a type, 3603 then this wasn't really an id-expression. */ 3604 if (TREE_CODE (decl) == TEMPLATE_DECL 3605 && !DECL_FUNCTION_TEMPLATE_P (decl)) 3606 { 3607 *error_msg = G_("missing template arguments"); 3608 return error_mark_node; 3609 } 3610 else if (TREE_CODE (decl) == TYPE_DECL 3611 || TREE_CODE (decl) == NAMESPACE_DECL) 3612 { 3613 *error_msg = G_("expected primary-expression"); 3614 return error_mark_node; 3615 } 3616 3617 /* If the name resolved to a template parameter, there is no 3618 need to look it up again later. */ 3619 if ((TREE_CODE (decl) == CONST_DECL && DECL_TEMPLATE_PARM_P (decl)) 3620 || TREE_CODE (decl) == TEMPLATE_PARM_INDEX) 3621 { 3622 tree r; 3623 3624 *idk = CP_ID_KIND_NONE; 3625 if (TREE_CODE (decl) == TEMPLATE_PARM_INDEX) 3626 decl = TEMPLATE_PARM_DECL (decl); 3627 r = convert_from_reference (DECL_INITIAL (decl)); 3628 3629 if (integral_constant_expression_p 3630 && !dependent_type_p (TREE_TYPE (decl)) 3631 && !(INTEGRAL_OR_ENUMERATION_TYPE_P (TREE_TYPE (r)))) 3632 { 3633 if (!allow_non_integral_constant_expression_p) 3634 error ("template parameter %qD of type %qT is not allowed in " 3635 "an integral constant expression because it is not of " 3636 "integral or enumeration type", decl, TREE_TYPE (decl)); 3637 *non_integral_constant_expression_p = true; 3638 } 3639 return r; 3640 } 3641 else 3642 { 3643 bool dependent_p = type_dependent_expression_p (decl); 3644 3645 /* If the declaration was explicitly qualified indicate 3646 that. The semantics of `A::f(3)' are different than 3647 `f(3)' if `f' is virtual. */ 3648 *idk = (scope 3649 ? CP_ID_KIND_QUALIFIED 3650 : (TREE_CODE (decl) == TEMPLATE_ID_EXPR 3651 ? CP_ID_KIND_TEMPLATE_ID 3652 : (dependent_p 3653 ? CP_ID_KIND_UNQUALIFIED_DEPENDENT 3654 : CP_ID_KIND_UNQUALIFIED))); 3655 3656 if (dependent_p 3657 && DECL_P (decl) 3658 && any_dependent_type_attributes_p (DECL_ATTRIBUTES (decl))) 3659 /* Dependent type attributes on the decl mean that the TREE_TYPE is 3660 wrong, so just return the identifier. */ 3661 return id_expression; 3662 3663 if (TREE_CODE (decl) == NAMESPACE_DECL) 3664 { 3665 error ("use of namespace %qD as expression", decl); 3666 return error_mark_node; 3667 } 3668 else if (DECL_CLASS_TEMPLATE_P (decl)) 3669 { 3670 error ("use of class template %qT as expression", decl); 3671 return error_mark_node; 3672 } 3673 else if (TREE_CODE (decl) == TREE_LIST) 3674 { 3675 /* Ambiguous reference to base members. */ 3676 error ("request for member %qD is ambiguous in " 3677 "multiple inheritance lattice", id_expression); 3678 print_candidates (decl); 3679 return error_mark_node; 3680 } 3681 3682 /* Mark variable-like entities as used. Functions are similarly 3683 marked either below or after overload resolution. */ 3684 if ((VAR_P (decl) 3685 || TREE_CODE (decl) == PARM_DECL 3686 || TREE_CODE (decl) == CONST_DECL 3687 || TREE_CODE (decl) == RESULT_DECL) 3688 && !mark_used (decl)) 3689 return error_mark_node; 3690 3691 /* Only certain kinds of names are allowed in constant 3692 expression. Template parameters have already 3693 been handled above. */ 3694 if (! error_operand_p (decl) 3695 && !dependent_p 3696 && integral_constant_expression_p 3697 && ! decl_constant_var_p (decl) 3698 && TREE_CODE (decl) != CONST_DECL 3699 && ! builtin_valid_in_constant_expr_p (decl)) 3700 { 3701 if (!allow_non_integral_constant_expression_p) 3702 { 3703 error ("%qD cannot appear in a constant-expression", decl); 3704 return error_mark_node; 3705 } 3706 *non_integral_constant_expression_p = true; 3707 } 3708 3709 tree wrap; 3710 if (VAR_P (decl) 3711 && !cp_unevaluated_operand 3712 && !processing_template_decl 3713 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)) 3714 && CP_DECL_THREAD_LOCAL_P (decl) 3715 && (wrap = get_tls_wrapper_fn (decl))) 3716 { 3717 /* Replace an evaluated use of the thread_local variable with 3718 a call to its wrapper. */ 3719 decl = build_cxx_call (wrap, 0, NULL, tf_warning_or_error); 3720 } 3721 else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR 3722 && !dependent_p 3723 && variable_template_p (TREE_OPERAND (decl, 0))) 3724 { 3725 decl = finish_template_variable (decl); 3726 mark_used (decl); 3727 decl = convert_from_reference (decl); 3728 } 3729 else if (scope) 3730 { 3731 if (TREE_CODE (decl) == SCOPE_REF) 3732 { 3733 gcc_assert (same_type_p (scope, TREE_OPERAND (decl, 0))); 3734 decl = TREE_OPERAND (decl, 1); 3735 } 3736 3737 decl = (adjust_result_of_qualified_name_lookup 3738 (decl, scope, current_nonlambda_class_type())); 3739 3740 if (TREE_CODE (decl) == FUNCTION_DECL) 3741 mark_used (decl); 3742 3743 if (TYPE_P (scope)) 3744 decl = finish_qualified_id_expr (scope, 3745 decl, 3746 done, 3747 address_p, 3748 template_p, 3749 template_arg_p, 3750 tf_warning_or_error); 3751 else 3752 decl = convert_from_reference (decl); 3753 } 3754 else if (TREE_CODE (decl) == FIELD_DECL) 3755 { 3756 /* Since SCOPE is NULL here, this is an unqualified name. 3757 Access checking has been performed during name lookup 3758 already. Turn off checking to avoid duplicate errors. */ 3759 push_deferring_access_checks (dk_no_check); 3760 decl = finish_non_static_data_member (decl, NULL_TREE, 3761 /*qualifying_scope=*/NULL_TREE); 3762 pop_deferring_access_checks (); 3763 } 3764 else if (is_overloaded_fn (decl)) 3765 { 3766 tree first_fn = get_first_fn (decl); 3767 3768 if (TREE_CODE (first_fn) == TEMPLATE_DECL) 3769 first_fn = DECL_TEMPLATE_RESULT (first_fn); 3770 3771 /* [basic.def.odr]: "A function whose name appears as a 3772 potentially-evaluated expression is odr-used if it is the unique 3773 lookup result". 3774 3775 But only mark it if it's a complete postfix-expression; in a call, 3776 ADL might select a different function, and we'll call mark_used in 3777 build_over_call. */ 3778 if (done 3779 && !really_overloaded_fn (decl) 3780 && !mark_used (first_fn)) 3781 return error_mark_node; 3782 3783 if (!template_arg_p 3784 && TREE_CODE (first_fn) == FUNCTION_DECL 3785 && DECL_FUNCTION_MEMBER_P (first_fn) 3786 && !shared_member_p (decl)) 3787 { 3788 /* A set of member functions. */ 3789 decl = maybe_dummy_object (DECL_CONTEXT (first_fn), 0); 3790 return finish_class_member_access_expr (decl, id_expression, 3791 /*template_p=*/false, 3792 tf_warning_or_error); 3793 } 3794 3795 decl = baselink_for_fns (decl); 3796 } 3797 else 3798 { 3799 if (DECL_P (decl) && DECL_NONLOCAL (decl) 3800 && DECL_CLASS_SCOPE_P (decl)) 3801 { 3802 tree context = context_for_name_lookup (decl); 3803 if (context != current_class_type) 3804 { 3805 tree path = currently_open_derived_class (context); 3806 perform_or_defer_access_check (TYPE_BINFO (path), 3807 decl, decl, 3808 tf_warning_or_error); 3809 } 3810 } 3811 3812 decl = convert_from_reference (decl); 3813 } 3814 } 3815 3816 return cp_expr (decl, location); 3817 } 3818 3819 /* Implement the __typeof keyword: Return the type of EXPR, suitable for 3820 use as a type-specifier. */ 3821 3822 tree 3823 finish_typeof (tree expr) 3824 { 3825 tree type; 3826 3827 if (type_dependent_expression_p (expr)) 3828 { 3829 type = cxx_make_type (TYPEOF_TYPE); 3830 TYPEOF_TYPE_EXPR (type) = expr; 3831 SET_TYPE_STRUCTURAL_EQUALITY (type); 3832 3833 return type; 3834 } 3835 3836 expr = mark_type_use (expr); 3837 3838 type = unlowered_expr_type (expr); 3839 3840 if (!type || type == unknown_type_node) 3841 { 3842 error ("type of %qE is unknown", expr); 3843 return error_mark_node; 3844 } 3845 3846 return type; 3847 } 3848 3849 /* Implement the __underlying_type keyword: Return the underlying 3850 type of TYPE, suitable for use as a type-specifier. */ 3851 3852 tree 3853 finish_underlying_type (tree type) 3854 { 3855 tree underlying_type; 3856 3857 if (processing_template_decl) 3858 { 3859 underlying_type = cxx_make_type (UNDERLYING_TYPE); 3860 UNDERLYING_TYPE_TYPE (underlying_type) = type; 3861 SET_TYPE_STRUCTURAL_EQUALITY (underlying_type); 3862 3863 return underlying_type; 3864 } 3865 3866 if (!complete_type_or_else (type, NULL_TREE)) 3867 return error_mark_node; 3868 3869 if (TREE_CODE (type) != ENUMERAL_TYPE) 3870 { 3871 error ("%qT is not an enumeration type", type); 3872 return error_mark_node; 3873 } 3874 3875 underlying_type = ENUM_UNDERLYING_TYPE (type); 3876 3877 /* Fixup necessary in this case because ENUM_UNDERLYING_TYPE 3878 includes TYPE_MIN_VALUE and TYPE_MAX_VALUE information. 3879 See finish_enum_value_list for details. */ 3880 if (!ENUM_FIXED_UNDERLYING_TYPE_P (type)) 3881 underlying_type 3882 = c_common_type_for_mode (TYPE_MODE (underlying_type), 3883 TYPE_UNSIGNED (underlying_type)); 3884 3885 return underlying_type; 3886 } 3887 3888 /* Implement the __direct_bases keyword: Return the direct base classes 3889 of type. */ 3890 3891 tree 3892 calculate_direct_bases (tree type, tsubst_flags_t complain) 3893 { 3894 if (!complete_type_or_maybe_complain (type, NULL_TREE, complain) 3895 || !NON_UNION_CLASS_TYPE_P (type)) 3896 return make_tree_vec (0); 3897 3898 vec<tree, va_gc> *vector = make_tree_vector (); 3899 vec<tree, va_gc> *base_binfos = BINFO_BASE_BINFOS (TYPE_BINFO (type)); 3900 tree binfo; 3901 unsigned i; 3902 3903 /* Virtual bases are initialized first */ 3904 for (i = 0; base_binfos->iterate (i, &binfo); i++) 3905 if (BINFO_VIRTUAL_P (binfo)) 3906 vec_safe_push (vector, binfo); 3907 3908 /* Now non-virtuals */ 3909 for (i = 0; base_binfos->iterate (i, &binfo); i++) 3910 if (!BINFO_VIRTUAL_P (binfo)) 3911 vec_safe_push (vector, binfo); 3912 3913 tree bases_vec = make_tree_vec (vector->length ()); 3914 3915 for (i = 0; i < vector->length (); ++i) 3916 TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE ((*vector)[i]); 3917 3918 release_tree_vector (vector); 3919 return bases_vec; 3920 } 3921 3922 /* Implement the __bases keyword: Return the base classes 3923 of type */ 3924 3925 /* Find morally non-virtual base classes by walking binfo hierarchy */ 3926 /* Virtual base classes are handled separately in finish_bases */ 3927 3928 static tree 3929 dfs_calculate_bases_pre (tree binfo, void * /*data_*/) 3930 { 3931 /* Don't walk bases of virtual bases */ 3932 return BINFO_VIRTUAL_P (binfo) ? dfs_skip_bases : NULL_TREE; 3933 } 3934 3935 static tree 3936 dfs_calculate_bases_post (tree binfo, void *data_) 3937 { 3938 vec<tree, va_gc> **data = ((vec<tree, va_gc> **) data_); 3939 if (!BINFO_VIRTUAL_P (binfo)) 3940 vec_safe_push (*data, BINFO_TYPE (binfo)); 3941 return NULL_TREE; 3942 } 3943 3944 /* Calculates the morally non-virtual base classes of a class */ 3945 static vec<tree, va_gc> * 3946 calculate_bases_helper (tree type) 3947 { 3948 vec<tree, va_gc> *vector = make_tree_vector (); 3949 3950 /* Now add non-virtual base classes in order of construction */ 3951 if (TYPE_BINFO (type)) 3952 dfs_walk_all (TYPE_BINFO (type), 3953 dfs_calculate_bases_pre, dfs_calculate_bases_post, &vector); 3954 return vector; 3955 } 3956 3957 tree 3958 calculate_bases (tree type, tsubst_flags_t complain) 3959 { 3960 if (!complete_type_or_maybe_complain (type, NULL_TREE, complain) 3961 || !NON_UNION_CLASS_TYPE_P (type)) 3962 return make_tree_vec (0); 3963 3964 vec<tree, va_gc> *vector = make_tree_vector (); 3965 tree bases_vec = NULL_TREE; 3966 unsigned i; 3967 vec<tree, va_gc> *vbases; 3968 vec<tree, va_gc> *nonvbases; 3969 tree binfo; 3970 3971 /* First go through virtual base classes */ 3972 for (vbases = CLASSTYPE_VBASECLASSES (type), i = 0; 3973 vec_safe_iterate (vbases, i, &binfo); i++) 3974 { 3975 vec<tree, va_gc> *vbase_bases 3976 = calculate_bases_helper (BINFO_TYPE (binfo)); 3977 vec_safe_splice (vector, vbase_bases); 3978 release_tree_vector (vbase_bases); 3979 } 3980 3981 /* Now for the non-virtual bases */ 3982 nonvbases = calculate_bases_helper (type); 3983 vec_safe_splice (vector, nonvbases); 3984 release_tree_vector (nonvbases); 3985 3986 /* Note that during error recovery vector->length can even be zero. */ 3987 if (vector->length () > 1) 3988 { 3989 /* Last element is entire class, so don't copy */ 3990 bases_vec = make_tree_vec (vector->length () - 1); 3991 3992 for (i = 0; i < vector->length () - 1; ++i) 3993 TREE_VEC_ELT (bases_vec, i) = (*vector)[i]; 3994 } 3995 else 3996 bases_vec = make_tree_vec (0); 3997 3998 release_tree_vector (vector); 3999 return bases_vec; 4000 } 4001 4002 tree 4003 finish_bases (tree type, bool direct) 4004 { 4005 tree bases = NULL_TREE; 4006 4007 if (!processing_template_decl) 4008 { 4009 /* Parameter packs can only be used in templates */ 4010 error ("Parameter pack __bases only valid in template declaration"); 4011 return error_mark_node; 4012 } 4013 4014 bases = cxx_make_type (BASES); 4015 BASES_TYPE (bases) = type; 4016 BASES_DIRECT (bases) = direct; 4017 SET_TYPE_STRUCTURAL_EQUALITY (bases); 4018 4019 return bases; 4020 } 4021 4022 /* Perform C++-specific checks for __builtin_offsetof before calling 4023 fold_offsetof. */ 4024 4025 tree 4026 finish_offsetof (tree object_ptr, tree expr, location_t loc) 4027 { 4028 /* If we're processing a template, we can't finish the semantics yet. 4029 Otherwise we can fold the entire expression now. */ 4030 if (processing_template_decl) 4031 { 4032 expr = build2 (OFFSETOF_EXPR, size_type_node, expr, object_ptr); 4033 SET_EXPR_LOCATION (expr, loc); 4034 return expr; 4035 } 4036 4037 if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR) 4038 { 4039 error ("cannot apply %<offsetof%> to destructor %<~%T%>", 4040 TREE_OPERAND (expr, 2)); 4041 return error_mark_node; 4042 } 4043 if (TREE_CODE (TREE_TYPE (expr)) == FUNCTION_TYPE 4044 || TREE_CODE (TREE_TYPE (expr)) == METHOD_TYPE 4045 || TREE_TYPE (expr) == unknown_type_node) 4046 { 4047 while (TREE_CODE (expr) == COMPONENT_REF 4048 || TREE_CODE (expr) == COMPOUND_EXPR) 4049 expr = TREE_OPERAND (expr, 1); 4050 4051 if (DECL_P (expr)) 4052 { 4053 error ("cannot apply %<offsetof%> to member function %qD", expr); 4054 inform (DECL_SOURCE_LOCATION (expr), "declared here"); 4055 } 4056 else 4057 error ("cannot apply %<offsetof%> to member function"); 4058 return error_mark_node; 4059 } 4060 if (TREE_CODE (expr) == CONST_DECL) 4061 { 4062 error ("cannot apply %<offsetof%> to an enumerator %qD", expr); 4063 return error_mark_node; 4064 } 4065 if (REFERENCE_REF_P (expr)) 4066 expr = TREE_OPERAND (expr, 0); 4067 if (!complete_type_or_else (TREE_TYPE (TREE_TYPE (object_ptr)), object_ptr)) 4068 return error_mark_node; 4069 if (warn_invalid_offsetof 4070 && CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (object_ptr))) 4071 && CLASSTYPE_NON_STD_LAYOUT (TREE_TYPE (TREE_TYPE (object_ptr))) 4072 && cp_unevaluated_operand == 0) 4073 warning_at (loc, OPT_Winvalid_offsetof, "offsetof within " 4074 "non-standard-layout type %qT is conditionally-supported", 4075 TREE_TYPE (TREE_TYPE (object_ptr))); 4076 return fold_offsetof (expr); 4077 } 4078 4079 /* Replace the AGGR_INIT_EXPR at *TP with an equivalent CALL_EXPR. This 4080 function is broken out from the above for the benefit of the tree-ssa 4081 project. */ 4082 4083 void 4084 simplify_aggr_init_expr (tree *tp) 4085 { 4086 tree aggr_init_expr = *tp; 4087 4088 /* Form an appropriate CALL_EXPR. */ 4089 tree fn = AGGR_INIT_EXPR_FN (aggr_init_expr); 4090 tree slot = AGGR_INIT_EXPR_SLOT (aggr_init_expr); 4091 tree type = TREE_TYPE (slot); 4092 4093 tree call_expr; 4094 enum style_t { ctor, arg, pcc } style; 4095 4096 if (AGGR_INIT_VIA_CTOR_P (aggr_init_expr)) 4097 style = ctor; 4098 #ifdef PCC_STATIC_STRUCT_RETURN 4099 else if (1) 4100 style = pcc; 4101 #endif 4102 else 4103 { 4104 gcc_assert (TREE_ADDRESSABLE (type)); 4105 style = arg; 4106 } 4107 4108 call_expr = build_call_array_loc (input_location, 4109 TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))), 4110 fn, 4111 aggr_init_expr_nargs (aggr_init_expr), 4112 AGGR_INIT_EXPR_ARGP (aggr_init_expr)); 4113 TREE_NOTHROW (call_expr) = TREE_NOTHROW (aggr_init_expr); 4114 CALL_FROM_THUNK_P (call_expr) = AGGR_INIT_FROM_THUNK_P (aggr_init_expr); 4115 CALL_EXPR_OPERATOR_SYNTAX (call_expr) 4116 = CALL_EXPR_OPERATOR_SYNTAX (aggr_init_expr); 4117 CALL_EXPR_ORDERED_ARGS (call_expr) = CALL_EXPR_ORDERED_ARGS (aggr_init_expr); 4118 CALL_EXPR_REVERSE_ARGS (call_expr) = CALL_EXPR_REVERSE_ARGS (aggr_init_expr); 4119 4120 if (style == ctor) 4121 { 4122 /* Replace the first argument to the ctor with the address of the 4123 slot. */ 4124 cxx_mark_addressable (slot); 4125 CALL_EXPR_ARG (call_expr, 0) = 4126 build1 (ADDR_EXPR, build_pointer_type (type), slot); 4127 } 4128 else if (style == arg) 4129 { 4130 /* Just mark it addressable here, and leave the rest to 4131 expand_call{,_inline}. */ 4132 cxx_mark_addressable (slot); 4133 CALL_EXPR_RETURN_SLOT_OPT (call_expr) = true; 4134 call_expr = build2 (INIT_EXPR, TREE_TYPE (call_expr), slot, call_expr); 4135 } 4136 else if (style == pcc) 4137 { 4138 /* If we're using the non-reentrant PCC calling convention, then we 4139 need to copy the returned value out of the static buffer into the 4140 SLOT. */ 4141 push_deferring_access_checks (dk_no_check); 4142 call_expr = build_aggr_init (slot, call_expr, 4143 DIRECT_BIND | LOOKUP_ONLYCONVERTING, 4144 tf_warning_or_error); 4145 pop_deferring_access_checks (); 4146 call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (slot), call_expr, slot); 4147 } 4148 4149 if (AGGR_INIT_ZERO_FIRST (aggr_init_expr)) 4150 { 4151 tree init = build_zero_init (type, NULL_TREE, 4152 /*static_storage_p=*/false); 4153 init = build2 (INIT_EXPR, void_type_node, slot, init); 4154 call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (call_expr), 4155 init, call_expr); 4156 } 4157 4158 *tp = call_expr; 4159 } 4160 4161 /* Emit all thunks to FN that should be emitted when FN is emitted. */ 4162 4163 void 4164 emit_associated_thunks (tree fn) 4165 { 4166 /* When we use vcall offsets, we emit thunks with the virtual 4167 functions to which they thunk. The whole point of vcall offsets 4168 is so that you can know statically the entire set of thunks that 4169 will ever be needed for a given virtual function, thereby 4170 enabling you to output all the thunks with the function itself. */ 4171 if (DECL_VIRTUAL_P (fn) 4172 /* Do not emit thunks for extern template instantiations. */ 4173 && ! DECL_REALLY_EXTERN (fn)) 4174 { 4175 tree thunk; 4176 4177 for (thunk = DECL_THUNKS (fn); thunk; thunk = DECL_CHAIN (thunk)) 4178 { 4179 if (!THUNK_ALIAS (thunk)) 4180 { 4181 use_thunk (thunk, /*emit_p=*/1); 4182 if (DECL_RESULT_THUNK_P (thunk)) 4183 { 4184 tree probe; 4185 4186 for (probe = DECL_THUNKS (thunk); 4187 probe; probe = DECL_CHAIN (probe)) 4188 use_thunk (probe, /*emit_p=*/1); 4189 } 4190 } 4191 else 4192 gcc_assert (!DECL_THUNKS (thunk)); 4193 } 4194 } 4195 } 4196 4197 /* Generate RTL for FN. */ 4198 4199 bool 4200 expand_or_defer_fn_1 (tree fn) 4201 { 4202 /* When the parser calls us after finishing the body of a template 4203 function, we don't really want to expand the body. */ 4204 if (processing_template_decl) 4205 { 4206 /* Normally, collection only occurs in rest_of_compilation. So, 4207 if we don't collect here, we never collect junk generated 4208 during the processing of templates until we hit a 4209 non-template function. It's not safe to do this inside a 4210 nested class, though, as the parser may have local state that 4211 is not a GC root. */ 4212 if (!function_depth) 4213 ggc_collect (); 4214 return false; 4215 } 4216 4217 gcc_assert (DECL_SAVED_TREE (fn)); 4218 4219 /* We make a decision about linkage for these functions at the end 4220 of the compilation. Until that point, we do not want the back 4221 end to output them -- but we do want it to see the bodies of 4222 these functions so that it can inline them as appropriate. */ 4223 if (DECL_DECLARED_INLINE_P (fn) || DECL_IMPLICIT_INSTANTIATION (fn)) 4224 { 4225 if (DECL_INTERFACE_KNOWN (fn)) 4226 /* We've already made a decision as to how this function will 4227 be handled. */; 4228 else if (!at_eof) 4229 tentative_decl_linkage (fn); 4230 else 4231 import_export_decl (fn); 4232 4233 /* If the user wants us to keep all inline functions, then mark 4234 this function as needed so that finish_file will make sure to 4235 output it later. Similarly, all dllexport'd functions must 4236 be emitted; there may be callers in other DLLs. */ 4237 if (DECL_DECLARED_INLINE_P (fn) 4238 && !DECL_REALLY_EXTERN (fn) 4239 && (flag_keep_inline_functions 4240 || (flag_keep_inline_dllexport 4241 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (fn))))) 4242 { 4243 mark_needed (fn); 4244 DECL_EXTERNAL (fn) = 0; 4245 } 4246 } 4247 4248 /* If this is a constructor or destructor body, we have to clone 4249 it. */ 4250 if (maybe_clone_body (fn)) 4251 { 4252 /* We don't want to process FN again, so pretend we've written 4253 it out, even though we haven't. */ 4254 TREE_ASM_WRITTEN (fn) = 1; 4255 /* If this is a constexpr function, keep DECL_SAVED_TREE. */ 4256 if (!DECL_DECLARED_CONSTEXPR_P (fn)) 4257 DECL_SAVED_TREE (fn) = NULL_TREE; 4258 return false; 4259 } 4260 4261 /* There's no reason to do any of the work here if we're only doing 4262 semantic analysis; this code just generates RTL. */ 4263 if (flag_syntax_only) 4264 return false; 4265 4266 return true; 4267 } 4268 4269 void 4270 expand_or_defer_fn (tree fn) 4271 { 4272 if (expand_or_defer_fn_1 (fn)) 4273 { 4274 function_depth++; 4275 4276 /* Expand or defer, at the whim of the compilation unit manager. */ 4277 cgraph_node::finalize_function (fn, function_depth > 1); 4278 emit_associated_thunks (fn); 4279 4280 function_depth--; 4281 } 4282 } 4283 4284 struct nrv_data 4285 { 4286 nrv_data () : visited (37) {} 4287 4288 tree var; 4289 tree result; 4290 hash_table<nofree_ptr_hash <tree_node> > visited; 4291 }; 4292 4293 /* Helper function for walk_tree, used by finalize_nrv below. */ 4294 4295 static tree 4296 finalize_nrv_r (tree* tp, int* walk_subtrees, void* data) 4297 { 4298 struct nrv_data *dp = (struct nrv_data *)data; 4299 tree_node **slot; 4300 4301 /* No need to walk into types. There wouldn't be any need to walk into 4302 non-statements, except that we have to consider STMT_EXPRs. */ 4303 if (TYPE_P (*tp)) 4304 *walk_subtrees = 0; 4305 /* Change all returns to just refer to the RESULT_DECL; this is a nop, 4306 but differs from using NULL_TREE in that it indicates that we care 4307 about the value of the RESULT_DECL. */ 4308 else if (TREE_CODE (*tp) == RETURN_EXPR) 4309 TREE_OPERAND (*tp, 0) = dp->result; 4310 /* Change all cleanups for the NRV to only run when an exception is 4311 thrown. */ 4312 else if (TREE_CODE (*tp) == CLEANUP_STMT 4313 && CLEANUP_DECL (*tp) == dp->var) 4314 CLEANUP_EH_ONLY (*tp) = 1; 4315 /* Replace the DECL_EXPR for the NRV with an initialization of the 4316 RESULT_DECL, if needed. */ 4317 else if (TREE_CODE (*tp) == DECL_EXPR 4318 && DECL_EXPR_DECL (*tp) == dp->var) 4319 { 4320 tree init; 4321 if (DECL_INITIAL (dp->var) 4322 && DECL_INITIAL (dp->var) != error_mark_node) 4323 init = build2 (INIT_EXPR, void_type_node, dp->result, 4324 DECL_INITIAL (dp->var)); 4325 else 4326 init = build_empty_stmt (EXPR_LOCATION (*tp)); 4327 DECL_INITIAL (dp->var) = NULL_TREE; 4328 SET_EXPR_LOCATION (init, EXPR_LOCATION (*tp)); 4329 *tp = init; 4330 } 4331 /* And replace all uses of the NRV with the RESULT_DECL. */ 4332 else if (*tp == dp->var) 4333 *tp = dp->result; 4334 4335 /* Avoid walking into the same tree more than once. Unfortunately, we 4336 can't just use walk_tree_without duplicates because it would only call 4337 us for the first occurrence of dp->var in the function body. */ 4338 slot = dp->visited.find_slot (*tp, INSERT); 4339 if (*slot) 4340 *walk_subtrees = 0; 4341 else 4342 *slot = *tp; 4343 4344 /* Keep iterating. */ 4345 return NULL_TREE; 4346 } 4347 4348 /* Called from finish_function to implement the named return value 4349 optimization by overriding all the RETURN_EXPRs and pertinent 4350 CLEANUP_STMTs and replacing all occurrences of VAR with RESULT, the 4351 RESULT_DECL for the function. */ 4352 4353 void 4354 finalize_nrv (tree *tp, tree var, tree result) 4355 { 4356 struct nrv_data data; 4357 4358 /* Copy name from VAR to RESULT. */ 4359 DECL_NAME (result) = DECL_NAME (var); 4360 /* Don't forget that we take its address. */ 4361 TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (var); 4362 /* Finally set DECL_VALUE_EXPR to avoid assigning 4363 a stack slot at -O0 for the original var and debug info 4364 uses RESULT location for VAR. */ 4365 SET_DECL_VALUE_EXPR (var, result); 4366 DECL_HAS_VALUE_EXPR_P (var) = 1; 4367 4368 data.var = var; 4369 data.result = result; 4370 cp_walk_tree (tp, finalize_nrv_r, &data, 0); 4371 } 4372 4373 /* Create CP_OMP_CLAUSE_INFO for clause C. Returns true if it is invalid. */ 4374 4375 bool 4376 cxx_omp_create_clause_info (tree c, tree type, bool need_default_ctor, 4377 bool need_copy_ctor, bool need_copy_assignment, 4378 bool need_dtor) 4379 { 4380 int save_errorcount = errorcount; 4381 tree info, t; 4382 4383 /* Always allocate 3 elements for simplicity. These are the 4384 function decls for the ctor, dtor, and assignment op. 4385 This layout is known to the three lang hooks, 4386 cxx_omp_clause_default_init, cxx_omp_clause_copy_init, 4387 and cxx_omp_clause_assign_op. */ 4388 info = make_tree_vec (3); 4389 CP_OMP_CLAUSE_INFO (c) = info; 4390 4391 if (need_default_ctor || need_copy_ctor) 4392 { 4393 if (need_default_ctor) 4394 t = get_default_ctor (type); 4395 else 4396 t = get_copy_ctor (type, tf_warning_or_error); 4397 4398 if (t && !trivial_fn_p (t)) 4399 TREE_VEC_ELT (info, 0) = t; 4400 } 4401 4402 if (need_dtor && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type)) 4403 TREE_VEC_ELT (info, 1) = get_dtor (type, tf_warning_or_error); 4404 4405 if (need_copy_assignment) 4406 { 4407 t = get_copy_assign (type); 4408 4409 if (t && !trivial_fn_p (t)) 4410 TREE_VEC_ELT (info, 2) = t; 4411 } 4412 4413 return errorcount != save_errorcount; 4414 } 4415 4416 /* If DECL is DECL_OMP_PRIVATIZED_MEMBER, return corresponding 4417 FIELD_DECL, otherwise return DECL itself. */ 4418 4419 static tree 4420 omp_clause_decl_field (tree decl) 4421 { 4422 if (VAR_P (decl) 4423 && DECL_HAS_VALUE_EXPR_P (decl) 4424 && DECL_ARTIFICIAL (decl) 4425 && DECL_LANG_SPECIFIC (decl) 4426 && DECL_OMP_PRIVATIZED_MEMBER (decl)) 4427 { 4428 tree f = DECL_VALUE_EXPR (decl); 4429 if (INDIRECT_REF_P (f)) 4430 f = TREE_OPERAND (f, 0); 4431 if (TREE_CODE (f) == COMPONENT_REF) 4432 { 4433 f = TREE_OPERAND (f, 1); 4434 gcc_assert (TREE_CODE (f) == FIELD_DECL); 4435 return f; 4436 } 4437 } 4438 return NULL_TREE; 4439 } 4440 4441 /* Adjust DECL if needed for printing using %qE. */ 4442 4443 static tree 4444 omp_clause_printable_decl (tree decl) 4445 { 4446 tree t = omp_clause_decl_field (decl); 4447 if (t) 4448 return t; 4449 return decl; 4450 } 4451 4452 /* For a FIELD_DECL F and corresponding DECL_OMP_PRIVATIZED_MEMBER 4453 VAR_DECL T that doesn't need a DECL_EXPR added, record it for 4454 privatization. */ 4455 4456 static void 4457 omp_note_field_privatization (tree f, tree t) 4458 { 4459 if (!omp_private_member_map) 4460 omp_private_member_map = new hash_map<tree, tree>; 4461 tree &v = omp_private_member_map->get_or_insert (f); 4462 if (v == NULL_TREE) 4463 { 4464 v = t; 4465 omp_private_member_vec.safe_push (f); 4466 /* Signal that we don't want to create DECL_EXPR for this dummy var. */ 4467 omp_private_member_vec.safe_push (integer_zero_node); 4468 } 4469 } 4470 4471 /* Privatize FIELD_DECL T, return corresponding DECL_OMP_PRIVATIZED_MEMBER 4472 dummy VAR_DECL. */ 4473 4474 tree 4475 omp_privatize_field (tree t, bool shared) 4476 { 4477 tree m = finish_non_static_data_member (t, NULL_TREE, NULL_TREE); 4478 if (m == error_mark_node) 4479 return error_mark_node; 4480 if (!omp_private_member_map && !shared) 4481 omp_private_member_map = new hash_map<tree, tree>; 4482 if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE) 4483 { 4484 gcc_assert (INDIRECT_REF_P (m)); 4485 m = TREE_OPERAND (m, 0); 4486 } 4487 tree vb = NULL_TREE; 4488 tree &v = shared ? vb : omp_private_member_map->get_or_insert (t); 4489 if (v == NULL_TREE) 4490 { 4491 v = create_temporary_var (TREE_TYPE (m)); 4492 retrofit_lang_decl (v); 4493 DECL_OMP_PRIVATIZED_MEMBER (v) = 1; 4494 SET_DECL_VALUE_EXPR (v, m); 4495 DECL_HAS_VALUE_EXPR_P (v) = 1; 4496 if (!shared) 4497 omp_private_member_vec.safe_push (t); 4498 } 4499 return v; 4500 } 4501 4502 /* Helper function for handle_omp_array_sections. Called recursively 4503 to handle multiple array-section-subscripts. C is the clause, 4504 T current expression (initially OMP_CLAUSE_DECL), which is either 4505 a TREE_LIST for array-section-subscript (TREE_PURPOSE is low-bound 4506 expression if specified, TREE_VALUE length expression if specified, 4507 TREE_CHAIN is what it has been specified after, or some decl. 4508 TYPES vector is populated with array section types, MAYBE_ZERO_LEN 4509 set to true if any of the array-section-subscript could have length 4510 of zero (explicit or implicit), FIRST_NON_ONE is the index of the 4511 first array-section-subscript which is known not to have length 4512 of one. Given say: 4513 map(a[:b][2:1][:c][:2][:d][e:f][2:5]) 4514 FIRST_NON_ONE will be 3, array-section-subscript [:b], [2:1] and [:c] 4515 all are or may have length of 1, array-section-subscript [:2] is the 4516 first one known not to have length 1. For array-section-subscript 4517 <= FIRST_NON_ONE we diagnose non-contiguous arrays if low bound isn't 4518 0 or length isn't the array domain max + 1, for > FIRST_NON_ONE we 4519 can if MAYBE_ZERO_LEN is false. MAYBE_ZERO_LEN will be true in the above 4520 case though, as some lengths could be zero. */ 4521 4522 static tree 4523 handle_omp_array_sections_1 (tree c, tree t, vec<tree> &types, 4524 bool &maybe_zero_len, unsigned int &first_non_one, 4525 enum c_omp_region_type ort) 4526 { 4527 tree ret, low_bound, length, type; 4528 if (TREE_CODE (t) != TREE_LIST) 4529 { 4530 if (error_operand_p (t)) 4531 return error_mark_node; 4532 if (REFERENCE_REF_P (t) 4533 && TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF) 4534 t = TREE_OPERAND (t, 0); 4535 ret = t; 4536 if (TREE_CODE (t) == COMPONENT_REF 4537 && ort == C_ORT_OMP 4538 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 4539 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO 4540 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM) 4541 && !type_dependent_expression_p (t)) 4542 { 4543 if (TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL 4544 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))) 4545 { 4546 error_at (OMP_CLAUSE_LOCATION (c), 4547 "bit-field %qE in %qs clause", 4548 t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4549 return error_mark_node; 4550 } 4551 while (TREE_CODE (t) == COMPONENT_REF) 4552 { 4553 if (TREE_TYPE (TREE_OPERAND (t, 0)) 4554 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == UNION_TYPE) 4555 { 4556 error_at (OMP_CLAUSE_LOCATION (c), 4557 "%qE is a member of a union", t); 4558 return error_mark_node; 4559 } 4560 t = TREE_OPERAND (t, 0); 4561 } 4562 if (REFERENCE_REF_P (t)) 4563 t = TREE_OPERAND (t, 0); 4564 } 4565 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) 4566 { 4567 if (processing_template_decl && TREE_CODE (t) != OVERLOAD) 4568 return NULL_TREE; 4569 if (DECL_P (t)) 4570 error_at (OMP_CLAUSE_LOCATION (c), 4571 "%qD is not a variable in %qs clause", t, 4572 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4573 else 4574 error_at (OMP_CLAUSE_LOCATION (c), 4575 "%qE is not a variable in %qs clause", t, 4576 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4577 return error_mark_node; 4578 } 4579 else if (TREE_CODE (t) == PARM_DECL 4580 && DECL_ARTIFICIAL (t) 4581 && DECL_NAME (t) == this_identifier) 4582 { 4583 error_at (OMP_CLAUSE_LOCATION (c), 4584 "%<this%> allowed in OpenMP only in %<declare simd%>" 4585 " clauses"); 4586 return error_mark_node; 4587 } 4588 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND 4589 && VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t)) 4590 { 4591 error_at (OMP_CLAUSE_LOCATION (c), 4592 "%qD is threadprivate variable in %qs clause", t, 4593 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4594 return error_mark_node; 4595 } 4596 if (type_dependent_expression_p (ret)) 4597 return NULL_TREE; 4598 ret = convert_from_reference (ret); 4599 return ret; 4600 } 4601 4602 if (ort == C_ORT_OMP 4603 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 4604 && TREE_CODE (TREE_CHAIN (t)) == FIELD_DECL) 4605 TREE_CHAIN (t) = omp_privatize_field (TREE_CHAIN (t), false); 4606 ret = handle_omp_array_sections_1 (c, TREE_CHAIN (t), types, 4607 maybe_zero_len, first_non_one, ort); 4608 if (ret == error_mark_node || ret == NULL_TREE) 4609 return ret; 4610 4611 type = TREE_TYPE (ret); 4612 low_bound = TREE_PURPOSE (t); 4613 length = TREE_VALUE (t); 4614 if ((low_bound && type_dependent_expression_p (low_bound)) 4615 || (length && type_dependent_expression_p (length))) 4616 return NULL_TREE; 4617 4618 if (low_bound == error_mark_node || length == error_mark_node) 4619 return error_mark_node; 4620 4621 if (low_bound && !INTEGRAL_TYPE_P (TREE_TYPE (low_bound))) 4622 { 4623 error_at (OMP_CLAUSE_LOCATION (c), 4624 "low bound %qE of array section does not have integral type", 4625 low_bound); 4626 return error_mark_node; 4627 } 4628 if (length && !INTEGRAL_TYPE_P (TREE_TYPE (length))) 4629 { 4630 error_at (OMP_CLAUSE_LOCATION (c), 4631 "length %qE of array section does not have integral type", 4632 length); 4633 return error_mark_node; 4634 } 4635 if (low_bound) 4636 low_bound = mark_rvalue_use (low_bound); 4637 if (length) 4638 length = mark_rvalue_use (length); 4639 /* We need to reduce to real constant-values for checks below. */ 4640 if (length) 4641 length = fold_simple (length); 4642 if (low_bound) 4643 low_bound = fold_simple (low_bound); 4644 if (low_bound 4645 && TREE_CODE (low_bound) == INTEGER_CST 4646 && TYPE_PRECISION (TREE_TYPE (low_bound)) 4647 > TYPE_PRECISION (sizetype)) 4648 low_bound = fold_convert (sizetype, low_bound); 4649 if (length 4650 && TREE_CODE (length) == INTEGER_CST 4651 && TYPE_PRECISION (TREE_TYPE (length)) 4652 > TYPE_PRECISION (sizetype)) 4653 length = fold_convert (sizetype, length); 4654 if (low_bound == NULL_TREE) 4655 low_bound = integer_zero_node; 4656 4657 if (length != NULL_TREE) 4658 { 4659 if (!integer_nonzerop (length)) 4660 { 4661 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 4662 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) 4663 { 4664 if (integer_zerop (length)) 4665 { 4666 error_at (OMP_CLAUSE_LOCATION (c), 4667 "zero length array section in %qs clause", 4668 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4669 return error_mark_node; 4670 } 4671 } 4672 else 4673 maybe_zero_len = true; 4674 } 4675 if (first_non_one == types.length () 4676 && (TREE_CODE (length) != INTEGER_CST || integer_onep (length))) 4677 first_non_one++; 4678 } 4679 if (TREE_CODE (type) == ARRAY_TYPE) 4680 { 4681 if (length == NULL_TREE 4682 && (TYPE_DOMAIN (type) == NULL_TREE 4683 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)) 4684 { 4685 error_at (OMP_CLAUSE_LOCATION (c), 4686 "for unknown bound array type length expression must " 4687 "be specified"); 4688 return error_mark_node; 4689 } 4690 if (TREE_CODE (low_bound) == INTEGER_CST 4691 && tree_int_cst_sgn (low_bound) == -1) 4692 { 4693 error_at (OMP_CLAUSE_LOCATION (c), 4694 "negative low bound in array section in %qs clause", 4695 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4696 return error_mark_node; 4697 } 4698 if (length != NULL_TREE 4699 && TREE_CODE (length) == INTEGER_CST 4700 && tree_int_cst_sgn (length) == -1) 4701 { 4702 error_at (OMP_CLAUSE_LOCATION (c), 4703 "negative length in array section in %qs clause", 4704 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4705 return error_mark_node; 4706 } 4707 if (TYPE_DOMAIN (type) 4708 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) 4709 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) 4710 == INTEGER_CST) 4711 { 4712 tree size 4713 = fold_convert (sizetype, TYPE_MAX_VALUE (TYPE_DOMAIN (type))); 4714 size = size_binop (PLUS_EXPR, size, size_one_node); 4715 if (TREE_CODE (low_bound) == INTEGER_CST) 4716 { 4717 if (tree_int_cst_lt (size, low_bound)) 4718 { 4719 error_at (OMP_CLAUSE_LOCATION (c), 4720 "low bound %qE above array section size " 4721 "in %qs clause", low_bound, 4722 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4723 return error_mark_node; 4724 } 4725 if (tree_int_cst_equal (size, low_bound)) 4726 { 4727 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 4728 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) 4729 { 4730 error_at (OMP_CLAUSE_LOCATION (c), 4731 "zero length array section in %qs clause", 4732 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4733 return error_mark_node; 4734 } 4735 maybe_zero_len = true; 4736 } 4737 else if (length == NULL_TREE 4738 && first_non_one == types.length () 4739 && tree_int_cst_equal 4740 (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 4741 low_bound)) 4742 first_non_one++; 4743 } 4744 else if (length == NULL_TREE) 4745 { 4746 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND 4747 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) 4748 maybe_zero_len = true; 4749 if (first_non_one == types.length ()) 4750 first_non_one++; 4751 } 4752 if (length && TREE_CODE (length) == INTEGER_CST) 4753 { 4754 if (tree_int_cst_lt (size, length)) 4755 { 4756 error_at (OMP_CLAUSE_LOCATION (c), 4757 "length %qE above array section size " 4758 "in %qs clause", length, 4759 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4760 return error_mark_node; 4761 } 4762 if (TREE_CODE (low_bound) == INTEGER_CST) 4763 { 4764 tree lbpluslen 4765 = size_binop (PLUS_EXPR, 4766 fold_convert (sizetype, low_bound), 4767 fold_convert (sizetype, length)); 4768 if (TREE_CODE (lbpluslen) == INTEGER_CST 4769 && tree_int_cst_lt (size, lbpluslen)) 4770 { 4771 error_at (OMP_CLAUSE_LOCATION (c), 4772 "high bound %qE above array section size " 4773 "in %qs clause", lbpluslen, 4774 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4775 return error_mark_node; 4776 } 4777 } 4778 } 4779 } 4780 else if (length == NULL_TREE) 4781 { 4782 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND 4783 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) 4784 maybe_zero_len = true; 4785 if (first_non_one == types.length ()) 4786 first_non_one++; 4787 } 4788 4789 /* For [lb:] we will need to evaluate lb more than once. */ 4790 if (length == NULL_TREE && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) 4791 { 4792 tree lb = cp_save_expr (low_bound); 4793 if (lb != low_bound) 4794 { 4795 TREE_PURPOSE (t) = lb; 4796 low_bound = lb; 4797 } 4798 } 4799 } 4800 else if (TREE_CODE (type) == POINTER_TYPE) 4801 { 4802 if (length == NULL_TREE) 4803 { 4804 error_at (OMP_CLAUSE_LOCATION (c), 4805 "for pointer type length expression must be specified"); 4806 return error_mark_node; 4807 } 4808 if (length != NULL_TREE 4809 && TREE_CODE (length) == INTEGER_CST 4810 && tree_int_cst_sgn (length) == -1) 4811 { 4812 error_at (OMP_CLAUSE_LOCATION (c), 4813 "negative length in array section in %qs clause", 4814 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4815 return error_mark_node; 4816 } 4817 /* If there is a pointer type anywhere but in the very first 4818 array-section-subscript, the array section can't be contiguous. */ 4819 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND 4820 && TREE_CODE (TREE_CHAIN (t)) == TREE_LIST) 4821 { 4822 error_at (OMP_CLAUSE_LOCATION (c), 4823 "array section is not contiguous in %qs clause", 4824 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4825 return error_mark_node; 4826 } 4827 } 4828 else 4829 { 4830 error_at (OMP_CLAUSE_LOCATION (c), 4831 "%qE does not have pointer or array type", ret); 4832 return error_mark_node; 4833 } 4834 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) 4835 types.safe_push (TREE_TYPE (ret)); 4836 /* We will need to evaluate lb more than once. */ 4837 tree lb = cp_save_expr (low_bound); 4838 if (lb != low_bound) 4839 { 4840 TREE_PURPOSE (t) = lb; 4841 low_bound = lb; 4842 } 4843 ret = grok_array_decl (OMP_CLAUSE_LOCATION (c), ret, low_bound, false); 4844 return ret; 4845 } 4846 4847 /* Handle array sections for clause C. */ 4848 4849 static bool 4850 handle_omp_array_sections (tree c, enum c_omp_region_type ort) 4851 { 4852 bool maybe_zero_len = false; 4853 unsigned int first_non_one = 0; 4854 auto_vec<tree, 10> types; 4855 tree first = handle_omp_array_sections_1 (c, OMP_CLAUSE_DECL (c), types, 4856 maybe_zero_len, first_non_one, 4857 ort); 4858 if (first == error_mark_node) 4859 return true; 4860 if (first == NULL_TREE) 4861 return false; 4862 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND) 4863 { 4864 tree t = OMP_CLAUSE_DECL (c); 4865 tree tem = NULL_TREE; 4866 if (processing_template_decl) 4867 return false; 4868 /* Need to evaluate side effects in the length expressions 4869 if any. */ 4870 while (TREE_CODE (t) == TREE_LIST) 4871 { 4872 if (TREE_VALUE (t) && TREE_SIDE_EFFECTS (TREE_VALUE (t))) 4873 { 4874 if (tem == NULL_TREE) 4875 tem = TREE_VALUE (t); 4876 else 4877 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), 4878 TREE_VALUE (t), tem); 4879 } 4880 t = TREE_CHAIN (t); 4881 } 4882 if (tem) 4883 first = build2 (COMPOUND_EXPR, TREE_TYPE (first), tem, first); 4884 OMP_CLAUSE_DECL (c) = first; 4885 } 4886 else 4887 { 4888 unsigned int num = types.length (), i; 4889 tree t, side_effects = NULL_TREE, size = NULL_TREE; 4890 tree condition = NULL_TREE; 4891 4892 if (int_size_in_bytes (TREE_TYPE (first)) <= 0) 4893 maybe_zero_len = true; 4894 if (processing_template_decl && maybe_zero_len) 4895 return false; 4896 4897 for (i = num, t = OMP_CLAUSE_DECL (c); i > 0; 4898 t = TREE_CHAIN (t)) 4899 { 4900 tree low_bound = TREE_PURPOSE (t); 4901 tree length = TREE_VALUE (t); 4902 4903 i--; 4904 if (low_bound 4905 && TREE_CODE (low_bound) == INTEGER_CST 4906 && TYPE_PRECISION (TREE_TYPE (low_bound)) 4907 > TYPE_PRECISION (sizetype)) 4908 low_bound = fold_convert (sizetype, low_bound); 4909 if (length 4910 && TREE_CODE (length) == INTEGER_CST 4911 && TYPE_PRECISION (TREE_TYPE (length)) 4912 > TYPE_PRECISION (sizetype)) 4913 length = fold_convert (sizetype, length); 4914 if (low_bound == NULL_TREE) 4915 low_bound = integer_zero_node; 4916 if (!maybe_zero_len && i > first_non_one) 4917 { 4918 if (integer_nonzerop (low_bound)) 4919 goto do_warn_noncontiguous; 4920 if (length != NULL_TREE 4921 && TREE_CODE (length) == INTEGER_CST 4922 && TYPE_DOMAIN (types[i]) 4923 && TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])) 4924 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (types[i]))) 4925 == INTEGER_CST) 4926 { 4927 tree size; 4928 size = size_binop (PLUS_EXPR, 4929 TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), 4930 size_one_node); 4931 if (!tree_int_cst_equal (length, size)) 4932 { 4933 do_warn_noncontiguous: 4934 error_at (OMP_CLAUSE_LOCATION (c), 4935 "array section is not contiguous in %qs " 4936 "clause", 4937 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 4938 return true; 4939 } 4940 } 4941 if (!processing_template_decl 4942 && length != NULL_TREE 4943 && TREE_SIDE_EFFECTS (length)) 4944 { 4945 if (side_effects == NULL_TREE) 4946 side_effects = length; 4947 else 4948 side_effects = build2 (COMPOUND_EXPR, 4949 TREE_TYPE (side_effects), 4950 length, side_effects); 4951 } 4952 } 4953 else if (processing_template_decl) 4954 continue; 4955 else 4956 { 4957 tree l; 4958 4959 if (i > first_non_one 4960 && ((length && integer_nonzerop (length)) 4961 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)) 4962 continue; 4963 if (length) 4964 l = fold_convert (sizetype, length); 4965 else 4966 { 4967 l = size_binop (PLUS_EXPR, 4968 TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), 4969 size_one_node); 4970 l = size_binop (MINUS_EXPR, l, 4971 fold_convert (sizetype, low_bound)); 4972 } 4973 if (i > first_non_one) 4974 { 4975 l = fold_build2 (NE_EXPR, boolean_type_node, l, 4976 size_zero_node); 4977 if (condition == NULL_TREE) 4978 condition = l; 4979 else 4980 condition = fold_build2 (BIT_AND_EXPR, boolean_type_node, 4981 l, condition); 4982 } 4983 else if (size == NULL_TREE) 4984 { 4985 size = size_in_bytes (TREE_TYPE (types[i])); 4986 tree eltype = TREE_TYPE (types[num - 1]); 4987 while (TREE_CODE (eltype) == ARRAY_TYPE) 4988 eltype = TREE_TYPE (eltype); 4989 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) 4990 size = size_binop (EXACT_DIV_EXPR, size, 4991 size_in_bytes (eltype)); 4992 size = size_binop (MULT_EXPR, size, l); 4993 if (condition) 4994 size = fold_build3 (COND_EXPR, sizetype, condition, 4995 size, size_zero_node); 4996 } 4997 else 4998 size = size_binop (MULT_EXPR, size, l); 4999 } 5000 } 5001 if (!processing_template_decl) 5002 { 5003 if (side_effects) 5004 size = build2 (COMPOUND_EXPR, sizetype, side_effects, size); 5005 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) 5006 { 5007 size = size_binop (MINUS_EXPR, size, size_one_node); 5008 tree index_type = build_index_type (size); 5009 tree eltype = TREE_TYPE (first); 5010 while (TREE_CODE (eltype) == ARRAY_TYPE) 5011 eltype = TREE_TYPE (eltype); 5012 tree type = build_array_type (eltype, index_type); 5013 tree ptype = build_pointer_type (eltype); 5014 if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE 5015 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (t)))) 5016 t = convert_from_reference (t); 5017 else if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) 5018 t = build_fold_addr_expr (t); 5019 tree t2 = build_fold_addr_expr (first); 5020 t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), 5021 ptrdiff_type_node, t2); 5022 t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, 5023 ptrdiff_type_node, t2, 5024 fold_convert_loc (OMP_CLAUSE_LOCATION (c), 5025 ptrdiff_type_node, t)); 5026 if (tree_fits_shwi_p (t2)) 5027 t = build2 (MEM_REF, type, t, 5028 build_int_cst (ptype, tree_to_shwi (t2))); 5029 else 5030 { 5031 t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), 5032 sizetype, t2); 5033 t = build2_loc (OMP_CLAUSE_LOCATION (c), POINTER_PLUS_EXPR, 5034 TREE_TYPE (t), t, t2); 5035 t = build2 (MEM_REF, type, t, build_int_cst (ptype, 0)); 5036 } 5037 OMP_CLAUSE_DECL (c) = t; 5038 return false; 5039 } 5040 OMP_CLAUSE_DECL (c) = first; 5041 OMP_CLAUSE_SIZE (c) = size; 5042 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP 5043 || (TREE_CODE (t) == COMPONENT_REF 5044 && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)) 5045 return false; 5046 if (ort == C_ORT_OMP || ort == C_ORT_ACC) 5047 switch (OMP_CLAUSE_MAP_KIND (c)) 5048 { 5049 case GOMP_MAP_ALLOC: 5050 case GOMP_MAP_TO: 5051 case GOMP_MAP_FROM: 5052 case GOMP_MAP_TOFROM: 5053 case GOMP_MAP_ALWAYS_TO: 5054 case GOMP_MAP_ALWAYS_FROM: 5055 case GOMP_MAP_ALWAYS_TOFROM: 5056 case GOMP_MAP_RELEASE: 5057 case GOMP_MAP_DELETE: 5058 case GOMP_MAP_FORCE_TO: 5059 case GOMP_MAP_FORCE_FROM: 5060 case GOMP_MAP_FORCE_TOFROM: 5061 case GOMP_MAP_FORCE_PRESENT: 5062 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; 5063 break; 5064 default: 5065 break; 5066 } 5067 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 5068 OMP_CLAUSE_MAP); 5069 if ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP && ort != C_ORT_ACC) 5070 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_POINTER); 5071 else if (TREE_CODE (t) == COMPONENT_REF) 5072 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER); 5073 else if (REFERENCE_REF_P (t) 5074 && TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF) 5075 { 5076 t = TREE_OPERAND (t, 0); 5077 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER); 5078 } 5079 else 5080 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER); 5081 if (OMP_CLAUSE_MAP_KIND (c2) != GOMP_MAP_FIRSTPRIVATE_POINTER 5082 && !cxx_mark_addressable (t)) 5083 return false; 5084 OMP_CLAUSE_DECL (c2) = t; 5085 t = build_fold_addr_expr (first); 5086 t = fold_convert_loc (OMP_CLAUSE_LOCATION (c), 5087 ptrdiff_type_node, t); 5088 tree ptr = OMP_CLAUSE_DECL (c2); 5089 ptr = convert_from_reference (ptr); 5090 if (!POINTER_TYPE_P (TREE_TYPE (ptr))) 5091 ptr = build_fold_addr_expr (ptr); 5092 t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, 5093 ptrdiff_type_node, t, 5094 fold_convert_loc (OMP_CLAUSE_LOCATION (c), 5095 ptrdiff_type_node, ptr)); 5096 OMP_CLAUSE_SIZE (c2) = t; 5097 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c); 5098 OMP_CLAUSE_CHAIN (c) = c2; 5099 ptr = OMP_CLAUSE_DECL (c2); 5100 if (OMP_CLAUSE_MAP_KIND (c2) != GOMP_MAP_FIRSTPRIVATE_POINTER 5101 && TREE_CODE (TREE_TYPE (ptr)) == REFERENCE_TYPE 5102 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (ptr)))) 5103 { 5104 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 5105 OMP_CLAUSE_MAP); 5106 OMP_CLAUSE_SET_MAP_KIND (c3, OMP_CLAUSE_MAP_KIND (c2)); 5107 OMP_CLAUSE_DECL (c3) = ptr; 5108 if (OMP_CLAUSE_MAP_KIND (c2) == GOMP_MAP_ALWAYS_POINTER) 5109 OMP_CLAUSE_DECL (c2) = build_simple_mem_ref (ptr); 5110 else 5111 OMP_CLAUSE_DECL (c2) = convert_from_reference (ptr); 5112 OMP_CLAUSE_SIZE (c3) = size_zero_node; 5113 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2); 5114 OMP_CLAUSE_CHAIN (c2) = c3; 5115 } 5116 } 5117 } 5118 return false; 5119 } 5120 5121 /* Return identifier to look up for omp declare reduction. */ 5122 5123 tree 5124 omp_reduction_id (enum tree_code reduction_code, tree reduction_id, tree type) 5125 { 5126 const char *p = NULL; 5127 const char *m = NULL; 5128 switch (reduction_code) 5129 { 5130 case PLUS_EXPR: 5131 case MULT_EXPR: 5132 case MINUS_EXPR: 5133 case BIT_AND_EXPR: 5134 case BIT_XOR_EXPR: 5135 case BIT_IOR_EXPR: 5136 case TRUTH_ANDIF_EXPR: 5137 case TRUTH_ORIF_EXPR: 5138 reduction_id = ovl_op_identifier (false, reduction_code); 5139 break; 5140 case MIN_EXPR: 5141 p = "min"; 5142 break; 5143 case MAX_EXPR: 5144 p = "max"; 5145 break; 5146 default: 5147 break; 5148 } 5149 5150 if (p == NULL) 5151 { 5152 if (TREE_CODE (reduction_id) != IDENTIFIER_NODE) 5153 return error_mark_node; 5154 p = IDENTIFIER_POINTER (reduction_id); 5155 } 5156 5157 if (type != NULL_TREE) 5158 m = mangle_type_string (TYPE_MAIN_VARIANT (type)); 5159 5160 const char prefix[] = "omp declare reduction "; 5161 size_t lenp = sizeof (prefix); 5162 if (strncmp (p, prefix, lenp - 1) == 0) 5163 lenp = 1; 5164 size_t len = strlen (p); 5165 size_t lenm = m ? strlen (m) + 1 : 0; 5166 char *name = XALLOCAVEC (char, lenp + len + lenm); 5167 if (lenp > 1) 5168 memcpy (name, prefix, lenp - 1); 5169 memcpy (name + lenp - 1, p, len + 1); 5170 if (m) 5171 { 5172 name[lenp + len - 1] = '~'; 5173 memcpy (name + lenp + len, m, lenm); 5174 } 5175 return get_identifier (name); 5176 } 5177 5178 /* Lookup OpenMP UDR ID for TYPE, return the corresponding artificial 5179 FUNCTION_DECL or NULL_TREE if not found. */ 5180 5181 static tree 5182 omp_reduction_lookup (location_t loc, tree id, tree type, tree *baselinkp, 5183 vec<tree> *ambiguousp) 5184 { 5185 tree orig_id = id; 5186 tree baselink = NULL_TREE; 5187 if (identifier_p (id)) 5188 { 5189 cp_id_kind idk; 5190 bool nonint_cst_expression_p; 5191 const char *error_msg; 5192 id = omp_reduction_id (ERROR_MARK, id, type); 5193 tree decl = lookup_name (id); 5194 if (decl == NULL_TREE) 5195 decl = error_mark_node; 5196 id = finish_id_expression (id, decl, NULL_TREE, &idk, false, true, 5197 &nonint_cst_expression_p, false, true, false, 5198 false, &error_msg, loc); 5199 if (idk == CP_ID_KIND_UNQUALIFIED 5200 && identifier_p (id)) 5201 { 5202 vec<tree, va_gc> *args = NULL; 5203 vec_safe_push (args, build_reference_type (type)); 5204 id = perform_koenig_lookup (id, args, tf_none); 5205 } 5206 } 5207 else if (TREE_CODE (id) == SCOPE_REF) 5208 id = lookup_qualified_name (TREE_OPERAND (id, 0), 5209 omp_reduction_id (ERROR_MARK, 5210 TREE_OPERAND (id, 1), 5211 type), 5212 false, false); 5213 tree fns = id; 5214 id = NULL_TREE; 5215 if (fns && is_overloaded_fn (fns)) 5216 { 5217 for (lkp_iterator iter (get_fns (fns)); iter; ++iter) 5218 { 5219 tree fndecl = *iter; 5220 if (TREE_CODE (fndecl) == FUNCTION_DECL) 5221 { 5222 tree argtype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl))); 5223 if (same_type_p (TREE_TYPE (argtype), type)) 5224 { 5225 id = fndecl; 5226 break; 5227 } 5228 } 5229 } 5230 5231 if (id && BASELINK_P (fns)) 5232 { 5233 if (baselinkp) 5234 *baselinkp = fns; 5235 else 5236 baselink = fns; 5237 } 5238 } 5239 5240 if (!id && CLASS_TYPE_P (type) && TYPE_BINFO (type)) 5241 { 5242 vec<tree> ambiguous = vNULL; 5243 tree binfo = TYPE_BINFO (type), base_binfo, ret = NULL_TREE; 5244 unsigned int ix; 5245 if (ambiguousp == NULL) 5246 ambiguousp = &ambiguous; 5247 for (ix = 0; BINFO_BASE_ITERATE (binfo, ix, base_binfo); ix++) 5248 { 5249 id = omp_reduction_lookup (loc, orig_id, BINFO_TYPE (base_binfo), 5250 baselinkp ? baselinkp : &baselink, 5251 ambiguousp); 5252 if (id == NULL_TREE) 5253 continue; 5254 if (!ambiguousp->is_empty ()) 5255 ambiguousp->safe_push (id); 5256 else if (ret != NULL_TREE) 5257 { 5258 ambiguousp->safe_push (ret); 5259 ambiguousp->safe_push (id); 5260 ret = NULL_TREE; 5261 } 5262 else 5263 ret = id; 5264 } 5265 if (ambiguousp != &ambiguous) 5266 return ret; 5267 if (!ambiguous.is_empty ()) 5268 { 5269 const char *str = _("candidates are:"); 5270 unsigned int idx; 5271 tree udr; 5272 error_at (loc, "user defined reduction lookup is ambiguous"); 5273 FOR_EACH_VEC_ELT (ambiguous, idx, udr) 5274 { 5275 inform (DECL_SOURCE_LOCATION (udr), "%s %#qD", str, udr); 5276 if (idx == 0) 5277 str = get_spaces (str); 5278 } 5279 ambiguous.release (); 5280 ret = error_mark_node; 5281 baselink = NULL_TREE; 5282 } 5283 id = ret; 5284 } 5285 if (id && baselink) 5286 perform_or_defer_access_check (BASELINK_BINFO (baselink), 5287 id, id, tf_warning_or_error); 5288 return id; 5289 } 5290 5291 /* Helper function for cp_parser_omp_declare_reduction_exprs 5292 and tsubst_omp_udr. 5293 Remove CLEANUP_STMT for data (omp_priv variable). 5294 Also append INIT_EXPR for DECL_INITIAL of omp_priv after its 5295 DECL_EXPR. */ 5296 5297 tree 5298 cp_remove_omp_priv_cleanup_stmt (tree *tp, int *walk_subtrees, void *data) 5299 { 5300 if (TYPE_P (*tp)) 5301 *walk_subtrees = 0; 5302 else if (TREE_CODE (*tp) == CLEANUP_STMT && CLEANUP_DECL (*tp) == (tree) data) 5303 *tp = CLEANUP_BODY (*tp); 5304 else if (TREE_CODE (*tp) == DECL_EXPR) 5305 { 5306 tree decl = DECL_EXPR_DECL (*tp); 5307 if (!processing_template_decl 5308 && decl == (tree) data 5309 && DECL_INITIAL (decl) 5310 && DECL_INITIAL (decl) != error_mark_node) 5311 { 5312 tree list = NULL_TREE; 5313 append_to_statement_list_force (*tp, &list); 5314 tree init_expr = build2 (INIT_EXPR, void_type_node, 5315 decl, DECL_INITIAL (decl)); 5316 DECL_INITIAL (decl) = NULL_TREE; 5317 append_to_statement_list_force (init_expr, &list); 5318 *tp = list; 5319 } 5320 } 5321 return NULL_TREE; 5322 } 5323 5324 /* Data passed from cp_check_omp_declare_reduction to 5325 cp_check_omp_declare_reduction_r. */ 5326 5327 struct cp_check_omp_declare_reduction_data 5328 { 5329 location_t loc; 5330 tree stmts[7]; 5331 bool combiner_p; 5332 }; 5333 5334 /* Helper function for cp_check_omp_declare_reduction, called via 5335 cp_walk_tree. */ 5336 5337 static tree 5338 cp_check_omp_declare_reduction_r (tree *tp, int *, void *data) 5339 { 5340 struct cp_check_omp_declare_reduction_data *udr_data 5341 = (struct cp_check_omp_declare_reduction_data *) data; 5342 if (SSA_VAR_P (*tp) 5343 && !DECL_ARTIFICIAL (*tp) 5344 && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 0 : 3]) 5345 && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 1 : 4])) 5346 { 5347 location_t loc = udr_data->loc; 5348 if (udr_data->combiner_p) 5349 error_at (loc, "%<#pragma omp declare reduction%> combiner refers to " 5350 "variable %qD which is not %<omp_out%> nor %<omp_in%>", 5351 *tp); 5352 else 5353 error_at (loc, "%<#pragma omp declare reduction%> initializer refers " 5354 "to variable %qD which is not %<omp_priv%> nor " 5355 "%<omp_orig%>", 5356 *tp); 5357 return *tp; 5358 } 5359 return NULL_TREE; 5360 } 5361 5362 /* Diagnose violation of OpenMP #pragma omp declare reduction restrictions. */ 5363 5364 void 5365 cp_check_omp_declare_reduction (tree udr) 5366 { 5367 tree type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (udr))); 5368 gcc_assert (TREE_CODE (type) == REFERENCE_TYPE); 5369 type = TREE_TYPE (type); 5370 int i; 5371 location_t loc = DECL_SOURCE_LOCATION (udr); 5372 5373 if (type == error_mark_node) 5374 return; 5375 if (ARITHMETIC_TYPE_P (type)) 5376 { 5377 static enum tree_code predef_codes[] 5378 = { PLUS_EXPR, MULT_EXPR, MINUS_EXPR, BIT_AND_EXPR, BIT_XOR_EXPR, 5379 BIT_IOR_EXPR, TRUTH_ANDIF_EXPR, TRUTH_ORIF_EXPR }; 5380 for (i = 0; i < 8; i++) 5381 { 5382 tree id = omp_reduction_id (predef_codes[i], NULL_TREE, NULL_TREE); 5383 const char *n1 = IDENTIFIER_POINTER (DECL_NAME (udr)); 5384 const char *n2 = IDENTIFIER_POINTER (id); 5385 if (strncmp (n1, n2, IDENTIFIER_LENGTH (id)) == 0 5386 && (n1[IDENTIFIER_LENGTH (id)] == '~' 5387 || n1[IDENTIFIER_LENGTH (id)] == '\0')) 5388 break; 5389 } 5390 5391 if (i == 8 5392 && TREE_CODE (type) != COMPLEX_EXPR) 5393 { 5394 const char prefix_minmax[] = "omp declare reduction m"; 5395 size_t prefix_size = sizeof (prefix_minmax) - 1; 5396 const char *n = IDENTIFIER_POINTER (DECL_NAME (udr)); 5397 if (strncmp (IDENTIFIER_POINTER (DECL_NAME (udr)), 5398 prefix_minmax, prefix_size) == 0 5399 && ((n[prefix_size] == 'i' && n[prefix_size + 1] == 'n') 5400 || (n[prefix_size] == 'a' && n[prefix_size + 1] == 'x')) 5401 && (n[prefix_size + 2] == '~' || n[prefix_size + 2] == '\0')) 5402 i = 0; 5403 } 5404 if (i < 8) 5405 { 5406 error_at (loc, "predeclared arithmetic type %qT in " 5407 "%<#pragma omp declare reduction%>", type); 5408 return; 5409 } 5410 } 5411 else if (TREE_CODE (type) == FUNCTION_TYPE 5412 || TREE_CODE (type) == METHOD_TYPE 5413 || TREE_CODE (type) == ARRAY_TYPE) 5414 { 5415 error_at (loc, "function or array type %qT in " 5416 "%<#pragma omp declare reduction%>", type); 5417 return; 5418 } 5419 else if (TREE_CODE (type) == REFERENCE_TYPE) 5420 { 5421 error_at (loc, "reference type %qT in %<#pragma omp declare reduction%>", 5422 type); 5423 return; 5424 } 5425 else if (TYPE_QUALS_NO_ADDR_SPACE (type)) 5426 { 5427 error_at (loc, "const, volatile or __restrict qualified type %qT in " 5428 "%<#pragma omp declare reduction%>", type); 5429 return; 5430 } 5431 5432 tree body = DECL_SAVED_TREE (udr); 5433 if (body == NULL_TREE || TREE_CODE (body) != STATEMENT_LIST) 5434 return; 5435 5436 tree_stmt_iterator tsi; 5437 struct cp_check_omp_declare_reduction_data data; 5438 memset (data.stmts, 0, sizeof data.stmts); 5439 for (i = 0, tsi = tsi_start (body); 5440 i < 7 && !tsi_end_p (tsi); 5441 i++, tsi_next (&tsi)) 5442 data.stmts[i] = tsi_stmt (tsi); 5443 data.loc = loc; 5444 gcc_assert (tsi_end_p (tsi)); 5445 if (i >= 3) 5446 { 5447 gcc_assert (TREE_CODE (data.stmts[0]) == DECL_EXPR 5448 && TREE_CODE (data.stmts[1]) == DECL_EXPR); 5449 if (TREE_NO_WARNING (DECL_EXPR_DECL (data.stmts[0]))) 5450 return; 5451 data.combiner_p = true; 5452 if (cp_walk_tree (&data.stmts[2], cp_check_omp_declare_reduction_r, 5453 &data, NULL)) 5454 TREE_NO_WARNING (DECL_EXPR_DECL (data.stmts[0])) = 1; 5455 } 5456 if (i >= 6) 5457 { 5458 gcc_assert (TREE_CODE (data.stmts[3]) == DECL_EXPR 5459 && TREE_CODE (data.stmts[4]) == DECL_EXPR); 5460 data.combiner_p = false; 5461 if (cp_walk_tree (&data.stmts[5], cp_check_omp_declare_reduction_r, 5462 &data, NULL) 5463 || cp_walk_tree (&DECL_INITIAL (DECL_EXPR_DECL (data.stmts[3])), 5464 cp_check_omp_declare_reduction_r, &data, NULL)) 5465 TREE_NO_WARNING (DECL_EXPR_DECL (data.stmts[0])) = 1; 5466 if (i == 7) 5467 gcc_assert (TREE_CODE (data.stmts[6]) == DECL_EXPR); 5468 } 5469 } 5470 5471 /* Helper function of finish_omp_clauses. Clone STMT as if we were making 5472 an inline call. But, remap 5473 the OMP_DECL1 VAR_DECL (omp_out resp. omp_orig) to PLACEHOLDER 5474 and OMP_DECL2 VAR_DECL (omp_in resp. omp_priv) to DECL. */ 5475 5476 static tree 5477 clone_omp_udr (tree stmt, tree omp_decl1, tree omp_decl2, 5478 tree decl, tree placeholder) 5479 { 5480 copy_body_data id; 5481 hash_map<tree, tree> decl_map; 5482 5483 decl_map.put (omp_decl1, placeholder); 5484 decl_map.put (omp_decl2, decl); 5485 memset (&id, 0, sizeof (id)); 5486 id.src_fn = DECL_CONTEXT (omp_decl1); 5487 id.dst_fn = current_function_decl; 5488 id.src_cfun = DECL_STRUCT_FUNCTION (id.src_fn); 5489 id.decl_map = &decl_map; 5490 5491 id.copy_decl = copy_decl_no_change; 5492 id.transform_call_graph_edges = CB_CGE_DUPLICATE; 5493 id.transform_new_cfg = true; 5494 id.transform_return_to_modify = false; 5495 id.transform_lang_insert_block = NULL; 5496 id.eh_lp_nr = 0; 5497 walk_tree (&stmt, copy_tree_body_r, &id, NULL); 5498 return stmt; 5499 } 5500 5501 /* Helper function of finish_omp_clauses, called via cp_walk_tree. 5502 Find OMP_CLAUSE_PLACEHOLDER (passed in DATA) in *TP. */ 5503 5504 static tree 5505 find_omp_placeholder_r (tree *tp, int *, void *data) 5506 { 5507 if (*tp == (tree) data) 5508 return *tp; 5509 return NULL_TREE; 5510 } 5511 5512 /* Helper function of finish_omp_clauses. Handle OMP_CLAUSE_REDUCTION C. 5513 Return true if there is some error and the clause should be removed. */ 5514 5515 static bool 5516 finish_omp_reduction_clause (tree c, bool *need_default_ctor, bool *need_dtor) 5517 { 5518 tree t = OMP_CLAUSE_DECL (c); 5519 bool predefined = false; 5520 if (TREE_CODE (t) == TREE_LIST) 5521 { 5522 gcc_assert (processing_template_decl); 5523 return false; 5524 } 5525 tree type = TREE_TYPE (t); 5526 if (TREE_CODE (t) == MEM_REF) 5527 type = TREE_TYPE (type); 5528 if (TREE_CODE (type) == REFERENCE_TYPE) 5529 type = TREE_TYPE (type); 5530 if (TREE_CODE (type) == ARRAY_TYPE) 5531 { 5532 tree oatype = type; 5533 gcc_assert (TREE_CODE (t) != MEM_REF); 5534 while (TREE_CODE (type) == ARRAY_TYPE) 5535 type = TREE_TYPE (type); 5536 if (!processing_template_decl) 5537 { 5538 t = require_complete_type (t); 5539 if (t == error_mark_node) 5540 return true; 5541 tree size = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (oatype), 5542 TYPE_SIZE_UNIT (type)); 5543 if (integer_zerop (size)) 5544 { 5545 error ("%qE in %<reduction%> clause is a zero size array", 5546 omp_clause_printable_decl (t)); 5547 return true; 5548 } 5549 size = size_binop (MINUS_EXPR, size, size_one_node); 5550 tree index_type = build_index_type (size); 5551 tree atype = build_array_type (type, index_type); 5552 tree ptype = build_pointer_type (type); 5553 if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) 5554 t = build_fold_addr_expr (t); 5555 t = build2 (MEM_REF, atype, t, build_int_cst (ptype, 0)); 5556 OMP_CLAUSE_DECL (c) = t; 5557 } 5558 } 5559 if (type == error_mark_node) 5560 return true; 5561 else if (ARITHMETIC_TYPE_P (type)) 5562 switch (OMP_CLAUSE_REDUCTION_CODE (c)) 5563 { 5564 case PLUS_EXPR: 5565 case MULT_EXPR: 5566 case MINUS_EXPR: 5567 predefined = true; 5568 break; 5569 case MIN_EXPR: 5570 case MAX_EXPR: 5571 if (TREE_CODE (type) == COMPLEX_TYPE) 5572 break; 5573 predefined = true; 5574 break; 5575 case BIT_AND_EXPR: 5576 case BIT_IOR_EXPR: 5577 case BIT_XOR_EXPR: 5578 if (FLOAT_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE) 5579 break; 5580 predefined = true; 5581 break; 5582 case TRUTH_ANDIF_EXPR: 5583 case TRUTH_ORIF_EXPR: 5584 if (FLOAT_TYPE_P (type)) 5585 break; 5586 predefined = true; 5587 break; 5588 default: 5589 break; 5590 } 5591 else if (TYPE_READONLY (type)) 5592 { 5593 error ("%qE has const type for %<reduction%>", 5594 omp_clause_printable_decl (t)); 5595 return true; 5596 } 5597 else if (!processing_template_decl) 5598 { 5599 t = require_complete_type (t); 5600 if (t == error_mark_node) 5601 return true; 5602 OMP_CLAUSE_DECL (c) = t; 5603 } 5604 5605 if (predefined) 5606 { 5607 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE; 5608 return false; 5609 } 5610 else if (processing_template_decl) 5611 { 5612 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == error_mark_node) 5613 return true; 5614 return false; 5615 } 5616 5617 tree id = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); 5618 5619 type = TYPE_MAIN_VARIANT (type); 5620 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE; 5621 if (id == NULL_TREE) 5622 id = omp_reduction_id (OMP_CLAUSE_REDUCTION_CODE (c), 5623 NULL_TREE, NULL_TREE); 5624 id = omp_reduction_lookup (OMP_CLAUSE_LOCATION (c), id, type, NULL, NULL); 5625 if (id) 5626 { 5627 if (id == error_mark_node) 5628 return true; 5629 mark_used (id); 5630 tree body = DECL_SAVED_TREE (id); 5631 if (!body) 5632 return true; 5633 if (TREE_CODE (body) == STATEMENT_LIST) 5634 { 5635 tree_stmt_iterator tsi; 5636 tree placeholder = NULL_TREE, decl_placeholder = NULL_TREE; 5637 int i; 5638 tree stmts[7]; 5639 tree atype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (id))); 5640 atype = TREE_TYPE (atype); 5641 bool need_static_cast = !same_type_p (type, atype); 5642 memset (stmts, 0, sizeof stmts); 5643 for (i = 0, tsi = tsi_start (body); 5644 i < 7 && !tsi_end_p (tsi); 5645 i++, tsi_next (&tsi)) 5646 stmts[i] = tsi_stmt (tsi); 5647 gcc_assert (tsi_end_p (tsi)); 5648 5649 if (i >= 3) 5650 { 5651 gcc_assert (TREE_CODE (stmts[0]) == DECL_EXPR 5652 && TREE_CODE (stmts[1]) == DECL_EXPR); 5653 placeholder = build_lang_decl (VAR_DECL, NULL_TREE, type); 5654 DECL_ARTIFICIAL (placeholder) = 1; 5655 DECL_IGNORED_P (placeholder) = 1; 5656 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = placeholder; 5657 if (TREE_CODE (t) == MEM_REF) 5658 { 5659 decl_placeholder = build_lang_decl (VAR_DECL, NULL_TREE, 5660 type); 5661 DECL_ARTIFICIAL (decl_placeholder) = 1; 5662 DECL_IGNORED_P (decl_placeholder) = 1; 5663 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = decl_placeholder; 5664 } 5665 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[0]))) 5666 cxx_mark_addressable (placeholder); 5667 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[1])) 5668 && TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c))) 5669 != REFERENCE_TYPE) 5670 cxx_mark_addressable (decl_placeholder ? decl_placeholder 5671 : OMP_CLAUSE_DECL (c)); 5672 tree omp_out = placeholder; 5673 tree omp_in = decl_placeholder ? decl_placeholder 5674 : convert_from_reference (OMP_CLAUSE_DECL (c)); 5675 if (need_static_cast) 5676 { 5677 tree rtype = build_reference_type (atype); 5678 omp_out = build_static_cast (rtype, omp_out, 5679 tf_warning_or_error); 5680 omp_in = build_static_cast (rtype, omp_in, 5681 tf_warning_or_error); 5682 if (omp_out == error_mark_node || omp_in == error_mark_node) 5683 return true; 5684 omp_out = convert_from_reference (omp_out); 5685 omp_in = convert_from_reference (omp_in); 5686 } 5687 OMP_CLAUSE_REDUCTION_MERGE (c) 5688 = clone_omp_udr (stmts[2], DECL_EXPR_DECL (stmts[0]), 5689 DECL_EXPR_DECL (stmts[1]), omp_in, omp_out); 5690 } 5691 if (i >= 6) 5692 { 5693 gcc_assert (TREE_CODE (stmts[3]) == DECL_EXPR 5694 && TREE_CODE (stmts[4]) == DECL_EXPR); 5695 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[3]))) 5696 cxx_mark_addressable (decl_placeholder ? decl_placeholder 5697 : OMP_CLAUSE_DECL (c)); 5698 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[4]))) 5699 cxx_mark_addressable (placeholder); 5700 tree omp_priv = decl_placeholder ? decl_placeholder 5701 : convert_from_reference (OMP_CLAUSE_DECL (c)); 5702 tree omp_orig = placeholder; 5703 if (need_static_cast) 5704 { 5705 if (i == 7) 5706 { 5707 error_at (OMP_CLAUSE_LOCATION (c), 5708 "user defined reduction with constructor " 5709 "initializer for base class %qT", atype); 5710 return true; 5711 } 5712 tree rtype = build_reference_type (atype); 5713 omp_priv = build_static_cast (rtype, omp_priv, 5714 tf_warning_or_error); 5715 omp_orig = build_static_cast (rtype, omp_orig, 5716 tf_warning_or_error); 5717 if (omp_priv == error_mark_node 5718 || omp_orig == error_mark_node) 5719 return true; 5720 omp_priv = convert_from_reference (omp_priv); 5721 omp_orig = convert_from_reference (omp_orig); 5722 } 5723 if (i == 6) 5724 *need_default_ctor = true; 5725 OMP_CLAUSE_REDUCTION_INIT (c) 5726 = clone_omp_udr (stmts[5], DECL_EXPR_DECL (stmts[4]), 5727 DECL_EXPR_DECL (stmts[3]), 5728 omp_priv, omp_orig); 5729 if (cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c), 5730 find_omp_placeholder_r, placeholder, NULL)) 5731 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1; 5732 } 5733 else if (i >= 3) 5734 { 5735 if (CLASS_TYPE_P (type) && !pod_type_p (type)) 5736 *need_default_ctor = true; 5737 else 5738 { 5739 tree init; 5740 tree v = decl_placeholder ? decl_placeholder 5741 : convert_from_reference (t); 5742 if (AGGREGATE_TYPE_P (TREE_TYPE (v))) 5743 init = build_constructor (TREE_TYPE (v), NULL); 5744 else 5745 init = fold_convert (TREE_TYPE (v), integer_zero_node); 5746 OMP_CLAUSE_REDUCTION_INIT (c) 5747 = build2 (INIT_EXPR, TREE_TYPE (v), v, init); 5748 } 5749 } 5750 } 5751 } 5752 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 5753 *need_dtor = true; 5754 else 5755 { 5756 error ("user defined reduction not found for %qE", 5757 omp_clause_printable_decl (t)); 5758 return true; 5759 } 5760 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF) 5761 gcc_assert (TYPE_SIZE_UNIT (type) 5762 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST); 5763 return false; 5764 } 5765 5766 /* Called from finish_struct_1. linear(this) or linear(this:step) 5767 clauses might not be finalized yet because the class has been incomplete 5768 when parsing #pragma omp declare simd methods. Fix those up now. */ 5769 5770 void 5771 finish_omp_declare_simd_methods (tree t) 5772 { 5773 if (processing_template_decl) 5774 return; 5775 5776 for (tree x = TYPE_FIELDS (t); x; x = DECL_CHAIN (x)) 5777 { 5778 if (TREE_CODE (TREE_TYPE (x)) != METHOD_TYPE) 5779 continue; 5780 tree ods = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (x)); 5781 if (!ods || !TREE_VALUE (ods)) 5782 continue; 5783 for (tree c = TREE_VALUE (TREE_VALUE (ods)); c; c = OMP_CLAUSE_CHAIN (c)) 5784 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 5785 && integer_zerop (OMP_CLAUSE_DECL (c)) 5786 && OMP_CLAUSE_LINEAR_STEP (c) 5787 && TREE_CODE (TREE_TYPE (OMP_CLAUSE_LINEAR_STEP (c))) 5788 == POINTER_TYPE) 5789 { 5790 tree s = OMP_CLAUSE_LINEAR_STEP (c); 5791 s = fold_convert_loc (OMP_CLAUSE_LOCATION (c), sizetype, s); 5792 s = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MULT_EXPR, 5793 sizetype, s, TYPE_SIZE_UNIT (t)); 5794 OMP_CLAUSE_LINEAR_STEP (c) = s; 5795 } 5796 } 5797 } 5798 5799 /* Adjust sink depend clause to take into account pointer offsets. 5800 5801 Return TRUE if there was a problem processing the offset, and the 5802 whole clause should be removed. */ 5803 5804 static bool 5805 cp_finish_omp_clause_depend_sink (tree sink_clause) 5806 { 5807 tree t = OMP_CLAUSE_DECL (sink_clause); 5808 gcc_assert (TREE_CODE (t) == TREE_LIST); 5809 5810 /* Make sure we don't adjust things twice for templates. */ 5811 if (processing_template_decl) 5812 return false; 5813 5814 for (; t; t = TREE_CHAIN (t)) 5815 { 5816 tree decl = TREE_VALUE (t); 5817 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) 5818 { 5819 tree offset = TREE_PURPOSE (t); 5820 bool neg = wi::neg_p (wi::to_wide (offset)); 5821 offset = fold_unary (ABS_EXPR, TREE_TYPE (offset), offset); 5822 decl = mark_rvalue_use (decl); 5823 decl = convert_from_reference (decl); 5824 tree t2 = pointer_int_sum (OMP_CLAUSE_LOCATION (sink_clause), 5825 neg ? MINUS_EXPR : PLUS_EXPR, 5826 decl, offset); 5827 t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (sink_clause), 5828 MINUS_EXPR, sizetype, 5829 fold_convert (sizetype, t2), 5830 fold_convert (sizetype, decl)); 5831 if (t2 == error_mark_node) 5832 return true; 5833 TREE_PURPOSE (t) = t2; 5834 } 5835 } 5836 return false; 5837 } 5838 5839 /* For all elements of CLAUSES, validate them vs OpenMP constraints. 5840 Remove any elements from the list that are invalid. */ 5841 5842 tree 5843 finish_omp_clauses (tree clauses, enum c_omp_region_type ort) 5844 { 5845 bitmap_head generic_head, firstprivate_head, lastprivate_head; 5846 bitmap_head aligned_head, map_head, map_field_head, oacc_reduction_head; 5847 tree c, t, *pc; 5848 tree safelen = NULL_TREE; 5849 bool branch_seen = false; 5850 bool copyprivate_seen = false; 5851 bool ordered_seen = false; 5852 bool oacc_async = false; 5853 5854 bitmap_obstack_initialize (NULL); 5855 bitmap_initialize (&generic_head, &bitmap_default_obstack); 5856 bitmap_initialize (&firstprivate_head, &bitmap_default_obstack); 5857 bitmap_initialize (&lastprivate_head, &bitmap_default_obstack); 5858 bitmap_initialize (&aligned_head, &bitmap_default_obstack); 5859 bitmap_initialize (&map_head, &bitmap_default_obstack); 5860 bitmap_initialize (&map_field_head, &bitmap_default_obstack); 5861 bitmap_initialize (&oacc_reduction_head, &bitmap_default_obstack); 5862 5863 if (ort & C_ORT_ACC) 5864 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 5865 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ASYNC) 5866 { 5867 oacc_async = true; 5868 break; 5869 } 5870 5871 for (pc = &clauses, c = clauses; c ; c = *pc) 5872 { 5873 bool remove = false; 5874 bool field_ok = false; 5875 5876 switch (OMP_CLAUSE_CODE (c)) 5877 { 5878 case OMP_CLAUSE_SHARED: 5879 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); 5880 goto check_dup_generic; 5881 case OMP_CLAUSE_PRIVATE: 5882 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); 5883 goto check_dup_generic; 5884 case OMP_CLAUSE_REDUCTION: 5885 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); 5886 t = OMP_CLAUSE_DECL (c); 5887 if (TREE_CODE (t) == TREE_LIST) 5888 { 5889 if (handle_omp_array_sections (c, ort)) 5890 { 5891 remove = true; 5892 break; 5893 } 5894 if (TREE_CODE (t) == TREE_LIST) 5895 { 5896 while (TREE_CODE (t) == TREE_LIST) 5897 t = TREE_CHAIN (t); 5898 } 5899 else 5900 { 5901 gcc_assert (TREE_CODE (t) == MEM_REF); 5902 t = TREE_OPERAND (t, 0); 5903 if (TREE_CODE (t) == POINTER_PLUS_EXPR) 5904 t = TREE_OPERAND (t, 0); 5905 if (TREE_CODE (t) == ADDR_EXPR 5906 || INDIRECT_REF_P (t)) 5907 t = TREE_OPERAND (t, 0); 5908 } 5909 tree n = omp_clause_decl_field (t); 5910 if (n) 5911 t = n; 5912 goto check_dup_generic_t; 5913 } 5914 if (oacc_async) 5915 cxx_mark_addressable (t); 5916 goto check_dup_generic; 5917 case OMP_CLAUSE_COPYPRIVATE: 5918 copyprivate_seen = true; 5919 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); 5920 goto check_dup_generic; 5921 case OMP_CLAUSE_COPYIN: 5922 goto check_dup_generic; 5923 case OMP_CLAUSE_LINEAR: 5924 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); 5925 t = OMP_CLAUSE_DECL (c); 5926 if (ort != C_ORT_OMP_DECLARE_SIMD 5927 && OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_DEFAULT) 5928 { 5929 error_at (OMP_CLAUSE_LOCATION (c), 5930 "modifier should not be specified in %<linear%> " 5931 "clause on %<simd%> or %<for%> constructs"); 5932 OMP_CLAUSE_LINEAR_KIND (c) = OMP_CLAUSE_LINEAR_DEFAULT; 5933 } 5934 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL) 5935 && !type_dependent_expression_p (t)) 5936 { 5937 tree type = TREE_TYPE (t); 5938 if ((OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF 5939 || OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_UVAL) 5940 && TREE_CODE (type) != REFERENCE_TYPE) 5941 { 5942 error ("linear clause with %qs modifier applied to " 5943 "non-reference variable with %qT type", 5944 OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF 5945 ? "ref" : "uval", TREE_TYPE (t)); 5946 remove = true; 5947 break; 5948 } 5949 if (TREE_CODE (type) == REFERENCE_TYPE) 5950 type = TREE_TYPE (type); 5951 if (OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_REF) 5952 { 5953 if (!INTEGRAL_TYPE_P (type) 5954 && TREE_CODE (type) != POINTER_TYPE) 5955 { 5956 error ("linear clause applied to non-integral non-pointer" 5957 " variable with %qT type", TREE_TYPE (t)); 5958 remove = true; 5959 break; 5960 } 5961 } 5962 } 5963 t = OMP_CLAUSE_LINEAR_STEP (c); 5964 if (t == NULL_TREE) 5965 t = integer_one_node; 5966 if (t == error_mark_node) 5967 { 5968 remove = true; 5969 break; 5970 } 5971 else if (!type_dependent_expression_p (t) 5972 && !INTEGRAL_TYPE_P (TREE_TYPE (t)) 5973 && (ort != C_ORT_OMP_DECLARE_SIMD 5974 || TREE_CODE (t) != PARM_DECL 5975 || TREE_CODE (TREE_TYPE (t)) != REFERENCE_TYPE 5976 || !INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (t))))) 5977 { 5978 error ("linear step expression must be integral"); 5979 remove = true; 5980 break; 5981 } 5982 else 5983 { 5984 t = mark_rvalue_use (t); 5985 if (ort == C_ORT_OMP_DECLARE_SIMD && TREE_CODE (t) == PARM_DECL) 5986 { 5987 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) = 1; 5988 goto check_dup_generic; 5989 } 5990 if (!processing_template_decl 5991 && (VAR_P (OMP_CLAUSE_DECL (c)) 5992 || TREE_CODE (OMP_CLAUSE_DECL (c)) == PARM_DECL)) 5993 { 5994 if (ort == C_ORT_OMP_DECLARE_SIMD) 5995 { 5996 t = maybe_constant_value (t); 5997 if (TREE_CODE (t) != INTEGER_CST) 5998 { 5999 error_at (OMP_CLAUSE_LOCATION (c), 6000 "%<linear%> clause step %qE is neither " 6001 "constant nor a parameter", t); 6002 remove = true; 6003 break; 6004 } 6005 } 6006 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6007 tree type = TREE_TYPE (OMP_CLAUSE_DECL (c)); 6008 if (TREE_CODE (type) == REFERENCE_TYPE) 6009 type = TREE_TYPE (type); 6010 if (OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF) 6011 { 6012 type = build_pointer_type (type); 6013 tree d = fold_convert (type, OMP_CLAUSE_DECL (c)); 6014 t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR, 6015 d, t); 6016 t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), 6017 MINUS_EXPR, sizetype, 6018 fold_convert (sizetype, t), 6019 fold_convert (sizetype, d)); 6020 if (t == error_mark_node) 6021 { 6022 remove = true; 6023 break; 6024 } 6025 } 6026 else if (TREE_CODE (type) == POINTER_TYPE 6027 /* Can't multiply the step yet if *this 6028 is still incomplete type. */ 6029 && (ort != C_ORT_OMP_DECLARE_SIMD 6030 || TREE_CODE (OMP_CLAUSE_DECL (c)) != PARM_DECL 6031 || !DECL_ARTIFICIAL (OMP_CLAUSE_DECL (c)) 6032 || DECL_NAME (OMP_CLAUSE_DECL (c)) 6033 != this_identifier 6034 || !TYPE_BEING_DEFINED (TREE_TYPE (type)))) 6035 { 6036 tree d = convert_from_reference (OMP_CLAUSE_DECL (c)); 6037 t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR, 6038 d, t); 6039 t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), 6040 MINUS_EXPR, sizetype, 6041 fold_convert (sizetype, t), 6042 fold_convert (sizetype, d)); 6043 if (t == error_mark_node) 6044 { 6045 remove = true; 6046 break; 6047 } 6048 } 6049 else 6050 t = fold_convert (type, t); 6051 } 6052 OMP_CLAUSE_LINEAR_STEP (c) = t; 6053 } 6054 goto check_dup_generic; 6055 check_dup_generic: 6056 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); 6057 if (t) 6058 { 6059 if (!remove && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED) 6060 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c)); 6061 } 6062 else 6063 t = OMP_CLAUSE_DECL (c); 6064 check_dup_generic_t: 6065 if (t == current_class_ptr 6066 && (ort != C_ORT_OMP_DECLARE_SIMD 6067 || (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR 6068 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_UNIFORM))) 6069 { 6070 error ("%<this%> allowed in OpenMP only in %<declare simd%>" 6071 " clauses"); 6072 remove = true; 6073 break; 6074 } 6075 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL 6076 && (!field_ok || TREE_CODE (t) != FIELD_DECL)) 6077 { 6078 if (processing_template_decl && TREE_CODE (t) != OVERLOAD) 6079 break; 6080 if (DECL_P (t)) 6081 error ("%qD is not a variable in clause %qs", t, 6082 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6083 else 6084 error ("%qE is not a variable in clause %qs", t, 6085 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6086 remove = true; 6087 } 6088 else if (ort == C_ORT_ACC 6089 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) 6090 { 6091 if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) 6092 { 6093 error ("%qD appears more than once in reduction clauses", t); 6094 remove = true; 6095 } 6096 else 6097 bitmap_set_bit (&oacc_reduction_head, DECL_UID (t)); 6098 } 6099 else if (bitmap_bit_p (&generic_head, DECL_UID (t)) 6100 || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) 6101 || bitmap_bit_p (&lastprivate_head, DECL_UID (t))) 6102 { 6103 error ("%qD appears more than once in data clauses", t); 6104 remove = true; 6105 } 6106 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE 6107 && bitmap_bit_p (&map_head, DECL_UID (t))) 6108 { 6109 if (ort == C_ORT_ACC) 6110 error ("%qD appears more than once in data clauses", t); 6111 else 6112 error ("%qD appears both in data and map clauses", t); 6113 remove = true; 6114 } 6115 else 6116 bitmap_set_bit (&generic_head, DECL_UID (t)); 6117 if (!field_ok) 6118 break; 6119 handle_field_decl: 6120 if (!remove 6121 && TREE_CODE (t) == FIELD_DECL 6122 && t == OMP_CLAUSE_DECL (c) 6123 && ort != C_ORT_ACC) 6124 { 6125 OMP_CLAUSE_DECL (c) 6126 = omp_privatize_field (t, (OMP_CLAUSE_CODE (c) 6127 == OMP_CLAUSE_SHARED)); 6128 if (OMP_CLAUSE_DECL (c) == error_mark_node) 6129 remove = true; 6130 } 6131 break; 6132 6133 case OMP_CLAUSE_FIRSTPRIVATE: 6134 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); 6135 if (t) 6136 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c)); 6137 else 6138 t = OMP_CLAUSE_DECL (c); 6139 if (ort != C_ORT_ACC && t == current_class_ptr) 6140 { 6141 error ("%<this%> allowed in OpenMP only in %<declare simd%>" 6142 " clauses"); 6143 remove = true; 6144 break; 6145 } 6146 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL 6147 && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP 6148 || TREE_CODE (t) != FIELD_DECL)) 6149 { 6150 if (processing_template_decl && TREE_CODE (t) != OVERLOAD) 6151 break; 6152 if (DECL_P (t)) 6153 error ("%qD is not a variable in clause %<firstprivate%>", t); 6154 else 6155 error ("%qE is not a variable in clause %<firstprivate%>", t); 6156 remove = true; 6157 } 6158 else if (bitmap_bit_p (&generic_head, DECL_UID (t)) 6159 || bitmap_bit_p (&firstprivate_head, DECL_UID (t))) 6160 { 6161 error ("%qD appears more than once in data clauses", t); 6162 remove = true; 6163 } 6164 else if (bitmap_bit_p (&map_head, DECL_UID (t))) 6165 { 6166 if (ort == C_ORT_ACC) 6167 error ("%qD appears more than once in data clauses", t); 6168 else 6169 error ("%qD appears both in data and map clauses", t); 6170 remove = true; 6171 } 6172 else 6173 bitmap_set_bit (&firstprivate_head, DECL_UID (t)); 6174 goto handle_field_decl; 6175 6176 case OMP_CLAUSE_LASTPRIVATE: 6177 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); 6178 if (t) 6179 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c)); 6180 else 6181 t = OMP_CLAUSE_DECL (c); 6182 if (t == current_class_ptr) 6183 { 6184 error ("%<this%> allowed in OpenMP only in %<declare simd%>" 6185 " clauses"); 6186 remove = true; 6187 break; 6188 } 6189 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL 6190 && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP 6191 || TREE_CODE (t) != FIELD_DECL)) 6192 { 6193 if (processing_template_decl && TREE_CODE (t) != OVERLOAD) 6194 break; 6195 if (DECL_P (t)) 6196 error ("%qD is not a variable in clause %<lastprivate%>", t); 6197 else 6198 error ("%qE is not a variable in clause %<lastprivate%>", t); 6199 remove = true; 6200 } 6201 else if (bitmap_bit_p (&generic_head, DECL_UID (t)) 6202 || bitmap_bit_p (&lastprivate_head, DECL_UID (t))) 6203 { 6204 error ("%qD appears more than once in data clauses", t); 6205 remove = true; 6206 } 6207 else 6208 bitmap_set_bit (&lastprivate_head, DECL_UID (t)); 6209 goto handle_field_decl; 6210 6211 case OMP_CLAUSE_IF: 6212 t = OMP_CLAUSE_IF_EXPR (c); 6213 t = maybe_convert_cond (t); 6214 if (t == error_mark_node) 6215 remove = true; 6216 else if (!processing_template_decl) 6217 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6218 OMP_CLAUSE_IF_EXPR (c) = t; 6219 break; 6220 6221 case OMP_CLAUSE_FINAL: 6222 t = OMP_CLAUSE_FINAL_EXPR (c); 6223 t = maybe_convert_cond (t); 6224 if (t == error_mark_node) 6225 remove = true; 6226 else if (!processing_template_decl) 6227 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6228 OMP_CLAUSE_FINAL_EXPR (c) = t; 6229 break; 6230 6231 case OMP_CLAUSE_GANG: 6232 /* Operand 1 is the gang static: argument. */ 6233 t = OMP_CLAUSE_OPERAND (c, 1); 6234 if (t != NULL_TREE) 6235 { 6236 if (t == error_mark_node) 6237 remove = true; 6238 else if (!type_dependent_expression_p (t) 6239 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6240 { 6241 error ("%<gang%> static expression must be integral"); 6242 remove = true; 6243 } 6244 else 6245 { 6246 t = mark_rvalue_use (t); 6247 if (!processing_template_decl) 6248 { 6249 t = maybe_constant_value (t); 6250 if (TREE_CODE (t) == INTEGER_CST 6251 && tree_int_cst_sgn (t) != 1 6252 && t != integer_minus_one_node) 6253 { 6254 warning_at (OMP_CLAUSE_LOCATION (c), 0, 6255 "%<gang%> static value must be " 6256 "positive"); 6257 t = integer_one_node; 6258 } 6259 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6260 } 6261 } 6262 OMP_CLAUSE_OPERAND (c, 1) = t; 6263 } 6264 /* Check operand 0, the num argument. */ 6265 /* FALLTHRU */ 6266 6267 case OMP_CLAUSE_WORKER: 6268 case OMP_CLAUSE_VECTOR: 6269 if (OMP_CLAUSE_OPERAND (c, 0) == NULL_TREE) 6270 break; 6271 /* FALLTHRU */ 6272 6273 case OMP_CLAUSE_NUM_TASKS: 6274 case OMP_CLAUSE_NUM_TEAMS: 6275 case OMP_CLAUSE_NUM_THREADS: 6276 case OMP_CLAUSE_NUM_GANGS: 6277 case OMP_CLAUSE_NUM_WORKERS: 6278 case OMP_CLAUSE_VECTOR_LENGTH: 6279 t = OMP_CLAUSE_OPERAND (c, 0); 6280 if (t == error_mark_node) 6281 remove = true; 6282 else if (!type_dependent_expression_p (t) 6283 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6284 { 6285 switch (OMP_CLAUSE_CODE (c)) 6286 { 6287 case OMP_CLAUSE_GANG: 6288 error_at (OMP_CLAUSE_LOCATION (c), 6289 "%<gang%> num expression must be integral"); break; 6290 case OMP_CLAUSE_VECTOR: 6291 error_at (OMP_CLAUSE_LOCATION (c), 6292 "%<vector%> length expression must be integral"); 6293 break; 6294 case OMP_CLAUSE_WORKER: 6295 error_at (OMP_CLAUSE_LOCATION (c), 6296 "%<worker%> num expression must be integral"); 6297 break; 6298 default: 6299 error_at (OMP_CLAUSE_LOCATION (c), 6300 "%qs expression must be integral", 6301 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6302 } 6303 remove = true; 6304 } 6305 else 6306 { 6307 t = mark_rvalue_use (t); 6308 if (!processing_template_decl) 6309 { 6310 t = maybe_constant_value (t); 6311 if (TREE_CODE (t) == INTEGER_CST 6312 && tree_int_cst_sgn (t) != 1) 6313 { 6314 switch (OMP_CLAUSE_CODE (c)) 6315 { 6316 case OMP_CLAUSE_GANG: 6317 warning_at (OMP_CLAUSE_LOCATION (c), 0, 6318 "%<gang%> num value must be positive"); 6319 break; 6320 case OMP_CLAUSE_VECTOR: 6321 warning_at (OMP_CLAUSE_LOCATION (c), 0, 6322 "%<vector%> length value must be " 6323 "positive"); 6324 break; 6325 case OMP_CLAUSE_WORKER: 6326 warning_at (OMP_CLAUSE_LOCATION (c), 0, 6327 "%<worker%> num value must be " 6328 "positive"); 6329 break; 6330 default: 6331 warning_at (OMP_CLAUSE_LOCATION (c), 0, 6332 "%qs value must be positive", 6333 omp_clause_code_name 6334 [OMP_CLAUSE_CODE (c)]); 6335 } 6336 t = integer_one_node; 6337 } 6338 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6339 } 6340 OMP_CLAUSE_OPERAND (c, 0) = t; 6341 } 6342 break; 6343 6344 case OMP_CLAUSE_SCHEDULE: 6345 if (OMP_CLAUSE_SCHEDULE_KIND (c) & OMP_CLAUSE_SCHEDULE_NONMONOTONIC) 6346 { 6347 const char *p = NULL; 6348 switch (OMP_CLAUSE_SCHEDULE_KIND (c) & OMP_CLAUSE_SCHEDULE_MASK) 6349 { 6350 case OMP_CLAUSE_SCHEDULE_STATIC: p = "static"; break; 6351 case OMP_CLAUSE_SCHEDULE_DYNAMIC: break; 6352 case OMP_CLAUSE_SCHEDULE_GUIDED: break; 6353 case OMP_CLAUSE_SCHEDULE_AUTO: p = "auto"; break; 6354 case OMP_CLAUSE_SCHEDULE_RUNTIME: p = "runtime"; break; 6355 default: gcc_unreachable (); 6356 } 6357 if (p) 6358 { 6359 error_at (OMP_CLAUSE_LOCATION (c), 6360 "%<nonmonotonic%> modifier specified for %qs " 6361 "schedule kind", p); 6362 OMP_CLAUSE_SCHEDULE_KIND (c) 6363 = (enum omp_clause_schedule_kind) 6364 (OMP_CLAUSE_SCHEDULE_KIND (c) 6365 & ~OMP_CLAUSE_SCHEDULE_NONMONOTONIC); 6366 } 6367 } 6368 6369 t = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c); 6370 if (t == NULL) 6371 ; 6372 else if (t == error_mark_node) 6373 remove = true; 6374 else if (!type_dependent_expression_p (t) 6375 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6376 { 6377 error ("schedule chunk size expression must be integral"); 6378 remove = true; 6379 } 6380 else 6381 { 6382 t = mark_rvalue_use (t); 6383 if (!processing_template_decl) 6384 { 6385 t = maybe_constant_value (t); 6386 if (TREE_CODE (t) == INTEGER_CST 6387 && tree_int_cst_sgn (t) != 1) 6388 { 6389 warning_at (OMP_CLAUSE_LOCATION (c), 0, 6390 "chunk size value must be positive"); 6391 t = integer_one_node; 6392 } 6393 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6394 } 6395 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = t; 6396 } 6397 break; 6398 6399 case OMP_CLAUSE_SIMDLEN: 6400 case OMP_CLAUSE_SAFELEN: 6401 t = OMP_CLAUSE_OPERAND (c, 0); 6402 if (t == error_mark_node) 6403 remove = true; 6404 else if (!type_dependent_expression_p (t) 6405 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6406 { 6407 error ("%qs length expression must be integral", 6408 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6409 remove = true; 6410 } 6411 else 6412 { 6413 t = mark_rvalue_use (t); 6414 if (!processing_template_decl) 6415 { 6416 t = maybe_constant_value (t); 6417 if (TREE_CODE (t) != INTEGER_CST 6418 || tree_int_cst_sgn (t) != 1) 6419 { 6420 error ("%qs length expression must be positive constant" 6421 " integer expression", 6422 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6423 remove = true; 6424 } 6425 } 6426 OMP_CLAUSE_OPERAND (c, 0) = t; 6427 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SAFELEN) 6428 safelen = c; 6429 } 6430 break; 6431 6432 case OMP_CLAUSE_ASYNC: 6433 t = OMP_CLAUSE_ASYNC_EXPR (c); 6434 if (t == error_mark_node) 6435 remove = true; 6436 else if (!type_dependent_expression_p (t) 6437 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6438 { 6439 error ("%<async%> expression must be integral"); 6440 remove = true; 6441 } 6442 else 6443 { 6444 t = mark_rvalue_use (t); 6445 if (!processing_template_decl) 6446 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6447 OMP_CLAUSE_ASYNC_EXPR (c) = t; 6448 } 6449 break; 6450 6451 case OMP_CLAUSE_WAIT: 6452 t = OMP_CLAUSE_WAIT_EXPR (c); 6453 if (t == error_mark_node) 6454 remove = true; 6455 else if (!processing_template_decl) 6456 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6457 OMP_CLAUSE_WAIT_EXPR (c) = t; 6458 break; 6459 6460 case OMP_CLAUSE_THREAD_LIMIT: 6461 t = OMP_CLAUSE_THREAD_LIMIT_EXPR (c); 6462 if (t == error_mark_node) 6463 remove = true; 6464 else if (!type_dependent_expression_p (t) 6465 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6466 { 6467 error ("%<thread_limit%> expression must be integral"); 6468 remove = true; 6469 } 6470 else 6471 { 6472 t = mark_rvalue_use (t); 6473 if (!processing_template_decl) 6474 { 6475 t = maybe_constant_value (t); 6476 if (TREE_CODE (t) == INTEGER_CST 6477 && tree_int_cst_sgn (t) != 1) 6478 { 6479 warning_at (OMP_CLAUSE_LOCATION (c), 0, 6480 "%<thread_limit%> value must be positive"); 6481 t = integer_one_node; 6482 } 6483 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6484 } 6485 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = t; 6486 } 6487 break; 6488 6489 case OMP_CLAUSE_DEVICE: 6490 t = OMP_CLAUSE_DEVICE_ID (c); 6491 if (t == error_mark_node) 6492 remove = true; 6493 else if (!type_dependent_expression_p (t) 6494 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6495 { 6496 error ("%<device%> id must be integral"); 6497 remove = true; 6498 } 6499 else 6500 { 6501 t = mark_rvalue_use (t); 6502 if (!processing_template_decl) 6503 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6504 OMP_CLAUSE_DEVICE_ID (c) = t; 6505 } 6506 break; 6507 6508 case OMP_CLAUSE_DIST_SCHEDULE: 6509 t = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c); 6510 if (t == NULL) 6511 ; 6512 else if (t == error_mark_node) 6513 remove = true; 6514 else if (!type_dependent_expression_p (t) 6515 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6516 { 6517 error ("%<dist_schedule%> chunk size expression must be " 6518 "integral"); 6519 remove = true; 6520 } 6521 else 6522 { 6523 t = mark_rvalue_use (t); 6524 if (!processing_template_decl) 6525 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6526 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = t; 6527 } 6528 break; 6529 6530 case OMP_CLAUSE_ALIGNED: 6531 t = OMP_CLAUSE_DECL (c); 6532 if (t == current_class_ptr && ort != C_ORT_OMP_DECLARE_SIMD) 6533 { 6534 error ("%<this%> allowed in OpenMP only in %<declare simd%>" 6535 " clauses"); 6536 remove = true; 6537 break; 6538 } 6539 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) 6540 { 6541 if (processing_template_decl && TREE_CODE (t) != OVERLOAD) 6542 break; 6543 if (DECL_P (t)) 6544 error ("%qD is not a variable in %<aligned%> clause", t); 6545 else 6546 error ("%qE is not a variable in %<aligned%> clause", t); 6547 remove = true; 6548 } 6549 else if (!type_dependent_expression_p (t) 6550 && TREE_CODE (TREE_TYPE (t)) != POINTER_TYPE 6551 && TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE 6552 && (TREE_CODE (TREE_TYPE (t)) != REFERENCE_TYPE 6553 || (!POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (t))) 6554 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) 6555 != ARRAY_TYPE)))) 6556 { 6557 error_at (OMP_CLAUSE_LOCATION (c), 6558 "%qE in %<aligned%> clause is neither a pointer nor " 6559 "an array nor a reference to pointer or array", t); 6560 remove = true; 6561 } 6562 else if (bitmap_bit_p (&aligned_head, DECL_UID (t))) 6563 { 6564 error ("%qD appears more than once in %<aligned%> clauses", t); 6565 remove = true; 6566 } 6567 else 6568 bitmap_set_bit (&aligned_head, DECL_UID (t)); 6569 t = OMP_CLAUSE_ALIGNED_ALIGNMENT (c); 6570 if (t == error_mark_node) 6571 remove = true; 6572 else if (t == NULL_TREE) 6573 break; 6574 else if (!type_dependent_expression_p (t) 6575 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6576 { 6577 error ("%<aligned%> clause alignment expression must " 6578 "be integral"); 6579 remove = true; 6580 } 6581 else 6582 { 6583 t = mark_rvalue_use (t); 6584 if (!processing_template_decl) 6585 { 6586 t = maybe_constant_value (t); 6587 if (TREE_CODE (t) != INTEGER_CST 6588 || tree_int_cst_sgn (t) != 1) 6589 { 6590 error ("%<aligned%> clause alignment expression must be " 6591 "positive constant integer expression"); 6592 remove = true; 6593 } 6594 } 6595 OMP_CLAUSE_ALIGNED_ALIGNMENT (c) = t; 6596 } 6597 break; 6598 6599 case OMP_CLAUSE_DEPEND: 6600 t = OMP_CLAUSE_DECL (c); 6601 if (t == NULL_TREE) 6602 { 6603 gcc_assert (OMP_CLAUSE_DEPEND_KIND (c) 6604 == OMP_CLAUSE_DEPEND_SOURCE); 6605 break; 6606 } 6607 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK) 6608 { 6609 if (cp_finish_omp_clause_depend_sink (c)) 6610 remove = true; 6611 break; 6612 } 6613 if (TREE_CODE (t) == TREE_LIST) 6614 { 6615 if (handle_omp_array_sections (c, ort)) 6616 remove = true; 6617 break; 6618 } 6619 if (t == error_mark_node) 6620 remove = true; 6621 else if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) 6622 { 6623 if (processing_template_decl && TREE_CODE (t) != OVERLOAD) 6624 break; 6625 if (DECL_P (t)) 6626 error ("%qD is not a variable in %<depend%> clause", t); 6627 else 6628 error ("%qE is not a variable in %<depend%> clause", t); 6629 remove = true; 6630 } 6631 else if (t == current_class_ptr) 6632 { 6633 error ("%<this%> allowed in OpenMP only in %<declare simd%>" 6634 " clauses"); 6635 remove = true; 6636 } 6637 else if (!processing_template_decl 6638 && !cxx_mark_addressable (t)) 6639 remove = true; 6640 break; 6641 6642 case OMP_CLAUSE_MAP: 6643 case OMP_CLAUSE_TO: 6644 case OMP_CLAUSE_FROM: 6645 case OMP_CLAUSE__CACHE_: 6646 t = OMP_CLAUSE_DECL (c); 6647 if (TREE_CODE (t) == TREE_LIST) 6648 { 6649 if (handle_omp_array_sections (c, ort)) 6650 remove = true; 6651 else 6652 { 6653 t = OMP_CLAUSE_DECL (c); 6654 if (TREE_CODE (t) != TREE_LIST 6655 && !type_dependent_expression_p (t) 6656 && !cp_omp_mappable_type (TREE_TYPE (t))) 6657 { 6658 error_at (OMP_CLAUSE_LOCATION (c), 6659 "array section does not have mappable type " 6660 "in %qs clause", 6661 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6662 remove = true; 6663 } 6664 while (TREE_CODE (t) == ARRAY_REF) 6665 t = TREE_OPERAND (t, 0); 6666 if (TREE_CODE (t) == COMPONENT_REF 6667 && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) 6668 { 6669 while (TREE_CODE (t) == COMPONENT_REF) 6670 t = TREE_OPERAND (t, 0); 6671 if (REFERENCE_REF_P (t)) 6672 t = TREE_OPERAND (t, 0); 6673 if (bitmap_bit_p (&map_field_head, DECL_UID (t))) 6674 break; 6675 if (bitmap_bit_p (&map_head, DECL_UID (t))) 6676 { 6677 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) 6678 error ("%qD appears more than once in motion" 6679 " clauses", t); 6680 else if (ort == C_ORT_ACC) 6681 error ("%qD appears more than once in data" 6682 " clauses", t); 6683 else 6684 error ("%qD appears more than once in map" 6685 " clauses", t); 6686 remove = true; 6687 } 6688 else 6689 { 6690 bitmap_set_bit (&map_head, DECL_UID (t)); 6691 bitmap_set_bit (&map_field_head, DECL_UID (t)); 6692 } 6693 } 6694 } 6695 break; 6696 } 6697 if (t == error_mark_node) 6698 { 6699 remove = true; 6700 break; 6701 } 6702 if (REFERENCE_REF_P (t) 6703 && TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF) 6704 { 6705 t = TREE_OPERAND (t, 0); 6706 OMP_CLAUSE_DECL (c) = t; 6707 } 6708 if (TREE_CODE (t) == COMPONENT_REF 6709 && (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP 6710 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE__CACHE_) 6711 { 6712 if (type_dependent_expression_p (t)) 6713 break; 6714 if (TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL 6715 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))) 6716 { 6717 error_at (OMP_CLAUSE_LOCATION (c), 6718 "bit-field %qE in %qs clause", 6719 t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6720 remove = true; 6721 } 6722 else if (!cp_omp_mappable_type (TREE_TYPE (t))) 6723 { 6724 error_at (OMP_CLAUSE_LOCATION (c), 6725 "%qE does not have a mappable type in %qs clause", 6726 t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6727 remove = true; 6728 } 6729 while (TREE_CODE (t) == COMPONENT_REF) 6730 { 6731 if (TREE_TYPE (TREE_OPERAND (t, 0)) 6732 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) 6733 == UNION_TYPE)) 6734 { 6735 error_at (OMP_CLAUSE_LOCATION (c), 6736 "%qE is a member of a union", t); 6737 remove = true; 6738 break; 6739 } 6740 t = TREE_OPERAND (t, 0); 6741 } 6742 if (remove) 6743 break; 6744 if (REFERENCE_REF_P (t)) 6745 t = TREE_OPERAND (t, 0); 6746 if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) 6747 { 6748 if (bitmap_bit_p (&map_field_head, DECL_UID (t))) 6749 goto handle_map_references; 6750 } 6751 } 6752 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) 6753 { 6754 if (processing_template_decl && TREE_CODE (t) != OVERLOAD) 6755 break; 6756 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 6757 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER 6758 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)) 6759 break; 6760 if (DECL_P (t)) 6761 error ("%qD is not a variable in %qs clause", t, 6762 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6763 else 6764 error ("%qE is not a variable in %qs clause", t, 6765 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6766 remove = true; 6767 } 6768 else if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t)) 6769 { 6770 error ("%qD is threadprivate variable in %qs clause", t, 6771 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6772 remove = true; 6773 } 6774 else if (ort != C_ORT_ACC && t == current_class_ptr) 6775 { 6776 error ("%<this%> allowed in OpenMP only in %<declare simd%>" 6777 " clauses"); 6778 remove = true; 6779 break; 6780 } 6781 else if (!processing_template_decl 6782 && TREE_CODE (TREE_TYPE (t)) != REFERENCE_TYPE 6783 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP 6784 || (OMP_CLAUSE_MAP_KIND (c) 6785 != GOMP_MAP_FIRSTPRIVATE_POINTER)) 6786 && !cxx_mark_addressable (t)) 6787 remove = true; 6788 else if (!(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 6789 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER 6790 || (OMP_CLAUSE_MAP_KIND (c) 6791 == GOMP_MAP_FIRSTPRIVATE_POINTER))) 6792 && t == OMP_CLAUSE_DECL (c) 6793 && !type_dependent_expression_p (t) 6794 && !cp_omp_mappable_type ((TREE_CODE (TREE_TYPE (t)) 6795 == REFERENCE_TYPE) 6796 ? TREE_TYPE (TREE_TYPE (t)) 6797 : TREE_TYPE (t))) 6798 { 6799 error_at (OMP_CLAUSE_LOCATION (c), 6800 "%qD does not have a mappable type in %qs clause", t, 6801 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6802 remove = true; 6803 } 6804 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 6805 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FORCE_DEVICEPTR 6806 && !type_dependent_expression_p (t) 6807 && !POINTER_TYPE_P (TREE_TYPE (t))) 6808 { 6809 error ("%qD is not a pointer variable", t); 6810 remove = true; 6811 } 6812 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 6813 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 6814 { 6815 if (bitmap_bit_p (&generic_head, DECL_UID (t)) 6816 || bitmap_bit_p (&firstprivate_head, DECL_UID (t))) 6817 { 6818 error ("%qD appears more than once in data clauses", t); 6819 remove = true; 6820 } 6821 else if (bitmap_bit_p (&map_head, DECL_UID (t))) 6822 { 6823 if (ort == C_ORT_ACC) 6824 error ("%qD appears more than once in data clauses", t); 6825 else 6826 error ("%qD appears both in data and map clauses", t); 6827 remove = true; 6828 } 6829 else 6830 bitmap_set_bit (&generic_head, DECL_UID (t)); 6831 } 6832 else if (bitmap_bit_p (&map_head, DECL_UID (t))) 6833 { 6834 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) 6835 error ("%qD appears more than once in motion clauses", t); 6836 if (ort == C_ORT_ACC) 6837 error ("%qD appears more than once in data clauses", t); 6838 else 6839 error ("%qD appears more than once in map clauses", t); 6840 remove = true; 6841 } 6842 else if (bitmap_bit_p (&generic_head, DECL_UID (t)) 6843 || bitmap_bit_p (&firstprivate_head, DECL_UID (t))) 6844 { 6845 if (ort == C_ORT_ACC) 6846 error ("%qD appears more than once in data clauses", t); 6847 else 6848 error ("%qD appears both in data and map clauses", t); 6849 remove = true; 6850 } 6851 else 6852 { 6853 bitmap_set_bit (&map_head, DECL_UID (t)); 6854 if (t != OMP_CLAUSE_DECL (c) 6855 && TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF) 6856 bitmap_set_bit (&map_field_head, DECL_UID (t)); 6857 } 6858 handle_map_references: 6859 if (!remove 6860 && !processing_template_decl 6861 && (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP 6862 && TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c))) == REFERENCE_TYPE) 6863 { 6864 t = OMP_CLAUSE_DECL (c); 6865 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) 6866 { 6867 OMP_CLAUSE_DECL (c) = build_simple_mem_ref (t); 6868 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 6869 OMP_CLAUSE_SIZE (c) 6870 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t))); 6871 } 6872 else if (OMP_CLAUSE_MAP_KIND (c) 6873 != GOMP_MAP_FIRSTPRIVATE_POINTER 6874 && (OMP_CLAUSE_MAP_KIND (c) 6875 != GOMP_MAP_FIRSTPRIVATE_REFERENCE) 6876 && (OMP_CLAUSE_MAP_KIND (c) 6877 != GOMP_MAP_ALWAYS_POINTER)) 6878 { 6879 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 6880 OMP_CLAUSE_MAP); 6881 if (TREE_CODE (t) == COMPONENT_REF) 6882 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER); 6883 else 6884 OMP_CLAUSE_SET_MAP_KIND (c2, 6885 GOMP_MAP_FIRSTPRIVATE_REFERENCE); 6886 OMP_CLAUSE_DECL (c2) = t; 6887 OMP_CLAUSE_SIZE (c2) = size_zero_node; 6888 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c); 6889 OMP_CLAUSE_CHAIN (c) = c2; 6890 OMP_CLAUSE_DECL (c) = build_simple_mem_ref (t); 6891 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 6892 OMP_CLAUSE_SIZE (c) 6893 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t))); 6894 c = c2; 6895 } 6896 } 6897 break; 6898 6899 case OMP_CLAUSE_TO_DECLARE: 6900 case OMP_CLAUSE_LINK: 6901 t = OMP_CLAUSE_DECL (c); 6902 if (TREE_CODE (t) == FUNCTION_DECL 6903 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO_DECLARE) 6904 ; 6905 else if (!VAR_P (t)) 6906 { 6907 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO_DECLARE) 6908 { 6909 if (TREE_CODE (t) == TEMPLATE_ID_EXPR) 6910 error_at (OMP_CLAUSE_LOCATION (c), 6911 "template %qE in clause %qs", t, 6912 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6913 else if (really_overloaded_fn (t)) 6914 error_at (OMP_CLAUSE_LOCATION (c), 6915 "overloaded function name %qE in clause %qs", t, 6916 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6917 else 6918 error_at (OMP_CLAUSE_LOCATION (c), 6919 "%qE is neither a variable nor a function name " 6920 "in clause %qs", t, 6921 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6922 } 6923 else 6924 error_at (OMP_CLAUSE_LOCATION (c), 6925 "%qE is not a variable in clause %qs", t, 6926 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6927 remove = true; 6928 } 6929 else if (DECL_THREAD_LOCAL_P (t)) 6930 { 6931 error_at (OMP_CLAUSE_LOCATION (c), 6932 "%qD is threadprivate variable in %qs clause", t, 6933 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6934 remove = true; 6935 } 6936 else if (!cp_omp_mappable_type (TREE_TYPE (t))) 6937 { 6938 error_at (OMP_CLAUSE_LOCATION (c), 6939 "%qD does not have a mappable type in %qs clause", t, 6940 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 6941 remove = true; 6942 } 6943 if (remove) 6944 break; 6945 if (bitmap_bit_p (&generic_head, DECL_UID (t))) 6946 { 6947 error_at (OMP_CLAUSE_LOCATION (c), 6948 "%qE appears more than once on the same " 6949 "%<declare target%> directive", t); 6950 remove = true; 6951 } 6952 else 6953 bitmap_set_bit (&generic_head, DECL_UID (t)); 6954 break; 6955 6956 case OMP_CLAUSE_UNIFORM: 6957 t = OMP_CLAUSE_DECL (c); 6958 if (TREE_CODE (t) != PARM_DECL) 6959 { 6960 if (processing_template_decl) 6961 break; 6962 if (DECL_P (t)) 6963 error ("%qD is not an argument in %<uniform%> clause", t); 6964 else 6965 error ("%qE is not an argument in %<uniform%> clause", t); 6966 remove = true; 6967 break; 6968 } 6969 /* map_head bitmap is used as uniform_head if declare_simd. */ 6970 bitmap_set_bit (&map_head, DECL_UID (t)); 6971 goto check_dup_generic; 6972 6973 case OMP_CLAUSE_GRAINSIZE: 6974 t = OMP_CLAUSE_GRAINSIZE_EXPR (c); 6975 if (t == error_mark_node) 6976 remove = true; 6977 else if (!type_dependent_expression_p (t) 6978 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 6979 { 6980 error ("%<grainsize%> expression must be integral"); 6981 remove = true; 6982 } 6983 else 6984 { 6985 t = mark_rvalue_use (t); 6986 if (!processing_template_decl) 6987 { 6988 t = maybe_constant_value (t); 6989 if (TREE_CODE (t) == INTEGER_CST 6990 && tree_int_cst_sgn (t) != 1) 6991 { 6992 warning_at (OMP_CLAUSE_LOCATION (c), 0, 6993 "%<grainsize%> value must be positive"); 6994 t = integer_one_node; 6995 } 6996 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 6997 } 6998 OMP_CLAUSE_GRAINSIZE_EXPR (c) = t; 6999 } 7000 break; 7001 7002 case OMP_CLAUSE_PRIORITY: 7003 t = OMP_CLAUSE_PRIORITY_EXPR (c); 7004 if (t == error_mark_node) 7005 remove = true; 7006 else if (!type_dependent_expression_p (t) 7007 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 7008 { 7009 error ("%<priority%> expression must be integral"); 7010 remove = true; 7011 } 7012 else 7013 { 7014 t = mark_rvalue_use (t); 7015 if (!processing_template_decl) 7016 { 7017 t = maybe_constant_value (t); 7018 if (TREE_CODE (t) == INTEGER_CST 7019 && tree_int_cst_sgn (t) == -1) 7020 { 7021 warning_at (OMP_CLAUSE_LOCATION (c), 0, 7022 "%<priority%> value must be non-negative"); 7023 t = integer_one_node; 7024 } 7025 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 7026 } 7027 OMP_CLAUSE_PRIORITY_EXPR (c) = t; 7028 } 7029 break; 7030 7031 case OMP_CLAUSE_HINT: 7032 t = OMP_CLAUSE_HINT_EXPR (c); 7033 if (t == error_mark_node) 7034 remove = true; 7035 else if (!type_dependent_expression_p (t) 7036 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 7037 { 7038 error ("%<num_tasks%> expression must be integral"); 7039 remove = true; 7040 } 7041 else 7042 { 7043 t = mark_rvalue_use (t); 7044 if (!processing_template_decl) 7045 { 7046 t = maybe_constant_value (t); 7047 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 7048 } 7049 OMP_CLAUSE_HINT_EXPR (c) = t; 7050 } 7051 break; 7052 7053 case OMP_CLAUSE_IS_DEVICE_PTR: 7054 case OMP_CLAUSE_USE_DEVICE_PTR: 7055 field_ok = (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP; 7056 t = OMP_CLAUSE_DECL (c); 7057 if (!type_dependent_expression_p (t)) 7058 { 7059 tree type = TREE_TYPE (t); 7060 if (TREE_CODE (type) != POINTER_TYPE 7061 && TREE_CODE (type) != ARRAY_TYPE 7062 && (TREE_CODE (type) != REFERENCE_TYPE 7063 || (TREE_CODE (TREE_TYPE (type)) != POINTER_TYPE 7064 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))) 7065 { 7066 error_at (OMP_CLAUSE_LOCATION (c), 7067 "%qs variable is neither a pointer, nor an array " 7068 "nor reference to pointer or array", 7069 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 7070 remove = true; 7071 } 7072 } 7073 goto check_dup_generic; 7074 7075 case OMP_CLAUSE_NOWAIT: 7076 case OMP_CLAUSE_DEFAULT: 7077 case OMP_CLAUSE_UNTIED: 7078 case OMP_CLAUSE_COLLAPSE: 7079 case OMP_CLAUSE_MERGEABLE: 7080 case OMP_CLAUSE_PARALLEL: 7081 case OMP_CLAUSE_FOR: 7082 case OMP_CLAUSE_SECTIONS: 7083 case OMP_CLAUSE_TASKGROUP: 7084 case OMP_CLAUSE_PROC_BIND: 7085 case OMP_CLAUSE_NOGROUP: 7086 case OMP_CLAUSE_THREADS: 7087 case OMP_CLAUSE_SIMD: 7088 case OMP_CLAUSE_DEFAULTMAP: 7089 case OMP_CLAUSE_AUTO: 7090 case OMP_CLAUSE_INDEPENDENT: 7091 case OMP_CLAUSE_SEQ: 7092 break; 7093 7094 case OMP_CLAUSE_TILE: 7095 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list; 7096 list = TREE_CHAIN (list)) 7097 { 7098 t = TREE_VALUE (list); 7099 7100 if (t == error_mark_node) 7101 remove = true; 7102 else if (!type_dependent_expression_p (t) 7103 && !INTEGRAL_TYPE_P (TREE_TYPE (t))) 7104 { 7105 error_at (OMP_CLAUSE_LOCATION (c), 7106 "%<tile%> argument needs integral type"); 7107 remove = true; 7108 } 7109 else 7110 { 7111 t = mark_rvalue_use (t); 7112 if (!processing_template_decl) 7113 { 7114 /* Zero is used to indicate '*', we permit you 7115 to get there via an ICE of value zero. */ 7116 t = maybe_constant_value (t); 7117 if (!tree_fits_shwi_p (t) 7118 || tree_to_shwi (t) < 0) 7119 { 7120 error_at (OMP_CLAUSE_LOCATION (c), 7121 "%<tile%> argument needs positive " 7122 "integral constant"); 7123 remove = true; 7124 } 7125 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 7126 } 7127 } 7128 7129 /* Update list item. */ 7130 TREE_VALUE (list) = t; 7131 } 7132 break; 7133 7134 case OMP_CLAUSE_ORDERED: 7135 ordered_seen = true; 7136 break; 7137 7138 case OMP_CLAUSE_INBRANCH: 7139 case OMP_CLAUSE_NOTINBRANCH: 7140 if (branch_seen) 7141 { 7142 error ("%<inbranch%> clause is incompatible with " 7143 "%<notinbranch%>"); 7144 remove = true; 7145 } 7146 branch_seen = true; 7147 break; 7148 7149 default: 7150 gcc_unreachable (); 7151 } 7152 7153 if (remove) 7154 *pc = OMP_CLAUSE_CHAIN (c); 7155 else 7156 pc = &OMP_CLAUSE_CHAIN (c); 7157 } 7158 7159 for (pc = &clauses, c = clauses; c ; c = *pc) 7160 { 7161 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c); 7162 bool remove = false; 7163 bool need_complete_type = false; 7164 bool need_default_ctor = false; 7165 bool need_copy_ctor = false; 7166 bool need_copy_assignment = false; 7167 bool need_implicitly_determined = false; 7168 bool need_dtor = false; 7169 tree type, inner_type; 7170 7171 switch (c_kind) 7172 { 7173 case OMP_CLAUSE_SHARED: 7174 need_implicitly_determined = true; 7175 break; 7176 case OMP_CLAUSE_PRIVATE: 7177 need_complete_type = true; 7178 need_default_ctor = true; 7179 need_dtor = true; 7180 need_implicitly_determined = true; 7181 break; 7182 case OMP_CLAUSE_FIRSTPRIVATE: 7183 need_complete_type = true; 7184 need_copy_ctor = true; 7185 need_dtor = true; 7186 need_implicitly_determined = true; 7187 break; 7188 case OMP_CLAUSE_LASTPRIVATE: 7189 need_complete_type = true; 7190 need_copy_assignment = true; 7191 need_implicitly_determined = true; 7192 break; 7193 case OMP_CLAUSE_REDUCTION: 7194 need_implicitly_determined = true; 7195 break; 7196 case OMP_CLAUSE_LINEAR: 7197 if (ort != C_ORT_OMP_DECLARE_SIMD) 7198 need_implicitly_determined = true; 7199 else if (OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) 7200 && !bitmap_bit_p (&map_head, 7201 DECL_UID (OMP_CLAUSE_LINEAR_STEP (c)))) 7202 { 7203 error_at (OMP_CLAUSE_LOCATION (c), 7204 "%<linear%> clause step is a parameter %qD not " 7205 "specified in %<uniform%> clause", 7206 OMP_CLAUSE_LINEAR_STEP (c)); 7207 *pc = OMP_CLAUSE_CHAIN (c); 7208 continue; 7209 } 7210 break; 7211 case OMP_CLAUSE_COPYPRIVATE: 7212 need_copy_assignment = true; 7213 break; 7214 case OMP_CLAUSE_COPYIN: 7215 need_copy_assignment = true; 7216 break; 7217 case OMP_CLAUSE_SIMDLEN: 7218 if (safelen 7219 && !processing_template_decl 7220 && tree_int_cst_lt (OMP_CLAUSE_SAFELEN_EXPR (safelen), 7221 OMP_CLAUSE_SIMDLEN_EXPR (c))) 7222 { 7223 error_at (OMP_CLAUSE_LOCATION (c), 7224 "%<simdlen%> clause value is bigger than " 7225 "%<safelen%> clause value"); 7226 OMP_CLAUSE_SIMDLEN_EXPR (c) 7227 = OMP_CLAUSE_SAFELEN_EXPR (safelen); 7228 } 7229 pc = &OMP_CLAUSE_CHAIN (c); 7230 continue; 7231 case OMP_CLAUSE_SCHEDULE: 7232 if (ordered_seen 7233 && (OMP_CLAUSE_SCHEDULE_KIND (c) 7234 & OMP_CLAUSE_SCHEDULE_NONMONOTONIC)) 7235 { 7236 error_at (OMP_CLAUSE_LOCATION (c), 7237 "%<nonmonotonic%> schedule modifier specified " 7238 "together with %<ordered%> clause"); 7239 OMP_CLAUSE_SCHEDULE_KIND (c) 7240 = (enum omp_clause_schedule_kind) 7241 (OMP_CLAUSE_SCHEDULE_KIND (c) 7242 & ~OMP_CLAUSE_SCHEDULE_NONMONOTONIC); 7243 } 7244 pc = &OMP_CLAUSE_CHAIN (c); 7245 continue; 7246 case OMP_CLAUSE_NOWAIT: 7247 if (copyprivate_seen) 7248 { 7249 error_at (OMP_CLAUSE_LOCATION (c), 7250 "%<nowait%> clause must not be used together " 7251 "with %<copyprivate%>"); 7252 *pc = OMP_CLAUSE_CHAIN (c); 7253 continue; 7254 } 7255 /* FALLTHRU */ 7256 default: 7257 pc = &OMP_CLAUSE_CHAIN (c); 7258 continue; 7259 } 7260 7261 t = OMP_CLAUSE_DECL (c); 7262 if (processing_template_decl 7263 && !VAR_P (t) && TREE_CODE (t) != PARM_DECL) 7264 { 7265 pc = &OMP_CLAUSE_CHAIN (c); 7266 continue; 7267 } 7268 7269 switch (c_kind) 7270 { 7271 case OMP_CLAUSE_LASTPRIVATE: 7272 if (!bitmap_bit_p (&firstprivate_head, DECL_UID (t))) 7273 { 7274 need_default_ctor = true; 7275 need_dtor = true; 7276 } 7277 break; 7278 7279 case OMP_CLAUSE_REDUCTION: 7280 if (finish_omp_reduction_clause (c, &need_default_ctor, 7281 &need_dtor)) 7282 remove = true; 7283 else 7284 t = OMP_CLAUSE_DECL (c); 7285 break; 7286 7287 case OMP_CLAUSE_COPYIN: 7288 if (!VAR_P (t) || !CP_DECL_THREAD_LOCAL_P (t)) 7289 { 7290 error ("%qE must be %<threadprivate%> for %<copyin%>", t); 7291 remove = true; 7292 } 7293 break; 7294 7295 default: 7296 break; 7297 } 7298 7299 if (need_complete_type || need_copy_assignment) 7300 { 7301 t = require_complete_type (t); 7302 if (t == error_mark_node) 7303 remove = true; 7304 else if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE 7305 && !complete_type_or_else (TREE_TYPE (TREE_TYPE (t)), t)) 7306 remove = true; 7307 } 7308 if (need_implicitly_determined) 7309 { 7310 const char *share_name = NULL; 7311 7312 if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t)) 7313 share_name = "threadprivate"; 7314 else switch (cxx_omp_predetermined_sharing (t)) 7315 { 7316 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: 7317 break; 7318 case OMP_CLAUSE_DEFAULT_SHARED: 7319 /* const vars may be specified in firstprivate clause. */ 7320 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE 7321 && cxx_omp_const_qual_no_mutable (t)) 7322 break; 7323 share_name = "shared"; 7324 break; 7325 case OMP_CLAUSE_DEFAULT_PRIVATE: 7326 share_name = "private"; 7327 break; 7328 default: 7329 gcc_unreachable (); 7330 } 7331 if (share_name) 7332 { 7333 error ("%qE is predetermined %qs for %qs", 7334 omp_clause_printable_decl (t), share_name, 7335 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 7336 remove = true; 7337 } 7338 } 7339 7340 /* We're interested in the base element, not arrays. */ 7341 inner_type = type = TREE_TYPE (t); 7342 if ((need_complete_type 7343 || need_copy_assignment 7344 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) 7345 && TREE_CODE (inner_type) == REFERENCE_TYPE) 7346 inner_type = TREE_TYPE (inner_type); 7347 while (TREE_CODE (inner_type) == ARRAY_TYPE) 7348 inner_type = TREE_TYPE (inner_type); 7349 7350 /* Check for special function availability by building a call to one. 7351 Save the results, because later we won't be in the right context 7352 for making these queries. */ 7353 if (CLASS_TYPE_P (inner_type) 7354 && COMPLETE_TYPE_P (inner_type) 7355 && (need_default_ctor || need_copy_ctor 7356 || need_copy_assignment || need_dtor) 7357 && !type_dependent_expression_p (t) 7358 && cxx_omp_create_clause_info (c, inner_type, need_default_ctor, 7359 need_copy_ctor, need_copy_assignment, 7360 need_dtor)) 7361 remove = true; 7362 7363 if (!remove 7364 && c_kind == OMP_CLAUSE_SHARED 7365 && processing_template_decl) 7366 { 7367 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); 7368 if (t) 7369 OMP_CLAUSE_DECL (c) = t; 7370 } 7371 7372 if (remove) 7373 *pc = OMP_CLAUSE_CHAIN (c); 7374 else 7375 pc = &OMP_CLAUSE_CHAIN (c); 7376 } 7377 7378 bitmap_obstack_release (NULL); 7379 return clauses; 7380 } 7381 7382 /* Start processing OpenMP clauses that can include any 7383 privatization clauses for non-static data members. */ 7384 7385 tree 7386 push_omp_privatization_clauses (bool ignore_next) 7387 { 7388 if (omp_private_member_ignore_next) 7389 { 7390 omp_private_member_ignore_next = ignore_next; 7391 return NULL_TREE; 7392 } 7393 omp_private_member_ignore_next = ignore_next; 7394 if (omp_private_member_map) 7395 omp_private_member_vec.safe_push (error_mark_node); 7396 return push_stmt_list (); 7397 } 7398 7399 /* Revert remapping of any non-static data members since 7400 the last push_omp_privatization_clauses () call. */ 7401 7402 void 7403 pop_omp_privatization_clauses (tree stmt) 7404 { 7405 if (stmt == NULL_TREE) 7406 return; 7407 stmt = pop_stmt_list (stmt); 7408 if (omp_private_member_map) 7409 { 7410 while (!omp_private_member_vec.is_empty ()) 7411 { 7412 tree t = omp_private_member_vec.pop (); 7413 if (t == error_mark_node) 7414 { 7415 add_stmt (stmt); 7416 return; 7417 } 7418 bool no_decl_expr = t == integer_zero_node; 7419 if (no_decl_expr) 7420 t = omp_private_member_vec.pop (); 7421 tree *v = omp_private_member_map->get (t); 7422 gcc_assert (v); 7423 if (!no_decl_expr) 7424 add_decl_expr (*v); 7425 omp_private_member_map->remove (t); 7426 } 7427 delete omp_private_member_map; 7428 omp_private_member_map = NULL; 7429 } 7430 add_stmt (stmt); 7431 } 7432 7433 /* Remember OpenMP privatization clauses mapping and clear it. 7434 Used for lambdas. */ 7435 7436 void 7437 save_omp_privatization_clauses (vec<tree> &save) 7438 { 7439 save = vNULL; 7440 if (omp_private_member_ignore_next) 7441 save.safe_push (integer_one_node); 7442 omp_private_member_ignore_next = false; 7443 if (!omp_private_member_map) 7444 return; 7445 7446 while (!omp_private_member_vec.is_empty ()) 7447 { 7448 tree t = omp_private_member_vec.pop (); 7449 if (t == error_mark_node) 7450 { 7451 save.safe_push (t); 7452 continue; 7453 } 7454 tree n = t; 7455 if (t == integer_zero_node) 7456 t = omp_private_member_vec.pop (); 7457 tree *v = omp_private_member_map->get (t); 7458 gcc_assert (v); 7459 save.safe_push (*v); 7460 save.safe_push (t); 7461 if (n != t) 7462 save.safe_push (n); 7463 } 7464 delete omp_private_member_map; 7465 omp_private_member_map = NULL; 7466 } 7467 7468 /* Restore OpenMP privatization clauses mapping saved by the 7469 above function. */ 7470 7471 void 7472 restore_omp_privatization_clauses (vec<tree> &save) 7473 { 7474 gcc_assert (omp_private_member_vec.is_empty ()); 7475 omp_private_member_ignore_next = false; 7476 if (save.is_empty ()) 7477 return; 7478 if (save.length () == 1 && save[0] == integer_one_node) 7479 { 7480 omp_private_member_ignore_next = true; 7481 save.release (); 7482 return; 7483 } 7484 7485 omp_private_member_map = new hash_map <tree, tree>; 7486 while (!save.is_empty ()) 7487 { 7488 tree t = save.pop (); 7489 tree n = t; 7490 if (t != error_mark_node) 7491 { 7492 if (t == integer_one_node) 7493 { 7494 omp_private_member_ignore_next = true; 7495 gcc_assert (save.is_empty ()); 7496 break; 7497 } 7498 if (t == integer_zero_node) 7499 t = save.pop (); 7500 tree &v = omp_private_member_map->get_or_insert (t); 7501 v = save.pop (); 7502 } 7503 omp_private_member_vec.safe_push (t); 7504 if (n != t) 7505 omp_private_member_vec.safe_push (n); 7506 } 7507 save.release (); 7508 } 7509 7510 /* For all variables in the tree_list VARS, mark them as thread local. */ 7511 7512 void 7513 finish_omp_threadprivate (tree vars) 7514 { 7515 tree t; 7516 7517 /* Mark every variable in VARS to be assigned thread local storage. */ 7518 for (t = vars; t; t = TREE_CHAIN (t)) 7519 { 7520 tree v = TREE_PURPOSE (t); 7521 7522 if (error_operand_p (v)) 7523 ; 7524 else if (!VAR_P (v)) 7525 error ("%<threadprivate%> %qD is not file, namespace " 7526 "or block scope variable", v); 7527 /* If V had already been marked threadprivate, it doesn't matter 7528 whether it had been used prior to this point. */ 7529 else if (TREE_USED (v) 7530 && (DECL_LANG_SPECIFIC (v) == NULL 7531 || !CP_DECL_THREADPRIVATE_P (v))) 7532 error ("%qE declared %<threadprivate%> after first use", v); 7533 else if (! TREE_STATIC (v) && ! DECL_EXTERNAL (v)) 7534 error ("automatic variable %qE cannot be %<threadprivate%>", v); 7535 else if (! COMPLETE_TYPE_P (complete_type (TREE_TYPE (v)))) 7536 error ("%<threadprivate%> %qE has incomplete type", v); 7537 else if (TREE_STATIC (v) && TYPE_P (CP_DECL_CONTEXT (v)) 7538 && CP_DECL_CONTEXT (v) != current_class_type) 7539 error ("%<threadprivate%> %qE directive not " 7540 "in %qT definition", v, CP_DECL_CONTEXT (v)); 7541 else 7542 { 7543 /* Allocate a LANG_SPECIFIC structure for V, if needed. */ 7544 if (DECL_LANG_SPECIFIC (v) == NULL) 7545 { 7546 retrofit_lang_decl (v); 7547 7548 /* Make sure that DECL_DISCRIMINATOR_P continues to be true 7549 after the allocation of the lang_decl structure. */ 7550 if (DECL_DISCRIMINATOR_P (v)) 7551 DECL_LANG_SPECIFIC (v)->u.base.u2sel = 1; 7552 } 7553 7554 if (! CP_DECL_THREAD_LOCAL_P (v)) 7555 { 7556 CP_DECL_THREAD_LOCAL_P (v) = true; 7557 set_decl_tls_model (v, decl_default_tls_model (v)); 7558 /* If rtl has been already set for this var, call 7559 make_decl_rtl once again, so that encode_section_info 7560 has a chance to look at the new decl flags. */ 7561 if (DECL_RTL_SET_P (v)) 7562 make_decl_rtl (v); 7563 } 7564 CP_DECL_THREADPRIVATE_P (v) = 1; 7565 } 7566 } 7567 } 7568 7569 /* Build an OpenMP structured block. */ 7570 7571 tree 7572 begin_omp_structured_block (void) 7573 { 7574 return do_pushlevel (sk_omp); 7575 } 7576 7577 tree 7578 finish_omp_structured_block (tree block) 7579 { 7580 return do_poplevel (block); 7581 } 7582 7583 /* Similarly, except force the retention of the BLOCK. */ 7584 7585 tree 7586 begin_omp_parallel (void) 7587 { 7588 keep_next_level (true); 7589 return begin_omp_structured_block (); 7590 } 7591 7592 /* Generate OACC_DATA, with CLAUSES and BLOCK as its compound 7593 statement. */ 7594 7595 tree 7596 finish_oacc_data (tree clauses, tree block) 7597 { 7598 tree stmt; 7599 7600 block = finish_omp_structured_block (block); 7601 7602 stmt = make_node (OACC_DATA); 7603 TREE_TYPE (stmt) = void_type_node; 7604 OACC_DATA_CLAUSES (stmt) = clauses; 7605 OACC_DATA_BODY (stmt) = block; 7606 7607 return add_stmt (stmt); 7608 } 7609 7610 /* Generate OACC_HOST_DATA, with CLAUSES and BLOCK as its compound 7611 statement. */ 7612 7613 tree 7614 finish_oacc_host_data (tree clauses, tree block) 7615 { 7616 tree stmt; 7617 7618 block = finish_omp_structured_block (block); 7619 7620 stmt = make_node (OACC_HOST_DATA); 7621 TREE_TYPE (stmt) = void_type_node; 7622 OACC_HOST_DATA_CLAUSES (stmt) = clauses; 7623 OACC_HOST_DATA_BODY (stmt) = block; 7624 7625 return add_stmt (stmt); 7626 } 7627 7628 /* Generate OMP construct CODE, with BODY and CLAUSES as its compound 7629 statement. */ 7630 7631 tree 7632 finish_omp_construct (enum tree_code code, tree body, tree clauses) 7633 { 7634 body = finish_omp_structured_block (body); 7635 7636 tree stmt = make_node (code); 7637 TREE_TYPE (stmt) = void_type_node; 7638 OMP_BODY (stmt) = body; 7639 OMP_CLAUSES (stmt) = clauses; 7640 7641 return add_stmt (stmt); 7642 } 7643 7644 tree 7645 finish_omp_parallel (tree clauses, tree body) 7646 { 7647 tree stmt; 7648 7649 body = finish_omp_structured_block (body); 7650 7651 stmt = make_node (OMP_PARALLEL); 7652 TREE_TYPE (stmt) = void_type_node; 7653 OMP_PARALLEL_CLAUSES (stmt) = clauses; 7654 OMP_PARALLEL_BODY (stmt) = body; 7655 7656 return add_stmt (stmt); 7657 } 7658 7659 tree 7660 begin_omp_task (void) 7661 { 7662 keep_next_level (true); 7663 return begin_omp_structured_block (); 7664 } 7665 7666 tree 7667 finish_omp_task (tree clauses, tree body) 7668 { 7669 tree stmt; 7670 7671 body = finish_omp_structured_block (body); 7672 7673 stmt = make_node (OMP_TASK); 7674 TREE_TYPE (stmt) = void_type_node; 7675 OMP_TASK_CLAUSES (stmt) = clauses; 7676 OMP_TASK_BODY (stmt) = body; 7677 7678 return add_stmt (stmt); 7679 } 7680 7681 /* Helper function for finish_omp_for. Convert Ith random access iterator 7682 into integral iterator. Return FALSE if successful. */ 7683 7684 static bool 7685 handle_omp_for_class_iterator (int i, location_t locus, enum tree_code code, 7686 tree declv, tree orig_declv, tree initv, 7687 tree condv, tree incrv, tree *body, 7688 tree *pre_body, tree &clauses, tree *lastp, 7689 int collapse, int ordered) 7690 { 7691 tree diff, iter_init, iter_incr = NULL, last; 7692 tree incr_var = NULL, orig_pre_body, orig_body, c; 7693 tree decl = TREE_VEC_ELT (declv, i); 7694 tree init = TREE_VEC_ELT (initv, i); 7695 tree cond = TREE_VEC_ELT (condv, i); 7696 tree incr = TREE_VEC_ELT (incrv, i); 7697 tree iter = decl; 7698 location_t elocus = locus; 7699 7700 if (init && EXPR_HAS_LOCATION (init)) 7701 elocus = EXPR_LOCATION (init); 7702 7703 cond = cp_fully_fold (cond); 7704 switch (TREE_CODE (cond)) 7705 { 7706 case GT_EXPR: 7707 case GE_EXPR: 7708 case LT_EXPR: 7709 case LE_EXPR: 7710 case NE_EXPR: 7711 if (TREE_OPERAND (cond, 1) == iter) 7712 cond = build2 (swap_tree_comparison (TREE_CODE (cond)), 7713 TREE_TYPE (cond), iter, TREE_OPERAND (cond, 0)); 7714 if (TREE_OPERAND (cond, 0) != iter) 7715 cond = error_mark_node; 7716 else 7717 { 7718 tree tem = build_x_binary_op (EXPR_LOCATION (cond), 7719 TREE_CODE (cond), 7720 iter, ERROR_MARK, 7721 TREE_OPERAND (cond, 1), ERROR_MARK, 7722 NULL, tf_warning_or_error); 7723 if (error_operand_p (tem)) 7724 return true; 7725 } 7726 break; 7727 default: 7728 cond = error_mark_node; 7729 break; 7730 } 7731 if (cond == error_mark_node) 7732 { 7733 error_at (elocus, "invalid controlling predicate"); 7734 return true; 7735 } 7736 diff = build_x_binary_op (elocus, MINUS_EXPR, TREE_OPERAND (cond, 1), 7737 ERROR_MARK, iter, ERROR_MARK, NULL, 7738 tf_warning_or_error); 7739 diff = cp_fully_fold (diff); 7740 if (error_operand_p (diff)) 7741 return true; 7742 if (TREE_CODE (TREE_TYPE (diff)) != INTEGER_TYPE) 7743 { 7744 error_at (elocus, "difference between %qE and %qD does not have integer type", 7745 TREE_OPERAND (cond, 1), iter); 7746 return true; 7747 } 7748 if (!c_omp_check_loop_iv_exprs (locus, orig_declv, 7749 TREE_VEC_ELT (declv, i), NULL_TREE, 7750 cond, cp_walk_subtrees)) 7751 return true; 7752 7753 switch (TREE_CODE (incr)) 7754 { 7755 case PREINCREMENT_EXPR: 7756 case PREDECREMENT_EXPR: 7757 case POSTINCREMENT_EXPR: 7758 case POSTDECREMENT_EXPR: 7759 if (TREE_OPERAND (incr, 0) != iter) 7760 { 7761 incr = error_mark_node; 7762 break; 7763 } 7764 iter_incr = build_x_unary_op (EXPR_LOCATION (incr), 7765 TREE_CODE (incr), iter, 7766 tf_warning_or_error); 7767 if (error_operand_p (iter_incr)) 7768 return true; 7769 else if (TREE_CODE (incr) == PREINCREMENT_EXPR 7770 || TREE_CODE (incr) == POSTINCREMENT_EXPR) 7771 incr = integer_one_node; 7772 else 7773 incr = integer_minus_one_node; 7774 break; 7775 case MODIFY_EXPR: 7776 if (TREE_OPERAND (incr, 0) != iter) 7777 incr = error_mark_node; 7778 else if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR 7779 || TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR) 7780 { 7781 tree rhs = TREE_OPERAND (incr, 1); 7782 if (TREE_OPERAND (rhs, 0) == iter) 7783 { 7784 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 1))) 7785 != INTEGER_TYPE) 7786 incr = error_mark_node; 7787 else 7788 { 7789 iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs), 7790 iter, TREE_CODE (rhs), 7791 TREE_OPERAND (rhs, 1), 7792 tf_warning_or_error); 7793 if (error_operand_p (iter_incr)) 7794 return true; 7795 incr = TREE_OPERAND (rhs, 1); 7796 incr = cp_convert (TREE_TYPE (diff), incr, 7797 tf_warning_or_error); 7798 if (TREE_CODE (rhs) == MINUS_EXPR) 7799 { 7800 incr = build1 (NEGATE_EXPR, TREE_TYPE (diff), incr); 7801 incr = fold_simple (incr); 7802 } 7803 if (TREE_CODE (incr) != INTEGER_CST 7804 && (TREE_CODE (incr) != NOP_EXPR 7805 || (TREE_CODE (TREE_OPERAND (incr, 0)) 7806 != INTEGER_CST))) 7807 iter_incr = NULL; 7808 } 7809 } 7810 else if (TREE_OPERAND (rhs, 1) == iter) 7811 { 7812 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) != INTEGER_TYPE 7813 || TREE_CODE (rhs) != PLUS_EXPR) 7814 incr = error_mark_node; 7815 else 7816 { 7817 iter_incr = build_x_binary_op (EXPR_LOCATION (rhs), 7818 PLUS_EXPR, 7819 TREE_OPERAND (rhs, 0), 7820 ERROR_MARK, iter, 7821 ERROR_MARK, NULL, 7822 tf_warning_or_error); 7823 if (error_operand_p (iter_incr)) 7824 return true; 7825 iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs), 7826 iter, NOP_EXPR, 7827 iter_incr, 7828 tf_warning_or_error); 7829 if (error_operand_p (iter_incr)) 7830 return true; 7831 incr = TREE_OPERAND (rhs, 0); 7832 iter_incr = NULL; 7833 } 7834 } 7835 else 7836 incr = error_mark_node; 7837 } 7838 else 7839 incr = error_mark_node; 7840 break; 7841 default: 7842 incr = error_mark_node; 7843 break; 7844 } 7845 7846 if (incr == error_mark_node) 7847 { 7848 error_at (elocus, "invalid increment expression"); 7849 return true; 7850 } 7851 7852 incr = cp_convert (TREE_TYPE (diff), incr, tf_warning_or_error); 7853 bool taskloop_iv_seen = false; 7854 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 7855 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 7856 && OMP_CLAUSE_DECL (c) == iter) 7857 { 7858 if (code == OMP_TASKLOOP) 7859 { 7860 taskloop_iv_seen = true; 7861 OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c) = 1; 7862 } 7863 break; 7864 } 7865 else if (code == OMP_TASKLOOP 7866 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE 7867 && OMP_CLAUSE_DECL (c) == iter) 7868 { 7869 taskloop_iv_seen = true; 7870 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c) = 1; 7871 } 7872 7873 decl = create_temporary_var (TREE_TYPE (diff)); 7874 pushdecl (decl); 7875 add_decl_expr (decl); 7876 last = create_temporary_var (TREE_TYPE (diff)); 7877 pushdecl (last); 7878 add_decl_expr (last); 7879 if (c && iter_incr == NULL && TREE_CODE (incr) != INTEGER_CST 7880 && (!ordered || (i < collapse && collapse > 1))) 7881 { 7882 incr_var = create_temporary_var (TREE_TYPE (diff)); 7883 pushdecl (incr_var); 7884 add_decl_expr (incr_var); 7885 } 7886 gcc_assert (stmts_are_full_exprs_p ()); 7887 tree diffvar = NULL_TREE; 7888 if (code == OMP_TASKLOOP) 7889 { 7890 if (!taskloop_iv_seen) 7891 { 7892 tree ivc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE); 7893 OMP_CLAUSE_DECL (ivc) = iter; 7894 cxx_omp_finish_clause (ivc, NULL); 7895 OMP_CLAUSE_CHAIN (ivc) = clauses; 7896 clauses = ivc; 7897 } 7898 tree lvc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE); 7899 OMP_CLAUSE_DECL (lvc) = last; 7900 OMP_CLAUSE_CHAIN (lvc) = clauses; 7901 clauses = lvc; 7902 diffvar = create_temporary_var (TREE_TYPE (diff)); 7903 pushdecl (diffvar); 7904 add_decl_expr (diffvar); 7905 } 7906 7907 orig_pre_body = *pre_body; 7908 *pre_body = push_stmt_list (); 7909 if (orig_pre_body) 7910 add_stmt (orig_pre_body); 7911 if (init != NULL) 7912 finish_expr_stmt (build_x_modify_expr (elocus, 7913 iter, NOP_EXPR, init, 7914 tf_warning_or_error)); 7915 init = build_int_cst (TREE_TYPE (diff), 0); 7916 if (c && iter_incr == NULL 7917 && (!ordered || (i < collapse && collapse > 1))) 7918 { 7919 if (incr_var) 7920 { 7921 finish_expr_stmt (build_x_modify_expr (elocus, 7922 incr_var, NOP_EXPR, 7923 incr, tf_warning_or_error)); 7924 incr = incr_var; 7925 } 7926 iter_incr = build_x_modify_expr (elocus, 7927 iter, PLUS_EXPR, incr, 7928 tf_warning_or_error); 7929 } 7930 if (c && ordered && i < collapse && collapse > 1) 7931 iter_incr = incr; 7932 finish_expr_stmt (build_x_modify_expr (elocus, 7933 last, NOP_EXPR, init, 7934 tf_warning_or_error)); 7935 if (diffvar) 7936 { 7937 finish_expr_stmt (build_x_modify_expr (elocus, 7938 diffvar, NOP_EXPR, 7939 diff, tf_warning_or_error)); 7940 diff = diffvar; 7941 } 7942 *pre_body = pop_stmt_list (*pre_body); 7943 7944 cond = cp_build_binary_op (elocus, 7945 TREE_CODE (cond), decl, diff, 7946 tf_warning_or_error); 7947 incr = build_modify_expr (elocus, decl, NULL_TREE, PLUS_EXPR, 7948 elocus, incr, NULL_TREE); 7949 7950 orig_body = *body; 7951 *body = push_stmt_list (); 7952 iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), decl, last); 7953 iter_init = build_x_modify_expr (elocus, 7954 iter, PLUS_EXPR, iter_init, 7955 tf_warning_or_error); 7956 if (iter_init != error_mark_node) 7957 iter_init = build1 (NOP_EXPR, void_type_node, iter_init); 7958 finish_expr_stmt (iter_init); 7959 finish_expr_stmt (build_x_modify_expr (elocus, 7960 last, NOP_EXPR, decl, 7961 tf_warning_or_error)); 7962 add_stmt (orig_body); 7963 *body = pop_stmt_list (*body); 7964 7965 if (c) 7966 { 7967 OMP_CLAUSE_LASTPRIVATE_STMT (c) = push_stmt_list (); 7968 if (!ordered) 7969 finish_expr_stmt (iter_incr); 7970 else 7971 { 7972 iter_init = decl; 7973 if (i < collapse && collapse > 1 && !error_operand_p (iter_incr)) 7974 iter_init = build2 (PLUS_EXPR, TREE_TYPE (diff), 7975 iter_init, iter_incr); 7976 iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), iter_init, last); 7977 iter_init = build_x_modify_expr (elocus, 7978 iter, PLUS_EXPR, iter_init, 7979 tf_warning_or_error); 7980 if (iter_init != error_mark_node) 7981 iter_init = build1 (NOP_EXPR, void_type_node, iter_init); 7982 finish_expr_stmt (iter_init); 7983 } 7984 OMP_CLAUSE_LASTPRIVATE_STMT (c) 7985 = pop_stmt_list (OMP_CLAUSE_LASTPRIVATE_STMT (c)); 7986 } 7987 7988 TREE_VEC_ELT (declv, i) = decl; 7989 TREE_VEC_ELT (initv, i) = init; 7990 TREE_VEC_ELT (condv, i) = cond; 7991 TREE_VEC_ELT (incrv, i) = incr; 7992 *lastp = last; 7993 7994 return false; 7995 } 7996 7997 /* Build and validate an OMP_FOR statement. CLAUSES, BODY, COND, INCR 7998 are directly for their associated operands in the statement. DECL 7999 and INIT are a combo; if DECL is NULL then INIT ought to be a 8000 MODIFY_EXPR, and the DECL should be extracted. PRE_BODY are 8001 optional statements that need to go before the loop into its 8002 sk_omp scope. */ 8003 8004 tree 8005 finish_omp_for (location_t locus, enum tree_code code, tree declv, 8006 tree orig_declv, tree initv, tree condv, tree incrv, 8007 tree body, tree pre_body, vec<tree> *orig_inits, tree clauses) 8008 { 8009 tree omp_for = NULL, orig_incr = NULL; 8010 tree decl = NULL, init, cond, incr; 8011 tree last = NULL_TREE; 8012 location_t elocus; 8013 int i; 8014 int collapse = 1; 8015 int ordered = 0; 8016 8017 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv)); 8018 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv)); 8019 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv)); 8020 if (TREE_VEC_LENGTH (declv) > 1) 8021 { 8022 tree c; 8023 8024 c = omp_find_clause (clauses, OMP_CLAUSE_TILE); 8025 if (c) 8026 collapse = list_length (OMP_CLAUSE_TILE_LIST (c)); 8027 else 8028 { 8029 c = omp_find_clause (clauses, OMP_CLAUSE_COLLAPSE); 8030 if (c) 8031 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c)); 8032 if (collapse != TREE_VEC_LENGTH (declv)) 8033 ordered = TREE_VEC_LENGTH (declv); 8034 } 8035 } 8036 for (i = 0; i < TREE_VEC_LENGTH (declv); i++) 8037 { 8038 decl = TREE_VEC_ELT (declv, i); 8039 init = TREE_VEC_ELT (initv, i); 8040 cond = TREE_VEC_ELT (condv, i); 8041 incr = TREE_VEC_ELT (incrv, i); 8042 elocus = locus; 8043 8044 if (decl == NULL) 8045 { 8046 if (init != NULL) 8047 switch (TREE_CODE (init)) 8048 { 8049 case MODIFY_EXPR: 8050 decl = TREE_OPERAND (init, 0); 8051 init = TREE_OPERAND (init, 1); 8052 break; 8053 case MODOP_EXPR: 8054 if (TREE_CODE (TREE_OPERAND (init, 1)) == NOP_EXPR) 8055 { 8056 decl = TREE_OPERAND (init, 0); 8057 init = TREE_OPERAND (init, 2); 8058 } 8059 break; 8060 default: 8061 break; 8062 } 8063 8064 if (decl == NULL) 8065 { 8066 error_at (locus, 8067 "expected iteration declaration or initialization"); 8068 return NULL; 8069 } 8070 } 8071 8072 if (init && EXPR_HAS_LOCATION (init)) 8073 elocus = EXPR_LOCATION (init); 8074 8075 if (cond == NULL) 8076 { 8077 error_at (elocus, "missing controlling predicate"); 8078 return NULL; 8079 } 8080 8081 if (incr == NULL) 8082 { 8083 error_at (elocus, "missing increment expression"); 8084 return NULL; 8085 } 8086 8087 TREE_VEC_ELT (declv, i) = decl; 8088 TREE_VEC_ELT (initv, i) = init; 8089 } 8090 8091 if (orig_inits) 8092 { 8093 bool fail = false; 8094 tree orig_init; 8095 FOR_EACH_VEC_ELT (*orig_inits, i, orig_init) 8096 if (orig_init 8097 && !c_omp_check_loop_iv_exprs (locus, declv, 8098 TREE_VEC_ELT (declv, i), orig_init, 8099 NULL_TREE, cp_walk_subtrees)) 8100 fail = true; 8101 if (fail) 8102 return NULL; 8103 } 8104 8105 if (dependent_omp_for_p (declv, initv, condv, incrv)) 8106 { 8107 tree stmt; 8108 8109 stmt = make_node (code); 8110 8111 for (i = 0; i < TREE_VEC_LENGTH (declv); i++) 8112 { 8113 /* This is really just a place-holder. We'll be decomposing this 8114 again and going through the cp_build_modify_expr path below when 8115 we instantiate the thing. */ 8116 TREE_VEC_ELT (initv, i) 8117 = build2 (MODIFY_EXPR, void_type_node, TREE_VEC_ELT (declv, i), 8118 TREE_VEC_ELT (initv, i)); 8119 } 8120 8121 TREE_TYPE (stmt) = void_type_node; 8122 OMP_FOR_INIT (stmt) = initv; 8123 OMP_FOR_COND (stmt) = condv; 8124 OMP_FOR_INCR (stmt) = incrv; 8125 OMP_FOR_BODY (stmt) = body; 8126 OMP_FOR_PRE_BODY (stmt) = pre_body; 8127 OMP_FOR_CLAUSES (stmt) = clauses; 8128 8129 SET_EXPR_LOCATION (stmt, locus); 8130 return add_stmt (stmt); 8131 } 8132 8133 if (!orig_declv) 8134 orig_declv = copy_node (declv); 8135 8136 if (processing_template_decl) 8137 orig_incr = make_tree_vec (TREE_VEC_LENGTH (incrv)); 8138 8139 for (i = 0; i < TREE_VEC_LENGTH (declv); ) 8140 { 8141 decl = TREE_VEC_ELT (declv, i); 8142 init = TREE_VEC_ELT (initv, i); 8143 cond = TREE_VEC_ELT (condv, i); 8144 incr = TREE_VEC_ELT (incrv, i); 8145 if (orig_incr) 8146 TREE_VEC_ELT (orig_incr, i) = incr; 8147 elocus = locus; 8148 8149 if (init && EXPR_HAS_LOCATION (init)) 8150 elocus = EXPR_LOCATION (init); 8151 8152 if (!DECL_P (decl)) 8153 { 8154 error_at (elocus, "expected iteration declaration or initialization"); 8155 return NULL; 8156 } 8157 8158 if (incr && TREE_CODE (incr) == MODOP_EXPR) 8159 { 8160 if (orig_incr) 8161 TREE_VEC_ELT (orig_incr, i) = incr; 8162 incr = cp_build_modify_expr (elocus, TREE_OPERAND (incr, 0), 8163 TREE_CODE (TREE_OPERAND (incr, 1)), 8164 TREE_OPERAND (incr, 2), 8165 tf_warning_or_error); 8166 } 8167 8168 if (CLASS_TYPE_P (TREE_TYPE (decl))) 8169 { 8170 if (code == OMP_SIMD) 8171 { 8172 error_at (elocus, "%<#pragma omp simd%> used with class " 8173 "iteration variable %qE", decl); 8174 return NULL; 8175 } 8176 if (handle_omp_for_class_iterator (i, locus, code, declv, orig_declv, 8177 initv, condv, incrv, &body, 8178 &pre_body, clauses, &last, 8179 collapse, ordered)) 8180 return NULL; 8181 continue; 8182 } 8183 8184 if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)) 8185 && !TYPE_PTR_P (TREE_TYPE (decl))) 8186 { 8187 error_at (elocus, "invalid type for iteration variable %qE", decl); 8188 return NULL; 8189 } 8190 8191 if (!processing_template_decl) 8192 { 8193 init = fold_build_cleanup_point_expr (TREE_TYPE (init), init); 8194 init = cp_build_modify_expr (elocus, decl, NOP_EXPR, init, 8195 tf_warning_or_error); 8196 } 8197 else 8198 init = build2 (MODIFY_EXPR, void_type_node, decl, init); 8199 if (cond 8200 && TREE_SIDE_EFFECTS (cond) 8201 && COMPARISON_CLASS_P (cond) 8202 && !processing_template_decl) 8203 { 8204 tree t = TREE_OPERAND (cond, 0); 8205 if (TREE_SIDE_EFFECTS (t) 8206 && t != decl 8207 && (TREE_CODE (t) != NOP_EXPR 8208 || TREE_OPERAND (t, 0) != decl)) 8209 TREE_OPERAND (cond, 0) 8210 = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 8211 8212 t = TREE_OPERAND (cond, 1); 8213 if (TREE_SIDE_EFFECTS (t) 8214 && t != decl 8215 && (TREE_CODE (t) != NOP_EXPR 8216 || TREE_OPERAND (t, 0) != decl)) 8217 TREE_OPERAND (cond, 1) 8218 = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 8219 } 8220 if (decl == error_mark_node || init == error_mark_node) 8221 return NULL; 8222 8223 TREE_VEC_ELT (declv, i) = decl; 8224 TREE_VEC_ELT (initv, i) = init; 8225 TREE_VEC_ELT (condv, i) = cond; 8226 TREE_VEC_ELT (incrv, i) = incr; 8227 i++; 8228 } 8229 8230 if (IS_EMPTY_STMT (pre_body)) 8231 pre_body = NULL; 8232 8233 omp_for = c_finish_omp_for (locus, code, declv, orig_declv, initv, condv, 8234 incrv, body, pre_body); 8235 8236 /* Check for iterators appearing in lb, b or incr expressions. */ 8237 if (omp_for && !c_omp_check_loop_iv (omp_for, orig_declv, cp_walk_subtrees)) 8238 omp_for = NULL_TREE; 8239 8240 if (omp_for == NULL) 8241 { 8242 return NULL; 8243 } 8244 8245 add_stmt (omp_for); 8246 8247 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)); i++) 8248 { 8249 decl = TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (omp_for), i), 0); 8250 incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i); 8251 8252 if (TREE_CODE (incr) != MODIFY_EXPR) 8253 continue; 8254 8255 if (TREE_SIDE_EFFECTS (TREE_OPERAND (incr, 1)) 8256 && BINARY_CLASS_P (TREE_OPERAND (incr, 1)) 8257 && !processing_template_decl) 8258 { 8259 tree t = TREE_OPERAND (TREE_OPERAND (incr, 1), 0); 8260 if (TREE_SIDE_EFFECTS (t) 8261 && t != decl 8262 && (TREE_CODE (t) != NOP_EXPR 8263 || TREE_OPERAND (t, 0) != decl)) 8264 TREE_OPERAND (TREE_OPERAND (incr, 1), 0) 8265 = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 8266 8267 t = TREE_OPERAND (TREE_OPERAND (incr, 1), 1); 8268 if (TREE_SIDE_EFFECTS (t) 8269 && t != decl 8270 && (TREE_CODE (t) != NOP_EXPR 8271 || TREE_OPERAND (t, 0) != decl)) 8272 TREE_OPERAND (TREE_OPERAND (incr, 1), 1) 8273 = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 8274 } 8275 8276 if (orig_incr) 8277 TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i) = TREE_VEC_ELT (orig_incr, i); 8278 } 8279 OMP_FOR_CLAUSES (omp_for) = clauses; 8280 8281 /* For simd loops with non-static data member iterators, we could have added 8282 OMP_CLAUSE_LINEAR clauses without OMP_CLAUSE_LINEAR_STEP. As we know the 8283 step at this point, fill it in. */ 8284 if (code == OMP_SIMD && !processing_template_decl 8285 && TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)) == 1) 8286 for (tree c = omp_find_clause (clauses, OMP_CLAUSE_LINEAR); c; 8287 c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_LINEAR)) 8288 if (OMP_CLAUSE_LINEAR_STEP (c) == NULL_TREE) 8289 { 8290 decl = TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (omp_for), 0), 0); 8291 gcc_assert (decl == OMP_CLAUSE_DECL (c)); 8292 incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), 0); 8293 tree step, stept; 8294 switch (TREE_CODE (incr)) 8295 { 8296 case PREINCREMENT_EXPR: 8297 case POSTINCREMENT_EXPR: 8298 /* c_omp_for_incr_canonicalize_ptr() should have been 8299 called to massage things appropriately. */ 8300 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 8301 OMP_CLAUSE_LINEAR_STEP (c) = build_int_cst (TREE_TYPE (decl), 1); 8302 break; 8303 case PREDECREMENT_EXPR: 8304 case POSTDECREMENT_EXPR: 8305 /* c_omp_for_incr_canonicalize_ptr() should have been 8306 called to massage things appropriately. */ 8307 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 8308 OMP_CLAUSE_LINEAR_STEP (c) 8309 = build_int_cst (TREE_TYPE (decl), -1); 8310 break; 8311 case MODIFY_EXPR: 8312 gcc_assert (TREE_OPERAND (incr, 0) == decl); 8313 incr = TREE_OPERAND (incr, 1); 8314 switch (TREE_CODE (incr)) 8315 { 8316 case PLUS_EXPR: 8317 if (TREE_OPERAND (incr, 1) == decl) 8318 step = TREE_OPERAND (incr, 0); 8319 else 8320 step = TREE_OPERAND (incr, 1); 8321 break; 8322 case MINUS_EXPR: 8323 case POINTER_PLUS_EXPR: 8324 gcc_assert (TREE_OPERAND (incr, 0) == decl); 8325 step = TREE_OPERAND (incr, 1); 8326 break; 8327 default: 8328 gcc_unreachable (); 8329 } 8330 stept = TREE_TYPE (decl); 8331 if (POINTER_TYPE_P (stept)) 8332 stept = sizetype; 8333 step = fold_convert (stept, step); 8334 if (TREE_CODE (incr) == MINUS_EXPR) 8335 step = fold_build1 (NEGATE_EXPR, stept, step); 8336 OMP_CLAUSE_LINEAR_STEP (c) = step; 8337 break; 8338 default: 8339 gcc_unreachable (); 8340 } 8341 } 8342 8343 return omp_for; 8344 } 8345 8346 void 8347 finish_omp_atomic (enum tree_code code, enum tree_code opcode, tree lhs, 8348 tree rhs, tree v, tree lhs1, tree rhs1, bool seq_cst) 8349 { 8350 tree orig_lhs; 8351 tree orig_rhs; 8352 tree orig_v; 8353 tree orig_lhs1; 8354 tree orig_rhs1; 8355 bool dependent_p; 8356 tree stmt; 8357 8358 orig_lhs = lhs; 8359 orig_rhs = rhs; 8360 orig_v = v; 8361 orig_lhs1 = lhs1; 8362 orig_rhs1 = rhs1; 8363 dependent_p = false; 8364 stmt = NULL_TREE; 8365 8366 /* Even in a template, we can detect invalid uses of the atomic 8367 pragma if neither LHS nor RHS is type-dependent. */ 8368 if (processing_template_decl) 8369 { 8370 dependent_p = (type_dependent_expression_p (lhs) 8371 || (rhs && type_dependent_expression_p (rhs)) 8372 || (v && type_dependent_expression_p (v)) 8373 || (lhs1 && type_dependent_expression_p (lhs1)) 8374 || (rhs1 && type_dependent_expression_p (rhs1))); 8375 if (!dependent_p) 8376 { 8377 lhs = build_non_dependent_expr (lhs); 8378 if (rhs) 8379 rhs = build_non_dependent_expr (rhs); 8380 if (v) 8381 v = build_non_dependent_expr (v); 8382 if (lhs1) 8383 lhs1 = build_non_dependent_expr (lhs1); 8384 if (rhs1) 8385 rhs1 = build_non_dependent_expr (rhs1); 8386 } 8387 } 8388 if (!dependent_p) 8389 { 8390 bool swapped = false; 8391 if (rhs1 && cp_tree_equal (lhs, rhs)) 8392 { 8393 std::swap (rhs, rhs1); 8394 swapped = !commutative_tree_code (opcode); 8395 } 8396 if (rhs1 && !cp_tree_equal (lhs, rhs1)) 8397 { 8398 if (code == OMP_ATOMIC) 8399 error ("%<#pragma omp atomic update%> uses two different " 8400 "expressions for memory"); 8401 else 8402 error ("%<#pragma omp atomic capture%> uses two different " 8403 "expressions for memory"); 8404 return; 8405 } 8406 if (lhs1 && !cp_tree_equal (lhs, lhs1)) 8407 { 8408 if (code == OMP_ATOMIC) 8409 error ("%<#pragma omp atomic update%> uses two different " 8410 "expressions for memory"); 8411 else 8412 error ("%<#pragma omp atomic capture%> uses two different " 8413 "expressions for memory"); 8414 return; 8415 } 8416 stmt = c_finish_omp_atomic (input_location, code, opcode, lhs, rhs, 8417 v, lhs1, rhs1, swapped, seq_cst, 8418 processing_template_decl != 0); 8419 if (stmt == error_mark_node) 8420 return; 8421 } 8422 if (processing_template_decl) 8423 { 8424 if (code == OMP_ATOMIC_READ) 8425 { 8426 stmt = build_min_nt_loc (EXPR_LOCATION (orig_lhs), 8427 OMP_ATOMIC_READ, orig_lhs); 8428 OMP_ATOMIC_SEQ_CST (stmt) = seq_cst; 8429 stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt); 8430 } 8431 else 8432 { 8433 if (opcode == NOP_EXPR) 8434 stmt = build2 (MODIFY_EXPR, void_type_node, orig_lhs, orig_rhs); 8435 else 8436 stmt = build2 (opcode, void_type_node, orig_lhs, orig_rhs); 8437 if (orig_rhs1) 8438 stmt = build_min_nt_loc (EXPR_LOCATION (orig_rhs1), 8439 COMPOUND_EXPR, orig_rhs1, stmt); 8440 if (code != OMP_ATOMIC) 8441 { 8442 stmt = build_min_nt_loc (EXPR_LOCATION (orig_lhs1), 8443 code, orig_lhs1, stmt); 8444 OMP_ATOMIC_SEQ_CST (stmt) = seq_cst; 8445 stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt); 8446 } 8447 } 8448 stmt = build2 (OMP_ATOMIC, void_type_node, integer_zero_node, stmt); 8449 OMP_ATOMIC_SEQ_CST (stmt) = seq_cst; 8450 } 8451 finish_expr_stmt (stmt); 8452 } 8453 8454 void 8455 finish_omp_barrier (void) 8456 { 8457 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER); 8458 vec<tree, va_gc> *vec = make_tree_vector (); 8459 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error); 8460 release_tree_vector (vec); 8461 finish_expr_stmt (stmt); 8462 } 8463 8464 void 8465 finish_omp_flush (void) 8466 { 8467 tree fn = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE); 8468 vec<tree, va_gc> *vec = make_tree_vector (); 8469 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error); 8470 release_tree_vector (vec); 8471 finish_expr_stmt (stmt); 8472 } 8473 8474 void 8475 finish_omp_taskwait (void) 8476 { 8477 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT); 8478 vec<tree, va_gc> *vec = make_tree_vector (); 8479 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error); 8480 release_tree_vector (vec); 8481 finish_expr_stmt (stmt); 8482 } 8483 8484 void 8485 finish_omp_taskyield (void) 8486 { 8487 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD); 8488 vec<tree, va_gc> *vec = make_tree_vector (); 8489 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error); 8490 release_tree_vector (vec); 8491 finish_expr_stmt (stmt); 8492 } 8493 8494 void 8495 finish_omp_cancel (tree clauses) 8496 { 8497 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL); 8498 int mask = 0; 8499 if (omp_find_clause (clauses, OMP_CLAUSE_PARALLEL)) 8500 mask = 1; 8501 else if (omp_find_clause (clauses, OMP_CLAUSE_FOR)) 8502 mask = 2; 8503 else if (omp_find_clause (clauses, OMP_CLAUSE_SECTIONS)) 8504 mask = 4; 8505 else if (omp_find_clause (clauses, OMP_CLAUSE_TASKGROUP)) 8506 mask = 8; 8507 else 8508 { 8509 error ("%<#pragma omp cancel%> must specify one of " 8510 "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses"); 8511 return; 8512 } 8513 vec<tree, va_gc> *vec = make_tree_vector (); 8514 tree ifc = omp_find_clause (clauses, OMP_CLAUSE_IF); 8515 if (ifc != NULL_TREE) 8516 { 8517 tree type = TREE_TYPE (OMP_CLAUSE_IF_EXPR (ifc)); 8518 ifc = fold_build2_loc (OMP_CLAUSE_LOCATION (ifc), NE_EXPR, 8519 boolean_type_node, OMP_CLAUSE_IF_EXPR (ifc), 8520 build_zero_cst (type)); 8521 } 8522 else 8523 ifc = boolean_true_node; 8524 vec->quick_push (build_int_cst (integer_type_node, mask)); 8525 vec->quick_push (ifc); 8526 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error); 8527 release_tree_vector (vec); 8528 finish_expr_stmt (stmt); 8529 } 8530 8531 void 8532 finish_omp_cancellation_point (tree clauses) 8533 { 8534 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT); 8535 int mask = 0; 8536 if (omp_find_clause (clauses, OMP_CLAUSE_PARALLEL)) 8537 mask = 1; 8538 else if (omp_find_clause (clauses, OMP_CLAUSE_FOR)) 8539 mask = 2; 8540 else if (omp_find_clause (clauses, OMP_CLAUSE_SECTIONS)) 8541 mask = 4; 8542 else if (omp_find_clause (clauses, OMP_CLAUSE_TASKGROUP)) 8543 mask = 8; 8544 else 8545 { 8546 error ("%<#pragma omp cancellation point%> must specify one of " 8547 "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses"); 8548 return; 8549 } 8550 vec<tree, va_gc> *vec 8551 = make_tree_vector_single (build_int_cst (integer_type_node, mask)); 8552 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error); 8553 release_tree_vector (vec); 8554 finish_expr_stmt (stmt); 8555 } 8556 8557 /* Begin a __transaction_atomic or __transaction_relaxed statement. 8558 If PCOMPOUND is non-null, this is for a function-transaction-block, and we 8559 should create an extra compound stmt. */ 8560 8561 tree 8562 begin_transaction_stmt (location_t loc, tree *pcompound, int flags) 8563 { 8564 tree r; 8565 8566 if (pcompound) 8567 *pcompound = begin_compound_stmt (0); 8568 8569 r = build_stmt (loc, TRANSACTION_EXPR, NULL_TREE); 8570 8571 /* Only add the statement to the function if support enabled. */ 8572 if (flag_tm) 8573 add_stmt (r); 8574 else 8575 error_at (loc, ((flags & TM_STMT_ATTR_RELAXED) != 0 8576 ? G_("%<__transaction_relaxed%> without " 8577 "transactional memory support enabled") 8578 : G_("%<__transaction_atomic%> without " 8579 "transactional memory support enabled"))); 8580 8581 TRANSACTION_EXPR_BODY (r) = push_stmt_list (); 8582 TREE_SIDE_EFFECTS (r) = 1; 8583 return r; 8584 } 8585 8586 /* End a __transaction_atomic or __transaction_relaxed statement. 8587 If COMPOUND_STMT is non-null, this is for a function-transaction-block, 8588 and we should end the compound. If NOEX is non-NULL, we wrap the body in 8589 a MUST_NOT_THROW_EXPR with NOEX as condition. */ 8590 8591 void 8592 finish_transaction_stmt (tree stmt, tree compound_stmt, int flags, tree noex) 8593 { 8594 TRANSACTION_EXPR_BODY (stmt) = pop_stmt_list (TRANSACTION_EXPR_BODY (stmt)); 8595 TRANSACTION_EXPR_OUTER (stmt) = (flags & TM_STMT_ATTR_OUTER) != 0; 8596 TRANSACTION_EXPR_RELAXED (stmt) = (flags & TM_STMT_ATTR_RELAXED) != 0; 8597 TRANSACTION_EXPR_IS_STMT (stmt) = 1; 8598 8599 /* noexcept specifications are not allowed for function transactions. */ 8600 gcc_assert (!(noex && compound_stmt)); 8601 if (noex) 8602 { 8603 tree body = build_must_not_throw_expr (TRANSACTION_EXPR_BODY (stmt), 8604 noex); 8605 protected_set_expr_location 8606 (body, EXPR_LOCATION (TRANSACTION_EXPR_BODY (stmt))); 8607 TREE_SIDE_EFFECTS (body) = 1; 8608 TRANSACTION_EXPR_BODY (stmt) = body; 8609 } 8610 8611 if (compound_stmt) 8612 finish_compound_stmt (compound_stmt); 8613 } 8614 8615 /* Build a __transaction_atomic or __transaction_relaxed expression. If 8616 NOEX is non-NULL, we wrap the body in a MUST_NOT_THROW_EXPR with NOEX as 8617 condition. */ 8618 8619 tree 8620 build_transaction_expr (location_t loc, tree expr, int flags, tree noex) 8621 { 8622 tree ret; 8623 if (noex) 8624 { 8625 expr = build_must_not_throw_expr (expr, noex); 8626 protected_set_expr_location (expr, loc); 8627 TREE_SIDE_EFFECTS (expr) = 1; 8628 } 8629 ret = build1 (TRANSACTION_EXPR, TREE_TYPE (expr), expr); 8630 if (flags & TM_STMT_ATTR_RELAXED) 8631 TRANSACTION_EXPR_RELAXED (ret) = 1; 8632 TREE_SIDE_EFFECTS (ret) = 1; 8633 SET_EXPR_LOCATION (ret, loc); 8634 return ret; 8635 } 8636 8637 void 8638 init_cp_semantics (void) 8639 { 8640 } 8641 8642 /* Build a STATIC_ASSERT for a static assertion with the condition 8643 CONDITION and the message text MESSAGE. LOCATION is the location 8644 of the static assertion in the source code. When MEMBER_P, this 8645 static assertion is a member of a class. */ 8646 void 8647 finish_static_assert (tree condition, tree message, location_t location, 8648 bool member_p) 8649 { 8650 tsubst_flags_t complain = tf_warning_or_error; 8651 8652 if (message == NULL_TREE 8653 || message == error_mark_node 8654 || condition == NULL_TREE 8655 || condition == error_mark_node) 8656 return; 8657 8658 if (check_for_bare_parameter_packs (condition)) 8659 condition = error_mark_node; 8660 8661 if (instantiation_dependent_expression_p (condition)) 8662 { 8663 /* We're in a template; build a STATIC_ASSERT and put it in 8664 the right place. */ 8665 tree assertion; 8666 8667 assertion = make_node (STATIC_ASSERT); 8668 STATIC_ASSERT_CONDITION (assertion) = condition; 8669 STATIC_ASSERT_MESSAGE (assertion) = message; 8670 STATIC_ASSERT_SOURCE_LOCATION (assertion) = location; 8671 8672 if (member_p) 8673 maybe_add_class_template_decl_list (current_class_type, 8674 assertion, 8675 /*friend_p=*/0); 8676 else 8677 add_stmt (assertion); 8678 8679 return; 8680 } 8681 8682 /* Fold the expression and convert it to a boolean value. */ 8683 condition = perform_implicit_conversion_flags (boolean_type_node, condition, 8684 complain, LOOKUP_NORMAL); 8685 condition = fold_non_dependent_expr (condition); 8686 8687 if (TREE_CODE (condition) == INTEGER_CST && !integer_zerop (condition)) 8688 /* Do nothing; the condition is satisfied. */ 8689 ; 8690 else 8691 { 8692 location_t saved_loc = input_location; 8693 8694 input_location = location; 8695 if (TREE_CODE (condition) == INTEGER_CST 8696 && integer_zerop (condition)) 8697 { 8698 int sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT 8699 (TREE_TYPE (TREE_TYPE (message)))); 8700 int len = TREE_STRING_LENGTH (message) / sz - 1; 8701 /* Report the error. */ 8702 if (len == 0) 8703 error ("static assertion failed"); 8704 else 8705 error ("static assertion failed: %s", 8706 TREE_STRING_POINTER (message)); 8707 } 8708 else if (condition && condition != error_mark_node) 8709 { 8710 error ("non-constant condition for static assertion"); 8711 if (require_rvalue_constant_expression (condition)) 8712 cxx_constant_value (condition); 8713 } 8714 input_location = saved_loc; 8715 } 8716 } 8717 8718 /* Implements the C++0x decltype keyword. Returns the type of EXPR, 8719 suitable for use as a type-specifier. 8720 8721 ID_EXPRESSION_OR_MEMBER_ACCESS_P is true when EXPR was parsed as an 8722 id-expression or a class member access, FALSE when it was parsed as 8723 a full expression. */ 8724 8725 tree 8726 finish_decltype_type (tree expr, bool id_expression_or_member_access_p, 8727 tsubst_flags_t complain) 8728 { 8729 tree type = NULL_TREE; 8730 8731 if (!expr || error_operand_p (expr)) 8732 return error_mark_node; 8733 8734 if (TYPE_P (expr) 8735 || TREE_CODE (expr) == TYPE_DECL 8736 || (TREE_CODE (expr) == BIT_NOT_EXPR 8737 && TYPE_P (TREE_OPERAND (expr, 0)))) 8738 { 8739 if (complain & tf_error) 8740 error ("argument to decltype must be an expression"); 8741 return error_mark_node; 8742 } 8743 8744 /* Depending on the resolution of DR 1172, we may later need to distinguish 8745 instantiation-dependent but not type-dependent expressions so that, say, 8746 A<decltype(sizeof(T))>::U doesn't require 'typename'. */ 8747 if (instantiation_dependent_uneval_expression_p (expr)) 8748 { 8749 type = cxx_make_type (DECLTYPE_TYPE); 8750 DECLTYPE_TYPE_EXPR (type) = expr; 8751 DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (type) 8752 = id_expression_or_member_access_p; 8753 SET_TYPE_STRUCTURAL_EQUALITY (type); 8754 8755 return type; 8756 } 8757 8758 /* The type denoted by decltype(e) is defined as follows: */ 8759 8760 expr = resolve_nondeduced_context (expr, complain); 8761 8762 if (invalid_nonstatic_memfn_p (input_location, expr, complain)) 8763 return error_mark_node; 8764 8765 if (type_unknown_p (expr)) 8766 { 8767 if (complain & tf_error) 8768 error ("decltype cannot resolve address of overloaded function"); 8769 return error_mark_node; 8770 } 8771 8772 /* To get the size of a static data member declared as an array of 8773 unknown bound, we need to instantiate it. */ 8774 if (VAR_P (expr) 8775 && VAR_HAD_UNKNOWN_BOUND (expr) 8776 && DECL_TEMPLATE_INSTANTIATION (expr)) 8777 instantiate_decl (expr, /*defer_ok*/true, /*expl_inst_mem*/false); 8778 8779 if (id_expression_or_member_access_p) 8780 { 8781 /* If e is an id-expression or a class member access (5.2.5 8782 [expr.ref]), decltype(e) is defined as the type of the entity 8783 named by e. If there is no such entity, or e names a set of 8784 overloaded functions, the program is ill-formed. */ 8785 if (identifier_p (expr)) 8786 expr = lookup_name (expr); 8787 8788 if (INDIRECT_REF_P (expr)) 8789 /* This can happen when the expression is, e.g., "a.b". Just 8790 look at the underlying operand. */ 8791 expr = TREE_OPERAND (expr, 0); 8792 8793 if (TREE_CODE (expr) == OFFSET_REF 8794 || TREE_CODE (expr) == MEMBER_REF 8795 || TREE_CODE (expr) == SCOPE_REF) 8796 /* We're only interested in the field itself. If it is a 8797 BASELINK, we will need to see through it in the next 8798 step. */ 8799 expr = TREE_OPERAND (expr, 1); 8800 8801 if (BASELINK_P (expr)) 8802 /* See through BASELINK nodes to the underlying function. */ 8803 expr = BASELINK_FUNCTIONS (expr); 8804 8805 /* decltype of a decomposition name drops references in the tuple case 8806 (unlike decltype of a normal variable) and keeps cv-qualifiers from 8807 the containing object in the other cases (unlike decltype of a member 8808 access expression). */ 8809 if (DECL_DECOMPOSITION_P (expr)) 8810 { 8811 if (DECL_HAS_VALUE_EXPR_P (expr)) 8812 /* Expr is an array or struct subobject proxy, handle 8813 bit-fields properly. */ 8814 return unlowered_expr_type (expr); 8815 else 8816 /* Expr is a reference variable for the tuple case. */ 8817 return lookup_decomp_type (expr); 8818 } 8819 8820 switch (TREE_CODE (expr)) 8821 { 8822 case FIELD_DECL: 8823 if (DECL_BIT_FIELD_TYPE (expr)) 8824 { 8825 type = DECL_BIT_FIELD_TYPE (expr); 8826 break; 8827 } 8828 /* Fall through for fields that aren't bitfields. */ 8829 gcc_fallthrough (); 8830 8831 case FUNCTION_DECL: 8832 case VAR_DECL: 8833 case CONST_DECL: 8834 case PARM_DECL: 8835 case RESULT_DECL: 8836 case TEMPLATE_PARM_INDEX: 8837 expr = mark_type_use (expr); 8838 type = TREE_TYPE (expr); 8839 break; 8840 8841 case ERROR_MARK: 8842 type = error_mark_node; 8843 break; 8844 8845 case COMPONENT_REF: 8846 case COMPOUND_EXPR: 8847 mark_type_use (expr); 8848 type = is_bitfield_expr_with_lowered_type (expr); 8849 if (!type) 8850 type = TREE_TYPE (TREE_OPERAND (expr, 1)); 8851 break; 8852 8853 case BIT_FIELD_REF: 8854 gcc_unreachable (); 8855 8856 case INTEGER_CST: 8857 case PTRMEM_CST: 8858 /* We can get here when the id-expression refers to an 8859 enumerator or non-type template parameter. */ 8860 type = TREE_TYPE (expr); 8861 break; 8862 8863 default: 8864 /* Handle instantiated template non-type arguments. */ 8865 type = TREE_TYPE (expr); 8866 break; 8867 } 8868 } 8869 else 8870 { 8871 /* Within a lambda-expression: 8872 8873 Every occurrence of decltype((x)) where x is a possibly 8874 parenthesized id-expression that names an entity of 8875 automatic storage duration is treated as if x were 8876 transformed into an access to a corresponding data member 8877 of the closure type that would have been declared if x 8878 were a use of the denoted entity. */ 8879 if (outer_automatic_var_p (expr) 8880 && current_function_decl 8881 && LAMBDA_FUNCTION_P (current_function_decl)) 8882 type = capture_decltype (expr); 8883 else if (error_operand_p (expr)) 8884 type = error_mark_node; 8885 else if (expr == current_class_ptr) 8886 /* If the expression is just "this", we want the 8887 cv-unqualified pointer for the "this" type. */ 8888 type = TYPE_MAIN_VARIANT (TREE_TYPE (expr)); 8889 else 8890 { 8891 /* Otherwise, where T is the type of e, if e is an lvalue, 8892 decltype(e) is defined as T&; if an xvalue, T&&; otherwise, T. */ 8893 cp_lvalue_kind clk = lvalue_kind (expr); 8894 type = unlowered_expr_type (expr); 8895 gcc_assert (TREE_CODE (type) != REFERENCE_TYPE); 8896 8897 /* For vector types, pick a non-opaque variant. */ 8898 if (VECTOR_TYPE_P (type)) 8899 type = strip_typedefs (type); 8900 8901 if (clk != clk_none && !(clk & clk_class)) 8902 type = cp_build_reference_type (type, (clk & clk_rvalueref)); 8903 } 8904 } 8905 8906 return type; 8907 } 8908 8909 /* Called from trait_expr_value to evaluate either __has_nothrow_assign or 8910 __has_nothrow_copy, depending on assign_p. Returns true iff all 8911 the copy {ctor,assign} fns are nothrow. */ 8912 8913 static bool 8914 classtype_has_nothrow_assign_or_copy_p (tree type, bool assign_p) 8915 { 8916 tree fns = NULL_TREE; 8917 8918 if (assign_p || TYPE_HAS_COPY_CTOR (type)) 8919 fns = get_class_binding (type, assign_p ? assign_op_identifier 8920 : ctor_identifier); 8921 8922 bool saw_copy = false; 8923 for (ovl_iterator iter (fns); iter; ++iter) 8924 { 8925 tree fn = *iter; 8926 8927 if (copy_fn_p (fn) > 0) 8928 { 8929 saw_copy = true; 8930 maybe_instantiate_noexcept (fn); 8931 if (!TYPE_NOTHROW_P (TREE_TYPE (fn))) 8932 return false; 8933 } 8934 } 8935 8936 return saw_copy; 8937 } 8938 8939 /* Actually evaluates the trait. */ 8940 8941 static bool 8942 trait_expr_value (cp_trait_kind kind, tree type1, tree type2) 8943 { 8944 enum tree_code type_code1; 8945 tree t; 8946 8947 type_code1 = TREE_CODE (type1); 8948 8949 switch (kind) 8950 { 8951 case CPTK_HAS_NOTHROW_ASSIGN: 8952 type1 = strip_array_types (type1); 8953 return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE 8954 && (trait_expr_value (CPTK_HAS_TRIVIAL_ASSIGN, type1, type2) 8955 || (CLASS_TYPE_P (type1) 8956 && classtype_has_nothrow_assign_or_copy_p (type1, 8957 true)))); 8958 8959 case CPTK_HAS_TRIVIAL_ASSIGN: 8960 /* ??? The standard seems to be missing the "or array of such a class 8961 type" wording for this trait. */ 8962 type1 = strip_array_types (type1); 8963 return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE 8964 && (trivial_type_p (type1) 8965 || (CLASS_TYPE_P (type1) 8966 && TYPE_HAS_TRIVIAL_COPY_ASSIGN (type1)))); 8967 8968 case CPTK_HAS_NOTHROW_CONSTRUCTOR: 8969 type1 = strip_array_types (type1); 8970 return (trait_expr_value (CPTK_HAS_TRIVIAL_CONSTRUCTOR, type1, type2) 8971 || (CLASS_TYPE_P (type1) 8972 && (t = locate_ctor (type1)) 8973 && (maybe_instantiate_noexcept (t), 8974 TYPE_NOTHROW_P (TREE_TYPE (t))))); 8975 8976 case CPTK_HAS_TRIVIAL_CONSTRUCTOR: 8977 type1 = strip_array_types (type1); 8978 return (trivial_type_p (type1) 8979 || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_DFLT (type1))); 8980 8981 case CPTK_HAS_NOTHROW_COPY: 8982 type1 = strip_array_types (type1); 8983 return (trait_expr_value (CPTK_HAS_TRIVIAL_COPY, type1, type2) 8984 || (CLASS_TYPE_P (type1) 8985 && classtype_has_nothrow_assign_or_copy_p (type1, false))); 8986 8987 case CPTK_HAS_TRIVIAL_COPY: 8988 /* ??? The standard seems to be missing the "or array of such a class 8989 type" wording for this trait. */ 8990 type1 = strip_array_types (type1); 8991 return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE 8992 || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_COPY_CTOR (type1))); 8993 8994 case CPTK_HAS_TRIVIAL_DESTRUCTOR: 8995 type1 = strip_array_types (type1); 8996 return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE 8997 || (CLASS_TYPE_P (type1) 8998 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type1))); 8999 9000 case CPTK_HAS_VIRTUAL_DESTRUCTOR: 9001 return type_has_virtual_destructor (type1); 9002 9003 case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS: 9004 return type_has_unique_obj_representations (type1); 9005 9006 case CPTK_IS_ABSTRACT: 9007 return ABSTRACT_CLASS_TYPE_P (type1); 9008 9009 case CPTK_IS_AGGREGATE: 9010 return CP_AGGREGATE_TYPE_P (type1); 9011 9012 case CPTK_IS_BASE_OF: 9013 return (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2) 9014 && (same_type_ignoring_top_level_qualifiers_p (type1, type2) 9015 || DERIVED_FROM_P (type1, type2))); 9016 9017 case CPTK_IS_CLASS: 9018 return NON_UNION_CLASS_TYPE_P (type1); 9019 9020 case CPTK_IS_EMPTY: 9021 return NON_UNION_CLASS_TYPE_P (type1) && CLASSTYPE_EMPTY_P (type1); 9022 9023 case CPTK_IS_ENUM: 9024 return type_code1 == ENUMERAL_TYPE; 9025 9026 case CPTK_IS_FINAL: 9027 return CLASS_TYPE_P (type1) && CLASSTYPE_FINAL (type1); 9028 9029 case CPTK_IS_LITERAL_TYPE: 9030 return literal_type_p (type1); 9031 9032 case CPTK_IS_POD: 9033 return pod_type_p (type1); 9034 9035 case CPTK_IS_POLYMORPHIC: 9036 return CLASS_TYPE_P (type1) && TYPE_POLYMORPHIC_P (type1); 9037 9038 case CPTK_IS_SAME_AS: 9039 return same_type_p (type1, type2); 9040 9041 case CPTK_IS_STD_LAYOUT: 9042 return std_layout_type_p (type1); 9043 9044 case CPTK_IS_TRIVIAL: 9045 return trivial_type_p (type1); 9046 9047 case CPTK_IS_TRIVIALLY_ASSIGNABLE: 9048 return is_trivially_xible (MODIFY_EXPR, type1, type2); 9049 9050 case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE: 9051 return is_trivially_xible (INIT_EXPR, type1, type2); 9052 9053 case CPTK_IS_TRIVIALLY_COPYABLE: 9054 return trivially_copyable_p (type1); 9055 9056 case CPTK_IS_UNION: 9057 return type_code1 == UNION_TYPE; 9058 9059 case CPTK_IS_ASSIGNABLE: 9060 return is_xible (MODIFY_EXPR, type1, type2); 9061 9062 case CPTK_IS_CONSTRUCTIBLE: 9063 return is_xible (INIT_EXPR, type1, type2); 9064 9065 default: 9066 gcc_unreachable (); 9067 return false; 9068 } 9069 } 9070 9071 /* If TYPE is an array of unknown bound, or (possibly cv-qualified) 9072 void, or a complete type, returns true, otherwise false. */ 9073 9074 static bool 9075 check_trait_type (tree type) 9076 { 9077 if (type == NULL_TREE) 9078 return true; 9079 9080 if (TREE_CODE (type) == TREE_LIST) 9081 return (check_trait_type (TREE_VALUE (type)) 9082 && check_trait_type (TREE_CHAIN (type))); 9083 9084 if (TREE_CODE (type) == ARRAY_TYPE && !TYPE_DOMAIN (type) 9085 && COMPLETE_TYPE_P (TREE_TYPE (type))) 9086 return true; 9087 9088 if (VOID_TYPE_P (type)) 9089 return true; 9090 9091 return !!complete_type_or_else (strip_array_types (type), NULL_TREE); 9092 } 9093 9094 /* Process a trait expression. */ 9095 9096 tree 9097 finish_trait_expr (cp_trait_kind kind, tree type1, tree type2) 9098 { 9099 if (type1 == error_mark_node 9100 || type2 == error_mark_node) 9101 return error_mark_node; 9102 9103 if (processing_template_decl) 9104 { 9105 tree trait_expr = make_node (TRAIT_EXPR); 9106 TREE_TYPE (trait_expr) = boolean_type_node; 9107 TRAIT_EXPR_TYPE1 (trait_expr) = type1; 9108 TRAIT_EXPR_TYPE2 (trait_expr) = type2; 9109 TRAIT_EXPR_KIND (trait_expr) = kind; 9110 return trait_expr; 9111 } 9112 9113 switch (kind) 9114 { 9115 case CPTK_HAS_NOTHROW_ASSIGN: 9116 case CPTK_HAS_TRIVIAL_ASSIGN: 9117 case CPTK_HAS_NOTHROW_CONSTRUCTOR: 9118 case CPTK_HAS_TRIVIAL_CONSTRUCTOR: 9119 case CPTK_HAS_NOTHROW_COPY: 9120 case CPTK_HAS_TRIVIAL_COPY: 9121 case CPTK_HAS_TRIVIAL_DESTRUCTOR: 9122 case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS: 9123 case CPTK_HAS_VIRTUAL_DESTRUCTOR: 9124 case CPTK_IS_ABSTRACT: 9125 case CPTK_IS_AGGREGATE: 9126 case CPTK_IS_EMPTY: 9127 case CPTK_IS_FINAL: 9128 case CPTK_IS_LITERAL_TYPE: 9129 case CPTK_IS_POD: 9130 case CPTK_IS_POLYMORPHIC: 9131 case CPTK_IS_STD_LAYOUT: 9132 case CPTK_IS_TRIVIAL: 9133 case CPTK_IS_TRIVIALLY_COPYABLE: 9134 if (!check_trait_type (type1)) 9135 return error_mark_node; 9136 break; 9137 9138 case CPTK_IS_ASSIGNABLE: 9139 case CPTK_IS_CONSTRUCTIBLE: 9140 break; 9141 9142 case CPTK_IS_TRIVIALLY_ASSIGNABLE: 9143 case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE: 9144 if (!check_trait_type (type1) 9145 || !check_trait_type (type2)) 9146 return error_mark_node; 9147 break; 9148 9149 case CPTK_IS_BASE_OF: 9150 if (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2) 9151 && !same_type_ignoring_top_level_qualifiers_p (type1, type2) 9152 && !complete_type_or_else (type2, NULL_TREE)) 9153 /* We already issued an error. */ 9154 return error_mark_node; 9155 break; 9156 9157 case CPTK_IS_CLASS: 9158 case CPTK_IS_ENUM: 9159 case CPTK_IS_UNION: 9160 case CPTK_IS_SAME_AS: 9161 break; 9162 9163 default: 9164 gcc_unreachable (); 9165 } 9166 9167 return (trait_expr_value (kind, type1, type2) 9168 ? boolean_true_node : boolean_false_node); 9169 } 9170 9171 /* Do-nothing variants of functions to handle pragma FLOAT_CONST_DECIMAL64, 9172 which is ignored for C++. */ 9173 9174 void 9175 set_float_const_decimal64 (void) 9176 { 9177 } 9178 9179 void 9180 clear_float_const_decimal64 (void) 9181 { 9182 } 9183 9184 bool 9185 float_const_decimal64_p (void) 9186 { 9187 return 0; 9188 } 9189 9190 9191 /* Return true if T designates the implied `this' parameter. */ 9192 9193 bool 9194 is_this_parameter (tree t) 9195 { 9196 if (!DECL_P (t) || DECL_NAME (t) != this_identifier) 9197 return false; 9198 gcc_assert (TREE_CODE (t) == PARM_DECL || is_capture_proxy (t) 9199 || (cp_binding_oracle && TREE_CODE (t) == VAR_DECL)); 9200 return true; 9201 } 9202 9203 /* Insert the deduced return type for an auto function. */ 9204 9205 void 9206 apply_deduced_return_type (tree fco, tree return_type) 9207 { 9208 tree result; 9209 9210 if (return_type == error_mark_node) 9211 return; 9212 9213 if (DECL_CONV_FN_P (fco)) 9214 DECL_NAME (fco) = make_conv_op_name (return_type); 9215 9216 TREE_TYPE (fco) = change_return_type (return_type, TREE_TYPE (fco)); 9217 9218 result = DECL_RESULT (fco); 9219 if (result == NULL_TREE) 9220 return; 9221 if (TREE_TYPE (result) == return_type) 9222 return; 9223 9224 if (!processing_template_decl && !VOID_TYPE_P (return_type) 9225 && !complete_type_or_else (return_type, NULL_TREE)) 9226 return; 9227 9228 /* We already have a DECL_RESULT from start_preparsed_function. 9229 Now we need to redo the work it and allocate_struct_function 9230 did to reflect the new type. */ 9231 gcc_assert (current_function_decl == fco); 9232 result = build_decl (input_location, RESULT_DECL, NULL_TREE, 9233 TYPE_MAIN_VARIANT (return_type)); 9234 DECL_ARTIFICIAL (result) = 1; 9235 DECL_IGNORED_P (result) = 1; 9236 cp_apply_type_quals_to_decl (cp_type_quals (return_type), 9237 result); 9238 9239 DECL_RESULT (fco) = result; 9240 9241 if (!processing_template_decl) 9242 { 9243 bool aggr = aggregate_value_p (result, fco); 9244 #ifdef PCC_STATIC_STRUCT_RETURN 9245 cfun->returns_pcc_struct = aggr; 9246 #endif 9247 cfun->returns_struct = aggr; 9248 } 9249 9250 } 9251 9252 /* DECL is a local variable or parameter from the surrounding scope of a 9253 lambda-expression. Returns the decltype for a use of the capture field 9254 for DECL even if it hasn't been captured yet. */ 9255 9256 static tree 9257 capture_decltype (tree decl) 9258 { 9259 tree lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); 9260 /* FIXME do lookup instead of list walk? */ 9261 tree cap = value_member (decl, LAMBDA_EXPR_CAPTURE_LIST (lam)); 9262 tree type; 9263 9264 if (cap) 9265 type = TREE_TYPE (TREE_PURPOSE (cap)); 9266 else 9267 switch (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam)) 9268 { 9269 case CPLD_NONE: 9270 error ("%qD is not captured", decl); 9271 return error_mark_node; 9272 9273 case CPLD_COPY: 9274 type = TREE_TYPE (decl); 9275 if (TREE_CODE (type) == REFERENCE_TYPE 9276 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE) 9277 type = TREE_TYPE (type); 9278 break; 9279 9280 case CPLD_REFERENCE: 9281 type = TREE_TYPE (decl); 9282 if (TREE_CODE (type) != REFERENCE_TYPE) 9283 type = build_reference_type (TREE_TYPE (decl)); 9284 break; 9285 9286 default: 9287 gcc_unreachable (); 9288 } 9289 9290 if (TREE_CODE (type) != REFERENCE_TYPE) 9291 { 9292 if (!LAMBDA_EXPR_MUTABLE_P (lam)) 9293 type = cp_build_qualified_type (type, (cp_type_quals (type) 9294 |TYPE_QUAL_CONST)); 9295 type = build_reference_type (type); 9296 } 9297 return type; 9298 } 9299 9300 /* Build a unary fold expression of EXPR over OP. If IS_RIGHT is true, 9301 this is a right unary fold. Otherwise it is a left unary fold. */ 9302 9303 static tree 9304 finish_unary_fold_expr (tree expr, int op, tree_code dir) 9305 { 9306 // Build a pack expansion (assuming expr has pack type). 9307 if (!uses_parameter_packs (expr)) 9308 { 9309 error_at (location_of (expr), "operand of fold expression has no " 9310 "unexpanded parameter packs"); 9311 return error_mark_node; 9312 } 9313 tree pack = make_pack_expansion (expr); 9314 9315 // Build the fold expression. 9316 tree code = build_int_cstu (integer_type_node, abs (op)); 9317 tree fold = build_min_nt_loc (UNKNOWN_LOCATION, dir, code, pack); 9318 FOLD_EXPR_MODIFY_P (fold) = (op < 0); 9319 return fold; 9320 } 9321 9322 tree 9323 finish_left_unary_fold_expr (tree expr, int op) 9324 { 9325 return finish_unary_fold_expr (expr, op, UNARY_LEFT_FOLD_EXPR); 9326 } 9327 9328 tree 9329 finish_right_unary_fold_expr (tree expr, int op) 9330 { 9331 return finish_unary_fold_expr (expr, op, UNARY_RIGHT_FOLD_EXPR); 9332 } 9333 9334 /* Build a binary fold expression over EXPR1 and EXPR2. The 9335 associativity of the fold is determined by EXPR1 and EXPR2 (whichever 9336 has an unexpanded parameter pack). */ 9337 9338 tree 9339 finish_binary_fold_expr (tree pack, tree init, int op, tree_code dir) 9340 { 9341 pack = make_pack_expansion (pack); 9342 tree code = build_int_cstu (integer_type_node, abs (op)); 9343 tree fold = build_min_nt_loc (UNKNOWN_LOCATION, dir, code, pack, init); 9344 FOLD_EXPR_MODIFY_P (fold) = (op < 0); 9345 return fold; 9346 } 9347 9348 tree 9349 finish_binary_fold_expr (tree expr1, tree expr2, int op) 9350 { 9351 // Determine which expr has an unexpanded parameter pack and 9352 // set the pack and initial term. 9353 bool pack1 = uses_parameter_packs (expr1); 9354 bool pack2 = uses_parameter_packs (expr2); 9355 if (pack1 && !pack2) 9356 return finish_binary_fold_expr (expr1, expr2, op, BINARY_RIGHT_FOLD_EXPR); 9357 else if (pack2 && !pack1) 9358 return finish_binary_fold_expr (expr2, expr1, op, BINARY_LEFT_FOLD_EXPR); 9359 else 9360 { 9361 if (pack1) 9362 error ("both arguments in binary fold have unexpanded parameter packs"); 9363 else 9364 error ("no unexpanded parameter packs in binary fold"); 9365 } 9366 return error_mark_node; 9367 } 9368 9369 /* Finish __builtin_launder (arg). */ 9370 9371 tree 9372 finish_builtin_launder (location_t loc, tree arg, tsubst_flags_t complain) 9373 { 9374 tree orig_arg = arg; 9375 if (!type_dependent_expression_p (arg)) 9376 arg = decay_conversion (arg, complain); 9377 if (error_operand_p (arg)) 9378 return error_mark_node; 9379 if (!type_dependent_expression_p (arg) 9380 && TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE) 9381 { 9382 error_at (loc, "non-pointer argument to %<__builtin_launder%>"); 9383 return error_mark_node; 9384 } 9385 if (processing_template_decl) 9386 arg = orig_arg; 9387 return build_call_expr_internal_loc (loc, IFN_LAUNDER, 9388 TREE_TYPE (arg), 1, arg); 9389 } 9390 9391 #include "gt-cp-semantics.h" 9392