1 /* Lower complex number operations to scalar operations. 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010 3 Free Software Foundation, Inc. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it 8 under the terms of the GNU General Public License as published by the 9 Free Software Foundation; either version 3, or (at your option) any 10 later version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT 13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "tm.h" 25 #include "tree.h" 26 #include "flags.h" 27 #include "tree-flow.h" 28 #include "gimple.h" 29 #include "tree-iterator.h" 30 #include "tree-pass.h" 31 #include "tree-ssa-propagate.h" 32 33 34 /* For each complex ssa name, a lattice value. We're interested in finding 35 out whether a complex number is degenerate in some way, having only real 36 or only complex parts. */ 37 38 enum 39 { 40 UNINITIALIZED = 0, 41 ONLY_REAL = 1, 42 ONLY_IMAG = 2, 43 VARYING = 3 44 }; 45 46 /* The type complex_lattice_t holds combinations of the above 47 constants. */ 48 typedef int complex_lattice_t; 49 50 #define PAIR(a, b) ((a) << 2 | (b)) 51 52 DEF_VEC_I(complex_lattice_t); 53 DEF_VEC_ALLOC_I(complex_lattice_t, heap); 54 55 static VEC(complex_lattice_t, heap) *complex_lattice_values; 56 57 /* For each complex variable, a pair of variables for the components exists in 58 the hashtable. */ 59 static htab_t complex_variable_components; 60 61 /* For each complex SSA_NAME, a pair of ssa names for the components. */ 62 static VEC(tree, heap) *complex_ssa_name_components; 63 64 /* Lookup UID in the complex_variable_components hashtable and return the 65 associated tree. */ 66 static tree 67 cvc_lookup (unsigned int uid) 68 { 69 struct int_tree_map *h, in; 70 in.uid = uid; 71 h = (struct int_tree_map *) htab_find_with_hash (complex_variable_components, &in, uid); 72 return h ? h->to : NULL; 73 } 74 75 /* Insert the pair UID, TO into the complex_variable_components hashtable. */ 76 77 static void 78 cvc_insert (unsigned int uid, tree to) 79 { 80 struct int_tree_map *h; 81 void **loc; 82 83 h = XNEW (struct int_tree_map); 84 h->uid = uid; 85 h->to = to; 86 loc = htab_find_slot_with_hash (complex_variable_components, h, 87 uid, INSERT); 88 *(struct int_tree_map **) loc = h; 89 } 90 91 /* Return true if T is not a zero constant. In the case of real values, 92 we're only interested in +0.0. */ 93 94 static int 95 some_nonzerop (tree t) 96 { 97 int zerop = false; 98 99 /* Operations with real or imaginary part of a complex number zero 100 cannot be treated the same as operations with a real or imaginary 101 operand if we care about the signs of zeros in the result. */ 102 if (TREE_CODE (t) == REAL_CST && !flag_signed_zeros) 103 zerop = REAL_VALUES_IDENTICAL (TREE_REAL_CST (t), dconst0); 104 else if (TREE_CODE (t) == FIXED_CST) 105 zerop = fixed_zerop (t); 106 else if (TREE_CODE (t) == INTEGER_CST) 107 zerop = integer_zerop (t); 108 109 return !zerop; 110 } 111 112 113 /* Compute a lattice value from the components of a complex type REAL 114 and IMAG. */ 115 116 static complex_lattice_t 117 find_lattice_value_parts (tree real, tree imag) 118 { 119 int r, i; 120 complex_lattice_t ret; 121 122 r = some_nonzerop (real); 123 i = some_nonzerop (imag); 124 ret = r * ONLY_REAL + i * ONLY_IMAG; 125 126 /* ??? On occasion we could do better than mapping 0+0i to real, but we 127 certainly don't want to leave it UNINITIALIZED, which eventually gets 128 mapped to VARYING. */ 129 if (ret == UNINITIALIZED) 130 ret = ONLY_REAL; 131 132 return ret; 133 } 134 135 136 /* Compute a lattice value from gimple_val T. */ 137 138 static complex_lattice_t 139 find_lattice_value (tree t) 140 { 141 tree real, imag; 142 143 switch (TREE_CODE (t)) 144 { 145 case SSA_NAME: 146 return VEC_index (complex_lattice_t, complex_lattice_values, 147 SSA_NAME_VERSION (t)); 148 149 case COMPLEX_CST: 150 real = TREE_REALPART (t); 151 imag = TREE_IMAGPART (t); 152 break; 153 154 default: 155 gcc_unreachable (); 156 } 157 158 return find_lattice_value_parts (real, imag); 159 } 160 161 /* Determine if LHS is something for which we're interested in seeing 162 simulation results. */ 163 164 static bool 165 is_complex_reg (tree lhs) 166 { 167 return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs); 168 } 169 170 /* Mark the incoming parameters to the function as VARYING. */ 171 172 static void 173 init_parameter_lattice_values (void) 174 { 175 tree parm, ssa_name; 176 177 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) 178 if (is_complex_reg (parm) 179 && var_ann (parm) != NULL 180 && (ssa_name = gimple_default_def (cfun, parm)) != NULL_TREE) 181 VEC_replace (complex_lattice_t, complex_lattice_values, 182 SSA_NAME_VERSION (ssa_name), VARYING); 183 } 184 185 /* Initialize simulation state for each statement. Return false if we 186 found no statements we want to simulate, and thus there's nothing 187 for the entire pass to do. */ 188 189 static bool 190 init_dont_simulate_again (void) 191 { 192 basic_block bb; 193 gimple_stmt_iterator gsi; 194 gimple phi; 195 bool saw_a_complex_op = false; 196 197 FOR_EACH_BB (bb) 198 { 199 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 200 { 201 phi = gsi_stmt (gsi); 202 prop_set_simulate_again (phi, 203 is_complex_reg (gimple_phi_result (phi))); 204 } 205 206 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 207 { 208 gimple stmt; 209 tree op0, op1; 210 bool sim_again_p; 211 212 stmt = gsi_stmt (gsi); 213 op0 = op1 = NULL_TREE; 214 215 /* Most control-altering statements must be initially 216 simulated, else we won't cover the entire cfg. */ 217 sim_again_p = stmt_ends_bb_p (stmt); 218 219 switch (gimple_code (stmt)) 220 { 221 case GIMPLE_CALL: 222 if (gimple_call_lhs (stmt)) 223 sim_again_p = is_complex_reg (gimple_call_lhs (stmt)); 224 break; 225 226 case GIMPLE_ASSIGN: 227 sim_again_p = is_complex_reg (gimple_assign_lhs (stmt)); 228 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR 229 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR) 230 op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); 231 else 232 op0 = gimple_assign_rhs1 (stmt); 233 if (gimple_num_ops (stmt) > 2) 234 op1 = gimple_assign_rhs2 (stmt); 235 break; 236 237 case GIMPLE_COND: 238 op0 = gimple_cond_lhs (stmt); 239 op1 = gimple_cond_rhs (stmt); 240 break; 241 242 default: 243 break; 244 } 245 246 if (op0 || op1) 247 switch (gimple_expr_code (stmt)) 248 { 249 case EQ_EXPR: 250 case NE_EXPR: 251 case PLUS_EXPR: 252 case MINUS_EXPR: 253 case MULT_EXPR: 254 case TRUNC_DIV_EXPR: 255 case CEIL_DIV_EXPR: 256 case FLOOR_DIV_EXPR: 257 case ROUND_DIV_EXPR: 258 case RDIV_EXPR: 259 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE 260 || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE) 261 saw_a_complex_op = true; 262 break; 263 264 case NEGATE_EXPR: 265 case CONJ_EXPR: 266 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE) 267 saw_a_complex_op = true; 268 break; 269 270 case REALPART_EXPR: 271 case IMAGPART_EXPR: 272 /* The total store transformation performed during 273 gimplification creates such uninitialized loads 274 and we need to lower the statement to be able 275 to fix things up. */ 276 if (TREE_CODE (op0) == SSA_NAME 277 && ssa_undefined_value_p (op0)) 278 saw_a_complex_op = true; 279 break; 280 281 default: 282 break; 283 } 284 285 prop_set_simulate_again (stmt, sim_again_p); 286 } 287 } 288 289 return saw_a_complex_op; 290 } 291 292 293 /* Evaluate statement STMT against the complex lattice defined above. */ 294 295 static enum ssa_prop_result 296 complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED, 297 tree *result_p) 298 { 299 complex_lattice_t new_l, old_l, op1_l, op2_l; 300 unsigned int ver; 301 tree lhs; 302 303 lhs = gimple_get_lhs (stmt); 304 /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */ 305 if (!lhs) 306 return SSA_PROP_VARYING; 307 308 /* These conditions should be satisfied due to the initial filter 309 set up in init_dont_simulate_again. */ 310 gcc_assert (TREE_CODE (lhs) == SSA_NAME); 311 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); 312 313 *result_p = lhs; 314 ver = SSA_NAME_VERSION (lhs); 315 old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver); 316 317 switch (gimple_expr_code (stmt)) 318 { 319 case SSA_NAME: 320 case COMPLEX_CST: 321 new_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 322 break; 323 324 case COMPLEX_EXPR: 325 new_l = find_lattice_value_parts (gimple_assign_rhs1 (stmt), 326 gimple_assign_rhs2 (stmt)); 327 break; 328 329 case PLUS_EXPR: 330 case MINUS_EXPR: 331 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 332 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt)); 333 334 /* We've set up the lattice values such that IOR neatly 335 models addition. */ 336 new_l = op1_l | op2_l; 337 break; 338 339 case MULT_EXPR: 340 case RDIV_EXPR: 341 case TRUNC_DIV_EXPR: 342 case CEIL_DIV_EXPR: 343 case FLOOR_DIV_EXPR: 344 case ROUND_DIV_EXPR: 345 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 346 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt)); 347 348 /* Obviously, if either varies, so does the result. */ 349 if (op1_l == VARYING || op2_l == VARYING) 350 new_l = VARYING; 351 /* Don't prematurely promote variables if we've not yet seen 352 their inputs. */ 353 else if (op1_l == UNINITIALIZED) 354 new_l = op2_l; 355 else if (op2_l == UNINITIALIZED) 356 new_l = op1_l; 357 else 358 { 359 /* At this point both numbers have only one component. If the 360 numbers are of opposite kind, the result is imaginary, 361 otherwise the result is real. The add/subtract translates 362 the real/imag from/to 0/1; the ^ performs the comparison. */ 363 new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL; 364 365 /* Don't allow the lattice value to flip-flop indefinitely. */ 366 new_l |= old_l; 367 } 368 break; 369 370 case NEGATE_EXPR: 371 case CONJ_EXPR: 372 new_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 373 break; 374 375 default: 376 new_l = VARYING; 377 break; 378 } 379 380 /* If nothing changed this round, let the propagator know. */ 381 if (new_l == old_l) 382 return SSA_PROP_NOT_INTERESTING; 383 384 VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l); 385 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; 386 } 387 388 /* Evaluate a PHI node against the complex lattice defined above. */ 389 390 static enum ssa_prop_result 391 complex_visit_phi (gimple phi) 392 { 393 complex_lattice_t new_l, old_l; 394 unsigned int ver; 395 tree lhs; 396 int i; 397 398 lhs = gimple_phi_result (phi); 399 400 /* This condition should be satisfied due to the initial filter 401 set up in init_dont_simulate_again. */ 402 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); 403 404 /* We've set up the lattice values such that IOR neatly models PHI meet. */ 405 new_l = UNINITIALIZED; 406 for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i) 407 new_l |= find_lattice_value (gimple_phi_arg_def (phi, i)); 408 409 ver = SSA_NAME_VERSION (lhs); 410 old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver); 411 412 if (new_l == old_l) 413 return SSA_PROP_NOT_INTERESTING; 414 415 VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l); 416 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; 417 } 418 419 /* Create one backing variable for a complex component of ORIG. */ 420 421 static tree 422 create_one_component_var (tree type, tree orig, const char *prefix, 423 const char *suffix, enum tree_code code) 424 { 425 tree r = create_tmp_var (type, prefix); 426 add_referenced_var (r); 427 428 DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (orig); 429 DECL_ARTIFICIAL (r) = 1; 430 431 if (DECL_NAME (orig) && !DECL_IGNORED_P (orig)) 432 { 433 const char *name = IDENTIFIER_POINTER (DECL_NAME (orig)); 434 435 DECL_NAME (r) = get_identifier (ACONCAT ((name, suffix, NULL))); 436 437 SET_DECL_DEBUG_EXPR (r, build1 (code, type, orig)); 438 DECL_DEBUG_EXPR_IS_FROM (r) = 1; 439 DECL_IGNORED_P (r) = 0; 440 TREE_NO_WARNING (r) = TREE_NO_WARNING (orig); 441 } 442 else 443 { 444 DECL_IGNORED_P (r) = 1; 445 TREE_NO_WARNING (r) = 1; 446 } 447 448 return r; 449 } 450 451 /* Retrieve a value for a complex component of VAR. */ 452 453 static tree 454 get_component_var (tree var, bool imag_p) 455 { 456 size_t decl_index = DECL_UID (var) * 2 + imag_p; 457 tree ret = cvc_lookup (decl_index); 458 459 if (ret == NULL) 460 { 461 ret = create_one_component_var (TREE_TYPE (TREE_TYPE (var)), var, 462 imag_p ? "CI" : "CR", 463 imag_p ? "$imag" : "$real", 464 imag_p ? IMAGPART_EXPR : REALPART_EXPR); 465 cvc_insert (decl_index, ret); 466 } 467 468 return ret; 469 } 470 471 /* Retrieve a value for a complex component of SSA_NAME. */ 472 473 static tree 474 get_component_ssa_name (tree ssa_name, bool imag_p) 475 { 476 complex_lattice_t lattice = find_lattice_value (ssa_name); 477 size_t ssa_name_index; 478 tree ret; 479 480 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) 481 { 482 tree inner_type = TREE_TYPE (TREE_TYPE (ssa_name)); 483 if (SCALAR_FLOAT_TYPE_P (inner_type)) 484 return build_real (inner_type, dconst0); 485 else 486 return build_int_cst (inner_type, 0); 487 } 488 489 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; 490 ret = VEC_index (tree, complex_ssa_name_components, ssa_name_index); 491 if (ret == NULL) 492 { 493 ret = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); 494 ret = make_ssa_name (ret, NULL); 495 496 /* Copy some properties from the original. In particular, whether it 497 is used in an abnormal phi, and whether it's uninitialized. */ 498 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret) 499 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name); 500 if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL 501 && gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))) 502 { 503 SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name); 504 set_default_def (SSA_NAME_VAR (ret), ret); 505 } 506 507 VEC_replace (tree, complex_ssa_name_components, ssa_name_index, ret); 508 } 509 510 return ret; 511 } 512 513 /* Set a value for a complex component of SSA_NAME, return a 514 gimple_seq of stuff that needs doing. */ 515 516 static gimple_seq 517 set_component_ssa_name (tree ssa_name, bool imag_p, tree value) 518 { 519 complex_lattice_t lattice = find_lattice_value (ssa_name); 520 size_t ssa_name_index; 521 tree comp; 522 gimple last; 523 gimple_seq list; 524 525 /* We know the value must be zero, else there's a bug in our lattice 526 analysis. But the value may well be a variable known to contain 527 zero. We should be safe ignoring it. */ 528 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) 529 return NULL; 530 531 /* If we've already assigned an SSA_NAME to this component, then this 532 means that our walk of the basic blocks found a use before the set. 533 This is fine. Now we should create an initialization for the value 534 we created earlier. */ 535 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; 536 comp = VEC_index (tree, complex_ssa_name_components, ssa_name_index); 537 if (comp) 538 ; 539 540 /* If we've nothing assigned, and the value we're given is already stable, 541 then install that as the value for this SSA_NAME. This preemptively 542 copy-propagates the value, which avoids unnecessary memory allocation. */ 543 else if (is_gimple_min_invariant (value) 544 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) 545 { 546 VEC_replace (tree, complex_ssa_name_components, ssa_name_index, value); 547 return NULL; 548 } 549 else if (TREE_CODE (value) == SSA_NAME 550 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) 551 { 552 /* Replace an anonymous base value with the variable from cvc_lookup. 553 This should result in better debug info. */ 554 if (DECL_IGNORED_P (SSA_NAME_VAR (value)) 555 && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name))) 556 { 557 comp = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); 558 replace_ssa_name_symbol (value, comp); 559 } 560 561 VEC_replace (tree, complex_ssa_name_components, ssa_name_index, value); 562 return NULL; 563 } 564 565 /* Finally, we need to stabilize the result by installing the value into 566 a new ssa name. */ 567 else 568 comp = get_component_ssa_name (ssa_name, imag_p); 569 570 /* Do all the work to assign VALUE to COMP. */ 571 list = NULL; 572 value = force_gimple_operand (value, &list, false, NULL); 573 last = gimple_build_assign (comp, value); 574 gimple_seq_add_stmt (&list, last); 575 gcc_assert (SSA_NAME_DEF_STMT (comp) == last); 576 577 return list; 578 } 579 580 /* Extract the real or imaginary part of a complex variable or constant. 581 Make sure that it's a proper gimple_val and gimplify it if not. 582 Emit any new code before gsi. */ 583 584 static tree 585 extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p, 586 bool gimple_p) 587 { 588 switch (TREE_CODE (t)) 589 { 590 case COMPLEX_CST: 591 return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t); 592 593 case COMPLEX_EXPR: 594 gcc_unreachable (); 595 596 case VAR_DECL: 597 case RESULT_DECL: 598 case PARM_DECL: 599 case COMPONENT_REF: 600 case ARRAY_REF: 601 case VIEW_CONVERT_EXPR: 602 case MEM_REF: 603 { 604 tree inner_type = TREE_TYPE (TREE_TYPE (t)); 605 606 t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR), 607 inner_type, unshare_expr (t)); 608 609 if (gimple_p) 610 t = force_gimple_operand_gsi (gsi, t, true, NULL, true, 611 GSI_SAME_STMT); 612 613 return t; 614 } 615 616 case SSA_NAME: 617 return get_component_ssa_name (t, imagpart_p); 618 619 default: 620 gcc_unreachable (); 621 } 622 } 623 624 /* Update the complex components of the ssa name on the lhs of STMT. */ 625 626 static void 627 update_complex_components (gimple_stmt_iterator *gsi, gimple stmt, tree r, 628 tree i) 629 { 630 tree lhs; 631 gimple_seq list; 632 633 lhs = gimple_get_lhs (stmt); 634 635 list = set_component_ssa_name (lhs, false, r); 636 if (list) 637 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 638 639 list = set_component_ssa_name (lhs, true, i); 640 if (list) 641 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 642 } 643 644 static void 645 update_complex_components_on_edge (edge e, tree lhs, tree r, tree i) 646 { 647 gimple_seq list; 648 649 list = set_component_ssa_name (lhs, false, r); 650 if (list) 651 gsi_insert_seq_on_edge (e, list); 652 653 list = set_component_ssa_name (lhs, true, i); 654 if (list) 655 gsi_insert_seq_on_edge (e, list); 656 } 657 658 659 /* Update an assignment to a complex variable in place. */ 660 661 static void 662 update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i) 663 { 664 gimple_stmt_iterator orig_si = *gsi; 665 gimple stmt; 666 667 if (gimple_in_ssa_p (cfun)) 668 update_complex_components (gsi, gsi_stmt (*gsi), r, i); 669 670 gimple_assign_set_rhs_with_ops (&orig_si, COMPLEX_EXPR, r, i); 671 stmt = gsi_stmt (orig_si); 672 update_stmt (stmt); 673 if (maybe_clean_eh_stmt (stmt)) 674 gimple_purge_dead_eh_edges (gimple_bb (stmt)); 675 } 676 677 678 /* Generate code at the entry point of the function to initialize the 679 component variables for a complex parameter. */ 680 681 static void 682 update_parameter_components (void) 683 { 684 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR); 685 tree parm; 686 687 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) 688 { 689 tree type = TREE_TYPE (parm); 690 tree ssa_name, r, i; 691 692 if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm)) 693 continue; 694 695 type = TREE_TYPE (type); 696 ssa_name = gimple_default_def (cfun, parm); 697 if (!ssa_name) 698 continue; 699 700 r = build1 (REALPART_EXPR, type, ssa_name); 701 i = build1 (IMAGPART_EXPR, type, ssa_name); 702 update_complex_components_on_edge (entry_edge, ssa_name, r, i); 703 } 704 } 705 706 /* Generate code to set the component variables of a complex variable 707 to match the PHI statements in block BB. */ 708 709 static void 710 update_phi_components (basic_block bb) 711 { 712 gimple_stmt_iterator gsi; 713 714 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 715 { 716 gimple phi = gsi_stmt (gsi); 717 718 if (is_complex_reg (gimple_phi_result (phi))) 719 { 720 tree lr, li; 721 gimple pr = NULL, pi = NULL; 722 unsigned int i, n; 723 724 lr = get_component_ssa_name (gimple_phi_result (phi), false); 725 if (TREE_CODE (lr) == SSA_NAME) 726 { 727 pr = create_phi_node (lr, bb); 728 SSA_NAME_DEF_STMT (lr) = pr; 729 } 730 731 li = get_component_ssa_name (gimple_phi_result (phi), true); 732 if (TREE_CODE (li) == SSA_NAME) 733 { 734 pi = create_phi_node (li, bb); 735 SSA_NAME_DEF_STMT (li) = pi; 736 } 737 738 for (i = 0, n = gimple_phi_num_args (phi); i < n; ++i) 739 { 740 tree comp, arg = gimple_phi_arg_def (phi, i); 741 if (pr) 742 { 743 comp = extract_component (NULL, arg, false, false); 744 SET_PHI_ARG_DEF (pr, i, comp); 745 } 746 if (pi) 747 { 748 comp = extract_component (NULL, arg, true, false); 749 SET_PHI_ARG_DEF (pi, i, comp); 750 } 751 } 752 } 753 } 754 } 755 756 /* Expand a complex move to scalars. */ 757 758 static void 759 expand_complex_move (gimple_stmt_iterator *gsi, tree type) 760 { 761 tree inner_type = TREE_TYPE (type); 762 tree r, i, lhs, rhs; 763 gimple stmt = gsi_stmt (*gsi); 764 765 if (is_gimple_assign (stmt)) 766 { 767 lhs = gimple_assign_lhs (stmt); 768 if (gimple_num_ops (stmt) == 2) 769 rhs = gimple_assign_rhs1 (stmt); 770 else 771 rhs = NULL_TREE; 772 } 773 else if (is_gimple_call (stmt)) 774 { 775 lhs = gimple_call_lhs (stmt); 776 rhs = NULL_TREE; 777 } 778 else 779 gcc_unreachable (); 780 781 if (TREE_CODE (lhs) == SSA_NAME) 782 { 783 if (is_ctrl_altering_stmt (stmt)) 784 { 785 edge e; 786 787 /* The value is not assigned on the exception edges, so we need not 788 concern ourselves there. We do need to update on the fallthru 789 edge. Find it. */ 790 e = find_fallthru_edge (gsi_bb (*gsi)->succs); 791 if (!e) 792 gcc_unreachable (); 793 794 r = build1 (REALPART_EXPR, inner_type, lhs); 795 i = build1 (IMAGPART_EXPR, inner_type, lhs); 796 update_complex_components_on_edge (e, lhs, r, i); 797 } 798 else if (is_gimple_call (stmt) 799 || gimple_has_side_effects (stmt) 800 || gimple_assign_rhs_code (stmt) == PAREN_EXPR) 801 { 802 r = build1 (REALPART_EXPR, inner_type, lhs); 803 i = build1 (IMAGPART_EXPR, inner_type, lhs); 804 update_complex_components (gsi, stmt, r, i); 805 } 806 else 807 { 808 if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR) 809 { 810 r = extract_component (gsi, rhs, 0, true); 811 i = extract_component (gsi, rhs, 1, true); 812 } 813 else 814 { 815 r = gimple_assign_rhs1 (stmt); 816 i = gimple_assign_rhs2 (stmt); 817 } 818 update_complex_assignment (gsi, r, i); 819 } 820 } 821 else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs)) 822 { 823 tree x; 824 gimple t; 825 826 r = extract_component (gsi, rhs, 0, false); 827 i = extract_component (gsi, rhs, 1, false); 828 829 x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs)); 830 t = gimple_build_assign (x, r); 831 gsi_insert_before (gsi, t, GSI_SAME_STMT); 832 833 if (stmt == gsi_stmt (*gsi)) 834 { 835 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); 836 gimple_assign_set_lhs (stmt, x); 837 gimple_assign_set_rhs1 (stmt, i); 838 } 839 else 840 { 841 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); 842 t = gimple_build_assign (x, i); 843 gsi_insert_before (gsi, t, GSI_SAME_STMT); 844 845 stmt = gsi_stmt (*gsi); 846 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN); 847 gimple_return_set_retval (stmt, lhs); 848 } 849 850 update_stmt (stmt); 851 } 852 } 853 854 /* Expand complex addition to scalars: 855 a + b = (ar + br) + i(ai + bi) 856 a - b = (ar - br) + i(ai + bi) 857 */ 858 859 static void 860 expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type, 861 tree ar, tree ai, tree br, tree bi, 862 enum tree_code code, 863 complex_lattice_t al, complex_lattice_t bl) 864 { 865 tree rr, ri; 866 867 switch (PAIR (al, bl)) 868 { 869 case PAIR (ONLY_REAL, ONLY_REAL): 870 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 871 ri = ai; 872 break; 873 874 case PAIR (ONLY_REAL, ONLY_IMAG): 875 rr = ar; 876 if (code == MINUS_EXPR) 877 ri = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, bi); 878 else 879 ri = bi; 880 break; 881 882 case PAIR (ONLY_IMAG, ONLY_REAL): 883 if (code == MINUS_EXPR) 884 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ar, br); 885 else 886 rr = br; 887 ri = ai; 888 break; 889 890 case PAIR (ONLY_IMAG, ONLY_IMAG): 891 rr = ar; 892 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 893 break; 894 895 case PAIR (VARYING, ONLY_REAL): 896 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 897 ri = ai; 898 break; 899 900 case PAIR (VARYING, ONLY_IMAG): 901 rr = ar; 902 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 903 break; 904 905 case PAIR (ONLY_REAL, VARYING): 906 if (code == MINUS_EXPR) 907 goto general; 908 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 909 ri = bi; 910 break; 911 912 case PAIR (ONLY_IMAG, VARYING): 913 if (code == MINUS_EXPR) 914 goto general; 915 rr = br; 916 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 917 break; 918 919 case PAIR (VARYING, VARYING): 920 general: 921 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 922 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 923 break; 924 925 default: 926 gcc_unreachable (); 927 } 928 929 update_complex_assignment (gsi, rr, ri); 930 } 931 932 /* Expand a complex multiplication or division to a libcall to the c99 933 compliant routines. */ 934 935 static void 936 expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai, 937 tree br, tree bi, enum tree_code code) 938 { 939 enum machine_mode mode; 940 enum built_in_function bcode; 941 tree fn, type, lhs; 942 gimple old_stmt, stmt; 943 944 old_stmt = gsi_stmt (*gsi); 945 lhs = gimple_assign_lhs (old_stmt); 946 type = TREE_TYPE (lhs); 947 948 mode = TYPE_MODE (type); 949 gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT); 950 951 if (code == MULT_EXPR) 952 bcode = ((enum built_in_function) 953 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 954 else if (code == RDIV_EXPR) 955 bcode = ((enum built_in_function) 956 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 957 else 958 gcc_unreachable (); 959 fn = builtin_decl_explicit (bcode); 960 961 stmt = gimple_build_call (fn, 4, ar, ai, br, bi); 962 gimple_call_set_lhs (stmt, lhs); 963 update_stmt (stmt); 964 gsi_replace (gsi, stmt, false); 965 966 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)) 967 gimple_purge_dead_eh_edges (gsi_bb (*gsi)); 968 969 if (gimple_in_ssa_p (cfun)) 970 { 971 type = TREE_TYPE (type); 972 update_complex_components (gsi, stmt, 973 build1 (REALPART_EXPR, type, lhs), 974 build1 (IMAGPART_EXPR, type, lhs)); 975 SSA_NAME_DEF_STMT (lhs) = stmt; 976 } 977 } 978 979 /* Expand complex multiplication to scalars: 980 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai) 981 */ 982 983 static void 984 expand_complex_multiplication (gimple_stmt_iterator *gsi, tree inner_type, 985 tree ar, tree ai, tree br, tree bi, 986 complex_lattice_t al, complex_lattice_t bl) 987 { 988 tree rr, ri; 989 990 if (al < bl) 991 { 992 complex_lattice_t tl; 993 rr = ar, ar = br, br = rr; 994 ri = ai, ai = bi, bi = ri; 995 tl = al, al = bl, bl = tl; 996 } 997 998 switch (PAIR (al, bl)) 999 { 1000 case PAIR (ONLY_REAL, ONLY_REAL): 1001 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1002 ri = ai; 1003 break; 1004 1005 case PAIR (ONLY_IMAG, ONLY_REAL): 1006 rr = ar; 1007 if (TREE_CODE (ai) == REAL_CST 1008 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst1)) 1009 ri = br; 1010 else 1011 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1012 break; 1013 1014 case PAIR (ONLY_IMAG, ONLY_IMAG): 1015 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1016 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr); 1017 ri = ar; 1018 break; 1019 1020 case PAIR (VARYING, ONLY_REAL): 1021 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1022 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1023 break; 1024 1025 case PAIR (VARYING, ONLY_IMAG): 1026 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1027 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr); 1028 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); 1029 break; 1030 1031 case PAIR (VARYING, VARYING): 1032 if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type)) 1033 { 1034 expand_complex_libcall (gsi, ar, ai, br, bi, MULT_EXPR); 1035 return; 1036 } 1037 else 1038 { 1039 tree t1, t2, t3, t4; 1040 1041 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1042 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1043 t3 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); 1044 1045 /* Avoid expanding redundant multiplication for the common 1046 case of squaring a complex number. */ 1047 if (ar == br && ai == bi) 1048 t4 = t3; 1049 else 1050 t4 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1051 1052 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2); 1053 ri = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t3, t4); 1054 } 1055 break; 1056 1057 default: 1058 gcc_unreachable (); 1059 } 1060 1061 update_complex_assignment (gsi, rr, ri); 1062 } 1063 1064 /* Keep this algorithm in sync with fold-const.c:const_binop(). 1065 1066 Expand complex division to scalars, straightforward algorithm. 1067 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) 1068 t = br*br + bi*bi 1069 */ 1070 1071 static void 1072 expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type, 1073 tree ar, tree ai, tree br, tree bi, 1074 enum tree_code code) 1075 { 1076 tree rr, ri, div, t1, t2, t3; 1077 1078 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, br); 1079 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, bi); 1080 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2); 1081 1082 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1083 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1084 t3 = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2); 1085 rr = gimplify_build2 (gsi, code, inner_type, t3, div); 1086 1087 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1088 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); 1089 t3 = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2); 1090 ri = gimplify_build2 (gsi, code, inner_type, t3, div); 1091 1092 update_complex_assignment (gsi, rr, ri); 1093 } 1094 1095 /* Keep this algorithm in sync with fold-const.c:const_binop(). 1096 1097 Expand complex division to scalars, modified algorithm to minimize 1098 overflow with wide input ranges. */ 1099 1100 static void 1101 expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type, 1102 tree ar, tree ai, tree br, tree bi, 1103 enum tree_code code) 1104 { 1105 tree rr, ri, ratio, div, t1, t2, tr, ti, compare; 1106 basic_block bb_cond, bb_true, bb_false, bb_join; 1107 gimple stmt; 1108 1109 /* Examine |br| < |bi|, and branch. */ 1110 t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br); 1111 t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi); 1112 compare = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), 1113 LT_EXPR, boolean_type_node, t1, t2); 1114 STRIP_NOPS (compare); 1115 1116 bb_cond = bb_true = bb_false = bb_join = NULL; 1117 rr = ri = tr = ti = NULL; 1118 if (TREE_CODE (compare) != INTEGER_CST) 1119 { 1120 edge e; 1121 gimple stmt; 1122 tree cond, tmp; 1123 1124 tmp = create_tmp_var (boolean_type_node, NULL); 1125 stmt = gimple_build_assign (tmp, compare); 1126 if (gimple_in_ssa_p (cfun)) 1127 { 1128 tmp = make_ssa_name (tmp, stmt); 1129 gimple_assign_set_lhs (stmt, tmp); 1130 } 1131 1132 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1133 1134 cond = fold_build2_loc (gimple_location (stmt), 1135 EQ_EXPR, boolean_type_node, tmp, boolean_true_node); 1136 stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE); 1137 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1138 1139 /* Split the original block, and create the TRUE and FALSE blocks. */ 1140 e = split_block (gsi_bb (*gsi), stmt); 1141 bb_cond = e->src; 1142 bb_join = e->dest; 1143 bb_true = create_empty_bb (bb_cond); 1144 bb_false = create_empty_bb (bb_true); 1145 1146 /* Wire the blocks together. */ 1147 e->flags = EDGE_TRUE_VALUE; 1148 redirect_edge_succ (e, bb_true); 1149 make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE); 1150 make_edge (bb_true, bb_join, EDGE_FALLTHRU); 1151 make_edge (bb_false, bb_join, EDGE_FALLTHRU); 1152 1153 /* Update dominance info. Note that bb_join's data was 1154 updated by split_block. */ 1155 if (dom_info_available_p (CDI_DOMINATORS)) 1156 { 1157 set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond); 1158 set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond); 1159 } 1160 1161 rr = make_rename_temp (inner_type, NULL); 1162 ri = make_rename_temp (inner_type, NULL); 1163 } 1164 1165 /* In the TRUE branch, we compute 1166 ratio = br/bi; 1167 div = (br * ratio) + bi; 1168 tr = (ar * ratio) + ai; 1169 ti = (ai * ratio) - ar; 1170 tr = tr / div; 1171 ti = ti / div; */ 1172 if (bb_true || integer_nonzerop (compare)) 1173 { 1174 if (bb_true) 1175 { 1176 *gsi = gsi_last_bb (bb_true); 1177 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); 1178 } 1179 1180 ratio = gimplify_build2 (gsi, code, inner_type, br, bi); 1181 1182 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, ratio); 1183 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, bi); 1184 1185 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio); 1186 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ai); 1187 1188 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio); 1189 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, ar); 1190 1191 tr = gimplify_build2 (gsi, code, inner_type, tr, div); 1192 ti = gimplify_build2 (gsi, code, inner_type, ti, div); 1193 1194 if (bb_true) 1195 { 1196 stmt = gimple_build_assign (rr, tr); 1197 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1198 stmt = gimple_build_assign (ri, ti); 1199 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1200 gsi_remove (gsi, true); 1201 } 1202 } 1203 1204 /* In the FALSE branch, we compute 1205 ratio = d/c; 1206 divisor = (d * ratio) + c; 1207 tr = (b * ratio) + a; 1208 ti = b - (a * ratio); 1209 tr = tr / div; 1210 ti = ti / div; */ 1211 if (bb_false || integer_zerop (compare)) 1212 { 1213 if (bb_false) 1214 { 1215 *gsi = gsi_last_bb (bb_false); 1216 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); 1217 } 1218 1219 ratio = gimplify_build2 (gsi, code, inner_type, bi, br); 1220 1221 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, ratio); 1222 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, br); 1223 1224 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio); 1225 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ar); 1226 1227 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio); 1228 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, t1); 1229 1230 tr = gimplify_build2 (gsi, code, inner_type, tr, div); 1231 ti = gimplify_build2 (gsi, code, inner_type, ti, div); 1232 1233 if (bb_false) 1234 { 1235 stmt = gimple_build_assign (rr, tr); 1236 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1237 stmt = gimple_build_assign (ri, ti); 1238 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1239 gsi_remove (gsi, true); 1240 } 1241 } 1242 1243 if (bb_join) 1244 *gsi = gsi_start_bb (bb_join); 1245 else 1246 rr = tr, ri = ti; 1247 1248 update_complex_assignment (gsi, rr, ri); 1249 } 1250 1251 /* Expand complex division to scalars. */ 1252 1253 static void 1254 expand_complex_division (gimple_stmt_iterator *gsi, tree inner_type, 1255 tree ar, tree ai, tree br, tree bi, 1256 enum tree_code code, 1257 complex_lattice_t al, complex_lattice_t bl) 1258 { 1259 tree rr, ri; 1260 1261 switch (PAIR (al, bl)) 1262 { 1263 case PAIR (ONLY_REAL, ONLY_REAL): 1264 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 1265 ri = ai; 1266 break; 1267 1268 case PAIR (ONLY_REAL, ONLY_IMAG): 1269 rr = ai; 1270 ri = gimplify_build2 (gsi, code, inner_type, ar, bi); 1271 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri); 1272 break; 1273 1274 case PAIR (ONLY_IMAG, ONLY_REAL): 1275 rr = ar; 1276 ri = gimplify_build2 (gsi, code, inner_type, ai, br); 1277 break; 1278 1279 case PAIR (ONLY_IMAG, ONLY_IMAG): 1280 rr = gimplify_build2 (gsi, code, inner_type, ai, bi); 1281 ri = ar; 1282 break; 1283 1284 case PAIR (VARYING, ONLY_REAL): 1285 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 1286 ri = gimplify_build2 (gsi, code, inner_type, ai, br); 1287 break; 1288 1289 case PAIR (VARYING, ONLY_IMAG): 1290 rr = gimplify_build2 (gsi, code, inner_type, ai, bi); 1291 ri = gimplify_build2 (gsi, code, inner_type, ar, bi); 1292 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri); 1293 1294 case PAIR (ONLY_REAL, VARYING): 1295 case PAIR (ONLY_IMAG, VARYING): 1296 case PAIR (VARYING, VARYING): 1297 switch (flag_complex_method) 1298 { 1299 case 0: 1300 /* straightforward implementation of complex divide acceptable. */ 1301 expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code); 1302 break; 1303 1304 case 2: 1305 if (SCALAR_FLOAT_TYPE_P (inner_type)) 1306 { 1307 expand_complex_libcall (gsi, ar, ai, br, bi, code); 1308 break; 1309 } 1310 /* FALLTHRU */ 1311 1312 case 1: 1313 /* wide ranges of inputs must work for complex divide. */ 1314 expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code); 1315 break; 1316 1317 default: 1318 gcc_unreachable (); 1319 } 1320 return; 1321 1322 default: 1323 gcc_unreachable (); 1324 } 1325 1326 update_complex_assignment (gsi, rr, ri); 1327 } 1328 1329 /* Expand complex negation to scalars: 1330 -a = (-ar) + i(-ai) 1331 */ 1332 1333 static void 1334 expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type, 1335 tree ar, tree ai) 1336 { 1337 tree rr, ri; 1338 1339 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ar); 1340 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai); 1341 1342 update_complex_assignment (gsi, rr, ri); 1343 } 1344 1345 /* Expand complex conjugate to scalars: 1346 ~a = (ar) + i(-ai) 1347 */ 1348 1349 static void 1350 expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type, 1351 tree ar, tree ai) 1352 { 1353 tree ri; 1354 1355 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai); 1356 1357 update_complex_assignment (gsi, ar, ri); 1358 } 1359 1360 /* Expand complex comparison (EQ or NE only). */ 1361 1362 static void 1363 expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai, 1364 tree br, tree bi, enum tree_code code) 1365 { 1366 tree cr, ci, cc, type; 1367 gimple stmt; 1368 1369 cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br); 1370 ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi); 1371 cc = gimplify_build2 (gsi, 1372 (code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR), 1373 boolean_type_node, cr, ci); 1374 1375 stmt = gsi_stmt (*gsi); 1376 1377 switch (gimple_code (stmt)) 1378 { 1379 case GIMPLE_RETURN: 1380 type = TREE_TYPE (gimple_return_retval (stmt)); 1381 gimple_return_set_retval (stmt, fold_convert (type, cc)); 1382 break; 1383 1384 case GIMPLE_ASSIGN: 1385 type = TREE_TYPE (gimple_assign_lhs (stmt)); 1386 gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc)); 1387 stmt = gsi_stmt (*gsi); 1388 break; 1389 1390 case GIMPLE_COND: 1391 gimple_cond_set_code (stmt, EQ_EXPR); 1392 gimple_cond_set_lhs (stmt, cc); 1393 gimple_cond_set_rhs (stmt, boolean_true_node); 1394 break; 1395 1396 default: 1397 gcc_unreachable (); 1398 } 1399 1400 update_stmt (stmt); 1401 } 1402 1403 1404 /* Process one statement. If we identify a complex operation, expand it. */ 1405 1406 static void 1407 expand_complex_operations_1 (gimple_stmt_iterator *gsi) 1408 { 1409 gimple stmt = gsi_stmt (*gsi); 1410 tree type, inner_type, lhs; 1411 tree ac, ar, ai, bc, br, bi; 1412 complex_lattice_t al, bl; 1413 enum tree_code code; 1414 1415 lhs = gimple_get_lhs (stmt); 1416 if (!lhs && gimple_code (stmt) != GIMPLE_COND) 1417 return; 1418 1419 type = TREE_TYPE (gimple_op (stmt, 0)); 1420 code = gimple_expr_code (stmt); 1421 1422 /* Initial filter for operations we handle. */ 1423 switch (code) 1424 { 1425 case PLUS_EXPR: 1426 case MINUS_EXPR: 1427 case MULT_EXPR: 1428 case TRUNC_DIV_EXPR: 1429 case CEIL_DIV_EXPR: 1430 case FLOOR_DIV_EXPR: 1431 case ROUND_DIV_EXPR: 1432 case RDIV_EXPR: 1433 case NEGATE_EXPR: 1434 case CONJ_EXPR: 1435 if (TREE_CODE (type) != COMPLEX_TYPE) 1436 return; 1437 inner_type = TREE_TYPE (type); 1438 break; 1439 1440 case EQ_EXPR: 1441 case NE_EXPR: 1442 /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR 1443 subocde, so we need to access the operands using gimple_op. */ 1444 inner_type = TREE_TYPE (gimple_op (stmt, 1)); 1445 if (TREE_CODE (inner_type) != COMPLEX_TYPE) 1446 return; 1447 break; 1448 1449 default: 1450 { 1451 tree rhs; 1452 1453 /* GIMPLE_COND may also fallthru here, but we do not need to 1454 do anything with it. */ 1455 if (gimple_code (stmt) == GIMPLE_COND) 1456 return; 1457 1458 if (TREE_CODE (type) == COMPLEX_TYPE) 1459 expand_complex_move (gsi, type); 1460 else if (is_gimple_assign (stmt) 1461 && (gimple_assign_rhs_code (stmt) == REALPART_EXPR 1462 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR) 1463 && TREE_CODE (lhs) == SSA_NAME) 1464 { 1465 rhs = gimple_assign_rhs1 (stmt); 1466 rhs = extract_component (gsi, TREE_OPERAND (rhs, 0), 1467 gimple_assign_rhs_code (stmt) 1468 == IMAGPART_EXPR, 1469 false); 1470 gimple_assign_set_rhs_from_tree (gsi, rhs); 1471 stmt = gsi_stmt (*gsi); 1472 update_stmt (stmt); 1473 } 1474 } 1475 return; 1476 } 1477 1478 /* Extract the components of the two complex values. Make sure and 1479 handle the common case of the same value used twice specially. */ 1480 if (is_gimple_assign (stmt)) 1481 { 1482 ac = gimple_assign_rhs1 (stmt); 1483 bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL; 1484 } 1485 /* GIMPLE_CALL can not get here. */ 1486 else 1487 { 1488 ac = gimple_cond_lhs (stmt); 1489 bc = gimple_cond_rhs (stmt); 1490 } 1491 1492 ar = extract_component (gsi, ac, false, true); 1493 ai = extract_component (gsi, ac, true, true); 1494 1495 if (ac == bc) 1496 br = ar, bi = ai; 1497 else if (bc) 1498 { 1499 br = extract_component (gsi, bc, 0, true); 1500 bi = extract_component (gsi, bc, 1, true); 1501 } 1502 else 1503 br = bi = NULL_TREE; 1504 1505 if (gimple_in_ssa_p (cfun)) 1506 { 1507 al = find_lattice_value (ac); 1508 if (al == UNINITIALIZED) 1509 al = VARYING; 1510 1511 if (TREE_CODE_CLASS (code) == tcc_unary) 1512 bl = UNINITIALIZED; 1513 else if (ac == bc) 1514 bl = al; 1515 else 1516 { 1517 bl = find_lattice_value (bc); 1518 if (bl == UNINITIALIZED) 1519 bl = VARYING; 1520 } 1521 } 1522 else 1523 al = bl = VARYING; 1524 1525 switch (code) 1526 { 1527 case PLUS_EXPR: 1528 case MINUS_EXPR: 1529 expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl); 1530 break; 1531 1532 case MULT_EXPR: 1533 expand_complex_multiplication (gsi, inner_type, ar, ai, br, bi, al, bl); 1534 break; 1535 1536 case TRUNC_DIV_EXPR: 1537 case CEIL_DIV_EXPR: 1538 case FLOOR_DIV_EXPR: 1539 case ROUND_DIV_EXPR: 1540 case RDIV_EXPR: 1541 expand_complex_division (gsi, inner_type, ar, ai, br, bi, code, al, bl); 1542 break; 1543 1544 case NEGATE_EXPR: 1545 expand_complex_negation (gsi, inner_type, ar, ai); 1546 break; 1547 1548 case CONJ_EXPR: 1549 expand_complex_conjugate (gsi, inner_type, ar, ai); 1550 break; 1551 1552 case EQ_EXPR: 1553 case NE_EXPR: 1554 expand_complex_comparison (gsi, ar, ai, br, bi, code); 1555 break; 1556 1557 default: 1558 gcc_unreachable (); 1559 } 1560 } 1561 1562 1563 /* Entry point for complex operation lowering during optimization. */ 1564 1565 static unsigned int 1566 tree_lower_complex (void) 1567 { 1568 int old_last_basic_block; 1569 gimple_stmt_iterator gsi; 1570 basic_block bb; 1571 1572 if (!init_dont_simulate_again ()) 1573 return 0; 1574 1575 complex_lattice_values = VEC_alloc (complex_lattice_t, heap, num_ssa_names); 1576 VEC_safe_grow_cleared (complex_lattice_t, heap, 1577 complex_lattice_values, num_ssa_names); 1578 1579 init_parameter_lattice_values (); 1580 ssa_propagate (complex_visit_stmt, complex_visit_phi); 1581 1582 complex_variable_components = htab_create (10, int_tree_map_hash, 1583 int_tree_map_eq, free); 1584 1585 complex_ssa_name_components = VEC_alloc (tree, heap, 2*num_ssa_names); 1586 VEC_safe_grow_cleared (tree, heap, complex_ssa_name_components, 1587 2 * num_ssa_names); 1588 1589 update_parameter_components (); 1590 1591 /* ??? Ideally we'd traverse the blocks in breadth-first order. */ 1592 old_last_basic_block = last_basic_block; 1593 FOR_EACH_BB (bb) 1594 { 1595 if (bb->index >= old_last_basic_block) 1596 continue; 1597 1598 update_phi_components (bb); 1599 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1600 expand_complex_operations_1 (&gsi); 1601 } 1602 1603 gsi_commit_edge_inserts (); 1604 1605 htab_delete (complex_variable_components); 1606 VEC_free (tree, heap, complex_ssa_name_components); 1607 VEC_free (complex_lattice_t, heap, complex_lattice_values); 1608 return 0; 1609 } 1610 1611 struct gimple_opt_pass pass_lower_complex = 1612 { 1613 { 1614 GIMPLE_PASS, 1615 "cplxlower", /* name */ 1616 0, /* gate */ 1617 tree_lower_complex, /* execute */ 1618 NULL, /* sub */ 1619 NULL, /* next */ 1620 0, /* static_pass_number */ 1621 TV_NONE, /* tv_id */ 1622 PROP_ssa, /* properties_required */ 1623 PROP_gimple_lcx, /* properties_provided */ 1624 0, /* properties_destroyed */ 1625 0, /* todo_flags_start */ 1626 TODO_ggc_collect 1627 | TODO_update_ssa 1628 | TODO_verify_stmts /* todo_flags_finish */ 1629 } 1630 }; 1631 1632 1633 static bool 1634 gate_no_optimization (void) 1635 { 1636 /* With errors, normal optimization passes are not run. If we don't 1637 lower complex operations at all, rtl expansion will abort. */ 1638 return !(cfun->curr_properties & PROP_gimple_lcx); 1639 } 1640 1641 struct gimple_opt_pass pass_lower_complex_O0 = 1642 { 1643 { 1644 GIMPLE_PASS, 1645 "cplxlower0", /* name */ 1646 gate_no_optimization, /* gate */ 1647 tree_lower_complex, /* execute */ 1648 NULL, /* sub */ 1649 NULL, /* next */ 1650 0, /* static_pass_number */ 1651 TV_NONE, /* tv_id */ 1652 PROP_cfg, /* properties_required */ 1653 PROP_gimple_lcx, /* properties_provided */ 1654 0, /* properties_destroyed */ 1655 0, /* todo_flags_start */ 1656 TODO_ggc_collect 1657 | TODO_update_ssa 1658 | TODO_verify_stmts /* todo_flags_finish */ 1659 } 1660 }; 1661