1 /* Profile counter container type. 2 Copyright (C) 2017-2018 Free Software Foundation, Inc. 3 Contributed by Jan Hubicka 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #ifndef GCC_PROFILE_COUNT_H 22 #define GCC_PROFILE_COUNT_H 23 24 struct function; 25 class profile_count; 26 27 /* Quality of the profile count. Because gengtype does not support enums 28 inside of classes, this is in global namespace. */ 29 enum profile_quality { 30 /* Uninitialized value. */ 31 profile_uninitialized, 32 /* Profile is based on static branch prediction heuristics and may 33 or may not match reality. It is local to function and can not be compared 34 inter-procedurally. Never used by probabilities (they are always local). 35 */ 36 profile_guessed_local, 37 /* Profile was read by feedback and was 0, we used local heuristics to guess 38 better. This is the case of functions not run in profile fedback. 39 Never used by probabilities. */ 40 profile_guessed_global0, 41 42 /* Same as profile_guessed_global0 but global count is adjusted 0. */ 43 profile_guessed_global0adjusted, 44 45 /* Profile is based on static branch prediction heuristics. It may or may 46 not reflect the reality but it can be compared interprocedurally 47 (for example, we inlined function w/o profile feedback into function 48 with feedback and propagated from that). 49 Never used by probablities. */ 50 profile_guessed, 51 /* Profile was determined by autofdo. */ 52 profile_afdo, 53 /* Profile was originally based on feedback but it was adjusted 54 by code duplicating optimization. It may not precisely reflect the 55 particular code path. */ 56 profile_adjusted, 57 /* Profile was read from profile feedback or determined by accurate static 58 method. */ 59 profile_precise 60 }; 61 62 /* The base value for branch probability notes and edge probabilities. */ 63 #define REG_BR_PROB_BASE 10000 64 65 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y)) 66 67 bool slow_safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res); 68 69 /* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */ 70 71 inline bool 72 safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res) 73 { 74 #if (GCC_VERSION >= 5000) 75 uint64_t tmp; 76 if (!__builtin_mul_overflow (a, b, &tmp) 77 && !__builtin_add_overflow (tmp, c/2, &tmp)) 78 { 79 *res = tmp / c; 80 return true; 81 } 82 if (c == 1) 83 { 84 *res = (uint64_t) -1; 85 return false; 86 } 87 #else 88 if (a < ((uint64_t)1 << 31) 89 && b < ((uint64_t)1 << 31) 90 && c < ((uint64_t)1 << 31)) 91 { 92 *res = (a * b + (c / 2)) / c; 93 return true; 94 } 95 #endif 96 return slow_safe_scale_64bit (a, b, c, res); 97 } 98 99 /* Data type to hold probabilities. It implements fixed point arithmetics 100 with capping so probability is always in range [0,1] and scaling requiring 101 values greater than 1 needs to be represented otherwise. 102 103 In addition to actual value the quality of profile is tracked and propagated 104 through all operations. Special value UNINITIALIZED is used for probabilities 105 that has not been determined yet (for example bacause of 106 -fno-guess-branch-probability) 107 108 Typically probabilities are derived from profile feedback (via 109 probability_in_gcov_type), autoFDO or guessed statically and then propagated 110 thorough the compilation. 111 112 Named probabilities are available: 113 - never (0 probability) 114 - guessed_never 115 - very_unlikely (1/2000 probability) 116 - unlikely (1/5 probablity) 117 - even (1/2 probability) 118 - likely (4/5 probability) 119 - very_likely (1999/2000 probability) 120 - guessed_always 121 - always 122 123 Named probabilities except for never/always are assumed to be statically 124 guessed and thus not necessarily accurate. The difference between never 125 and guessed_never is that the first one should be used only in case that 126 well behaving program will very likely not execute the "never" path. 127 For example if the path is going to abort () call or it exception handling. 128 129 Always and guessed_always probabilities are symmetric. 130 131 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint 132 integer arithmetics. Once the code is converted to branch probabilities, 133 these conversions will probably go away because they are lossy. 134 */ 135 136 class GTY((user)) profile_probability 137 { 138 static const int n_bits = 29; 139 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that 140 will lead to harder multiplication sequences. */ 141 static const uint32_t max_probability = (uint32_t) 1 << (n_bits - 2); 142 static const uint32_t uninitialized_probability 143 = ((uint32_t) 1 << (n_bits - 1)) - 1; 144 145 uint32_t m_val : 29; 146 enum profile_quality m_quality : 3; 147 148 friend class profile_count; 149 public: 150 151 /* Named probabilities. */ 152 static profile_probability never () 153 { 154 profile_probability ret; 155 ret.m_val = 0; 156 ret.m_quality = profile_precise; 157 return ret; 158 } 159 static profile_probability guessed_never () 160 { 161 profile_probability ret; 162 ret.m_val = 0; 163 ret.m_quality = profile_guessed; 164 return ret; 165 } 166 static profile_probability very_unlikely () 167 { 168 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */ 169 profile_probability r 170 = profile_probability::guessed_always ().apply_scale (1, 2000); 171 r.m_val--; 172 return r; 173 } 174 static profile_probability unlikely () 175 { 176 /* Be consistent with PROB_VERY_LIKELY in predict.h. */ 177 profile_probability r 178 = profile_probability::guessed_always ().apply_scale (1, 5); 179 r.m_val--; 180 return r; 181 } 182 static profile_probability even () 183 { 184 return profile_probability::guessed_always ().apply_scale (1, 2); 185 } 186 static profile_probability very_likely () 187 { 188 return profile_probability::always () - very_unlikely (); 189 } 190 static profile_probability likely () 191 { 192 return profile_probability::always () - unlikely (); 193 } 194 static profile_probability guessed_always () 195 { 196 profile_probability ret; 197 ret.m_val = max_probability; 198 ret.m_quality = profile_guessed; 199 return ret; 200 } 201 static profile_probability always () 202 { 203 profile_probability ret; 204 ret.m_val = max_probability; 205 ret.m_quality = profile_precise; 206 return ret; 207 } 208 /* Probabilities which has not been initialized. Either because 209 initialization did not happen yet or because profile is unknown. */ 210 static profile_probability uninitialized () 211 { 212 profile_probability c; 213 c.m_val = uninitialized_probability; 214 c.m_quality = profile_guessed; 215 return c; 216 } 217 218 219 /* Return true if value has been initialized. */ 220 bool initialized_p () const 221 { 222 return m_val != uninitialized_probability; 223 } 224 /* Return true if value can be trusted. */ 225 bool reliable_p () const 226 { 227 return m_quality >= profile_adjusted; 228 } 229 230 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics. 231 this is mostly to support legacy code and should go away. */ 232 static profile_probability from_reg_br_prob_base (int v) 233 { 234 profile_probability ret; 235 gcc_checking_assert (v >= 0 && v <= REG_BR_PROB_BASE); 236 ret.m_val = RDIV (v * (uint64_t) max_probability, REG_BR_PROB_BASE); 237 ret.m_quality = profile_guessed; 238 return ret; 239 } 240 int to_reg_br_prob_base () const 241 { 242 gcc_checking_assert (initialized_p ()); 243 return RDIV (m_val * (uint64_t) REG_BR_PROB_BASE, max_probability); 244 } 245 246 /* Conversion to and from RTL representation of profile probabilities. */ 247 static profile_probability from_reg_br_prob_note (int v) 248 { 249 profile_probability ret; 250 ret.m_val = ((unsigned int)v) / 8; 251 ret.m_quality = (enum profile_quality)(v & 7); 252 return ret; 253 } 254 int to_reg_br_prob_note () const 255 { 256 gcc_checking_assert (initialized_p ()); 257 int ret = m_val * 8 + m_quality; 258 gcc_checking_assert (profile_probability::from_reg_br_prob_note (ret) 259 == *this); 260 return ret; 261 } 262 263 /* Return VAL1/VAL2. */ 264 static profile_probability probability_in_gcov_type 265 (gcov_type val1, gcov_type val2) 266 { 267 profile_probability ret; 268 gcc_checking_assert (val1 >= 0 && val2 > 0); 269 if (val1 > val2) 270 ret.m_val = max_probability; 271 else 272 { 273 uint64_t tmp; 274 safe_scale_64bit (val1, max_probability, val2, &tmp); 275 gcc_checking_assert (tmp <= max_probability); 276 ret.m_val = tmp; 277 } 278 ret.m_quality = profile_precise; 279 return ret; 280 } 281 282 /* Basic operations. */ 283 bool operator== (const profile_probability &other) const 284 { 285 return m_val == other.m_val && m_quality == other.m_quality; 286 } 287 profile_probability operator+ (const profile_probability &other) const 288 { 289 if (other == profile_probability::never ()) 290 return *this; 291 if (*this == profile_probability::never ()) 292 return other; 293 if (!initialized_p () || !other.initialized_p ()) 294 return profile_probability::uninitialized (); 295 296 profile_probability ret; 297 ret.m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability); 298 ret.m_quality = MIN (m_quality, other.m_quality); 299 return ret; 300 } 301 profile_probability &operator+= (const profile_probability &other) 302 { 303 if (other == profile_probability::never ()) 304 return *this; 305 if (*this == profile_probability::never ()) 306 { 307 *this = other; 308 return *this; 309 } 310 if (!initialized_p () || !other.initialized_p ()) 311 return *this = profile_probability::uninitialized (); 312 else 313 { 314 m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability); 315 m_quality = MIN (m_quality, other.m_quality); 316 } 317 return *this; 318 } 319 profile_probability operator- (const profile_probability &other) const 320 { 321 if (*this == profile_probability::never () 322 || other == profile_probability::never ()) 323 return *this; 324 if (!initialized_p () || !other.initialized_p ()) 325 return profile_probability::uninitialized (); 326 profile_probability ret; 327 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0; 328 ret.m_quality = MIN (m_quality, other.m_quality); 329 return ret; 330 } 331 profile_probability &operator-= (const profile_probability &other) 332 { 333 if (*this == profile_probability::never () 334 || other == profile_probability::never ()) 335 return *this; 336 if (!initialized_p () || !other.initialized_p ()) 337 return *this = profile_probability::uninitialized (); 338 else 339 { 340 m_val = m_val >= other.m_val ? m_val - other.m_val : 0; 341 m_quality = MIN (m_quality, other.m_quality); 342 } 343 return *this; 344 } 345 profile_probability operator* (const profile_probability &other) const 346 { 347 if (*this == profile_probability::never () 348 || other == profile_probability::never ()) 349 return profile_probability::never (); 350 if (!initialized_p () || !other.initialized_p ()) 351 return profile_probability::uninitialized (); 352 profile_probability ret; 353 ret.m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability); 354 ret.m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted); 355 return ret; 356 } 357 profile_probability &operator*= (const profile_probability &other) 358 { 359 if (*this == profile_probability::never () 360 || other == profile_probability::never ()) 361 return *this = profile_probability::never (); 362 if (!initialized_p () || !other.initialized_p ()) 363 return *this = profile_probability::uninitialized (); 364 else 365 { 366 m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability); 367 m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted); 368 } 369 return *this; 370 } 371 profile_probability operator/ (const profile_probability &other) const 372 { 373 if (*this == profile_probability::never ()) 374 return profile_probability::never (); 375 if (!initialized_p () || !other.initialized_p ()) 376 return profile_probability::uninitialized (); 377 profile_probability ret; 378 /* If we get probability above 1, mark it as unreliable and return 1. */ 379 if (m_val >= other.m_val) 380 { 381 ret.m_val = max_probability; 382 ret.m_quality = MIN (MIN (m_quality, other.m_quality), 383 profile_guessed); 384 return ret; 385 } 386 else if (!m_val) 387 ret.m_val = 0; 388 else 389 { 390 gcc_checking_assert (other.m_val); 391 ret.m_val = MIN (RDIV ((uint64_t)m_val * max_probability, 392 other.m_val), 393 max_probability); 394 } 395 ret.m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted); 396 return ret; 397 } 398 profile_probability &operator/= (const profile_probability &other) 399 { 400 if (*this == profile_probability::never ()) 401 return *this = profile_probability::never (); 402 if (!initialized_p () || !other.initialized_p ()) 403 return *this = profile_probability::uninitialized (); 404 else 405 { 406 /* If we get probability above 1, mark it as unreliable 407 and return 1. */ 408 if (m_val > other.m_val) 409 { 410 m_val = max_probability; 411 m_quality = MIN (MIN (m_quality, other.m_quality), 412 profile_guessed); 413 return *this; 414 } 415 else if (!m_val) 416 ; 417 else 418 { 419 gcc_checking_assert (other.m_val); 420 m_val = MIN (RDIV ((uint64_t)m_val * max_probability, 421 other.m_val), 422 max_probability); 423 } 424 m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted); 425 } 426 return *this; 427 } 428 429 /* Split *THIS (ORIG) probability into 2 probabilities, such that 430 the returned one (FIRST) is *THIS * CPROB and *THIS is 431 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND 432 == ORIG. This is useful e.g. when splitting a conditional 433 branch like: 434 if (cond) 435 goto lab; // ORIG probability 436 into 437 if (cond1) 438 goto lab; // FIRST = ORIG * CPROB probability 439 if (cond2) 440 goto lab; // SECOND probability 441 such that the overall probability of jumping to lab remains 442 the same. CPROB gives the relative probability between the 443 branches. */ 444 profile_probability split (const profile_probability &cprob) 445 { 446 profile_probability ret = *this * cprob; 447 /* The following is equivalent to: 448 *this = cprob.invert () * *this / ret.invert (); */ 449 *this = (*this - ret) / ret.invert (); 450 return ret; 451 } 452 453 gcov_type apply (gcov_type val) const 454 { 455 if (*this == profile_probability::uninitialized ()) 456 return val / 2; 457 return RDIV (val * m_val, max_probability); 458 } 459 460 /* Return 1-*THIS. */ 461 profile_probability invert () const 462 { 463 return profile_probability::always() - *this; 464 } 465 466 /* Return THIS with quality dropped to GUESSED. */ 467 profile_probability guessed () const 468 { 469 profile_probability ret = *this; 470 ret.m_quality = profile_guessed; 471 return ret; 472 } 473 474 /* Return THIS with quality dropped to AFDO. */ 475 profile_probability afdo () const 476 { 477 profile_probability ret = *this; 478 ret.m_quality = profile_afdo; 479 return ret; 480 } 481 482 /* Return *THIS * NUM / DEN. */ 483 profile_probability apply_scale (int64_t num, int64_t den) const 484 { 485 if (*this == profile_probability::never ()) 486 return *this; 487 if (!initialized_p ()) 488 return profile_probability::uninitialized (); 489 profile_probability ret; 490 uint64_t tmp; 491 safe_scale_64bit (m_val, num, den, &tmp); 492 ret.m_val = MIN (tmp, max_probability); 493 ret.m_quality = MIN (m_quality, profile_adjusted); 494 return ret; 495 } 496 497 /* Return true when the probability of edge is reliable. 498 499 The profile guessing code is good at predicting branch outcome (ie. 500 taken/not taken), that is predicted right slightly over 75% of time. 501 It is however notoriously poor on predicting the probability itself. 502 In general the profile appear a lot flatter (with probabilities closer 503 to 50%) than the reality so it is bad idea to use it to drive optimization 504 such as those disabling dynamic branch prediction for well predictable 505 branches. 506 507 There are two exceptions - edges leading to noreturn edges and edges 508 predicted by number of iterations heuristics are predicted well. This macro 509 should be able to distinguish those, but at the moment it simply check for 510 noreturn heuristic that is only one giving probability over 99% or bellow 511 1%. In future we might want to propagate reliability information across the 512 CFG if we find this information useful on multiple places. */ 513 514 bool probably_reliable_p () const 515 { 516 if (m_quality >= profile_adjusted) 517 return true; 518 if (!initialized_p ()) 519 return false; 520 return m_val < max_probability / 100 521 || m_val > max_probability - max_probability / 100; 522 } 523 524 /* Return false if profile_probability is bogus. */ 525 bool verify () const 526 { 527 gcc_checking_assert (m_quality != profile_uninitialized); 528 if (m_val == uninitialized_probability) 529 return m_quality == profile_guessed; 530 else if (m_quality < profile_guessed) 531 return false; 532 return m_val <= max_probability; 533 } 534 535 /* Comparsions are three-state and conservative. False is returned if 536 the inequality can not be decided. */ 537 bool operator< (const profile_probability &other) const 538 { 539 return initialized_p () && other.initialized_p () && m_val < other.m_val; 540 } 541 bool operator> (const profile_probability &other) const 542 { 543 return initialized_p () && other.initialized_p () && m_val > other.m_val; 544 } 545 546 bool operator<= (const profile_probability &other) const 547 { 548 return initialized_p () && other.initialized_p () && m_val <= other.m_val; 549 } 550 bool operator>= (const profile_probability &other) const 551 { 552 return initialized_p () && other.initialized_p () && m_val >= other.m_val; 553 } 554 555 /* Output THIS to F. */ 556 void dump (FILE *f) const; 557 558 /* Print THIS to stderr. */ 559 void debug () const; 560 561 /* Return true if THIS is known to differ significantly from OTHER. */ 562 bool differs_from_p (profile_probability other) const; 563 /* Return if difference is greater than 50%. */ 564 bool differs_lot_from_p (profile_probability other) const; 565 /* COUNT1 times event happens with *THIS probability, COUNT2 times OTHER 566 happens with COUNT2 probablity. Return probablity that either *THIS or 567 OTHER happens. */ 568 profile_probability combine_with_count (profile_count count1, 569 profile_probability other, 570 profile_count count2) const; 571 572 /* LTO streaming support. */ 573 static profile_probability stream_in (struct lto_input_block *); 574 void stream_out (struct output_block *); 575 void stream_out (struct lto_output_stream *); 576 }; 577 578 /* Main data type to hold profile counters in GCC. Profile counts originate 579 either from profile feedback, static profile estimation or both. We do not 580 perform whole program profile propagation and thus profile estimation 581 counters are often local to function, while counters from profile feedback 582 (or special cases of profile estimation) can be used inter-procedurally. 583 584 There are 3 basic types 585 1) local counters which are result of intra-procedural static profile 586 estimation. 587 2) ipa counters which are result of profile feedback or special case 588 of static profile estimation (such as in function main). 589 3) counters which counts as 0 inter-procedurally (beause given function 590 was never run in train feedback) but they hold local static profile 591 estimate. 592 593 Counters of type 1 and 3 can not be mixed with counters of different type 594 within operation (because whole function should use one type of counter) 595 with exception that global zero mix in most operations where outcome is 596 well defined. 597 598 To take local counter and use it inter-procedurally use ipa member function 599 which strips information irelevant at the inter-procedural level. 600 601 Counters are 61bit integers representing number of executions during the 602 train run or normalized frequency within the function. 603 604 As the profile is maintained during the compilation, many adjustments are 605 made. Not all transformations can be made precisely, most importantly 606 when code is being duplicated. It also may happen that part of CFG has 607 profile counts known while other do not - for example when LTO optimizing 608 partly profiled program or when profile was lost due to COMDAT merging. 609 610 For this reason profile_count tracks more information than 611 just unsigned integer and it is also ready for profile mismatches. 612 The API of this data type represent operations that are natural 613 on profile counts - sum, difference and operation with scales and 614 probabilities. All operations are safe by never getting negative counts 615 and they do end up in uninitialized scale if any of the parameters is 616 uninitialized. 617 618 All comparsions that are three state and handling of probabilities. Thus 619 a < b is not equal to !(a >= b). 620 621 The following pre-defined counts are available: 622 623 profile_count::zero () for code that is known to execute zero times at 624 runtime (this can be detected statically i.e. for paths leading to 625 abort (); 626 profile_count::one () for code that is known to execute once (such as 627 main () function 628 profile_count::uninitialized () for unknown execution count. 629 630 */ 631 632 class sreal; 633 634 class GTY(()) profile_count 635 { 636 public: 637 /* Use 62bit to hold basic block counters. Should be at least 638 64bit. Although a counter cannot be negative, we use a signed 639 type to hold various extra stages. */ 640 641 static const int n_bits = 61; 642 private: 643 static const uint64_t max_count = ((uint64_t) 1 << n_bits) - 2; 644 static const uint64_t uninitialized_count = ((uint64_t) 1 << n_bits) - 1; 645 646 #if defined (__arm__) && (__GNUC__ >= 6 && __GNUC__ <= 8) 647 /* Work-around for PR88469. A bug in the gcc-6/7/8 PCS layout code 648 incorrectly detects the alignment of a structure where the only 649 64-bit aligned object is a bit-field. We force the alignment of 650 the entire field to mitigate this. */ 651 #define UINT64_BIT_FIELD_ALIGN __attribute__ ((aligned(8))) 652 #else 653 #define UINT64_BIT_FIELD_ALIGN 654 #endif 655 uint64_t UINT64_BIT_FIELD_ALIGN m_val : n_bits; 656 #undef UINT64_BIT_FIELD_ALIGN 657 enum profile_quality m_quality : 3; 658 659 /* Return true if both values can meaningfully appear in single function 660 body. We have either all counters in function local or global, otherwise 661 operations between them are not really defined well. */ 662 bool compatible_p (const profile_count other) const 663 { 664 if (!initialized_p () || !other.initialized_p ()) 665 return true; 666 if (*this == profile_count::zero () 667 || other == profile_count::zero ()) 668 return true; 669 return ipa_p () == other.ipa_p (); 670 } 671 public: 672 673 /* Used for counters which are expected to be never executed. */ 674 static profile_count zero () 675 { 676 return from_gcov_type (0); 677 } 678 static profile_count adjusted_zero () 679 { 680 profile_count c; 681 c.m_val = 0; 682 c.m_quality = profile_adjusted; 683 return c; 684 } 685 static profile_count guessed_zero () 686 { 687 profile_count c; 688 c.m_val = 0; 689 c.m_quality = profile_guessed; 690 return c; 691 } 692 static profile_count one () 693 { 694 return from_gcov_type (1); 695 } 696 /* Value of counters which has not been initialized. Either because 697 initialization did not happen yet or because profile is unknown. */ 698 static profile_count uninitialized () 699 { 700 profile_count c; 701 c.m_val = uninitialized_count; 702 c.m_quality = profile_guessed_local; 703 return c; 704 } 705 706 /* Conversion to gcov_type is lossy. */ 707 gcov_type to_gcov_type () const 708 { 709 gcc_checking_assert (initialized_p ()); 710 return m_val; 711 } 712 713 /* Return true if value has been initialized. */ 714 bool initialized_p () const 715 { 716 return m_val != uninitialized_count; 717 } 718 /* Return true if value can be trusted. */ 719 bool reliable_p () const 720 { 721 return m_quality >= profile_adjusted; 722 } 723 /* Return true if vlaue can be operated inter-procedurally. */ 724 bool ipa_p () const 725 { 726 return !initialized_p () || m_quality >= profile_guessed_global0; 727 } 728 /* Return true if quality of profile is precise. */ 729 bool precise_p () const 730 { 731 return m_quality == profile_precise; 732 } 733 734 /* When merging basic blocks, the two different profile counts are unified. 735 Return true if this can be done without losing info about profile. 736 The only case we care about here is when first BB contains something 737 that makes it terminate in a way not visible in CFG. */ 738 bool ok_for_merging (profile_count other) const 739 { 740 if (m_quality < profile_adjusted 741 || other.m_quality < profile_adjusted) 742 return true; 743 return !(other < *this); 744 } 745 746 /* When merging two BBs with different counts, pick common count that looks 747 most representative. */ 748 profile_count merge (profile_count other) const 749 { 750 if (*this == other || !other.initialized_p () 751 || m_quality > other.m_quality) 752 return *this; 753 if (other.m_quality > m_quality 754 || other > *this) 755 return other; 756 return *this; 757 } 758 759 /* Basic operations. */ 760 bool operator== (const profile_count &other) const 761 { 762 return m_val == other.m_val && m_quality == other.m_quality; 763 } 764 profile_count operator+ (const profile_count &other) const 765 { 766 if (other == profile_count::zero ()) 767 return *this; 768 if (*this == profile_count::zero ()) 769 return other; 770 if (!initialized_p () || !other.initialized_p ()) 771 return profile_count::uninitialized (); 772 773 profile_count ret; 774 gcc_checking_assert (compatible_p (other)); 775 ret.m_val = m_val + other.m_val; 776 ret.m_quality = MIN (m_quality, other.m_quality); 777 return ret; 778 } 779 profile_count &operator+= (const profile_count &other) 780 { 781 if (other == profile_count::zero ()) 782 return *this; 783 if (*this == profile_count::zero ()) 784 { 785 *this = other; 786 return *this; 787 } 788 if (!initialized_p () || !other.initialized_p ()) 789 return *this = profile_count::uninitialized (); 790 else 791 { 792 gcc_checking_assert (compatible_p (other)); 793 m_val += other.m_val; 794 m_quality = MIN (m_quality, other.m_quality); 795 } 796 return *this; 797 } 798 profile_count operator- (const profile_count &other) const 799 { 800 if (*this == profile_count::zero () || other == profile_count::zero ()) 801 return *this; 802 if (!initialized_p () || !other.initialized_p ()) 803 return profile_count::uninitialized (); 804 gcc_checking_assert (compatible_p (other)); 805 profile_count ret; 806 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0; 807 ret.m_quality = MIN (m_quality, other.m_quality); 808 return ret; 809 } 810 profile_count &operator-= (const profile_count &other) 811 { 812 if (*this == profile_count::zero () || other == profile_count::zero ()) 813 return *this; 814 if (!initialized_p () || !other.initialized_p ()) 815 return *this = profile_count::uninitialized (); 816 else 817 { 818 gcc_checking_assert (compatible_p (other)); 819 m_val = m_val >= other.m_val ? m_val - other.m_val: 0; 820 m_quality = MIN (m_quality, other.m_quality); 821 } 822 return *this; 823 } 824 825 /* Return false if profile_count is bogus. */ 826 bool verify () const 827 { 828 gcc_checking_assert (m_quality != profile_uninitialized); 829 return m_val != uninitialized_count || m_quality == profile_guessed_local; 830 } 831 832 /* Comparsions are three-state and conservative. False is returned if 833 the inequality can not be decided. */ 834 bool operator< (const profile_count &other) const 835 { 836 if (!initialized_p () || !other.initialized_p ()) 837 return false; 838 if (*this == profile_count::zero ()) 839 return !(other == profile_count::zero ()); 840 if (other == profile_count::zero ()) 841 return false; 842 gcc_checking_assert (compatible_p (other)); 843 return m_val < other.m_val; 844 } 845 bool operator> (const profile_count &other) const 846 { 847 if (!initialized_p () || !other.initialized_p ()) 848 return false; 849 if (*this == profile_count::zero ()) 850 return false; 851 if (other == profile_count::zero ()) 852 return !(*this == profile_count::zero ()); 853 gcc_checking_assert (compatible_p (other)); 854 return initialized_p () && other.initialized_p () && m_val > other.m_val; 855 } 856 bool operator< (const gcov_type other) const 857 { 858 gcc_checking_assert (ipa_p ()); 859 gcc_checking_assert (other >= 0); 860 return initialized_p () && m_val < (uint64_t) other; 861 } 862 bool operator> (const gcov_type other) const 863 { 864 gcc_checking_assert (ipa_p ()); 865 gcc_checking_assert (other >= 0); 866 return initialized_p () && m_val > (uint64_t) other; 867 } 868 869 bool operator<= (const profile_count &other) const 870 { 871 if (!initialized_p () || !other.initialized_p ()) 872 return false; 873 if (*this == profile_count::zero ()) 874 return true; 875 if (other == profile_count::zero ()) 876 return (*this == profile_count::zero ()); 877 gcc_checking_assert (compatible_p (other)); 878 return m_val <= other.m_val; 879 } 880 bool operator>= (const profile_count &other) const 881 { 882 if (!initialized_p () || !other.initialized_p ()) 883 return false; 884 if (other == profile_count::zero ()) 885 return true; 886 if (*this == profile_count::zero ()) 887 return !(other == profile_count::zero ()); 888 gcc_checking_assert (compatible_p (other)); 889 return m_val >= other.m_val; 890 } 891 bool operator<= (const gcov_type other) const 892 { 893 gcc_checking_assert (ipa_p ()); 894 gcc_checking_assert (other >= 0); 895 return initialized_p () && m_val <= (uint64_t) other; 896 } 897 bool operator>= (const gcov_type other) const 898 { 899 gcc_checking_assert (ipa_p ()); 900 gcc_checking_assert (other >= 0); 901 return initialized_p () && m_val >= (uint64_t) other; 902 } 903 /* Return true when value is not zero and can be used for scaling. 904 This is different from *this > 0 because that requires counter to 905 be IPA. */ 906 bool nonzero_p () const 907 { 908 return initialized_p () && m_val != 0; 909 } 910 911 /* Make counter forcingly nonzero. */ 912 profile_count force_nonzero () const 913 { 914 if (!initialized_p ()) 915 return *this; 916 profile_count ret = *this; 917 if (ret.m_val == 0) 918 { 919 ret.m_val = 1; 920 ret.m_quality = MIN (m_quality, profile_adjusted); 921 } 922 return ret; 923 } 924 925 profile_count max (profile_count other) const 926 { 927 if (!initialized_p ()) 928 return other; 929 if (!other.initialized_p ()) 930 return *this; 931 if (*this == profile_count::zero ()) 932 return other; 933 if (other == profile_count::zero ()) 934 return *this; 935 gcc_checking_assert (compatible_p (other)); 936 if (m_val < other.m_val || (m_val == other.m_val 937 && m_quality < other.m_quality)) 938 return other; 939 return *this; 940 } 941 942 /* PROB is a probability in scale 0...REG_BR_PROB_BASE. Scale counter 943 accordingly. */ 944 profile_count apply_probability (int prob) const 945 { 946 gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE); 947 if (m_val == 0) 948 return *this; 949 if (!initialized_p ()) 950 return profile_count::uninitialized (); 951 profile_count ret; 952 ret.m_val = RDIV (m_val * prob, REG_BR_PROB_BASE); 953 ret.m_quality = MIN (m_quality, profile_adjusted); 954 return ret; 955 } 956 957 /* Scale counter according to PROB. */ 958 profile_count apply_probability (profile_probability prob) const 959 { 960 if (*this == profile_count::zero ()) 961 return *this; 962 if (prob == profile_probability::never ()) 963 return profile_count::zero (); 964 if (!initialized_p ()) 965 return profile_count::uninitialized (); 966 profile_count ret; 967 uint64_t tmp; 968 safe_scale_64bit (m_val, prob.m_val, profile_probability::max_probability, 969 &tmp); 970 ret.m_val = tmp; 971 ret.m_quality = MIN (m_quality, prob.m_quality); 972 return ret; 973 } 974 /* Return *THIS * NUM / DEN. */ 975 profile_count apply_scale (int64_t num, int64_t den) const 976 { 977 if (m_val == 0) 978 return *this; 979 if (!initialized_p ()) 980 return profile_count::uninitialized (); 981 profile_count ret; 982 uint64_t tmp; 983 984 gcc_checking_assert (num >= 0 && den > 0); 985 safe_scale_64bit (m_val, num, den, &tmp); 986 ret.m_val = MIN (tmp, max_count); 987 ret.m_quality = MIN (m_quality, profile_adjusted); 988 return ret; 989 } 990 profile_count apply_scale (profile_count num, profile_count den) const 991 { 992 if (*this == profile_count::zero ()) 993 return *this; 994 if (num == profile_count::zero ()) 995 return num; 996 if (!initialized_p () || !num.initialized_p () || !den.initialized_p ()) 997 return profile_count::uninitialized (); 998 if (num == den) 999 return *this; 1000 gcc_checking_assert (den.m_val); 1001 1002 profile_count ret; 1003 uint64_t val; 1004 safe_scale_64bit (m_val, num.m_val, den.m_val, &val); 1005 ret.m_val = MIN (val, max_count); 1006 ret.m_quality = MIN (MIN (MIN (m_quality, profile_adjusted), 1007 num.m_quality), den.m_quality); 1008 if (num.ipa_p () && !ret.ipa_p ()) 1009 ret.m_quality = MIN (num.m_quality, profile_guessed); 1010 return ret; 1011 } 1012 1013 /* Return THIS with quality dropped to GUESSED_LOCAL. */ 1014 profile_count guessed_local () const 1015 { 1016 profile_count ret = *this; 1017 if (!initialized_p ()) 1018 return *this; 1019 ret.m_quality = profile_guessed_local; 1020 return ret; 1021 } 1022 1023 /* We know that profile is globally 0 but keep local profile if present. */ 1024 profile_count global0 () const 1025 { 1026 profile_count ret = *this; 1027 if (!initialized_p ()) 1028 return *this; 1029 ret.m_quality = profile_guessed_global0; 1030 return ret; 1031 } 1032 1033 /* We know that profile is globally adjusted 0 but keep local profile 1034 if present. */ 1035 profile_count global0adjusted () const 1036 { 1037 profile_count ret = *this; 1038 if (!initialized_p ()) 1039 return *this; 1040 ret.m_quality = profile_guessed_global0adjusted; 1041 return ret; 1042 } 1043 1044 /* Return THIS with quality dropped to GUESSED. */ 1045 profile_count guessed () const 1046 { 1047 profile_count ret = *this; 1048 ret.m_quality = MIN (ret.m_quality, profile_guessed); 1049 return ret; 1050 } 1051 1052 /* Return variant of profile counte which is always safe to compare 1053 acorss functions. */ 1054 profile_count ipa () const 1055 { 1056 if (m_quality > profile_guessed_global0adjusted) 1057 return *this; 1058 if (m_quality == profile_guessed_global0) 1059 return profile_count::zero (); 1060 if (m_quality == profile_guessed_global0adjusted) 1061 return profile_count::adjusted_zero (); 1062 return profile_count::uninitialized (); 1063 } 1064 1065 /* Return THIS with quality dropped to AFDO. */ 1066 profile_count afdo () const 1067 { 1068 profile_count ret = *this; 1069 ret.m_quality = profile_afdo; 1070 return ret; 1071 } 1072 1073 /* Return probability of event with counter THIS within event with counter 1074 OVERALL. */ 1075 profile_probability probability_in (const profile_count overall) const 1076 { 1077 if (*this == profile_count::zero () 1078 && !(overall == profile_count::zero ())) 1079 return profile_probability::never (); 1080 if (!initialized_p () || !overall.initialized_p () 1081 || !overall.m_val) 1082 return profile_probability::uninitialized (); 1083 if (*this == overall && m_quality == profile_precise) 1084 return profile_probability::always (); 1085 profile_probability ret; 1086 gcc_checking_assert (compatible_p (overall)); 1087 1088 if (overall.m_val < m_val) 1089 { 1090 ret.m_val = profile_probability::max_probability; 1091 ret.m_quality = profile_guessed; 1092 return ret; 1093 } 1094 else 1095 ret.m_val = RDIV (m_val * profile_probability::max_probability, 1096 overall.m_val); 1097 ret.m_quality = MIN (MAX (MIN (m_quality, overall.m_quality), 1098 profile_guessed), profile_adjusted); 1099 return ret; 1100 } 1101 1102 int to_frequency (struct function *fun) const; 1103 int to_cgraph_frequency (profile_count entry_bb_count) const; 1104 sreal to_sreal_scale (profile_count in, bool *known = NULL) const; 1105 1106 /* Output THIS to F. */ 1107 void dump (FILE *f) const; 1108 1109 /* Print THIS to stderr. */ 1110 void debug () const; 1111 1112 /* Return true if THIS is known to differ significantly from OTHER. */ 1113 bool differs_from_p (profile_count other) const; 1114 1115 /* We want to scale profile across function boundary from NUM to DEN. 1116 Take care of the side case when NUM and DEN are zeros of incompatible 1117 kinds. */ 1118 static void adjust_for_ipa_scaling (profile_count *num, profile_count *den); 1119 1120 /* THIS is a count of bb which is known to be executed IPA times. 1121 Combine this information into bb counter. This means returning IPA 1122 if it is nonzero, not changing anything if IPA is uninitialized 1123 and if IPA is zero, turning THIS into corresponding local profile with 1124 global0. */ 1125 profile_count combine_with_ipa_count (profile_count ipa); 1126 1127 /* The profiling runtime uses gcov_type, which is usually 64bit integer. 1128 Conversions back and forth are used to read the coverage and get it 1129 into internal representation. */ 1130 static profile_count from_gcov_type (gcov_type v); 1131 1132 /* LTO streaming support. */ 1133 static profile_count stream_in (struct lto_input_block *); 1134 void stream_out (struct output_block *); 1135 void stream_out (struct lto_output_stream *); 1136 }; 1137 #endif 1138