1 /* Profile counter container type. 2 Copyright (C) 2017-2018 Free Software Foundation, Inc. 3 Contributed by Jan Hubicka 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #ifndef GCC_PROFILE_COUNT_H 22 #define GCC_PROFILE_COUNT_H 23 24 struct function; 25 class profile_count; 26 27 /* Quality of the profile count. Because gengtype does not support enums 28 inside of classes, this is in global namespace. */ 29 enum profile_quality { 30 /* Uninitialized value. */ 31 profile_uninitialized, 32 /* Profile is based on static branch prediction heuristics and may 33 or may not match reality. It is local to function and can not be compared 34 inter-procedurally. Never used by probabilities (they are always local). 35 */ 36 profile_guessed_local, 37 /* Profile was read by feedback and was 0, we used local heuristics to guess 38 better. This is the case of functions not run in profile fedback. 39 Never used by probabilities. */ 40 profile_guessed_global0, 41 42 /* Same as profile_guessed_global0 but global count is adjusted 0. */ 43 profile_guessed_global0adjusted, 44 45 /* Profile is based on static branch prediction heuristics. It may or may 46 not reflect the reality but it can be compared interprocedurally 47 (for example, we inlined function w/o profile feedback into function 48 with feedback and propagated from that). 49 Never used by probablities. */ 50 profile_guessed, 51 /* Profile was determined by autofdo. */ 52 profile_afdo, 53 /* Profile was originally based on feedback but it was adjusted 54 by code duplicating optimization. It may not precisely reflect the 55 particular code path. */ 56 profile_adjusted, 57 /* Profile was read from profile feedback or determined by accurate static 58 method. */ 59 profile_precise 60 }; 61 62 /* The base value for branch probability notes and edge probabilities. */ 63 #define REG_BR_PROB_BASE 10000 64 65 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y)) 66 67 bool slow_safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res); 68 69 /* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */ 70 71 inline bool 72 safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res) 73 { 74 #if (GCC_VERSION >= 5000) 75 uint64_t tmp; 76 if (!__builtin_mul_overflow (a, b, &tmp) 77 && !__builtin_add_overflow (tmp, c/2, &tmp)) 78 { 79 *res = tmp / c; 80 return true; 81 } 82 if (c == 1) 83 { 84 *res = (uint64_t) -1; 85 return false; 86 } 87 #else 88 if (a < ((uint64_t)1 << 31) 89 && b < ((uint64_t)1 << 31) 90 && c < ((uint64_t)1 << 31)) 91 { 92 *res = (a * b + (c / 2)) / c; 93 return true; 94 } 95 #endif 96 return slow_safe_scale_64bit (a, b, c, res); 97 } 98 99 /* Data type to hold probabilities. It implements fixed point arithmetics 100 with capping so probability is always in range [0,1] and scaling requiring 101 values greater than 1 needs to be represented otherwise. 102 103 In addition to actual value the quality of profile is tracked and propagated 104 through all operations. Special value UNINITIALIZED is used for probabilities 105 that has not been determined yet (for example bacause of 106 -fno-guess-branch-probability) 107 108 Typically probabilities are derived from profile feedback (via 109 probability_in_gcov_type), autoFDO or guessed statically and then propagated 110 thorough the compilation. 111 112 Named probabilities are available: 113 - never (0 probability) 114 - guessed_never 115 - very_unlikely (1/2000 probability) 116 - unlikely (1/5 probablity) 117 - even (1/2 probability) 118 - likely (4/5 probability) 119 - very_likely (1999/2000 probability) 120 - guessed_always 121 - always 122 123 Named probabilities except for never/always are assumed to be statically 124 guessed and thus not necessarily accurate. The difference between never 125 and guessed_never is that the first one should be used only in case that 126 well behaving program will very likely not execute the "never" path. 127 For example if the path is going to abort () call or it exception handling. 128 129 Always and guessed_always probabilities are symmetric. 130 131 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint 132 integer arithmetics. Once the code is converted to branch probabilities, 133 these conversions will probably go away because they are lossy. 134 */ 135 136 class GTY((user)) profile_probability 137 { 138 static const int n_bits = 29; 139 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that 140 will lead to harder multiplication sequences. */ 141 static const uint32_t max_probability = (uint32_t) 1 << (n_bits - 2); 142 static const uint32_t uninitialized_probability 143 = ((uint32_t) 1 << (n_bits - 1)) - 1; 144 145 uint32_t m_val : 29; 146 enum profile_quality m_quality : 3; 147 148 friend class profile_count; 149 public: 150 151 /* Named probabilities. */ 152 static profile_probability never () 153 { 154 profile_probability ret; 155 ret.m_val = 0; 156 ret.m_quality = profile_precise; 157 return ret; 158 } 159 static profile_probability guessed_never () 160 { 161 profile_probability ret; 162 ret.m_val = 0; 163 ret.m_quality = profile_guessed; 164 return ret; 165 } 166 static profile_probability very_unlikely () 167 { 168 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */ 169 profile_probability r 170 = profile_probability::guessed_always ().apply_scale (1, 2000); 171 r.m_val--; 172 return r; 173 } 174 static profile_probability unlikely () 175 { 176 /* Be consistent with PROB_VERY_LIKELY in predict.h. */ 177 profile_probability r 178 = profile_probability::guessed_always ().apply_scale (1, 5); 179 r.m_val--; 180 return r; 181 } 182 static profile_probability even () 183 { 184 return profile_probability::guessed_always ().apply_scale (1, 2); 185 } 186 static profile_probability very_likely () 187 { 188 return profile_probability::always () - very_unlikely (); 189 } 190 static profile_probability likely () 191 { 192 return profile_probability::always () - unlikely (); 193 } 194 static profile_probability guessed_always () 195 { 196 profile_probability ret; 197 ret.m_val = max_probability; 198 ret.m_quality = profile_guessed; 199 return ret; 200 } 201 static profile_probability always () 202 { 203 profile_probability ret; 204 ret.m_val = max_probability; 205 ret.m_quality = profile_precise; 206 return ret; 207 } 208 /* Probabilities which has not been initialized. Either because 209 initialization did not happen yet or because profile is unknown. */ 210 static profile_probability uninitialized () 211 { 212 profile_probability c; 213 c.m_val = uninitialized_probability; 214 c.m_quality = profile_guessed; 215 return c; 216 } 217 218 219 /* Return true if value has been initialized. */ 220 bool initialized_p () const 221 { 222 return m_val != uninitialized_probability; 223 } 224 /* Return true if value can be trusted. */ 225 bool reliable_p () const 226 { 227 return m_quality >= profile_adjusted; 228 } 229 230 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics. 231 this is mostly to support legacy code and should go away. */ 232 static profile_probability from_reg_br_prob_base (int v) 233 { 234 profile_probability ret; 235 gcc_checking_assert (v >= 0 && v <= REG_BR_PROB_BASE); 236 ret.m_val = RDIV (v * (uint64_t) max_probability, REG_BR_PROB_BASE); 237 ret.m_quality = profile_guessed; 238 return ret; 239 } 240 int to_reg_br_prob_base () const 241 { 242 gcc_checking_assert (initialized_p ()); 243 return RDIV (m_val * (uint64_t) REG_BR_PROB_BASE, max_probability); 244 } 245 246 /* Conversion to and from RTL representation of profile probabilities. */ 247 static profile_probability from_reg_br_prob_note (int v) 248 { 249 profile_probability ret; 250 ret.m_val = ((unsigned int)v) / 8; 251 ret.m_quality = (enum profile_quality)(v & 7); 252 return ret; 253 } 254 int to_reg_br_prob_note () const 255 { 256 gcc_checking_assert (initialized_p ()); 257 int ret = m_val * 8 + m_quality; 258 gcc_checking_assert (profile_probability::from_reg_br_prob_note (ret) 259 == *this); 260 return ret; 261 } 262 263 /* Return VAL1/VAL2. */ 264 static profile_probability probability_in_gcov_type 265 (gcov_type val1, gcov_type val2) 266 { 267 profile_probability ret; 268 gcc_checking_assert (val1 >= 0 && val2 > 0); 269 if (val1 > val2) 270 ret.m_val = max_probability; 271 else 272 { 273 uint64_t tmp; 274 safe_scale_64bit (val1, max_probability, val2, &tmp); 275 gcc_checking_assert (tmp <= max_probability); 276 ret.m_val = tmp; 277 } 278 ret.m_quality = profile_precise; 279 return ret; 280 } 281 282 /* Basic operations. */ 283 bool operator== (const profile_probability &other) const 284 { 285 return m_val == other.m_val && m_quality == other.m_quality; 286 } 287 profile_probability operator+ (const profile_probability &other) const 288 { 289 if (other == profile_probability::never ()) 290 return *this; 291 if (*this == profile_probability::never ()) 292 return other; 293 if (!initialized_p () || !other.initialized_p ()) 294 return profile_probability::uninitialized (); 295 296 profile_probability ret; 297 ret.m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability); 298 ret.m_quality = MIN (m_quality, other.m_quality); 299 return ret; 300 } 301 profile_probability &operator+= (const profile_probability &other) 302 { 303 if (other == profile_probability::never ()) 304 return *this; 305 if (*this == profile_probability::never ()) 306 { 307 *this = other; 308 return *this; 309 } 310 if (!initialized_p () || !other.initialized_p ()) 311 return *this = profile_probability::uninitialized (); 312 else 313 { 314 m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability); 315 m_quality = MIN (m_quality, other.m_quality); 316 } 317 return *this; 318 } 319 profile_probability operator- (const profile_probability &other) const 320 { 321 if (*this == profile_probability::never () 322 || other == profile_probability::never ()) 323 return *this; 324 if (!initialized_p () || !other.initialized_p ()) 325 return profile_probability::uninitialized (); 326 profile_probability ret; 327 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0; 328 ret.m_quality = MIN (m_quality, other.m_quality); 329 return ret; 330 } 331 profile_probability &operator-= (const profile_probability &other) 332 { 333 if (*this == profile_probability::never () 334 || other == profile_probability::never ()) 335 return *this; 336 if (!initialized_p () || !other.initialized_p ()) 337 return *this = profile_probability::uninitialized (); 338 else 339 { 340 m_val = m_val >= other.m_val ? m_val - other.m_val : 0; 341 m_quality = MIN (m_quality, other.m_quality); 342 } 343 return *this; 344 } 345 profile_probability operator* (const profile_probability &other) const 346 { 347 if (*this == profile_probability::never () 348 || other == profile_probability::never ()) 349 return profile_probability::never (); 350 if (!initialized_p () || !other.initialized_p ()) 351 return profile_probability::uninitialized (); 352 profile_probability ret; 353 ret.m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability); 354 ret.m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted); 355 return ret; 356 } 357 profile_probability &operator*= (const profile_probability &other) 358 { 359 if (*this == profile_probability::never () 360 || other == profile_probability::never ()) 361 return *this = profile_probability::never (); 362 if (!initialized_p () || !other.initialized_p ()) 363 return *this = profile_probability::uninitialized (); 364 else 365 { 366 m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability); 367 m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted); 368 } 369 return *this; 370 } 371 profile_probability operator/ (const profile_probability &other) const 372 { 373 if (*this == profile_probability::never ()) 374 return profile_probability::never (); 375 if (!initialized_p () || !other.initialized_p ()) 376 return profile_probability::uninitialized (); 377 profile_probability ret; 378 /* If we get probability above 1, mark it as unreliable and return 1. */ 379 if (m_val >= other.m_val) 380 { 381 ret.m_val = max_probability; 382 ret.m_quality = MIN (MIN (m_quality, other.m_quality), 383 profile_guessed); 384 return ret; 385 } 386 else if (!m_val) 387 ret.m_val = 0; 388 else 389 { 390 gcc_checking_assert (other.m_val); 391 ret.m_val = MIN (RDIV ((uint64_t)m_val * max_probability, 392 other.m_val), 393 max_probability); 394 } 395 ret.m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted); 396 return ret; 397 } 398 profile_probability &operator/= (const profile_probability &other) 399 { 400 if (*this == profile_probability::never ()) 401 return *this = profile_probability::never (); 402 if (!initialized_p () || !other.initialized_p ()) 403 return *this = profile_probability::uninitialized (); 404 else 405 { 406 /* If we get probability above 1, mark it as unreliable 407 and return 1. */ 408 if (m_val > other.m_val) 409 { 410 m_val = max_probability; 411 m_quality = MIN (MIN (m_quality, other.m_quality), 412 profile_guessed); 413 return *this; 414 } 415 else if (!m_val) 416 ; 417 else 418 { 419 gcc_checking_assert (other.m_val); 420 m_val = MIN (RDIV ((uint64_t)m_val * max_probability, 421 other.m_val), 422 max_probability); 423 } 424 m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted); 425 } 426 return *this; 427 } 428 429 /* Split *THIS (ORIG) probability into 2 probabilities, such that 430 the returned one (FIRST) is *THIS * CPROB and *THIS is 431 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND 432 == ORIG. This is useful e.g. when splitting a conditional 433 branch like: 434 if (cond) 435 goto lab; // ORIG probability 436 into 437 if (cond1) 438 goto lab; // FIRST = ORIG * CPROB probability 439 if (cond2) 440 goto lab; // SECOND probability 441 such that the overall probability of jumping to lab remains 442 the same. CPROB gives the relative probability between the 443 branches. */ 444 profile_probability split (const profile_probability &cprob) 445 { 446 profile_probability ret = *this * cprob; 447 /* The following is equivalent to: 448 *this = cprob.invert () * *this / ret.invert (); */ 449 *this = (*this - ret) / ret.invert (); 450 return ret; 451 } 452 453 gcov_type apply (gcov_type val) const 454 { 455 if (*this == profile_probability::uninitialized ()) 456 return val / 2; 457 return RDIV (val * m_val, max_probability); 458 } 459 460 /* Return 1-*THIS. */ 461 profile_probability invert () const 462 { 463 return profile_probability::always() - *this; 464 } 465 466 /* Return THIS with quality dropped to GUESSED. */ 467 profile_probability guessed () const 468 { 469 profile_probability ret = *this; 470 ret.m_quality = profile_guessed; 471 return ret; 472 } 473 474 /* Return THIS with quality dropped to AFDO. */ 475 profile_probability afdo () const 476 { 477 profile_probability ret = *this; 478 ret.m_quality = profile_afdo; 479 return ret; 480 } 481 482 /* Return *THIS * NUM / DEN. */ 483 profile_probability apply_scale (int64_t num, int64_t den) const 484 { 485 if (*this == profile_probability::never ()) 486 return *this; 487 if (!initialized_p ()) 488 return profile_probability::uninitialized (); 489 profile_probability ret; 490 uint64_t tmp; 491 safe_scale_64bit (m_val, num, den, &tmp); 492 ret.m_val = MIN (tmp, max_probability); 493 ret.m_quality = MIN (m_quality, profile_adjusted); 494 return ret; 495 } 496 497 /* Return true when the probability of edge is reliable. 498 499 The profile guessing code is good at predicting branch outcome (ie. 500 taken/not taken), that is predicted right slightly over 75% of time. 501 It is however notoriously poor on predicting the probability itself. 502 In general the profile appear a lot flatter (with probabilities closer 503 to 50%) than the reality so it is bad idea to use it to drive optimization 504 such as those disabling dynamic branch prediction for well predictable 505 branches. 506 507 There are two exceptions - edges leading to noreturn edges and edges 508 predicted by number of iterations heuristics are predicted well. This macro 509 should be able to distinguish those, but at the moment it simply check for 510 noreturn heuristic that is only one giving probability over 99% or bellow 511 1%. In future we might want to propagate reliability information across the 512 CFG if we find this information useful on multiple places. */ 513 514 bool probably_reliable_p () const 515 { 516 if (m_quality >= profile_adjusted) 517 return true; 518 if (!initialized_p ()) 519 return false; 520 return m_val < max_probability / 100 521 || m_val > max_probability - max_probability / 100; 522 } 523 524 /* Return false if profile_probability is bogus. */ 525 bool verify () const 526 { 527 gcc_checking_assert (m_quality != profile_uninitialized); 528 if (m_val == uninitialized_probability) 529 return m_quality == profile_guessed; 530 else if (m_quality < profile_guessed) 531 return false; 532 return m_val <= max_probability; 533 } 534 535 /* Comparsions are three-state and conservative. False is returned if 536 the inequality can not be decided. */ 537 bool operator< (const profile_probability &other) const 538 { 539 return initialized_p () && other.initialized_p () && m_val < other.m_val; 540 } 541 bool operator> (const profile_probability &other) const 542 { 543 return initialized_p () && other.initialized_p () && m_val > other.m_val; 544 } 545 546 bool operator<= (const profile_probability &other) const 547 { 548 return initialized_p () && other.initialized_p () && m_val <= other.m_val; 549 } 550 bool operator>= (const profile_probability &other) const 551 { 552 return initialized_p () && other.initialized_p () && m_val >= other.m_val; 553 } 554 555 /* Output THIS to F. */ 556 void dump (FILE *f) const; 557 558 /* Print THIS to stderr. */ 559 void debug () const; 560 561 /* Return true if THIS is known to differ significantly from OTHER. */ 562 bool differs_from_p (profile_probability other) const; 563 /* Return if difference is greater than 50%. */ 564 bool differs_lot_from_p (profile_probability other) const; 565 /* COUNT1 times event happens with *THIS probability, COUNT2 times OTHER 566 happens with COUNT2 probablity. Return probablity that either *THIS or 567 OTHER happens. */ 568 profile_probability combine_with_count (profile_count count1, 569 profile_probability other, 570 profile_count count2) const; 571 572 /* LTO streaming support. */ 573 static profile_probability stream_in (struct lto_input_block *); 574 void stream_out (struct output_block *); 575 void stream_out (struct lto_output_stream *); 576 }; 577 578 /* Main data type to hold profile counters in GCC. Profile counts originate 579 either from profile feedback, static profile estimation or both. We do not 580 perform whole program profile propagation and thus profile estimation 581 counters are often local to function, while counters from profile feedback 582 (or special cases of profile estimation) can be used inter-procedurally. 583 584 There are 3 basic types 585 1) local counters which are result of intra-procedural static profile 586 estimation. 587 2) ipa counters which are result of profile feedback or special case 588 of static profile estimation (such as in function main). 589 3) counters which counts as 0 inter-procedurally (beause given function 590 was never run in train feedback) but they hold local static profile 591 estimate. 592 593 Counters of type 1 and 3 can not be mixed with counters of different type 594 within operation (because whole function should use one type of counter) 595 with exception that global zero mix in most operations where outcome is 596 well defined. 597 598 To take local counter and use it inter-procedurally use ipa member function 599 which strips information irelevant at the inter-procedural level. 600 601 Counters are 61bit integers representing number of executions during the 602 train run or normalized frequency within the function. 603 604 As the profile is maintained during the compilation, many adjustments are 605 made. Not all transformations can be made precisely, most importantly 606 when code is being duplicated. It also may happen that part of CFG has 607 profile counts known while other do not - for example when LTO optimizing 608 partly profiled program or when profile was lost due to COMDAT merging. 609 610 For this reason profile_count tracks more information than 611 just unsigned integer and it is also ready for profile mismatches. 612 The API of this data type represent operations that are natural 613 on profile counts - sum, difference and operation with scales and 614 probabilities. All operations are safe by never getting negative counts 615 and they do end up in uninitialized scale if any of the parameters is 616 uninitialized. 617 618 All comparsions that are three state and handling of probabilities. Thus 619 a < b is not equal to !(a >= b). 620 621 The following pre-defined counts are available: 622 623 profile_count::zero () for code that is known to execute zero times at 624 runtime (this can be detected statically i.e. for paths leading to 625 abort (); 626 profile_count::one () for code that is known to execute once (such as 627 main () function 628 profile_count::uninitialized () for unknown execution count. 629 630 */ 631 632 class sreal; 633 634 class GTY(()) profile_count 635 { 636 public: 637 /* Use 62bit to hold basic block counters. Should be at least 638 64bit. Although a counter cannot be negative, we use a signed 639 type to hold various extra stages. */ 640 641 static const int n_bits = 61; 642 private: 643 static const uint64_t max_count = ((uint64_t) 1 << n_bits) - 2; 644 static const uint64_t uninitialized_count = ((uint64_t) 1 << n_bits) - 1; 645 646 uint64_t m_val : n_bits; 647 enum profile_quality m_quality : 3; 648 649 /* Return true if both values can meaningfully appear in single function 650 body. We have either all counters in function local or global, otherwise 651 operations between them are not really defined well. */ 652 bool compatible_p (const profile_count other) const 653 { 654 if (!initialized_p () || !other.initialized_p ()) 655 return true; 656 if (*this == profile_count::zero () 657 || other == profile_count::zero ()) 658 return true; 659 return ipa_p () == other.ipa_p (); 660 } 661 public: 662 663 /* Used for counters which are expected to be never executed. */ 664 static profile_count zero () 665 { 666 return from_gcov_type (0); 667 } 668 static profile_count adjusted_zero () 669 { 670 profile_count c; 671 c.m_val = 0; 672 c.m_quality = profile_adjusted; 673 return c; 674 } 675 static profile_count guessed_zero () 676 { 677 profile_count c; 678 c.m_val = 0; 679 c.m_quality = profile_guessed; 680 return c; 681 } 682 static profile_count one () 683 { 684 return from_gcov_type (1); 685 } 686 /* Value of counters which has not been initialized. Either because 687 initialization did not happen yet or because profile is unknown. */ 688 static profile_count uninitialized () 689 { 690 profile_count c; 691 c.m_val = uninitialized_count; 692 c.m_quality = profile_guessed_local; 693 return c; 694 } 695 696 /* Conversion to gcov_type is lossy. */ 697 gcov_type to_gcov_type () const 698 { 699 gcc_checking_assert (initialized_p ()); 700 return m_val; 701 } 702 703 /* Return true if value has been initialized. */ 704 bool initialized_p () const 705 { 706 return m_val != uninitialized_count; 707 } 708 /* Return true if value can be trusted. */ 709 bool reliable_p () const 710 { 711 return m_quality >= profile_adjusted; 712 } 713 /* Return true if vlaue can be operated inter-procedurally. */ 714 bool ipa_p () const 715 { 716 return !initialized_p () || m_quality >= profile_guessed_global0; 717 } 718 /* Return true if quality of profile is precise. */ 719 bool precise_p () const 720 { 721 return m_quality == profile_precise; 722 } 723 724 /* When merging basic blocks, the two different profile counts are unified. 725 Return true if this can be done without losing info about profile. 726 The only case we care about here is when first BB contains something 727 that makes it terminate in a way not visible in CFG. */ 728 bool ok_for_merging (profile_count other) const 729 { 730 if (m_quality < profile_adjusted 731 || other.m_quality < profile_adjusted) 732 return true; 733 return !(other < *this); 734 } 735 736 /* When merging two BBs with different counts, pick common count that looks 737 most representative. */ 738 profile_count merge (profile_count other) const 739 { 740 if (*this == other || !other.initialized_p () 741 || m_quality > other.m_quality) 742 return *this; 743 if (other.m_quality > m_quality 744 || other > *this) 745 return other; 746 return *this; 747 } 748 749 /* Basic operations. */ 750 bool operator== (const profile_count &other) const 751 { 752 return m_val == other.m_val && m_quality == other.m_quality; 753 } 754 profile_count operator+ (const profile_count &other) const 755 { 756 if (other == profile_count::zero ()) 757 return *this; 758 if (*this == profile_count::zero ()) 759 return other; 760 if (!initialized_p () || !other.initialized_p ()) 761 return profile_count::uninitialized (); 762 763 profile_count ret; 764 gcc_checking_assert (compatible_p (other)); 765 ret.m_val = m_val + other.m_val; 766 ret.m_quality = MIN (m_quality, other.m_quality); 767 return ret; 768 } 769 profile_count &operator+= (const profile_count &other) 770 { 771 if (other == profile_count::zero ()) 772 return *this; 773 if (*this == profile_count::zero ()) 774 { 775 *this = other; 776 return *this; 777 } 778 if (!initialized_p () || !other.initialized_p ()) 779 return *this = profile_count::uninitialized (); 780 else 781 { 782 gcc_checking_assert (compatible_p (other)); 783 m_val += other.m_val; 784 m_quality = MIN (m_quality, other.m_quality); 785 } 786 return *this; 787 } 788 profile_count operator- (const profile_count &other) const 789 { 790 if (*this == profile_count::zero () || other == profile_count::zero ()) 791 return *this; 792 if (!initialized_p () || !other.initialized_p ()) 793 return profile_count::uninitialized (); 794 gcc_checking_assert (compatible_p (other)); 795 profile_count ret; 796 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0; 797 ret.m_quality = MIN (m_quality, other.m_quality); 798 return ret; 799 } 800 profile_count &operator-= (const profile_count &other) 801 { 802 if (*this == profile_count::zero () || other == profile_count::zero ()) 803 return *this; 804 if (!initialized_p () || !other.initialized_p ()) 805 return *this = profile_count::uninitialized (); 806 else 807 { 808 gcc_checking_assert (compatible_p (other)); 809 m_val = m_val >= other.m_val ? m_val - other.m_val: 0; 810 m_quality = MIN (m_quality, other.m_quality); 811 } 812 return *this; 813 } 814 815 /* Return false if profile_count is bogus. */ 816 bool verify () const 817 { 818 gcc_checking_assert (m_quality != profile_uninitialized); 819 return m_val != uninitialized_count || m_quality == profile_guessed_local; 820 } 821 822 /* Comparsions are three-state and conservative. False is returned if 823 the inequality can not be decided. */ 824 bool operator< (const profile_count &other) const 825 { 826 if (!initialized_p () || !other.initialized_p ()) 827 return false; 828 if (*this == profile_count::zero ()) 829 return !(other == profile_count::zero ()); 830 if (other == profile_count::zero ()) 831 return false; 832 gcc_checking_assert (compatible_p (other)); 833 return m_val < other.m_val; 834 } 835 bool operator> (const profile_count &other) const 836 { 837 if (!initialized_p () || !other.initialized_p ()) 838 return false; 839 if (*this == profile_count::zero ()) 840 return false; 841 if (other == profile_count::zero ()) 842 return !(*this == profile_count::zero ()); 843 gcc_checking_assert (compatible_p (other)); 844 return initialized_p () && other.initialized_p () && m_val > other.m_val; 845 } 846 bool operator< (const gcov_type other) const 847 { 848 gcc_checking_assert (ipa_p ()); 849 gcc_checking_assert (other >= 0); 850 return initialized_p () && m_val < (uint64_t) other; 851 } 852 bool operator> (const gcov_type other) const 853 { 854 gcc_checking_assert (ipa_p ()); 855 gcc_checking_assert (other >= 0); 856 return initialized_p () && m_val > (uint64_t) other; 857 } 858 859 bool operator<= (const profile_count &other) const 860 { 861 if (!initialized_p () || !other.initialized_p ()) 862 return false; 863 if (*this == profile_count::zero ()) 864 return true; 865 if (other == profile_count::zero ()) 866 return (*this == profile_count::zero ()); 867 gcc_checking_assert (compatible_p (other)); 868 return m_val <= other.m_val; 869 } 870 bool operator>= (const profile_count &other) const 871 { 872 if (!initialized_p () || !other.initialized_p ()) 873 return false; 874 if (other == profile_count::zero ()) 875 return true; 876 if (*this == profile_count::zero ()) 877 return !(other == profile_count::zero ()); 878 gcc_checking_assert (compatible_p (other)); 879 return m_val >= other.m_val; 880 } 881 bool operator<= (const gcov_type other) const 882 { 883 gcc_checking_assert (ipa_p ()); 884 gcc_checking_assert (other >= 0); 885 return initialized_p () && m_val <= (uint64_t) other; 886 } 887 bool operator>= (const gcov_type other) const 888 { 889 gcc_checking_assert (ipa_p ()); 890 gcc_checking_assert (other >= 0); 891 return initialized_p () && m_val >= (uint64_t) other; 892 } 893 /* Return true when value is not zero and can be used for scaling. 894 This is different from *this > 0 because that requires counter to 895 be IPA. */ 896 bool nonzero_p () const 897 { 898 return initialized_p () && m_val != 0; 899 } 900 901 /* Make counter forcingly nonzero. */ 902 profile_count force_nonzero () const 903 { 904 if (!initialized_p ()) 905 return *this; 906 profile_count ret = *this; 907 if (ret.m_val == 0) 908 { 909 ret.m_val = 1; 910 ret.m_quality = MIN (m_quality, profile_adjusted); 911 } 912 return ret; 913 } 914 915 profile_count max (profile_count other) const 916 { 917 if (!initialized_p ()) 918 return other; 919 if (!other.initialized_p ()) 920 return *this; 921 if (*this == profile_count::zero ()) 922 return other; 923 if (other == profile_count::zero ()) 924 return *this; 925 gcc_checking_assert (compatible_p (other)); 926 if (m_val < other.m_val || (m_val == other.m_val 927 && m_quality < other.m_quality)) 928 return other; 929 return *this; 930 } 931 932 /* PROB is a probability in scale 0...REG_BR_PROB_BASE. Scale counter 933 accordingly. */ 934 profile_count apply_probability (int prob) const 935 { 936 gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE); 937 if (m_val == 0) 938 return *this; 939 if (!initialized_p ()) 940 return profile_count::uninitialized (); 941 profile_count ret; 942 ret.m_val = RDIV (m_val * prob, REG_BR_PROB_BASE); 943 ret.m_quality = MIN (m_quality, profile_adjusted); 944 return ret; 945 } 946 947 /* Scale counter according to PROB. */ 948 profile_count apply_probability (profile_probability prob) const 949 { 950 if (*this == profile_count::zero ()) 951 return *this; 952 if (prob == profile_probability::never ()) 953 return profile_count::zero (); 954 if (!initialized_p ()) 955 return profile_count::uninitialized (); 956 profile_count ret; 957 uint64_t tmp; 958 safe_scale_64bit (m_val, prob.m_val, profile_probability::max_probability, 959 &tmp); 960 ret.m_val = tmp; 961 ret.m_quality = MIN (m_quality, prob.m_quality); 962 return ret; 963 } 964 /* Return *THIS * NUM / DEN. */ 965 profile_count apply_scale (int64_t num, int64_t den) const 966 { 967 if (m_val == 0) 968 return *this; 969 if (!initialized_p ()) 970 return profile_count::uninitialized (); 971 profile_count ret; 972 uint64_t tmp; 973 974 gcc_checking_assert (num >= 0 && den > 0); 975 safe_scale_64bit (m_val, num, den, &tmp); 976 ret.m_val = MIN (tmp, max_count); 977 ret.m_quality = MIN (m_quality, profile_adjusted); 978 return ret; 979 } 980 profile_count apply_scale (profile_count num, profile_count den) const 981 { 982 if (*this == profile_count::zero ()) 983 return *this; 984 if (num == profile_count::zero ()) 985 return num; 986 if (!initialized_p () || !num.initialized_p () || !den.initialized_p ()) 987 return profile_count::uninitialized (); 988 if (num == den) 989 return *this; 990 gcc_checking_assert (den.m_val); 991 992 profile_count ret; 993 uint64_t val; 994 safe_scale_64bit (m_val, num.m_val, den.m_val, &val); 995 ret.m_val = MIN (val, max_count); 996 ret.m_quality = MIN (MIN (MIN (m_quality, profile_adjusted), 997 num.m_quality), den.m_quality); 998 if (num.ipa_p () && !ret.ipa_p ()) 999 ret.m_quality = MIN (num.m_quality, profile_guessed); 1000 return ret; 1001 } 1002 1003 /* Return THIS with quality dropped to GUESSED_LOCAL. */ 1004 profile_count guessed_local () const 1005 { 1006 profile_count ret = *this; 1007 if (!initialized_p ()) 1008 return *this; 1009 ret.m_quality = profile_guessed_local; 1010 return ret; 1011 } 1012 1013 /* We know that profile is globally 0 but keep local profile if present. */ 1014 profile_count global0 () const 1015 { 1016 profile_count ret = *this; 1017 if (!initialized_p ()) 1018 return *this; 1019 ret.m_quality = profile_guessed_global0; 1020 return ret; 1021 } 1022 1023 /* We know that profile is globally adjusted 0 but keep local profile 1024 if present. */ 1025 profile_count global0adjusted () const 1026 { 1027 profile_count ret = *this; 1028 if (!initialized_p ()) 1029 return *this; 1030 ret.m_quality = profile_guessed_global0adjusted; 1031 return ret; 1032 } 1033 1034 /* Return THIS with quality dropped to GUESSED. */ 1035 profile_count guessed () const 1036 { 1037 profile_count ret = *this; 1038 ret.m_quality = MIN (ret.m_quality, profile_guessed); 1039 return ret; 1040 } 1041 1042 /* Return variant of profile counte which is always safe to compare 1043 acorss functions. */ 1044 profile_count ipa () const 1045 { 1046 if (m_quality > profile_guessed_global0adjusted) 1047 return *this; 1048 if (m_quality == profile_guessed_global0) 1049 return profile_count::zero (); 1050 if (m_quality == profile_guessed_global0adjusted) 1051 return profile_count::adjusted_zero (); 1052 return profile_count::uninitialized (); 1053 } 1054 1055 /* Return THIS with quality dropped to AFDO. */ 1056 profile_count afdo () const 1057 { 1058 profile_count ret = *this; 1059 ret.m_quality = profile_afdo; 1060 return ret; 1061 } 1062 1063 /* Return probability of event with counter THIS within event with counter 1064 OVERALL. */ 1065 profile_probability probability_in (const profile_count overall) const 1066 { 1067 if (*this == profile_count::zero () 1068 && !(overall == profile_count::zero ())) 1069 return profile_probability::never (); 1070 if (!initialized_p () || !overall.initialized_p () 1071 || !overall.m_val) 1072 return profile_probability::uninitialized (); 1073 if (*this == overall && m_quality == profile_precise) 1074 return profile_probability::always (); 1075 profile_probability ret; 1076 gcc_checking_assert (compatible_p (overall)); 1077 1078 if (overall.m_val < m_val) 1079 { 1080 ret.m_val = profile_probability::max_probability; 1081 ret.m_quality = profile_guessed; 1082 return ret; 1083 } 1084 else 1085 ret.m_val = RDIV (m_val * profile_probability::max_probability, 1086 overall.m_val); 1087 ret.m_quality = MIN (MAX (MIN (m_quality, overall.m_quality), 1088 profile_guessed), profile_adjusted); 1089 return ret; 1090 } 1091 1092 int to_frequency (struct function *fun) const; 1093 int to_cgraph_frequency (profile_count entry_bb_count) const; 1094 sreal to_sreal_scale (profile_count in, bool *known = NULL) const; 1095 1096 /* Output THIS to F. */ 1097 void dump (FILE *f) const; 1098 1099 /* Print THIS to stderr. */ 1100 void debug () const; 1101 1102 /* Return true if THIS is known to differ significantly from OTHER. */ 1103 bool differs_from_p (profile_count other) const; 1104 1105 /* We want to scale profile across function boundary from NUM to DEN. 1106 Take care of the side case when NUM and DEN are zeros of incompatible 1107 kinds. */ 1108 static void adjust_for_ipa_scaling (profile_count *num, profile_count *den); 1109 1110 /* THIS is a count of bb which is known to be executed IPA times. 1111 Combine this information into bb counter. This means returning IPA 1112 if it is nonzero, not changing anything if IPA is uninitialized 1113 and if IPA is zero, turning THIS into corresponding local profile with 1114 global0. */ 1115 profile_count combine_with_ipa_count (profile_count ipa); 1116 1117 /* The profiling runtime uses gcov_type, which is usually 64bit integer. 1118 Conversions back and forth are used to read the coverage and get it 1119 into internal representation. */ 1120 static profile_count from_gcov_type (gcov_type v); 1121 1122 /* LTO streaming support. */ 1123 static profile_count stream_in (struct lto_input_block *); 1124 void stream_out (struct output_block *); 1125 void stream_out (struct lto_output_stream *); 1126 }; 1127 #endif 1128