1 /* Profile counter container type.
2 Copyright (C) 2017-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_PROFILE_COUNT_H
22 #define GCC_PROFILE_COUNT_H
23
24 struct function;
25 class profile_count;
26
27 /* Quality of the profile count. Because gengtype does not support enums
28 inside of classes, this is in global namespace. */
29 enum profile_quality {
30 /* Uninitialized value. */
31 profile_uninitialized,
32 /* Profile is based on static branch prediction heuristics and may
33 or may not match reality. It is local to function and cannot be compared
34 inter-procedurally. Never used by probabilities (they are always local).
35 */
36 profile_guessed_local,
37 /* Profile was read by feedback and was 0, we used local heuristics to guess
38 better. This is the case of functions not run in profile fedback.
39 Never used by probabilities. */
40 profile_guessed_global0,
41
42 /* Same as profile_guessed_global0 but global count is adjusted 0. */
43 profile_guessed_global0adjusted,
44
45 /* Profile is based on static branch prediction heuristics. It may or may
46 not reflect the reality but it can be compared interprocedurally
47 (for example, we inlined function w/o profile feedback into function
48 with feedback and propagated from that).
49 Never used by probablities. */
50 profile_guessed,
51 /* Profile was determined by autofdo. */
52 profile_afdo,
53 /* Profile was originally based on feedback but it was adjusted
54 by code duplicating optimization. It may not precisely reflect the
55 particular code path. */
56 profile_adjusted,
57 /* Profile was read from profile feedback or determined by accurate static
58 method. */
59 profile_precise
60 };
61
62 extern const char *profile_quality_as_string (enum profile_quality);
63
64 /* The base value for branch probability notes and edge probabilities. */
65 #define REG_BR_PROB_BASE 10000
66
67 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
68
69 bool slow_safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res);
70
71 /* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */
72
73 inline bool
safe_scale_64bit(uint64_t a,uint64_t b,uint64_t c,uint64_t * res)74 safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res)
75 {
76 #if (GCC_VERSION >= 5000)
77 uint64_t tmp;
78 if (!__builtin_mul_overflow (a, b, &tmp)
79 && !__builtin_add_overflow (tmp, c/2, &tmp))
80 {
81 *res = tmp / c;
82 return true;
83 }
84 if (c == 1)
85 {
86 *res = (uint64_t) -1;
87 return false;
88 }
89 #else
90 if (a < ((uint64_t)1 << 31)
91 && b < ((uint64_t)1 << 31)
92 && c < ((uint64_t)1 << 31))
93 {
94 *res = (a * b + (c / 2)) / c;
95 return true;
96 }
97 #endif
98 return slow_safe_scale_64bit (a, b, c, res);
99 }
100
101 /* Data type to hold probabilities. It implements fixed point arithmetics
102 with capping so probability is always in range [0,1] and scaling requiring
103 values greater than 1 needs to be represented otherwise.
104
105 In addition to actual value the quality of profile is tracked and propagated
106 through all operations. Special value UNINITIALIZED is used for probabilities
107 that has not been determined yet (for example bacause of
108 -fno-guess-branch-probability)
109
110 Typically probabilities are derived from profile feedback (via
111 probability_in_gcov_type), autoFDO or guessed statically and then propagated
112 thorough the compilation.
113
114 Named probabilities are available:
115 - never (0 probability)
116 - guessed_never
117 - very_unlikely (1/2000 probability)
118 - unlikely (1/5 probablity)
119 - even (1/2 probability)
120 - likely (4/5 probability)
121 - very_likely (1999/2000 probability)
122 - guessed_always
123 - always
124
125 Named probabilities except for never/always are assumed to be statically
126 guessed and thus not necessarily accurate. The difference between never
127 and guessed_never is that the first one should be used only in case that
128 well behaving program will very likely not execute the "never" path.
129 For example if the path is going to abort () call or it exception handling.
130
131 Always and guessed_always probabilities are symmetric.
132
133 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint
134 integer arithmetics. Once the code is converted to branch probabilities,
135 these conversions will probably go away because they are lossy.
136 */
137
class(user)138 class GTY((user)) profile_probability
139 {
140 static const int n_bits = 29;
141 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that
142 will lead to harder multiplication sequences. */
143 static const uint32_t max_probability = (uint32_t) 1 << (n_bits - 2);
144 static const uint32_t uninitialized_probability
145 = ((uint32_t) 1 << (n_bits - 1)) - 1;
146
147 uint32_t m_val : 29;
148 enum profile_quality m_quality : 3;
149
150 friend class profile_count;
151 public:
152
153 /* Named probabilities. */
154 static profile_probability never ()
155 {
156 profile_probability ret;
157 ret.m_val = 0;
158 ret.m_quality = profile_precise;
159 return ret;
160 }
161 static profile_probability guessed_never ()
162 {
163 profile_probability ret;
164 ret.m_val = 0;
165 ret.m_quality = profile_guessed;
166 return ret;
167 }
168 static profile_probability very_unlikely ()
169 {
170 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */
171 profile_probability r
172 = profile_probability::guessed_always ().apply_scale (1, 2000);
173 r.m_val--;
174 return r;
175 }
176 static profile_probability unlikely ()
177 {
178 /* Be consistent with PROB_VERY_LIKELY in predict.h. */
179 profile_probability r
180 = profile_probability::guessed_always ().apply_scale (1, 5);
181 r.m_val--;
182 return r;
183 }
184 static profile_probability even ()
185 {
186 return profile_probability::guessed_always ().apply_scale (1, 2);
187 }
188 static profile_probability very_likely ()
189 {
190 return profile_probability::always () - very_unlikely ();
191 }
192 static profile_probability likely ()
193 {
194 return profile_probability::always () - unlikely ();
195 }
196 static profile_probability guessed_always ()
197 {
198 profile_probability ret;
199 ret.m_val = max_probability;
200 ret.m_quality = profile_guessed;
201 return ret;
202 }
203 static profile_probability always ()
204 {
205 profile_probability ret;
206 ret.m_val = max_probability;
207 ret.m_quality = profile_precise;
208 return ret;
209 }
210 /* Probabilities which has not been initialized. Either because
211 initialization did not happen yet or because profile is unknown. */
212 static profile_probability uninitialized ()
213 {
214 profile_probability c;
215 c.m_val = uninitialized_probability;
216 c.m_quality = profile_guessed;
217 return c;
218 }
219
220
221 /* Return true if value has been initialized. */
222 bool initialized_p () const
223 {
224 return m_val != uninitialized_probability;
225 }
226 /* Return true if value can be trusted. */
227 bool reliable_p () const
228 {
229 return m_quality >= profile_adjusted;
230 }
231
232 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics.
233 this is mostly to support legacy code and should go away. */
234 static profile_probability from_reg_br_prob_base (int v)
235 {
236 profile_probability ret;
237 gcc_checking_assert (v >= 0 && v <= REG_BR_PROB_BASE);
238 ret.m_val = RDIV (v * (uint64_t) max_probability, REG_BR_PROB_BASE);
239 ret.m_quality = profile_guessed;
240 return ret;
241 }
242 int to_reg_br_prob_base () const
243 {
244 gcc_checking_assert (initialized_p ());
245 return RDIV (m_val * (uint64_t) REG_BR_PROB_BASE, max_probability);
246 }
247
248 /* Conversion to and from RTL representation of profile probabilities. */
249 static profile_probability from_reg_br_prob_note (int v)
250 {
251 profile_probability ret;
252 ret.m_val = ((unsigned int)v) / 8;
253 ret.m_quality = (enum profile_quality)(v & 7);
254 return ret;
255 }
256 int to_reg_br_prob_note () const
257 {
258 gcc_checking_assert (initialized_p ());
259 int ret = m_val * 8 + m_quality;
260 gcc_checking_assert (profile_probability::from_reg_br_prob_note (ret)
261 == *this);
262 return ret;
263 }
264
265 /* Return VAL1/VAL2. */
266 static profile_probability probability_in_gcov_type
267 (gcov_type val1, gcov_type val2)
268 {
269 profile_probability ret;
270 gcc_checking_assert (val1 >= 0 && val2 > 0);
271 if (val1 > val2)
272 ret.m_val = max_probability;
273 else
274 {
275 uint64_t tmp;
276 safe_scale_64bit (val1, max_probability, val2, &tmp);
277 gcc_checking_assert (tmp <= max_probability);
278 ret.m_val = tmp;
279 }
280 ret.m_quality = profile_precise;
281 return ret;
282 }
283
284 /* Basic operations. */
285 bool operator== (const profile_probability &other) const
286 {
287 return m_val == other.m_val && m_quality == other.m_quality;
288 }
289 profile_probability operator+ (const profile_probability &other) const
290 {
291 if (other == profile_probability::never ())
292 return *this;
293 if (*this == profile_probability::never ())
294 return other;
295 if (!initialized_p () || !other.initialized_p ())
296 return profile_probability::uninitialized ();
297
298 profile_probability ret;
299 ret.m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability);
300 ret.m_quality = MIN (m_quality, other.m_quality);
301 return ret;
302 }
303 profile_probability &operator+= (const profile_probability &other)
304 {
305 if (other == profile_probability::never ())
306 return *this;
307 if (*this == profile_probability::never ())
308 {
309 *this = other;
310 return *this;
311 }
312 if (!initialized_p () || !other.initialized_p ())
313 return *this = profile_probability::uninitialized ();
314 else
315 {
316 m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability);
317 m_quality = MIN (m_quality, other.m_quality);
318 }
319 return *this;
320 }
321 profile_probability operator- (const profile_probability &other) const
322 {
323 if (*this == profile_probability::never ()
324 || other == profile_probability::never ())
325 return *this;
326 if (!initialized_p () || !other.initialized_p ())
327 return profile_probability::uninitialized ();
328 profile_probability ret;
329 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
330 ret.m_quality = MIN (m_quality, other.m_quality);
331 return ret;
332 }
333 profile_probability &operator-= (const profile_probability &other)
334 {
335 if (*this == profile_probability::never ()
336 || other == profile_probability::never ())
337 return *this;
338 if (!initialized_p () || !other.initialized_p ())
339 return *this = profile_probability::uninitialized ();
340 else
341 {
342 m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
343 m_quality = MIN (m_quality, other.m_quality);
344 }
345 return *this;
346 }
347 profile_probability operator* (const profile_probability &other) const
348 {
349 if (*this == profile_probability::never ()
350 || other == profile_probability::never ())
351 return profile_probability::never ();
352 if (!initialized_p () || !other.initialized_p ())
353 return profile_probability::uninitialized ();
354 profile_probability ret;
355 ret.m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability);
356 ret.m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted);
357 return ret;
358 }
359 profile_probability &operator*= (const profile_probability &other)
360 {
361 if (*this == profile_probability::never ()
362 || other == profile_probability::never ())
363 return *this = profile_probability::never ();
364 if (!initialized_p () || !other.initialized_p ())
365 return *this = profile_probability::uninitialized ();
366 else
367 {
368 m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability);
369 m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted);
370 }
371 return *this;
372 }
373 profile_probability operator/ (const profile_probability &other) const
374 {
375 if (*this == profile_probability::never ())
376 return profile_probability::never ();
377 if (!initialized_p () || !other.initialized_p ())
378 return profile_probability::uninitialized ();
379 profile_probability ret;
380 /* If we get probability above 1, mark it as unreliable and return 1. */
381 if (m_val >= other.m_val)
382 {
383 ret.m_val = max_probability;
384 ret.m_quality = MIN (MIN (m_quality, other.m_quality),
385 profile_guessed);
386 return ret;
387 }
388 else if (!m_val)
389 ret.m_val = 0;
390 else
391 {
392 gcc_checking_assert (other.m_val);
393 ret.m_val = MIN (RDIV ((uint64_t)m_val * max_probability,
394 other.m_val),
395 max_probability);
396 }
397 ret.m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted);
398 return ret;
399 }
400 profile_probability &operator/= (const profile_probability &other)
401 {
402 if (*this == profile_probability::never ())
403 return *this = profile_probability::never ();
404 if (!initialized_p () || !other.initialized_p ())
405 return *this = profile_probability::uninitialized ();
406 else
407 {
408 /* If we get probability above 1, mark it as unreliable
409 and return 1. */
410 if (m_val > other.m_val)
411 {
412 m_val = max_probability;
413 m_quality = MIN (MIN (m_quality, other.m_quality),
414 profile_guessed);
415 return *this;
416 }
417 else if (!m_val)
418 ;
419 else
420 {
421 gcc_checking_assert (other.m_val);
422 m_val = MIN (RDIV ((uint64_t)m_val * max_probability,
423 other.m_val),
424 max_probability);
425 }
426 m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted);
427 }
428 return *this;
429 }
430
431 /* Split *THIS (ORIG) probability into 2 probabilities, such that
432 the returned one (FIRST) is *THIS * CPROB and *THIS is
433 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND
434 == ORIG. This is useful e.g. when splitting a conditional
435 branch like:
436 if (cond)
437 goto lab; // ORIG probability
438 into
439 if (cond1)
440 goto lab; // FIRST = ORIG * CPROB probability
441 if (cond2)
442 goto lab; // SECOND probability
443 such that the overall probability of jumping to lab remains
444 the same. CPROB gives the relative probability between the
445 branches. */
446 profile_probability split (const profile_probability &cprob)
447 {
448 profile_probability ret = *this * cprob;
449 /* The following is equivalent to:
450 *this = cprob.invert () * *this / ret.invert ();
451 Avoid scaling when overall outcome is supposed to be always.
452 Without knowing that one is inverse of toher, the result would be
453 conservative. */
454 if (!(*this == profile_probability::always ()))
455 *this = (*this - ret) / ret.invert ();
456 return ret;
457 }
458
459 gcov_type apply (gcov_type val) const
460 {
461 if (*this == profile_probability::uninitialized ())
462 return val / 2;
463 return RDIV (val * m_val, max_probability);
464 }
465
466 /* Return 1-*THIS. */
467 profile_probability invert () const
468 {
469 return profile_probability::always() - *this;
470 }
471
472 /* Return THIS with quality dropped to GUESSED. */
473 profile_probability guessed () const
474 {
475 profile_probability ret = *this;
476 ret.m_quality = profile_guessed;
477 return ret;
478 }
479
480 /* Return THIS with quality dropped to AFDO. */
481 profile_probability afdo () const
482 {
483 profile_probability ret = *this;
484 ret.m_quality = profile_afdo;
485 return ret;
486 }
487
488 /* Return *THIS * NUM / DEN. */
489 profile_probability apply_scale (int64_t num, int64_t den) const
490 {
491 if (*this == profile_probability::never ())
492 return *this;
493 if (!initialized_p ())
494 return profile_probability::uninitialized ();
495 profile_probability ret;
496 uint64_t tmp;
497 safe_scale_64bit (m_val, num, den, &tmp);
498 ret.m_val = MIN (tmp, max_probability);
499 ret.m_quality = MIN (m_quality, profile_adjusted);
500 return ret;
501 }
502
503 /* Return true when the probability of edge is reliable.
504
505 The profile guessing code is good at predicting branch outcome (ie.
506 taken/not taken), that is predicted right slightly over 75% of time.
507 It is however notoriously poor on predicting the probability itself.
508 In general the profile appear a lot flatter (with probabilities closer
509 to 50%) than the reality so it is bad idea to use it to drive optimization
510 such as those disabling dynamic branch prediction for well predictable
511 branches.
512
513 There are two exceptions - edges leading to noreturn edges and edges
514 predicted by number of iterations heuristics are predicted well. This macro
515 should be able to distinguish those, but at the moment it simply check for
516 noreturn heuristic that is only one giving probability over 99% or bellow
517 1%. In future we might want to propagate reliability information across the
518 CFG if we find this information useful on multiple places. */
519
520 bool probably_reliable_p () const
521 {
522 if (m_quality >= profile_adjusted)
523 return true;
524 if (!initialized_p ())
525 return false;
526 return m_val < max_probability / 100
527 || m_val > max_probability - max_probability / 100;
528 }
529
530 /* Return false if profile_probability is bogus. */
531 bool verify () const
532 {
533 gcc_checking_assert (m_quality != profile_uninitialized);
534 if (m_val == uninitialized_probability)
535 return m_quality == profile_guessed;
536 else if (m_quality < profile_guessed)
537 return false;
538 return m_val <= max_probability;
539 }
540
541 /* Comparsions are three-state and conservative. False is returned if
542 the inequality cannot be decided. */
543 bool operator< (const profile_probability &other) const
544 {
545 return initialized_p () && other.initialized_p () && m_val < other.m_val;
546 }
547 bool operator> (const profile_probability &other) const
548 {
549 return initialized_p () && other.initialized_p () && m_val > other.m_val;
550 }
551
552 bool operator<= (const profile_probability &other) const
553 {
554 return initialized_p () && other.initialized_p () && m_val <= other.m_val;
555 }
556 bool operator>= (const profile_probability &other) const
557 {
558 return initialized_p () && other.initialized_p () && m_val >= other.m_val;
559 }
560
561 /* Output THIS to F. */
562 void dump (FILE *f) const;
563
564 /* Print THIS to stderr. */
565 void debug () const;
566
567 /* Return true if THIS is known to differ significantly from OTHER. */
568 bool differs_from_p (profile_probability other) const;
569 /* Return if difference is greater than 50%. */
570 bool differs_lot_from_p (profile_probability other) const;
571 /* COUNT1 times event happens with *THIS probability, COUNT2 times OTHER
572 happens with COUNT2 probablity. Return probablity that either *THIS or
573 OTHER happens. */
574 profile_probability combine_with_count (profile_count count1,
575 profile_probability other,
576 profile_count count2) const;
577
578 /* LTO streaming support. */
579 static profile_probability stream_in (struct lto_input_block *);
580 void stream_out (struct output_block *);
581 void stream_out (struct lto_output_stream *);
582 };
583
584 /* Main data type to hold profile counters in GCC. Profile counts originate
585 either from profile feedback, static profile estimation or both. We do not
586 perform whole program profile propagation and thus profile estimation
587 counters are often local to function, while counters from profile feedback
588 (or special cases of profile estimation) can be used inter-procedurally.
589
590 There are 3 basic types
591 1) local counters which are result of intra-procedural static profile
592 estimation.
593 2) ipa counters which are result of profile feedback or special case
594 of static profile estimation (such as in function main).
595 3) counters which counts as 0 inter-procedurally (beause given function
596 was never run in train feedback) but they hold local static profile
597 estimate.
598
599 Counters of type 1 and 3 cannot be mixed with counters of different type
600 within operation (because whole function should use one type of counter)
601 with exception that global zero mix in most operations where outcome is
602 well defined.
603
604 To take local counter and use it inter-procedurally use ipa member function
605 which strips information irelevant at the inter-procedural level.
606
607 Counters are 61bit integers representing number of executions during the
608 train run or normalized frequency within the function.
609
610 As the profile is maintained during the compilation, many adjustments are
611 made. Not all transformations can be made precisely, most importantly
612 when code is being duplicated. It also may happen that part of CFG has
613 profile counts known while other do not - for example when LTO optimizing
614 partly profiled program or when profile was lost due to COMDAT merging.
615
616 For this reason profile_count tracks more information than
617 just unsigned integer and it is also ready for profile mismatches.
618 The API of this data type represent operations that are natural
619 on profile counts - sum, difference and operation with scales and
620 probabilities. All operations are safe by never getting negative counts
621 and they do end up in uninitialized scale if any of the parameters is
622 uninitialized.
623
624 All comparsions that are three state and handling of probabilities. Thus
625 a < b is not equal to !(a >= b).
626
627 The following pre-defined counts are available:
628
629 profile_count::zero () for code that is known to execute zero times at
630 runtime (this can be detected statically i.e. for paths leading to
631 abort ();
632 profile_count::one () for code that is known to execute once (such as
633 main () function
634 profile_count::uninitialized () for unknown execution count.
635
636 */
637
638 class sreal;
639
class()640 class GTY(()) profile_count
641 {
642 public:
643 /* Use 62bit to hold basic block counters. Should be at least
644 64bit. Although a counter cannot be negative, we use a signed
645 type to hold various extra stages. */
646
647 static const int n_bits = 61;
648 static const uint64_t max_count = ((uint64_t) 1 << n_bits) - 2;
649 private:
650 static const uint64_t uninitialized_count = ((uint64_t) 1 << n_bits) - 1;
651
652 #if defined (__arm__) && (__GNUC__ >= 6 && __GNUC__ <= 8)
653 /* Work-around for PR88469. A bug in the gcc-6/7/8 PCS layout code
654 incorrectly detects the alignment of a structure where the only
655 64-bit aligned object is a bit-field. We force the alignment of
656 the entire field to mitigate this. */
657 #define UINT64_BIT_FIELD_ALIGN __attribute__ ((aligned(8)))
658 #else
659 #define UINT64_BIT_FIELD_ALIGN
660 #endif
661 uint64_t UINT64_BIT_FIELD_ALIGN m_val : n_bits;
662 #undef UINT64_BIT_FIELD_ALIGN
663 enum profile_quality m_quality : 3;
664
665 /* Return true if both values can meaningfully appear in single function
666 body. We have either all counters in function local or global, otherwise
667 operations between them are not really defined well. */
668 bool compatible_p (const profile_count other) const
669 {
670 if (!initialized_p () || !other.initialized_p ())
671 return true;
672 if (*this == profile_count::zero ()
673 || other == profile_count::zero ())
674 return true;
675 return ipa_p () == other.ipa_p ();
676 }
677 public:
678
679 /* Used for counters which are expected to be never executed. */
680 static profile_count zero ()
681 {
682 return from_gcov_type (0);
683 }
684 static profile_count adjusted_zero ()
685 {
686 profile_count c;
687 c.m_val = 0;
688 c.m_quality = profile_adjusted;
689 return c;
690 }
691 static profile_count guessed_zero ()
692 {
693 profile_count c;
694 c.m_val = 0;
695 c.m_quality = profile_guessed;
696 return c;
697 }
698 static profile_count one ()
699 {
700 return from_gcov_type (1);
701 }
702 /* Value of counters which has not been initialized. Either because
703 initialization did not happen yet or because profile is unknown. */
704 static profile_count uninitialized ()
705 {
706 profile_count c;
707 c.m_val = uninitialized_count;
708 c.m_quality = profile_guessed_local;
709 return c;
710 }
711
712 /* Conversion to gcov_type is lossy. */
713 gcov_type to_gcov_type () const
714 {
715 gcc_checking_assert (initialized_p ());
716 return m_val;
717 }
718
719 /* Return true if value has been initialized. */
720 bool initialized_p () const
721 {
722 return m_val != uninitialized_count;
723 }
724 /* Return true if value can be trusted. */
725 bool reliable_p () const
726 {
727 return m_quality >= profile_adjusted;
728 }
729 /* Return true if vlaue can be operated inter-procedurally. */
730 bool ipa_p () const
731 {
732 return !initialized_p () || m_quality >= profile_guessed_global0;
733 }
734 /* Return true if quality of profile is precise. */
735 bool precise_p () const
736 {
737 return m_quality == profile_precise;
738 }
739
740 /* Get the quality of the count. */
741 enum profile_quality quality () const { return m_quality; }
742
743 /* When merging basic blocks, the two different profile counts are unified.
744 Return true if this can be done without losing info about profile.
745 The only case we care about here is when first BB contains something
746 that makes it terminate in a way not visible in CFG. */
747 bool ok_for_merging (profile_count other) const
748 {
749 if (m_quality < profile_adjusted
750 || other.m_quality < profile_adjusted)
751 return true;
752 return !(other < *this);
753 }
754
755 /* When merging two BBs with different counts, pick common count that looks
756 most representative. */
757 profile_count merge (profile_count other) const
758 {
759 if (*this == other || !other.initialized_p ()
760 || m_quality > other.m_quality)
761 return *this;
762 if (other.m_quality > m_quality
763 || other > *this)
764 return other;
765 return *this;
766 }
767
768 /* Basic operations. */
769 bool operator== (const profile_count &other) const
770 {
771 return m_val == other.m_val && m_quality == other.m_quality;
772 }
773 profile_count operator+ (const profile_count &other) const
774 {
775 if (other == profile_count::zero ())
776 return *this;
777 if (*this == profile_count::zero ())
778 return other;
779 if (!initialized_p () || !other.initialized_p ())
780 return profile_count::uninitialized ();
781
782 profile_count ret;
783 gcc_checking_assert (compatible_p (other));
784 ret.m_val = m_val + other.m_val;
785 ret.m_quality = MIN (m_quality, other.m_quality);
786 return ret;
787 }
788 profile_count &operator+= (const profile_count &other)
789 {
790 if (other == profile_count::zero ())
791 return *this;
792 if (*this == profile_count::zero ())
793 {
794 *this = other;
795 return *this;
796 }
797 if (!initialized_p () || !other.initialized_p ())
798 return *this = profile_count::uninitialized ();
799 else
800 {
801 gcc_checking_assert (compatible_p (other));
802 m_val += other.m_val;
803 m_quality = MIN (m_quality, other.m_quality);
804 }
805 return *this;
806 }
807 profile_count operator- (const profile_count &other) const
808 {
809 if (*this == profile_count::zero () || other == profile_count::zero ())
810 return *this;
811 if (!initialized_p () || !other.initialized_p ())
812 return profile_count::uninitialized ();
813 gcc_checking_assert (compatible_p (other));
814 profile_count ret;
815 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
816 ret.m_quality = MIN (m_quality, other.m_quality);
817 return ret;
818 }
819 profile_count &operator-= (const profile_count &other)
820 {
821 if (*this == profile_count::zero () || other == profile_count::zero ())
822 return *this;
823 if (!initialized_p () || !other.initialized_p ())
824 return *this = profile_count::uninitialized ();
825 else
826 {
827 gcc_checking_assert (compatible_p (other));
828 m_val = m_val >= other.m_val ? m_val - other.m_val: 0;
829 m_quality = MIN (m_quality, other.m_quality);
830 }
831 return *this;
832 }
833
834 /* Return false if profile_count is bogus. */
835 bool verify () const
836 {
837 gcc_checking_assert (m_quality != profile_uninitialized);
838 return m_val != uninitialized_count || m_quality == profile_guessed_local;
839 }
840
841 /* Comparsions are three-state and conservative. False is returned if
842 the inequality cannot be decided. */
843 bool operator< (const profile_count &other) const
844 {
845 if (!initialized_p () || !other.initialized_p ())
846 return false;
847 if (*this == profile_count::zero ())
848 return !(other == profile_count::zero ());
849 if (other == profile_count::zero ())
850 return false;
851 gcc_checking_assert (compatible_p (other));
852 return m_val < other.m_val;
853 }
854 bool operator> (const profile_count &other) const
855 {
856 if (!initialized_p () || !other.initialized_p ())
857 return false;
858 if (*this == profile_count::zero ())
859 return false;
860 if (other == profile_count::zero ())
861 return !(*this == profile_count::zero ());
862 gcc_checking_assert (compatible_p (other));
863 return initialized_p () && other.initialized_p () && m_val > other.m_val;
864 }
865 bool operator< (const gcov_type other) const
866 {
867 gcc_checking_assert (ipa_p ());
868 gcc_checking_assert (other >= 0);
869 return initialized_p () && m_val < (uint64_t) other;
870 }
871 bool operator> (const gcov_type other) const
872 {
873 gcc_checking_assert (ipa_p ());
874 gcc_checking_assert (other >= 0);
875 return initialized_p () && m_val > (uint64_t) other;
876 }
877
878 bool operator<= (const profile_count &other) const
879 {
880 if (!initialized_p () || !other.initialized_p ())
881 return false;
882 if (*this == profile_count::zero ())
883 return true;
884 if (other == profile_count::zero ())
885 return (*this == profile_count::zero ());
886 gcc_checking_assert (compatible_p (other));
887 return m_val <= other.m_val;
888 }
889 bool operator>= (const profile_count &other) const
890 {
891 if (!initialized_p () || !other.initialized_p ())
892 return false;
893 if (other == profile_count::zero ())
894 return true;
895 if (*this == profile_count::zero ())
896 return (other == profile_count::zero ());
897 gcc_checking_assert (compatible_p (other));
898 return m_val >= other.m_val;
899 }
900 bool operator<= (const gcov_type other) const
901 {
902 gcc_checking_assert (ipa_p ());
903 gcc_checking_assert (other >= 0);
904 return initialized_p () && m_val <= (uint64_t) other;
905 }
906 bool operator>= (const gcov_type other) const
907 {
908 gcc_checking_assert (ipa_p ());
909 gcc_checking_assert (other >= 0);
910 return initialized_p () && m_val >= (uint64_t) other;
911 }
912 /* Return true when value is not zero and can be used for scaling.
913 This is different from *this > 0 because that requires counter to
914 be IPA. */
915 bool nonzero_p () const
916 {
917 return initialized_p () && m_val != 0;
918 }
919
920 /* Make counter forcingly nonzero. */
921 profile_count force_nonzero () const
922 {
923 if (!initialized_p ())
924 return *this;
925 profile_count ret = *this;
926 if (ret.m_val == 0)
927 {
928 ret.m_val = 1;
929 ret.m_quality = MIN (m_quality, profile_adjusted);
930 }
931 return ret;
932 }
933
934 profile_count max (profile_count other) const
935 {
936 if (!initialized_p ())
937 return other;
938 if (!other.initialized_p ())
939 return *this;
940 if (*this == profile_count::zero ())
941 return other;
942 if (other == profile_count::zero ())
943 return *this;
944 gcc_checking_assert (compatible_p (other));
945 if (m_val < other.m_val || (m_val == other.m_val
946 && m_quality < other.m_quality))
947 return other;
948 return *this;
949 }
950
951 /* PROB is a probability in scale 0...REG_BR_PROB_BASE. Scale counter
952 accordingly. */
953 profile_count apply_probability (int prob) const
954 {
955 gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
956 if (m_val == 0)
957 return *this;
958 if (!initialized_p ())
959 return profile_count::uninitialized ();
960 profile_count ret;
961 ret.m_val = RDIV (m_val * prob, REG_BR_PROB_BASE);
962 ret.m_quality = MIN (m_quality, profile_adjusted);
963 return ret;
964 }
965
966 /* Scale counter according to PROB. */
967 profile_count apply_probability (profile_probability prob) const
968 {
969 if (*this == profile_count::zero ())
970 return *this;
971 if (prob == profile_probability::never ())
972 return profile_count::zero ();
973 if (!initialized_p ())
974 return profile_count::uninitialized ();
975 profile_count ret;
976 uint64_t tmp;
977 safe_scale_64bit (m_val, prob.m_val, profile_probability::max_probability,
978 &tmp);
979 ret.m_val = tmp;
980 ret.m_quality = MIN (m_quality, prob.m_quality);
981 return ret;
982 }
983 /* Return *THIS * NUM / DEN. */
984 profile_count apply_scale (int64_t num, int64_t den) const
985 {
986 if (m_val == 0)
987 return *this;
988 if (!initialized_p ())
989 return profile_count::uninitialized ();
990 profile_count ret;
991 uint64_t tmp;
992
993 gcc_checking_assert (num >= 0 && den > 0);
994 safe_scale_64bit (m_val, num, den, &tmp);
995 ret.m_val = MIN (tmp, max_count);
996 ret.m_quality = MIN (m_quality, profile_adjusted);
997 return ret;
998 }
999 profile_count apply_scale (profile_count num, profile_count den) const
1000 {
1001 if (*this == profile_count::zero ())
1002 return *this;
1003 if (num == profile_count::zero ())
1004 return num;
1005 if (!initialized_p () || !num.initialized_p () || !den.initialized_p ())
1006 return profile_count::uninitialized ();
1007 if (num == den)
1008 return *this;
1009 gcc_checking_assert (den.m_val);
1010
1011 profile_count ret;
1012 uint64_t val;
1013 safe_scale_64bit (m_val, num.m_val, den.m_val, &val);
1014 ret.m_val = MIN (val, max_count);
1015 ret.m_quality = MIN (MIN (MIN (m_quality, profile_adjusted),
1016 num.m_quality), den.m_quality);
1017 if (num.ipa_p () && !ret.ipa_p ())
1018 ret.m_quality = MIN (num.m_quality, profile_guessed);
1019 return ret;
1020 }
1021
1022 /* Return THIS with quality dropped to GUESSED_LOCAL. */
1023 profile_count guessed_local () const
1024 {
1025 profile_count ret = *this;
1026 if (!initialized_p ())
1027 return *this;
1028 ret.m_quality = profile_guessed_local;
1029 return ret;
1030 }
1031
1032 /* We know that profile is globally 0 but keep local profile if present. */
1033 profile_count global0 () const
1034 {
1035 profile_count ret = *this;
1036 if (!initialized_p ())
1037 return *this;
1038 ret.m_quality = profile_guessed_global0;
1039 return ret;
1040 }
1041
1042 /* We know that profile is globally adjusted 0 but keep local profile
1043 if present. */
1044 profile_count global0adjusted () const
1045 {
1046 profile_count ret = *this;
1047 if (!initialized_p ())
1048 return *this;
1049 ret.m_quality = profile_guessed_global0adjusted;
1050 return ret;
1051 }
1052
1053 /* Return THIS with quality dropped to GUESSED. */
1054 profile_count guessed () const
1055 {
1056 profile_count ret = *this;
1057 ret.m_quality = MIN (ret.m_quality, profile_guessed);
1058 return ret;
1059 }
1060
1061 /* Return variant of profile counte which is always safe to compare
1062 acorss functions. */
1063 profile_count ipa () const
1064 {
1065 if (m_quality > profile_guessed_global0adjusted)
1066 return *this;
1067 if (m_quality == profile_guessed_global0)
1068 return profile_count::zero ();
1069 if (m_quality == profile_guessed_global0adjusted)
1070 return profile_count::adjusted_zero ();
1071 return profile_count::uninitialized ();
1072 }
1073
1074 /* Return THIS with quality dropped to AFDO. */
1075 profile_count afdo () const
1076 {
1077 profile_count ret = *this;
1078 ret.m_quality = profile_afdo;
1079 return ret;
1080 }
1081
1082 /* Return probability of event with counter THIS within event with counter
1083 OVERALL. */
1084 profile_probability probability_in (const profile_count overall) const
1085 {
1086 if (*this == profile_count::zero ()
1087 && !(overall == profile_count::zero ()))
1088 return profile_probability::never ();
1089 if (!initialized_p () || !overall.initialized_p ()
1090 || !overall.m_val)
1091 return profile_probability::uninitialized ();
1092 if (*this == overall && m_quality == profile_precise)
1093 return profile_probability::always ();
1094 profile_probability ret;
1095 gcc_checking_assert (compatible_p (overall));
1096
1097 if (overall.m_val < m_val)
1098 {
1099 ret.m_val = profile_probability::max_probability;
1100 ret.m_quality = profile_guessed;
1101 return ret;
1102 }
1103 else
1104 ret.m_val = RDIV (m_val * profile_probability::max_probability,
1105 overall.m_val);
1106 ret.m_quality = MIN (MAX (MIN (m_quality, overall.m_quality),
1107 profile_guessed), profile_adjusted);
1108 return ret;
1109 }
1110
1111 int to_frequency (struct function *fun) const;
1112 int to_cgraph_frequency (profile_count entry_bb_count) const;
1113 sreal to_sreal_scale (profile_count in, bool *known = NULL) const;
1114
1115 /* Output THIS to F. */
1116 void dump (FILE *f) const;
1117
1118 /* Print THIS to stderr. */
1119 void debug () const;
1120
1121 /* Return true if THIS is known to differ significantly from OTHER. */
1122 bool differs_from_p (profile_count other) const;
1123
1124 /* We want to scale profile across function boundary from NUM to DEN.
1125 Take care of the side case when NUM and DEN are zeros of incompatible
1126 kinds. */
1127 static void adjust_for_ipa_scaling (profile_count *num, profile_count *den);
1128
1129 /* THIS is a count of bb which is known to be executed IPA times.
1130 Combine this information into bb counter. This means returning IPA
1131 if it is nonzero, not changing anything if IPA is uninitialized
1132 and if IPA is zero, turning THIS into corresponding local profile with
1133 global0. */
1134 profile_count combine_with_ipa_count (profile_count ipa);
1135
1136 /* The profiling runtime uses gcov_type, which is usually 64bit integer.
1137 Conversions back and forth are used to read the coverage and get it
1138 into internal representation. */
1139 static profile_count from_gcov_type (gcov_type v);
1140
1141 /* LTO streaming support. */
1142 static profile_count stream_in (struct lto_input_block *);
1143 void stream_out (struct output_block *);
1144 void stream_out (struct lto_output_stream *);
1145 };
1146 #endif
1147