]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/profile-count.h
Update copyright years.
[thirdparty/gcc.git] / gcc / profile-count.h
1 /* Profile counter container type.
2 Copyright (C) 2017-2020 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_PROFILE_COUNT_H
22 #define GCC_PROFILE_COUNT_H
23
24 struct function;
25 struct profile_count;
26
27 /* Quality of the profile count. Because gengtype does not support enums
28 inside of classes, this is in global namespace. */
29 enum profile_quality {
30 /* Uninitialized value. */
31 UNINITIALIZED_PROFILE,
32
33 /* Profile is based on static branch prediction heuristics and may
34 or may not match reality. It is local to function and cannot be compared
35 inter-procedurally. Never used by probabilities (they are always local).
36 */
37 GUESSED_LOCAL,
38
39 /* Profile was read by feedback and was 0, we used local heuristics to guess
40 better. This is the case of functions not run in profile feedback.
41 Never used by probabilities. */
42 GUESSED_GLOBAL0,
43
44 /* Same as GUESSED_GLOBAL0 but global count is adjusted 0. */
45 GUESSED_GLOBAL0_ADJUSTED,
46
47 /* Profile is based on static branch prediction heuristics. It may or may
48 not reflect the reality but it can be compared interprocedurally
49 (for example, we inlined function w/o profile feedback into function
50 with feedback and propagated from that).
51 Never used by probabilities. */
52 GUESSED,
53
54 /* Profile was determined by autofdo. */
55 AFDO,
56
57 /* Profile was originally based on feedback but it was adjusted
58 by code duplicating optimization. It may not precisely reflect the
59 particular code path. */
60 ADJUSTED,
61
62 /* Profile was read from profile feedback or determined by accurate static
63 method. */
64 PRECISE
65 };
66
67 extern const char *profile_quality_as_string (enum profile_quality);
68 extern bool parse_profile_quality (const char *value,
69 profile_quality *quality);
70
71 /* The base value for branch probability notes and edge probabilities. */
72 #define REG_BR_PROB_BASE 10000
73
74 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
75
76 bool slow_safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res);
77
78 /* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */
79
80 inline bool
81 safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res)
82 {
83 #if (GCC_VERSION >= 5000)
84 uint64_t tmp;
85 if (!__builtin_mul_overflow (a, b, &tmp)
86 && !__builtin_add_overflow (tmp, c/2, &tmp))
87 {
88 *res = tmp / c;
89 return true;
90 }
91 if (c == 1)
92 {
93 *res = (uint64_t) -1;
94 return false;
95 }
96 #else
97 if (a < ((uint64_t)1 << 31)
98 && b < ((uint64_t)1 << 31)
99 && c < ((uint64_t)1 << 31))
100 {
101 *res = (a * b + (c / 2)) / c;
102 return true;
103 }
104 #endif
105 return slow_safe_scale_64bit (a, b, c, res);
106 }
107
108 /* Data type to hold probabilities. It implements fixed point arithmetics
109 with capping so probability is always in range [0,1] and scaling requiring
110 values greater than 1 needs to be represented otherwise.
111
112 In addition to actual value the quality of profile is tracked and propagated
113 through all operations. Special value UNINITIALIZED_PROFILE is used for probabilities
114 that has not been determined yet (for example because of
115 -fno-guess-branch-probability)
116
117 Typically probabilities are derived from profile feedback (via
118 probability_in_gcov_type), autoFDO or guessed statically and then propagated
119 thorough the compilation.
120
121 Named probabilities are available:
122 - never (0 probability)
123 - guessed_never
124 - very_unlikely (1/2000 probability)
125 - unlikely (1/5 probability)
126 - even (1/2 probability)
127 - likely (4/5 probability)
128 - very_likely (1999/2000 probability)
129 - guessed_always
130 - always
131
132 Named probabilities except for never/always are assumed to be statically
133 guessed and thus not necessarily accurate. The difference between never
134 and guessed_never is that the first one should be used only in case that
135 well behaving program will very likely not execute the "never" path.
136 For example if the path is going to abort () call or it exception handling.
137
138 Always and guessed_always probabilities are symmetric.
139
140 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint
141 integer arithmetics. Once the code is converted to branch probabilities,
142 these conversions will probably go away because they are lossy.
143 */
144
145 class GTY((user)) profile_probability
146 {
147 static const int n_bits = 29;
148 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that
149 will lead to harder multiplication sequences. */
150 static const uint32_t max_probability = (uint32_t) 1 << (n_bits - 2);
151 static const uint32_t uninitialized_probability
152 = ((uint32_t) 1 << (n_bits - 1)) - 1;
153
154 uint32_t m_val : 29;
155 enum profile_quality m_quality : 3;
156
157 friend struct profile_count;
158 public:
159 profile_probability (): m_val (uninitialized_probability),
160 m_quality (GUESSED)
161 {}
162
163 profile_probability (uint32_t val, profile_quality quality):
164 m_val (val), m_quality (quality)
165 {}
166
167 /* Named probabilities. */
168 static profile_probability never ()
169 {
170 profile_probability ret;
171 ret.m_val = 0;
172 ret.m_quality = PRECISE;
173 return ret;
174 }
175
176 static profile_probability guessed_never ()
177 {
178 profile_probability ret;
179 ret.m_val = 0;
180 ret.m_quality = GUESSED;
181 return ret;
182 }
183
184 static profile_probability very_unlikely ()
185 {
186 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */
187 profile_probability r = guessed_always ().apply_scale (1, 2000);
188 r.m_val--;
189 return r;
190 }
191
192 static profile_probability unlikely ()
193 {
194 /* Be consistent with PROB_VERY_LIKELY in predict.h. */
195 profile_probability r = guessed_always ().apply_scale (1, 5);
196 r.m_val--;
197 return r;
198 }
199
200 static profile_probability even ()
201 {
202 return guessed_always ().apply_scale (1, 2);
203 }
204
205 static profile_probability very_likely ()
206 {
207 return always () - very_unlikely ();
208 }
209
210 static profile_probability likely ()
211 {
212 return always () - unlikely ();
213 }
214
215 static profile_probability guessed_always ()
216 {
217 profile_probability ret;
218 ret.m_val = max_probability;
219 ret.m_quality = GUESSED;
220 return ret;
221 }
222
223 static profile_probability always ()
224 {
225 profile_probability ret;
226 ret.m_val = max_probability;
227 ret.m_quality = PRECISE;
228 return ret;
229 }
230
231 /* Probabilities which has not been initialized. Either because
232 initialization did not happen yet or because profile is unknown. */
233 static profile_probability uninitialized ()
234 {
235 profile_probability c;
236 c.m_val = uninitialized_probability;
237 c.m_quality = GUESSED;
238 return c;
239 }
240
241 /* Return true if value has been initialized. */
242 bool initialized_p () const
243 {
244 return m_val != uninitialized_probability;
245 }
246
247 /* Return true if value can be trusted. */
248 bool reliable_p () const
249 {
250 return m_quality >= ADJUSTED;
251 }
252
253 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics.
254 this is mostly to support legacy code and should go away. */
255 static profile_probability from_reg_br_prob_base (int v)
256 {
257 profile_probability ret;
258 gcc_checking_assert (v >= 0 && v <= REG_BR_PROB_BASE);
259 ret.m_val = RDIV (v * (uint64_t) max_probability, REG_BR_PROB_BASE);
260 ret.m_quality = GUESSED;
261 return ret;
262 }
263
264 int to_reg_br_prob_base () const
265 {
266 gcc_checking_assert (initialized_p ());
267 return RDIV (m_val * (uint64_t) REG_BR_PROB_BASE, max_probability);
268 }
269
270 /* Conversion to and from RTL representation of profile probabilities. */
271 static profile_probability from_reg_br_prob_note (int v)
272 {
273 profile_probability ret;
274 ret.m_val = ((unsigned int)v) / 8;
275 ret.m_quality = (enum profile_quality)(v & 7);
276 return ret;
277 }
278
279 int to_reg_br_prob_note () const
280 {
281 gcc_checking_assert (initialized_p ());
282 int ret = m_val * 8 + m_quality;
283 gcc_checking_assert (from_reg_br_prob_note (ret) == *this);
284 return ret;
285 }
286
287 /* Return VAL1/VAL2. */
288 static profile_probability probability_in_gcov_type
289 (gcov_type val1, gcov_type val2)
290 {
291 profile_probability ret;
292 gcc_checking_assert (val1 >= 0 && val2 > 0);
293 if (val1 > val2)
294 ret.m_val = max_probability;
295 else
296 {
297 uint64_t tmp;
298 safe_scale_64bit (val1, max_probability, val2, &tmp);
299 gcc_checking_assert (tmp <= max_probability);
300 ret.m_val = tmp;
301 }
302 ret.m_quality = PRECISE;
303 return ret;
304 }
305
306 /* Basic operations. */
307 bool operator== (const profile_probability &other) const
308 {
309 return m_val == other.m_val && m_quality == other.m_quality;
310 }
311
312 profile_probability operator+ (const profile_probability &other) const
313 {
314 if (other == never ())
315 return *this;
316 if (*this == never ())
317 return other;
318 if (!initialized_p () || !other.initialized_p ())
319 return uninitialized ();
320
321 profile_probability ret;
322 ret.m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability);
323 ret.m_quality = MIN (m_quality, other.m_quality);
324 return ret;
325 }
326
327 profile_probability &operator+= (const profile_probability &other)
328 {
329 if (other == never ())
330 return *this;
331 if (*this == never ())
332 {
333 *this = other;
334 return *this;
335 }
336 if (!initialized_p () || !other.initialized_p ())
337 return *this = uninitialized ();
338 else
339 {
340 m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability);
341 m_quality = MIN (m_quality, other.m_quality);
342 }
343 return *this;
344 }
345
346 profile_probability operator- (const profile_probability &other) const
347 {
348 if (*this == never ()
349 || other == never ())
350 return *this;
351 if (!initialized_p () || !other.initialized_p ())
352 return uninitialized ();
353 profile_probability ret;
354 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
355 ret.m_quality = MIN (m_quality, other.m_quality);
356 return ret;
357 }
358
359 profile_probability &operator-= (const profile_probability &other)
360 {
361 if (*this == never ()
362 || other == never ())
363 return *this;
364 if (!initialized_p () || !other.initialized_p ())
365 return *this = uninitialized ();
366 else
367 {
368 m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
369 m_quality = MIN (m_quality, other.m_quality);
370 }
371 return *this;
372 }
373
374 profile_probability operator* (const profile_probability &other) const
375 {
376 if (*this == never ()
377 || other == never ())
378 return never ();
379 if (!initialized_p () || !other.initialized_p ())
380 return uninitialized ();
381 profile_probability ret;
382 ret.m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability);
383 ret.m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED);
384 return ret;
385 }
386
387 profile_probability &operator*= (const profile_probability &other)
388 {
389 if (*this == never ()
390 || other == never ())
391 return *this = never ();
392 if (!initialized_p () || !other.initialized_p ())
393 return *this = uninitialized ();
394 else
395 {
396 m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability);
397 m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED);
398 }
399 return *this;
400 }
401
402 profile_probability operator/ (const profile_probability &other) const
403 {
404 if (*this == never ())
405 return never ();
406 if (!initialized_p () || !other.initialized_p ())
407 return uninitialized ();
408 profile_probability ret;
409 /* If we get probability above 1, mark it as unreliable and return 1. */
410 if (m_val >= other.m_val)
411 {
412 ret.m_val = max_probability;
413 ret.m_quality = MIN (MIN (m_quality, other.m_quality),
414 GUESSED);
415 return ret;
416 }
417 else if (!m_val)
418 ret.m_val = 0;
419 else
420 {
421 gcc_checking_assert (other.m_val);
422 ret.m_val = MIN (RDIV ((uint64_t)m_val * max_probability,
423 other.m_val),
424 max_probability);
425 }
426 ret.m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED);
427 return ret;
428 }
429
430 profile_probability &operator/= (const profile_probability &other)
431 {
432 if (*this == never ())
433 return *this = never ();
434 if (!initialized_p () || !other.initialized_p ())
435 return *this = uninitialized ();
436 else
437 {
438 /* If we get probability above 1, mark it as unreliable
439 and return 1. */
440 if (m_val > other.m_val)
441 {
442 m_val = max_probability;
443 m_quality = MIN (MIN (m_quality, other.m_quality),
444 GUESSED);
445 return *this;
446 }
447 else if (!m_val)
448 ;
449 else
450 {
451 gcc_checking_assert (other.m_val);
452 m_val = MIN (RDIV ((uint64_t)m_val * max_probability,
453 other.m_val),
454 max_probability);
455 }
456 m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED);
457 }
458 return *this;
459 }
460
461 /* Split *THIS (ORIG) probability into 2 probabilities, such that
462 the returned one (FIRST) is *THIS * CPROB and *THIS is
463 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND
464 == ORIG. This is useful e.g. when splitting a conditional
465 branch like:
466 if (cond)
467 goto lab; // ORIG probability
468 into
469 if (cond1)
470 goto lab; // FIRST = ORIG * CPROB probability
471 if (cond2)
472 goto lab; // SECOND probability
473 such that the overall probability of jumping to lab remains
474 the same. CPROB gives the relative probability between the
475 branches. */
476 profile_probability split (const profile_probability &cprob)
477 {
478 profile_probability ret = *this * cprob;
479 /* The following is equivalent to:
480 *this = cprob.invert () * *this / ret.invert ();
481 Avoid scaling when overall outcome is supposed to be always.
482 Without knowing that one is inverse of other, the result would be
483 conservative. */
484 if (!(*this == always ()))
485 *this = (*this - ret) / ret.invert ();
486 return ret;
487 }
488
489 gcov_type apply (gcov_type val) const
490 {
491 if (*this == uninitialized ())
492 return val / 2;
493 return RDIV (val * m_val, max_probability);
494 }
495
496 /* Return 1-*THIS. */
497 profile_probability invert () const
498 {
499 return always() - *this;
500 }
501
502 /* Return THIS with quality dropped to GUESSED. */
503 profile_probability guessed () const
504 {
505 profile_probability ret = *this;
506 ret.m_quality = GUESSED;
507 return ret;
508 }
509
510 /* Return THIS with quality dropped to AFDO. */
511 profile_probability afdo () const
512 {
513 profile_probability ret = *this;
514 ret.m_quality = AFDO;
515 return ret;
516 }
517
518 /* Return *THIS * NUM / DEN. */
519 profile_probability apply_scale (int64_t num, int64_t den) const
520 {
521 if (*this == never ())
522 return *this;
523 if (!initialized_p ())
524 return uninitialized ();
525 profile_probability ret;
526 uint64_t tmp;
527 safe_scale_64bit (m_val, num, den, &tmp);
528 ret.m_val = MIN (tmp, max_probability);
529 ret.m_quality = MIN (m_quality, ADJUSTED);
530 return ret;
531 }
532
533 /* Return true when the probability of edge is reliable.
534
535 The profile guessing code is good at predicting branch outcome (i.e.
536 taken/not taken), that is predicted right slightly over 75% of time.
537 It is however notoriously poor on predicting the probability itself.
538 In general the profile appear a lot flatter (with probabilities closer
539 to 50%) than the reality so it is bad idea to use it to drive optimization
540 such as those disabling dynamic branch prediction for well predictable
541 branches.
542
543 There are two exceptions - edges leading to noreturn edges and edges
544 predicted by number of iterations heuristics are predicted well. This macro
545 should be able to distinguish those, but at the moment it simply check for
546 noreturn heuristic that is only one giving probability over 99% or bellow
547 1%. In future we might want to propagate reliability information across the
548 CFG if we find this information useful on multiple places. */
549 bool probably_reliable_p () const
550 {
551 if (m_quality >= ADJUSTED)
552 return true;
553 if (!initialized_p ())
554 return false;
555 return m_val < max_probability / 100
556 || m_val > max_probability - max_probability / 100;
557 }
558
559 /* Return false if profile_probability is bogus. */
560 bool verify () const
561 {
562 gcc_checking_assert (m_quality != UNINITIALIZED_PROFILE);
563 if (m_val == uninitialized_probability)
564 return m_quality == GUESSED;
565 else if (m_quality < GUESSED)
566 return false;
567 return m_val <= max_probability;
568 }
569
570 /* Comparisons are three-state and conservative. False is returned if
571 the inequality cannot be decided. */
572 bool operator< (const profile_probability &other) const
573 {
574 return initialized_p () && other.initialized_p () && m_val < other.m_val;
575 }
576
577 bool operator> (const profile_probability &other) const
578 {
579 return initialized_p () && other.initialized_p () && m_val > other.m_val;
580 }
581
582 bool operator<= (const profile_probability &other) const
583 {
584 return initialized_p () && other.initialized_p () && m_val <= other.m_val;
585 }
586
587 bool operator>= (const profile_probability &other) const
588 {
589 return initialized_p () && other.initialized_p () && m_val >= other.m_val;
590 }
591
592 /* Get the value of the count. */
593 uint32_t value () const { return m_val; }
594
595 /* Get the quality of the count. */
596 enum profile_quality quality () const { return m_quality; }
597
598 /* Output THIS to F. */
599 void dump (FILE *f) const;
600
601 /* Print THIS to stderr. */
602 void debug () const;
603
604 /* Return true if THIS is known to differ significantly from OTHER. */
605 bool differs_from_p (profile_probability other) const;
606
607 /* Return if difference is greater than 50%. */
608 bool differs_lot_from_p (profile_probability other) const;
609
610 /* COUNT1 times event happens with *THIS probability, COUNT2 times OTHER
611 happens with COUNT2 probability. Return probability that either *THIS or
612 OTHER happens. */
613 profile_probability combine_with_count (profile_count count1,
614 profile_probability other,
615 profile_count count2) const;
616
617 /* LTO streaming support. */
618 static profile_probability stream_in (class lto_input_block *);
619 void stream_out (struct output_block *);
620 void stream_out (struct lto_output_stream *);
621 };
622
623 /* Main data type to hold profile counters in GCC. Profile counts originate
624 either from profile feedback, static profile estimation or both. We do not
625 perform whole program profile propagation and thus profile estimation
626 counters are often local to function, while counters from profile feedback
627 (or special cases of profile estimation) can be used inter-procedurally.
628
629 There are 3 basic types
630 1) local counters which are result of intra-procedural static profile
631 estimation.
632 2) ipa counters which are result of profile feedback or special case
633 of static profile estimation (such as in function main).
634 3) counters which counts as 0 inter-procedurally (because given function
635 was never run in train feedback) but they hold local static profile
636 estimate.
637
638 Counters of type 1 and 3 cannot be mixed with counters of different type
639 within operation (because whole function should use one type of counter)
640 with exception that global zero mix in most operations where outcome is
641 well defined.
642
643 To take local counter and use it inter-procedurally use ipa member function
644 which strips information irrelevant at the inter-procedural level.
645
646 Counters are 61bit integers representing number of executions during the
647 train run or normalized frequency within the function.
648
649 As the profile is maintained during the compilation, many adjustments are
650 made. Not all transformations can be made precisely, most importantly
651 when code is being duplicated. It also may happen that part of CFG has
652 profile counts known while other do not - for example when LTO optimizing
653 partly profiled program or when profile was lost due to COMDAT merging.
654
655 For this reason profile_count tracks more information than
656 just unsigned integer and it is also ready for profile mismatches.
657 The API of this data type represent operations that are natural
658 on profile counts - sum, difference and operation with scales and
659 probabilities. All operations are safe by never getting negative counts
660 and they do end up in uninitialized scale if any of the parameters is
661 uninitialized.
662
663 All comparisons that are three state and handling of probabilities. Thus
664 a < b is not equal to !(a >= b).
665
666 The following pre-defined counts are available:
667
668 profile_count::zero () for code that is known to execute zero times at
669 runtime (this can be detected statically i.e. for paths leading to
670 abort ();
671 profile_count::one () for code that is known to execute once (such as
672 main () function
673 profile_count::uninitialized () for unknown execution count.
674
675 */
676
677 class sreal;
678
679 struct GTY(()) profile_count
680 {
681 public:
682 /* Use 62bit to hold basic block counters. Should be at least
683 64bit. Although a counter cannot be negative, we use a signed
684 type to hold various extra stages. */
685
686 static const int n_bits = 61;
687 static const uint64_t max_count = ((uint64_t) 1 << n_bits) - 2;
688 private:
689 static const uint64_t uninitialized_count = ((uint64_t) 1 << n_bits) - 1;
690
691 #if defined (__arm__) && (__GNUC__ >= 6 && __GNUC__ <= 8)
692 /* Work-around for PR88469. A bug in the gcc-6/7/8 PCS layout code
693 incorrectly detects the alignment of a structure where the only
694 64-bit aligned object is a bit-field. We force the alignment of
695 the entire field to mitigate this. */
696 #define UINT64_BIT_FIELD_ALIGN __attribute__ ((aligned(8)))
697 #else
698 #define UINT64_BIT_FIELD_ALIGN
699 #endif
700 uint64_t UINT64_BIT_FIELD_ALIGN m_val : n_bits;
701 #undef UINT64_BIT_FIELD_ALIGN
702 enum profile_quality m_quality : 3;
703 public:
704
705 /* Return true if both values can meaningfully appear in single function
706 body. We have either all counters in function local or global, otherwise
707 operations between them are not really defined well. */
708 bool compatible_p (const profile_count other) const
709 {
710 if (!initialized_p () || !other.initialized_p ())
711 return true;
712 if (*this == zero ()
713 || other == zero ())
714 return true;
715 /* Do not allow nonzero global profile together with local guesses
716 that are globally0. */
717 if (ipa ().nonzero_p ()
718 && !(other.ipa () == other))
719 return false;
720 if (other.ipa ().nonzero_p ()
721 && !(ipa () == *this))
722 return false;
723
724 return ipa_p () == other.ipa_p ();
725 }
726
727 /* Used for counters which are expected to be never executed. */
728 static profile_count zero ()
729 {
730 return from_gcov_type (0);
731 }
732
733 static profile_count adjusted_zero ()
734 {
735 profile_count c;
736 c.m_val = 0;
737 c.m_quality = ADJUSTED;
738 return c;
739 }
740
741 static profile_count guessed_zero ()
742 {
743 profile_count c;
744 c.m_val = 0;
745 c.m_quality = GUESSED;
746 return c;
747 }
748
749 static profile_count one ()
750 {
751 return from_gcov_type (1);
752 }
753
754 /* Value of counters which has not been initialized. Either because
755 initialization did not happen yet or because profile is unknown. */
756 static profile_count uninitialized ()
757 {
758 profile_count c;
759 c.m_val = uninitialized_count;
760 c.m_quality = GUESSED_LOCAL;
761 return c;
762 }
763
764 /* Conversion to gcov_type is lossy. */
765 gcov_type to_gcov_type () const
766 {
767 gcc_checking_assert (initialized_p ());
768 return m_val;
769 }
770
771 /* Return true if value has been initialized. */
772 bool initialized_p () const
773 {
774 return m_val != uninitialized_count;
775 }
776
777 /* Return true if value can be trusted. */
778 bool reliable_p () const
779 {
780 return m_quality >= ADJUSTED;
781 }
782
783 /* Return true if value can be operated inter-procedurally. */
784 bool ipa_p () const
785 {
786 return !initialized_p () || m_quality >= GUESSED_GLOBAL0;
787 }
788
789 /* Return true if quality of profile is precise. */
790 bool precise_p () const
791 {
792 return m_quality == PRECISE;
793 }
794
795 /* Get the value of the count. */
796 uint32_t value () const { return m_val; }
797
798 /* Get the quality of the count. */
799 enum profile_quality quality () const { return m_quality; }
800
801 /* When merging basic blocks, the two different profile counts are unified.
802 Return true if this can be done without losing info about profile.
803 The only case we care about here is when first BB contains something
804 that makes it terminate in a way not visible in CFG. */
805 bool ok_for_merging (profile_count other) const
806 {
807 if (m_quality < ADJUSTED
808 || other.m_quality < ADJUSTED)
809 return true;
810 return !(other < *this);
811 }
812
813 /* When merging two BBs with different counts, pick common count that looks
814 most representative. */
815 profile_count merge (profile_count other) const
816 {
817 if (*this == other || !other.initialized_p ()
818 || m_quality > other.m_quality)
819 return *this;
820 if (other.m_quality > m_quality
821 || other > *this)
822 return other;
823 return *this;
824 }
825
826 /* Basic operations. */
827 bool operator== (const profile_count &other) const
828 {
829 return m_val == other.m_val && m_quality == other.m_quality;
830 }
831
832 profile_count operator+ (const profile_count &other) const
833 {
834 if (other == zero ())
835 return *this;
836 if (*this == zero ())
837 return other;
838 if (!initialized_p () || !other.initialized_p ())
839 return uninitialized ();
840
841 profile_count ret;
842 gcc_checking_assert (compatible_p (other));
843 ret.m_val = m_val + other.m_val;
844 ret.m_quality = MIN (m_quality, other.m_quality);
845 return ret;
846 }
847
848 profile_count &operator+= (const profile_count &other)
849 {
850 if (other == zero ())
851 return *this;
852 if (*this == zero ())
853 {
854 *this = other;
855 return *this;
856 }
857 if (!initialized_p () || !other.initialized_p ())
858 return *this = uninitialized ();
859 else
860 {
861 gcc_checking_assert (compatible_p (other));
862 m_val += other.m_val;
863 m_quality = MIN (m_quality, other.m_quality);
864 }
865 return *this;
866 }
867
868 profile_count operator- (const profile_count &other) const
869 {
870 if (*this == zero () || other == zero ())
871 return *this;
872 if (!initialized_p () || !other.initialized_p ())
873 return uninitialized ();
874 gcc_checking_assert (compatible_p (other));
875 profile_count ret;
876 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
877 ret.m_quality = MIN (m_quality, other.m_quality);
878 return ret;
879 }
880
881 profile_count &operator-= (const profile_count &other)
882 {
883 if (*this == zero () || other == zero ())
884 return *this;
885 if (!initialized_p () || !other.initialized_p ())
886 return *this = uninitialized ();
887 else
888 {
889 gcc_checking_assert (compatible_p (other));
890 m_val = m_val >= other.m_val ? m_val - other.m_val: 0;
891 m_quality = MIN (m_quality, other.m_quality);
892 }
893 return *this;
894 }
895
896 /* Return false if profile_count is bogus. */
897 bool verify () const
898 {
899 gcc_checking_assert (m_quality != UNINITIALIZED_PROFILE);
900 return m_val != uninitialized_count || m_quality == GUESSED_LOCAL;
901 }
902
903 /* Comparisons are three-state and conservative. False is returned if
904 the inequality cannot be decided. */
905 bool operator< (const profile_count &other) const
906 {
907 if (!initialized_p () || !other.initialized_p ())
908 return false;
909 if (*this == zero ())
910 return !(other == zero ());
911 if (other == zero ())
912 return false;
913 gcc_checking_assert (compatible_p (other));
914 return m_val < other.m_val;
915 }
916
917 bool operator> (const profile_count &other) const
918 {
919 if (!initialized_p () || !other.initialized_p ())
920 return false;
921 if (*this == zero ())
922 return false;
923 if (other == zero ())
924 return !(*this == zero ());
925 gcc_checking_assert (compatible_p (other));
926 return initialized_p () && other.initialized_p () && m_val > other.m_val;
927 }
928
929 bool operator< (const gcov_type other) const
930 {
931 gcc_checking_assert (ipa_p ());
932 gcc_checking_assert (other >= 0);
933 return ipa ().initialized_p () && ipa ().m_val < (uint64_t) other;
934 }
935
936 bool operator> (const gcov_type other) const
937 {
938 gcc_checking_assert (ipa_p ());
939 gcc_checking_assert (other >= 0);
940 return ipa ().initialized_p () && ipa ().m_val > (uint64_t) other;
941 }
942
943 bool operator<= (const profile_count &other) const
944 {
945 if (!initialized_p () || !other.initialized_p ())
946 return false;
947 if (*this == zero ())
948 return true;
949 if (other == zero ())
950 return (*this == zero ());
951 gcc_checking_assert (compatible_p (other));
952 return m_val <= other.m_val;
953 }
954
955 bool operator>= (const profile_count &other) const
956 {
957 if (!initialized_p () || !other.initialized_p ())
958 return false;
959 if (other == zero ())
960 return true;
961 if (*this == zero ())
962 return (other == zero ());
963 gcc_checking_assert (compatible_p (other));
964 return m_val >= other.m_val;
965 }
966
967 bool operator<= (const gcov_type other) const
968 {
969 gcc_checking_assert (ipa_p ());
970 gcc_checking_assert (other >= 0);
971 return ipa ().initialized_p () && ipa ().m_val <= (uint64_t) other;
972 }
973
974 bool operator>= (const gcov_type other) const
975 {
976 gcc_checking_assert (ipa_p ());
977 gcc_checking_assert (other >= 0);
978 return ipa ().initialized_p () && ipa ().m_val >= (uint64_t) other;
979 }
980
981 /* Return true when value is not zero and can be used for scaling.
982 This is different from *this > 0 because that requires counter to
983 be IPA. */
984 bool nonzero_p () const
985 {
986 return initialized_p () && m_val != 0;
987 }
988
989 /* Make counter forcibly nonzero. */
990 profile_count force_nonzero () const
991 {
992 if (!initialized_p ())
993 return *this;
994 profile_count ret = *this;
995 if (ret.m_val == 0)
996 {
997 ret.m_val = 1;
998 ret.m_quality = MIN (m_quality, ADJUSTED);
999 }
1000 return ret;
1001 }
1002
1003 profile_count max (profile_count other) const
1004 {
1005 profile_count val = *this;
1006
1007 /* Always prefer nonzero IPA counts over local counts. */
1008 if (ipa ().nonzero_p () || other.ipa ().nonzero_p ())
1009 {
1010 val = ipa ();
1011 other = other.ipa ();
1012 }
1013 if (!initialized_p ())
1014 return other;
1015 if (!other.initialized_p ())
1016 return *this;
1017 if (*this == zero ())
1018 return other;
1019 if (other == zero ())
1020 return *this;
1021 gcc_checking_assert (compatible_p (other));
1022 if (val.m_val < other.m_val || (m_val == other.m_val
1023 && val.m_quality < other.m_quality))
1024 return other;
1025 return *this;
1026 }
1027
1028 /* PROB is a probability in scale 0...REG_BR_PROB_BASE. Scale counter
1029 accordingly. */
1030 profile_count apply_probability (int prob) const
1031 {
1032 gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
1033 if (m_val == 0)
1034 return *this;
1035 if (!initialized_p ())
1036 return uninitialized ();
1037 profile_count ret;
1038 ret.m_val = RDIV (m_val * prob, REG_BR_PROB_BASE);
1039 ret.m_quality = MIN (m_quality, ADJUSTED);
1040 return ret;
1041 }
1042
1043 /* Scale counter according to PROB. */
1044 profile_count apply_probability (profile_probability prob) const
1045 {
1046 if (*this == zero ())
1047 return *this;
1048 if (prob == profile_probability::never ())
1049 return zero ();
1050 if (!initialized_p ())
1051 return uninitialized ();
1052 profile_count ret;
1053 uint64_t tmp;
1054 safe_scale_64bit (m_val, prob.m_val, profile_probability::max_probability,
1055 &tmp);
1056 ret.m_val = tmp;
1057 ret.m_quality = MIN (m_quality, prob.m_quality);
1058 return ret;
1059 }
1060
1061 /* Return *THIS * NUM / DEN. */
1062 profile_count apply_scale (int64_t num, int64_t den) const
1063 {
1064 if (m_val == 0)
1065 return *this;
1066 if (!initialized_p ())
1067 return uninitialized ();
1068 profile_count ret;
1069 uint64_t tmp;
1070
1071 gcc_checking_assert (num >= 0 && den > 0);
1072 safe_scale_64bit (m_val, num, den, &tmp);
1073 ret.m_val = MIN (tmp, max_count);
1074 ret.m_quality = MIN (m_quality, ADJUSTED);
1075 return ret;
1076 }
1077
1078 profile_count apply_scale (profile_count num, profile_count den) const
1079 {
1080 if (*this == zero ())
1081 return *this;
1082 if (num == zero ())
1083 return num;
1084 if (!initialized_p () || !num.initialized_p () || !den.initialized_p ())
1085 return uninitialized ();
1086 if (num == den)
1087 return *this;
1088 gcc_checking_assert (den.m_val);
1089
1090 profile_count ret;
1091 uint64_t val;
1092 safe_scale_64bit (m_val, num.m_val, den.m_val, &val);
1093 ret.m_val = MIN (val, max_count);
1094 ret.m_quality = MIN (MIN (MIN (m_quality, ADJUSTED),
1095 num.m_quality), den.m_quality);
1096 /* Be sure that ret is not local if num is global.
1097 Also ensure that ret is not global0 when num is global. */
1098 if (num.ipa_p ())
1099 ret.m_quality = MAX (ret.m_quality,
1100 num == num.ipa () ? GUESSED : num.m_quality);
1101 return ret;
1102 }
1103
1104 /* Return THIS with quality dropped to GUESSED_LOCAL. */
1105 profile_count guessed_local () const
1106 {
1107 profile_count ret = *this;
1108 if (!initialized_p ())
1109 return *this;
1110 ret.m_quality = GUESSED_LOCAL;
1111 return ret;
1112 }
1113
1114 /* We know that profile is globally 0 but keep local profile if present. */
1115 profile_count global0 () const
1116 {
1117 profile_count ret = *this;
1118 if (!initialized_p ())
1119 return *this;
1120 ret.m_quality = GUESSED_GLOBAL0;
1121 return ret;
1122 }
1123
1124 /* We know that profile is globally adjusted 0 but keep local profile
1125 if present. */
1126 profile_count global0adjusted () const
1127 {
1128 profile_count ret = *this;
1129 if (!initialized_p ())
1130 return *this;
1131 ret.m_quality = GUESSED_GLOBAL0_ADJUSTED;
1132 return ret;
1133 }
1134
1135 /* Return THIS with quality dropped to GUESSED. */
1136 profile_count guessed () const
1137 {
1138 profile_count ret = *this;
1139 ret.m_quality = MIN (ret.m_quality, GUESSED);
1140 return ret;
1141 }
1142
1143 /* Return variant of profile count which is always safe to compare
1144 across functions. */
1145 profile_count ipa () const
1146 {
1147 if (m_quality > GUESSED_GLOBAL0_ADJUSTED)
1148 return *this;
1149 if (m_quality == GUESSED_GLOBAL0)
1150 return zero ();
1151 if (m_quality == GUESSED_GLOBAL0_ADJUSTED)
1152 return adjusted_zero ();
1153 return uninitialized ();
1154 }
1155
1156 /* Return THIS with quality dropped to AFDO. */
1157 profile_count afdo () const
1158 {
1159 profile_count ret = *this;
1160 ret.m_quality = AFDO;
1161 return ret;
1162 }
1163
1164 /* Return probability of event with counter THIS within event with counter
1165 OVERALL. */
1166 profile_probability probability_in (const profile_count overall) const
1167 {
1168 if (*this == zero ()
1169 && !(overall == zero ()))
1170 return profile_probability::never ();
1171 if (!initialized_p () || !overall.initialized_p ()
1172 || !overall.m_val)
1173 return profile_probability::uninitialized ();
1174 if (*this == overall && m_quality == PRECISE)
1175 return profile_probability::always ();
1176 profile_probability ret;
1177 gcc_checking_assert (compatible_p (overall));
1178
1179 if (overall.m_val < m_val)
1180 {
1181 ret.m_val = profile_probability::max_probability;
1182 ret.m_quality = GUESSED;
1183 return ret;
1184 }
1185 else
1186 ret.m_val = RDIV (m_val * profile_probability::max_probability,
1187 overall.m_val);
1188 ret.m_quality = MIN (MAX (MIN (m_quality, overall.m_quality),
1189 GUESSED), ADJUSTED);
1190 return ret;
1191 }
1192
1193 int to_frequency (struct function *fun) const;
1194 int to_cgraph_frequency (profile_count entry_bb_count) const;
1195 sreal to_sreal_scale (profile_count in, bool *known = NULL) const;
1196
1197 /* Output THIS to F. */
1198 void dump (FILE *f) const;
1199
1200 /* Print THIS to stderr. */
1201 void debug () const;
1202
1203 /* Return true if THIS is known to differ significantly from OTHER. */
1204 bool differs_from_p (profile_count other) const;
1205
1206 /* We want to scale profile across function boundary from NUM to DEN.
1207 Take care of the side case when NUM and DEN are zeros of incompatible
1208 kinds. */
1209 static void adjust_for_ipa_scaling (profile_count *num, profile_count *den);
1210
1211 /* THIS is a count of bb which is known to be executed IPA times.
1212 Combine this information into bb counter. This means returning IPA
1213 if it is nonzero, not changing anything if IPA is uninitialized
1214 and if IPA is zero, turning THIS into corresponding local profile with
1215 global0. */
1216 profile_count combine_with_ipa_count (profile_count ipa);
1217
1218 /* Same as combine_with_ipa_count but inside function with count IPA2. */
1219 profile_count combine_with_ipa_count_within
1220 (profile_count ipa, profile_count ipa2);
1221
1222 /* The profiling runtime uses gcov_type, which is usually 64bit integer.
1223 Conversions back and forth are used to read the coverage and get it
1224 into internal representation. */
1225 static profile_count from_gcov_type (gcov_type v,
1226 profile_quality quality = PRECISE);
1227
1228 /* LTO streaming support. */
1229 static profile_count stream_in (class lto_input_block *);
1230 void stream_out (struct output_block *);
1231 void stream_out (struct lto_output_stream *);
1232 };
1233 #endif