]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/profile-count.h
2669f0d5fff30adcd91bb2ae6faf79a5b62a3888
[thirdparty/gcc.git] / gcc / profile-count.h
1 /* Profile counter container type.
2 Copyright (C) 2017-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_PROFILE_COUNT_H
22 #define GCC_PROFILE_COUNT_H
23
24 struct function;
25 class profile_count;
26
27 /* Quality of the profile count. Because gengtype does not support enums
28 inside of classes, this is in global namespace. */
29 enum profile_quality {
30 /* Uninitialized value. */
31 profile_uninitialized,
32 /* Profile is based on static branch prediction heuristics and may
33 or may not match reality. It is local to function and cannot be compared
34 inter-procedurally. Never used by probabilities (they are always local).
35 */
36 profile_guessed_local,
37 /* Profile was read by feedback and was 0, we used local heuristics to guess
38 better. This is the case of functions not run in profile fedback.
39 Never used by probabilities. */
40 profile_guessed_global0,
41
42 /* Same as profile_guessed_global0 but global count is adjusted 0. */
43 profile_guessed_global0adjusted,
44
45 /* Profile is based on static branch prediction heuristics. It may or may
46 not reflect the reality but it can be compared interprocedurally
47 (for example, we inlined function w/o profile feedback into function
48 with feedback and propagated from that).
49 Never used by probablities. */
50 profile_guessed,
51 /* Profile was determined by autofdo. */
52 profile_afdo,
53 /* Profile was originally based on feedback but it was adjusted
54 by code duplicating optimization. It may not precisely reflect the
55 particular code path. */
56 profile_adjusted,
57 /* Profile was read from profile feedback or determined by accurate static
58 method. */
59 profile_precise
60 };
61
62 extern const char *profile_quality_as_string (enum profile_quality);
63 extern bool parse_profile_quality (const char *value,
64 profile_quality *quality);
65
66 /* The base value for branch probability notes and edge probabilities. */
67 #define REG_BR_PROB_BASE 10000
68
69 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
70
71 bool slow_safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res);
72
73 /* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */
74
75 inline bool
76 safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res)
77 {
78 #if (GCC_VERSION >= 5000)
79 uint64_t tmp;
80 if (!__builtin_mul_overflow (a, b, &tmp)
81 && !__builtin_add_overflow (tmp, c/2, &tmp))
82 {
83 *res = tmp / c;
84 return true;
85 }
86 if (c == 1)
87 {
88 *res = (uint64_t) -1;
89 return false;
90 }
91 #else
92 if (a < ((uint64_t)1 << 31)
93 && b < ((uint64_t)1 << 31)
94 && c < ((uint64_t)1 << 31))
95 {
96 *res = (a * b + (c / 2)) / c;
97 return true;
98 }
99 #endif
100 return slow_safe_scale_64bit (a, b, c, res);
101 }
102
103 /* Data type to hold probabilities. It implements fixed point arithmetics
104 with capping so probability is always in range [0,1] and scaling requiring
105 values greater than 1 needs to be represented otherwise.
106
107 In addition to actual value the quality of profile is tracked and propagated
108 through all operations. Special value UNINITIALIZED is used for probabilities
109 that has not been determined yet (for example bacause of
110 -fno-guess-branch-probability)
111
112 Typically probabilities are derived from profile feedback (via
113 probability_in_gcov_type), autoFDO or guessed statically and then propagated
114 thorough the compilation.
115
116 Named probabilities are available:
117 - never (0 probability)
118 - guessed_never
119 - very_unlikely (1/2000 probability)
120 - unlikely (1/5 probablity)
121 - even (1/2 probability)
122 - likely (4/5 probability)
123 - very_likely (1999/2000 probability)
124 - guessed_always
125 - always
126
127 Named probabilities except for never/always are assumed to be statically
128 guessed and thus not necessarily accurate. The difference between never
129 and guessed_never is that the first one should be used only in case that
130 well behaving program will very likely not execute the "never" path.
131 For example if the path is going to abort () call or it exception handling.
132
133 Always and guessed_always probabilities are symmetric.
134
135 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint
136 integer arithmetics. Once the code is converted to branch probabilities,
137 these conversions will probably go away because they are lossy.
138 */
139
140 class GTY((user)) profile_probability
141 {
142 static const int n_bits = 29;
143 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that
144 will lead to harder multiplication sequences. */
145 static const uint32_t max_probability = (uint32_t) 1 << (n_bits - 2);
146 static const uint32_t uninitialized_probability
147 = ((uint32_t) 1 << (n_bits - 1)) - 1;
148
149 uint32_t m_val : 29;
150 enum profile_quality m_quality : 3;
151
152 friend class profile_count;
153 public:
154 profile_probability (): m_val (uninitialized_probability),
155 m_quality (profile_guessed)
156 {}
157
158 profile_probability (uint32_t val, profile_quality quality):
159 m_val (val), m_quality (quality)
160 {}
161
162 /* Named probabilities. */
163 static profile_probability never ()
164 {
165 profile_probability ret;
166 ret.m_val = 0;
167 ret.m_quality = profile_precise;
168 return ret;
169 }
170 static profile_probability guessed_never ()
171 {
172 profile_probability ret;
173 ret.m_val = 0;
174 ret.m_quality = profile_guessed;
175 return ret;
176 }
177 static profile_probability very_unlikely ()
178 {
179 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */
180 profile_probability r
181 = profile_probability::guessed_always ().apply_scale (1, 2000);
182 r.m_val--;
183 return r;
184 }
185 static profile_probability unlikely ()
186 {
187 /* Be consistent with PROB_VERY_LIKELY in predict.h. */
188 profile_probability r
189 = profile_probability::guessed_always ().apply_scale (1, 5);
190 r.m_val--;
191 return r;
192 }
193 static profile_probability even ()
194 {
195 return profile_probability::guessed_always ().apply_scale (1, 2);
196 }
197 static profile_probability very_likely ()
198 {
199 return profile_probability::always () - very_unlikely ();
200 }
201 static profile_probability likely ()
202 {
203 return profile_probability::always () - unlikely ();
204 }
205 static profile_probability guessed_always ()
206 {
207 profile_probability ret;
208 ret.m_val = max_probability;
209 ret.m_quality = profile_guessed;
210 return ret;
211 }
212 static profile_probability always ()
213 {
214 profile_probability ret;
215 ret.m_val = max_probability;
216 ret.m_quality = profile_precise;
217 return ret;
218 }
219 /* Probabilities which has not been initialized. Either because
220 initialization did not happen yet or because profile is unknown. */
221 static profile_probability uninitialized ()
222 {
223 profile_probability c;
224 c.m_val = uninitialized_probability;
225 c.m_quality = profile_guessed;
226 return c;
227 }
228
229
230 /* Return true if value has been initialized. */
231 bool initialized_p () const
232 {
233 return m_val != uninitialized_probability;
234 }
235 /* Return true if value can be trusted. */
236 bool reliable_p () const
237 {
238 return m_quality >= profile_adjusted;
239 }
240
241 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics.
242 this is mostly to support legacy code and should go away. */
243 static profile_probability from_reg_br_prob_base (int v)
244 {
245 profile_probability ret;
246 gcc_checking_assert (v >= 0 && v <= REG_BR_PROB_BASE);
247 ret.m_val = RDIV (v * (uint64_t) max_probability, REG_BR_PROB_BASE);
248 ret.m_quality = profile_guessed;
249 return ret;
250 }
251 int to_reg_br_prob_base () const
252 {
253 gcc_checking_assert (initialized_p ());
254 return RDIV (m_val * (uint64_t) REG_BR_PROB_BASE, max_probability);
255 }
256
257 /* Conversion to and from RTL representation of profile probabilities. */
258 static profile_probability from_reg_br_prob_note (int v)
259 {
260 profile_probability ret;
261 ret.m_val = ((unsigned int)v) / 8;
262 ret.m_quality = (enum profile_quality)(v & 7);
263 return ret;
264 }
265 int to_reg_br_prob_note () const
266 {
267 gcc_checking_assert (initialized_p ());
268 int ret = m_val * 8 + m_quality;
269 gcc_checking_assert (profile_probability::from_reg_br_prob_note (ret)
270 == *this);
271 return ret;
272 }
273
274 /* Return VAL1/VAL2. */
275 static profile_probability probability_in_gcov_type
276 (gcov_type val1, gcov_type val2)
277 {
278 profile_probability ret;
279 gcc_checking_assert (val1 >= 0 && val2 > 0);
280 if (val1 > val2)
281 ret.m_val = max_probability;
282 else
283 {
284 uint64_t tmp;
285 safe_scale_64bit (val1, max_probability, val2, &tmp);
286 gcc_checking_assert (tmp <= max_probability);
287 ret.m_val = tmp;
288 }
289 ret.m_quality = profile_precise;
290 return ret;
291 }
292
293 /* Basic operations. */
294 bool operator== (const profile_probability &other) const
295 {
296 return m_val == other.m_val && m_quality == other.m_quality;
297 }
298 profile_probability operator+ (const profile_probability &other) const
299 {
300 if (other == profile_probability::never ())
301 return *this;
302 if (*this == profile_probability::never ())
303 return other;
304 if (!initialized_p () || !other.initialized_p ())
305 return profile_probability::uninitialized ();
306
307 profile_probability ret;
308 ret.m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability);
309 ret.m_quality = MIN (m_quality, other.m_quality);
310 return ret;
311 }
312 profile_probability &operator+= (const profile_probability &other)
313 {
314 if (other == profile_probability::never ())
315 return *this;
316 if (*this == profile_probability::never ())
317 {
318 *this = other;
319 return *this;
320 }
321 if (!initialized_p () || !other.initialized_p ())
322 return *this = profile_probability::uninitialized ();
323 else
324 {
325 m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability);
326 m_quality = MIN (m_quality, other.m_quality);
327 }
328 return *this;
329 }
330 profile_probability operator- (const profile_probability &other) const
331 {
332 if (*this == profile_probability::never ()
333 || other == profile_probability::never ())
334 return *this;
335 if (!initialized_p () || !other.initialized_p ())
336 return profile_probability::uninitialized ();
337 profile_probability ret;
338 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
339 ret.m_quality = MIN (m_quality, other.m_quality);
340 return ret;
341 }
342 profile_probability &operator-= (const profile_probability &other)
343 {
344 if (*this == profile_probability::never ()
345 || other == profile_probability::never ())
346 return *this;
347 if (!initialized_p () || !other.initialized_p ())
348 return *this = profile_probability::uninitialized ();
349 else
350 {
351 m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
352 m_quality = MIN (m_quality, other.m_quality);
353 }
354 return *this;
355 }
356 profile_probability operator* (const profile_probability &other) const
357 {
358 if (*this == profile_probability::never ()
359 || other == profile_probability::never ())
360 return profile_probability::never ();
361 if (!initialized_p () || !other.initialized_p ())
362 return profile_probability::uninitialized ();
363 profile_probability ret;
364 ret.m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability);
365 ret.m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted);
366 return ret;
367 }
368 profile_probability &operator*= (const profile_probability &other)
369 {
370 if (*this == profile_probability::never ()
371 || other == profile_probability::never ())
372 return *this = profile_probability::never ();
373 if (!initialized_p () || !other.initialized_p ())
374 return *this = profile_probability::uninitialized ();
375 else
376 {
377 m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability);
378 m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted);
379 }
380 return *this;
381 }
382 profile_probability operator/ (const profile_probability &other) const
383 {
384 if (*this == profile_probability::never ())
385 return profile_probability::never ();
386 if (!initialized_p () || !other.initialized_p ())
387 return profile_probability::uninitialized ();
388 profile_probability ret;
389 /* If we get probability above 1, mark it as unreliable and return 1. */
390 if (m_val >= other.m_val)
391 {
392 ret.m_val = max_probability;
393 ret.m_quality = MIN (MIN (m_quality, other.m_quality),
394 profile_guessed);
395 return ret;
396 }
397 else if (!m_val)
398 ret.m_val = 0;
399 else
400 {
401 gcc_checking_assert (other.m_val);
402 ret.m_val = MIN (RDIV ((uint64_t)m_val * max_probability,
403 other.m_val),
404 max_probability);
405 }
406 ret.m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted);
407 return ret;
408 }
409 profile_probability &operator/= (const profile_probability &other)
410 {
411 if (*this == profile_probability::never ())
412 return *this = profile_probability::never ();
413 if (!initialized_p () || !other.initialized_p ())
414 return *this = profile_probability::uninitialized ();
415 else
416 {
417 /* If we get probability above 1, mark it as unreliable
418 and return 1. */
419 if (m_val > other.m_val)
420 {
421 m_val = max_probability;
422 m_quality = MIN (MIN (m_quality, other.m_quality),
423 profile_guessed);
424 return *this;
425 }
426 else if (!m_val)
427 ;
428 else
429 {
430 gcc_checking_assert (other.m_val);
431 m_val = MIN (RDIV ((uint64_t)m_val * max_probability,
432 other.m_val),
433 max_probability);
434 }
435 m_quality = MIN (MIN (m_quality, other.m_quality), profile_adjusted);
436 }
437 return *this;
438 }
439
440 /* Split *THIS (ORIG) probability into 2 probabilities, such that
441 the returned one (FIRST) is *THIS * CPROB and *THIS is
442 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND
443 == ORIG. This is useful e.g. when splitting a conditional
444 branch like:
445 if (cond)
446 goto lab; // ORIG probability
447 into
448 if (cond1)
449 goto lab; // FIRST = ORIG * CPROB probability
450 if (cond2)
451 goto lab; // SECOND probability
452 such that the overall probability of jumping to lab remains
453 the same. CPROB gives the relative probability between the
454 branches. */
455 profile_probability split (const profile_probability &cprob)
456 {
457 profile_probability ret = *this * cprob;
458 /* The following is equivalent to:
459 *this = cprob.invert () * *this / ret.invert ();
460 Avoid scaling when overall outcome is supposed to be always.
461 Without knowing that one is inverse of toher, the result would be
462 conservative. */
463 if (!(*this == profile_probability::always ()))
464 *this = (*this - ret) / ret.invert ();
465 return ret;
466 }
467
468 gcov_type apply (gcov_type val) const
469 {
470 if (*this == profile_probability::uninitialized ())
471 return val / 2;
472 return RDIV (val * m_val, max_probability);
473 }
474
475 /* Return 1-*THIS. */
476 profile_probability invert () const
477 {
478 return profile_probability::always() - *this;
479 }
480
481 /* Return THIS with quality dropped to GUESSED. */
482 profile_probability guessed () const
483 {
484 profile_probability ret = *this;
485 ret.m_quality = profile_guessed;
486 return ret;
487 }
488
489 /* Return THIS with quality dropped to AFDO. */
490 profile_probability afdo () const
491 {
492 profile_probability ret = *this;
493 ret.m_quality = profile_afdo;
494 return ret;
495 }
496
497 /* Return *THIS * NUM / DEN. */
498 profile_probability apply_scale (int64_t num, int64_t den) const
499 {
500 if (*this == profile_probability::never ())
501 return *this;
502 if (!initialized_p ())
503 return profile_probability::uninitialized ();
504 profile_probability ret;
505 uint64_t tmp;
506 safe_scale_64bit (m_val, num, den, &tmp);
507 ret.m_val = MIN (tmp, max_probability);
508 ret.m_quality = MIN (m_quality, profile_adjusted);
509 return ret;
510 }
511
512 /* Return true when the probability of edge is reliable.
513
514 The profile guessing code is good at predicting branch outcome (ie.
515 taken/not taken), that is predicted right slightly over 75% of time.
516 It is however notoriously poor on predicting the probability itself.
517 In general the profile appear a lot flatter (with probabilities closer
518 to 50%) than the reality so it is bad idea to use it to drive optimization
519 such as those disabling dynamic branch prediction for well predictable
520 branches.
521
522 There are two exceptions - edges leading to noreturn edges and edges
523 predicted by number of iterations heuristics are predicted well. This macro
524 should be able to distinguish those, but at the moment it simply check for
525 noreturn heuristic that is only one giving probability over 99% or bellow
526 1%. In future we might want to propagate reliability information across the
527 CFG if we find this information useful on multiple places. */
528
529 bool probably_reliable_p () const
530 {
531 if (m_quality >= profile_adjusted)
532 return true;
533 if (!initialized_p ())
534 return false;
535 return m_val < max_probability / 100
536 || m_val > max_probability - max_probability / 100;
537 }
538
539 /* Return false if profile_probability is bogus. */
540 bool verify () const
541 {
542 gcc_checking_assert (m_quality != profile_uninitialized);
543 if (m_val == uninitialized_probability)
544 return m_quality == profile_guessed;
545 else if (m_quality < profile_guessed)
546 return false;
547 return m_val <= max_probability;
548 }
549
550 /* Comparsions are three-state and conservative. False is returned if
551 the inequality cannot be decided. */
552 bool operator< (const profile_probability &other) const
553 {
554 return initialized_p () && other.initialized_p () && m_val < other.m_val;
555 }
556 bool operator> (const profile_probability &other) const
557 {
558 return initialized_p () && other.initialized_p () && m_val > other.m_val;
559 }
560
561 bool operator<= (const profile_probability &other) const
562 {
563 return initialized_p () && other.initialized_p () && m_val <= other.m_val;
564 }
565 bool operator>= (const profile_probability &other) const
566 {
567 return initialized_p () && other.initialized_p () && m_val >= other.m_val;
568 }
569
570 /* Get the value of the count. */
571 uint32_t value () const { return m_val; }
572
573 /* Get the quality of the count. */
574 enum profile_quality quality () const { return m_quality; }
575
576 /* Output THIS to F. */
577 void dump (FILE *f) const;
578
579 /* Print THIS to stderr. */
580 void debug () const;
581
582 /* Return true if THIS is known to differ significantly from OTHER. */
583 bool differs_from_p (profile_probability other) const;
584 /* Return if difference is greater than 50%. */
585 bool differs_lot_from_p (profile_probability other) const;
586 /* COUNT1 times event happens with *THIS probability, COUNT2 times OTHER
587 happens with COUNT2 probablity. Return probablity that either *THIS or
588 OTHER happens. */
589 profile_probability combine_with_count (profile_count count1,
590 profile_probability other,
591 profile_count count2) const;
592
593 /* LTO streaming support. */
594 static profile_probability stream_in (struct lto_input_block *);
595 void stream_out (struct output_block *);
596 void stream_out (struct lto_output_stream *);
597 };
598
599 /* Main data type to hold profile counters in GCC. Profile counts originate
600 either from profile feedback, static profile estimation or both. We do not
601 perform whole program profile propagation and thus profile estimation
602 counters are often local to function, while counters from profile feedback
603 (or special cases of profile estimation) can be used inter-procedurally.
604
605 There are 3 basic types
606 1) local counters which are result of intra-procedural static profile
607 estimation.
608 2) ipa counters which are result of profile feedback or special case
609 of static profile estimation (such as in function main).
610 3) counters which counts as 0 inter-procedurally (beause given function
611 was never run in train feedback) but they hold local static profile
612 estimate.
613
614 Counters of type 1 and 3 cannot be mixed with counters of different type
615 within operation (because whole function should use one type of counter)
616 with exception that global zero mix in most operations where outcome is
617 well defined.
618
619 To take local counter and use it inter-procedurally use ipa member function
620 which strips information irelevant at the inter-procedural level.
621
622 Counters are 61bit integers representing number of executions during the
623 train run or normalized frequency within the function.
624
625 As the profile is maintained during the compilation, many adjustments are
626 made. Not all transformations can be made precisely, most importantly
627 when code is being duplicated. It also may happen that part of CFG has
628 profile counts known while other do not - for example when LTO optimizing
629 partly profiled program or when profile was lost due to COMDAT merging.
630
631 For this reason profile_count tracks more information than
632 just unsigned integer and it is also ready for profile mismatches.
633 The API of this data type represent operations that are natural
634 on profile counts - sum, difference and operation with scales and
635 probabilities. All operations are safe by never getting negative counts
636 and they do end up in uninitialized scale if any of the parameters is
637 uninitialized.
638
639 All comparsions that are three state and handling of probabilities. Thus
640 a < b is not equal to !(a >= b).
641
642 The following pre-defined counts are available:
643
644 profile_count::zero () for code that is known to execute zero times at
645 runtime (this can be detected statically i.e. for paths leading to
646 abort ();
647 profile_count::one () for code that is known to execute once (such as
648 main () function
649 profile_count::uninitialized () for unknown execution count.
650
651 */
652
653 class sreal;
654
655 class GTY(()) profile_count
656 {
657 public:
658 /* Use 62bit to hold basic block counters. Should be at least
659 64bit. Although a counter cannot be negative, we use a signed
660 type to hold various extra stages. */
661
662 static const int n_bits = 61;
663 static const uint64_t max_count = ((uint64_t) 1 << n_bits) - 2;
664 private:
665 static const uint64_t uninitialized_count = ((uint64_t) 1 << n_bits) - 1;
666
667 #if defined (__arm__) && (__GNUC__ >= 6 && __GNUC__ <= 8)
668 /* Work-around for PR88469. A bug in the gcc-6/7/8 PCS layout code
669 incorrectly detects the alignment of a structure where the only
670 64-bit aligned object is a bit-field. We force the alignment of
671 the entire field to mitigate this. */
672 #define UINT64_BIT_FIELD_ALIGN __attribute__ ((aligned(8)))
673 #else
674 #define UINT64_BIT_FIELD_ALIGN
675 #endif
676 uint64_t UINT64_BIT_FIELD_ALIGN m_val : n_bits;
677 #undef UINT64_BIT_FIELD_ALIGN
678 enum profile_quality m_quality : 3;
679
680 /* Return true if both values can meaningfully appear in single function
681 body. We have either all counters in function local or global, otherwise
682 operations between them are not really defined well. */
683 bool compatible_p (const profile_count other) const
684 {
685 if (!initialized_p () || !other.initialized_p ())
686 return true;
687 if (*this == profile_count::zero ()
688 || other == profile_count::zero ())
689 return true;
690 return ipa_p () == other.ipa_p ();
691 }
692 public:
693 /* Used for counters which are expected to be never executed. */
694 static profile_count zero ()
695 {
696 return from_gcov_type (0);
697 }
698 static profile_count adjusted_zero ()
699 {
700 profile_count c;
701 c.m_val = 0;
702 c.m_quality = profile_adjusted;
703 return c;
704 }
705 static profile_count guessed_zero ()
706 {
707 profile_count c;
708 c.m_val = 0;
709 c.m_quality = profile_guessed;
710 return c;
711 }
712 static profile_count one ()
713 {
714 return from_gcov_type (1);
715 }
716 /* Value of counters which has not been initialized. Either because
717 initialization did not happen yet or because profile is unknown. */
718 static profile_count uninitialized ()
719 {
720 profile_count c;
721 c.m_val = uninitialized_count;
722 c.m_quality = profile_guessed_local;
723 return c;
724 }
725
726 /* Conversion to gcov_type is lossy. */
727 gcov_type to_gcov_type () const
728 {
729 gcc_checking_assert (initialized_p ());
730 return m_val;
731 }
732
733 /* Return true if value has been initialized. */
734 bool initialized_p () const
735 {
736 return m_val != uninitialized_count;
737 }
738 /* Return true if value can be trusted. */
739 bool reliable_p () const
740 {
741 return m_quality >= profile_adjusted;
742 }
743 /* Return true if vlaue can be operated inter-procedurally. */
744 bool ipa_p () const
745 {
746 return !initialized_p () || m_quality >= profile_guessed_global0;
747 }
748 /* Return true if quality of profile is precise. */
749 bool precise_p () const
750 {
751 return m_quality == profile_precise;
752 }
753
754 /* Get the value of the count. */
755 uint32_t value () const { return m_val; }
756
757 /* Get the quality of the count. */
758 enum profile_quality quality () const { return m_quality; }
759
760 /* When merging basic blocks, the two different profile counts are unified.
761 Return true if this can be done without losing info about profile.
762 The only case we care about here is when first BB contains something
763 that makes it terminate in a way not visible in CFG. */
764 bool ok_for_merging (profile_count other) const
765 {
766 if (m_quality < profile_adjusted
767 || other.m_quality < profile_adjusted)
768 return true;
769 return !(other < *this);
770 }
771
772 /* When merging two BBs with different counts, pick common count that looks
773 most representative. */
774 profile_count merge (profile_count other) const
775 {
776 if (*this == other || !other.initialized_p ()
777 || m_quality > other.m_quality)
778 return *this;
779 if (other.m_quality > m_quality
780 || other > *this)
781 return other;
782 return *this;
783 }
784
785 /* Basic operations. */
786 bool operator== (const profile_count &other) const
787 {
788 return m_val == other.m_val && m_quality == other.m_quality;
789 }
790 profile_count operator+ (const profile_count &other) const
791 {
792 if (other == profile_count::zero ())
793 return *this;
794 if (*this == profile_count::zero ())
795 return other;
796 if (!initialized_p () || !other.initialized_p ())
797 return profile_count::uninitialized ();
798
799 profile_count ret;
800 gcc_checking_assert (compatible_p (other));
801 ret.m_val = m_val + other.m_val;
802 ret.m_quality = MIN (m_quality, other.m_quality);
803 return ret;
804 }
805 profile_count &operator+= (const profile_count &other)
806 {
807 if (other == profile_count::zero ())
808 return *this;
809 if (*this == profile_count::zero ())
810 {
811 *this = other;
812 return *this;
813 }
814 if (!initialized_p () || !other.initialized_p ())
815 return *this = profile_count::uninitialized ();
816 else
817 {
818 gcc_checking_assert (compatible_p (other));
819 m_val += other.m_val;
820 m_quality = MIN (m_quality, other.m_quality);
821 }
822 return *this;
823 }
824 profile_count operator- (const profile_count &other) const
825 {
826 if (*this == profile_count::zero () || other == profile_count::zero ())
827 return *this;
828 if (!initialized_p () || !other.initialized_p ())
829 return profile_count::uninitialized ();
830 gcc_checking_assert (compatible_p (other));
831 profile_count ret;
832 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
833 ret.m_quality = MIN (m_quality, other.m_quality);
834 return ret;
835 }
836 profile_count &operator-= (const profile_count &other)
837 {
838 if (*this == profile_count::zero () || other == profile_count::zero ())
839 return *this;
840 if (!initialized_p () || !other.initialized_p ())
841 return *this = profile_count::uninitialized ();
842 else
843 {
844 gcc_checking_assert (compatible_p (other));
845 m_val = m_val >= other.m_val ? m_val - other.m_val: 0;
846 m_quality = MIN (m_quality, other.m_quality);
847 }
848 return *this;
849 }
850
851 /* Return false if profile_count is bogus. */
852 bool verify () const
853 {
854 gcc_checking_assert (m_quality != profile_uninitialized);
855 return m_val != uninitialized_count || m_quality == profile_guessed_local;
856 }
857
858 /* Comparsions are three-state and conservative. False is returned if
859 the inequality cannot be decided. */
860 bool operator< (const profile_count &other) const
861 {
862 if (!initialized_p () || !other.initialized_p ())
863 return false;
864 if (*this == profile_count::zero ())
865 return !(other == profile_count::zero ());
866 if (other == profile_count::zero ())
867 return false;
868 gcc_checking_assert (compatible_p (other));
869 return m_val < other.m_val;
870 }
871 bool operator> (const profile_count &other) const
872 {
873 if (!initialized_p () || !other.initialized_p ())
874 return false;
875 if (*this == profile_count::zero ())
876 return false;
877 if (other == profile_count::zero ())
878 return !(*this == profile_count::zero ());
879 gcc_checking_assert (compatible_p (other));
880 return initialized_p () && other.initialized_p () && m_val > other.m_val;
881 }
882 bool operator< (const gcov_type other) const
883 {
884 gcc_checking_assert (ipa_p ());
885 gcc_checking_assert (other >= 0);
886 return initialized_p () && m_val < (uint64_t) other;
887 }
888 bool operator> (const gcov_type other) const
889 {
890 gcc_checking_assert (ipa_p ());
891 gcc_checking_assert (other >= 0);
892 return initialized_p () && m_val > (uint64_t) other;
893 }
894
895 bool operator<= (const profile_count &other) const
896 {
897 if (!initialized_p () || !other.initialized_p ())
898 return false;
899 if (*this == profile_count::zero ())
900 return true;
901 if (other == profile_count::zero ())
902 return (*this == profile_count::zero ());
903 gcc_checking_assert (compatible_p (other));
904 return m_val <= other.m_val;
905 }
906 bool operator>= (const profile_count &other) const
907 {
908 if (!initialized_p () || !other.initialized_p ())
909 return false;
910 if (other == profile_count::zero ())
911 return true;
912 if (*this == profile_count::zero ())
913 return (other == profile_count::zero ());
914 gcc_checking_assert (compatible_p (other));
915 return m_val >= other.m_val;
916 }
917 bool operator<= (const gcov_type other) const
918 {
919 gcc_checking_assert (ipa_p ());
920 gcc_checking_assert (other >= 0);
921 return initialized_p () && m_val <= (uint64_t) other;
922 }
923 bool operator>= (const gcov_type other) const
924 {
925 gcc_checking_assert (ipa_p ());
926 gcc_checking_assert (other >= 0);
927 return initialized_p () && m_val >= (uint64_t) other;
928 }
929 /* Return true when value is not zero and can be used for scaling.
930 This is different from *this > 0 because that requires counter to
931 be IPA. */
932 bool nonzero_p () const
933 {
934 return initialized_p () && m_val != 0;
935 }
936
937 /* Make counter forcingly nonzero. */
938 profile_count force_nonzero () const
939 {
940 if (!initialized_p ())
941 return *this;
942 profile_count ret = *this;
943 if (ret.m_val == 0)
944 {
945 ret.m_val = 1;
946 ret.m_quality = MIN (m_quality, profile_adjusted);
947 }
948 return ret;
949 }
950
951 profile_count max (profile_count other) const
952 {
953 if (!initialized_p ())
954 return other;
955 if (!other.initialized_p ())
956 return *this;
957 if (*this == profile_count::zero ())
958 return other;
959 if (other == profile_count::zero ())
960 return *this;
961 gcc_checking_assert (compatible_p (other));
962 if (m_val < other.m_val || (m_val == other.m_val
963 && m_quality < other.m_quality))
964 return other;
965 return *this;
966 }
967
968 /* PROB is a probability in scale 0...REG_BR_PROB_BASE. Scale counter
969 accordingly. */
970 profile_count apply_probability (int prob) const
971 {
972 gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
973 if (m_val == 0)
974 return *this;
975 if (!initialized_p ())
976 return profile_count::uninitialized ();
977 profile_count ret;
978 ret.m_val = RDIV (m_val * prob, REG_BR_PROB_BASE);
979 ret.m_quality = MIN (m_quality, profile_adjusted);
980 return ret;
981 }
982
983 /* Scale counter according to PROB. */
984 profile_count apply_probability (profile_probability prob) const
985 {
986 if (*this == profile_count::zero ())
987 return *this;
988 if (prob == profile_probability::never ())
989 return profile_count::zero ();
990 if (!initialized_p ())
991 return profile_count::uninitialized ();
992 profile_count ret;
993 uint64_t tmp;
994 safe_scale_64bit (m_val, prob.m_val, profile_probability::max_probability,
995 &tmp);
996 ret.m_val = tmp;
997 ret.m_quality = MIN (m_quality, prob.m_quality);
998 return ret;
999 }
1000 /* Return *THIS * NUM / DEN. */
1001 profile_count apply_scale (int64_t num, int64_t den) const
1002 {
1003 if (m_val == 0)
1004 return *this;
1005 if (!initialized_p ())
1006 return profile_count::uninitialized ();
1007 profile_count ret;
1008 uint64_t tmp;
1009
1010 gcc_checking_assert (num >= 0 && den > 0);
1011 safe_scale_64bit (m_val, num, den, &tmp);
1012 ret.m_val = MIN (tmp, max_count);
1013 ret.m_quality = MIN (m_quality, profile_adjusted);
1014 return ret;
1015 }
1016 profile_count apply_scale (profile_count num, profile_count den) const
1017 {
1018 if (*this == profile_count::zero ())
1019 return *this;
1020 if (num == profile_count::zero ())
1021 return num;
1022 if (!initialized_p () || !num.initialized_p () || !den.initialized_p ())
1023 return profile_count::uninitialized ();
1024 if (num == den)
1025 return *this;
1026 gcc_checking_assert (den.m_val);
1027
1028 profile_count ret;
1029 uint64_t val;
1030 safe_scale_64bit (m_val, num.m_val, den.m_val, &val);
1031 ret.m_val = MIN (val, max_count);
1032 ret.m_quality = MIN (MIN (MIN (m_quality, profile_adjusted),
1033 num.m_quality), den.m_quality);
1034 if (num.ipa_p () && !ret.ipa_p ())
1035 ret.m_quality = MIN (num.m_quality, profile_guessed);
1036 return ret;
1037 }
1038
1039 /* Return THIS with quality dropped to GUESSED_LOCAL. */
1040 profile_count guessed_local () const
1041 {
1042 profile_count ret = *this;
1043 if (!initialized_p ())
1044 return *this;
1045 ret.m_quality = profile_guessed_local;
1046 return ret;
1047 }
1048
1049 /* We know that profile is globally 0 but keep local profile if present. */
1050 profile_count global0 () const
1051 {
1052 profile_count ret = *this;
1053 if (!initialized_p ())
1054 return *this;
1055 ret.m_quality = profile_guessed_global0;
1056 return ret;
1057 }
1058
1059 /* We know that profile is globally adjusted 0 but keep local profile
1060 if present. */
1061 profile_count global0adjusted () const
1062 {
1063 profile_count ret = *this;
1064 if (!initialized_p ())
1065 return *this;
1066 ret.m_quality = profile_guessed_global0adjusted;
1067 return ret;
1068 }
1069
1070 /* Return THIS with quality dropped to GUESSED. */
1071 profile_count guessed () const
1072 {
1073 profile_count ret = *this;
1074 ret.m_quality = MIN (ret.m_quality, profile_guessed);
1075 return ret;
1076 }
1077
1078 /* Return variant of profile counte which is always safe to compare
1079 acorss functions. */
1080 profile_count ipa () const
1081 {
1082 if (m_quality > profile_guessed_global0adjusted)
1083 return *this;
1084 if (m_quality == profile_guessed_global0)
1085 return profile_count::zero ();
1086 if (m_quality == profile_guessed_global0adjusted)
1087 return profile_count::adjusted_zero ();
1088 return profile_count::uninitialized ();
1089 }
1090
1091 /* Return THIS with quality dropped to AFDO. */
1092 profile_count afdo () const
1093 {
1094 profile_count ret = *this;
1095 ret.m_quality = profile_afdo;
1096 return ret;
1097 }
1098
1099 /* Return probability of event with counter THIS within event with counter
1100 OVERALL. */
1101 profile_probability probability_in (const profile_count overall) const
1102 {
1103 if (*this == profile_count::zero ()
1104 && !(overall == profile_count::zero ()))
1105 return profile_probability::never ();
1106 if (!initialized_p () || !overall.initialized_p ()
1107 || !overall.m_val)
1108 return profile_probability::uninitialized ();
1109 if (*this == overall && m_quality == profile_precise)
1110 return profile_probability::always ();
1111 profile_probability ret;
1112 gcc_checking_assert (compatible_p (overall));
1113
1114 if (overall.m_val < m_val)
1115 {
1116 ret.m_val = profile_probability::max_probability;
1117 ret.m_quality = profile_guessed;
1118 return ret;
1119 }
1120 else
1121 ret.m_val = RDIV (m_val * profile_probability::max_probability,
1122 overall.m_val);
1123 ret.m_quality = MIN (MAX (MIN (m_quality, overall.m_quality),
1124 profile_guessed), profile_adjusted);
1125 return ret;
1126 }
1127
1128 int to_frequency (struct function *fun) const;
1129 int to_cgraph_frequency (profile_count entry_bb_count) const;
1130 sreal to_sreal_scale (profile_count in, bool *known = NULL) const;
1131
1132 /* Output THIS to F. */
1133 void dump (FILE *f) const;
1134
1135 /* Print THIS to stderr. */
1136 void debug () const;
1137
1138 /* Return true if THIS is known to differ significantly from OTHER. */
1139 bool differs_from_p (profile_count other) const;
1140
1141 /* We want to scale profile across function boundary from NUM to DEN.
1142 Take care of the side case when NUM and DEN are zeros of incompatible
1143 kinds. */
1144 static void adjust_for_ipa_scaling (profile_count *num, profile_count *den);
1145
1146 /* THIS is a count of bb which is known to be executed IPA times.
1147 Combine this information into bb counter. This means returning IPA
1148 if it is nonzero, not changing anything if IPA is uninitialized
1149 and if IPA is zero, turning THIS into corresponding local profile with
1150 global0. */
1151 profile_count combine_with_ipa_count (profile_count ipa);
1152
1153 /* The profiling runtime uses gcov_type, which is usually 64bit integer.
1154 Conversions back and forth are used to read the coverage and get it
1155 into internal representation. */
1156 static profile_count from_gcov_type (gcov_type v,
1157 profile_quality quality = profile_precise);
1158
1159 /* LTO streaming support. */
1160 static profile_count stream_in (struct lto_input_block *);
1161 void stream_out (struct output_block *);
1162 void stream_out (struct lto_output_stream *);
1163 };
1164 #endif