]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/predict.c
2015-10-29 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / predict.c
CommitLineData
59423b59 1/* Branch prediction routines for the GNU compiler.
d353bf18 2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
59423b59 3
e6751e9a 4This file is part of GCC.
59423b59 5
e6751e9a 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
e6751e9a 9version.
59423b59 10
e6751e9a 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
59423b59 15
e6751e9a 16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
59423b59 19
20/* References:
21
22 [1] "Branch Prediction for Free"
23 Ball and Larus; PLDI '93.
24 [2] "Static Branch Frequency and Program Profile Analysis"
25 Wu and Larus; MICRO-27.
26 [3] "Corpus-based Static Branch Prediction"
04641143 27 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
59423b59 28
29
30#include "config.h"
31#include "system.h"
805e22b2 32#include "coretypes.h"
9ef16211 33#include "backend.h"
7c29e30e 34#include "target.h"
35#include "rtl.h"
59423b59 36#include "tree.h"
9ef16211 37#include "gimple.h"
7c29e30e 38#include "cfghooks.h"
39#include "tree-pass.h"
40#include "tm_p.h"
9ef16211 41#include "ssa.h"
7c29e30e 42#include "expmed.h"
43#include "insn-config.h"
44#include "regs.h"
45#include "emit-rtl.h"
46#include "recog.h"
47#include "cgraph.h"
48#include "coverage.h"
49#include "diagnostic-core.h"
50#include "gimple-predict.h"
9ef16211 51#include "alias.h"
b20a8bb4 52#include "fold-const.h"
9ed99284 53#include "calls.h"
94ea8568 54#include "cfganal.h"
94ea8568 55#include "flags.h"
886c1262 56#include "profile.h"
59423b59 57#include "except.h"
d53441c8 58#include "dojump.h"
59#include "explow.h"
d53441c8 60#include "varasm.h"
61#include "stmt.h"
59423b59 62#include "expr.h"
e9d7220b 63#include "sreal.h"
429fa7fa 64#include "params.h"
862be747 65#include "cfgloop.h"
bc61cadb 66#include "internal-fn.h"
dcf1a1ec 67#include "gimple-iterator.h"
073c1fd5 68#include "tree-cfg.h"
05d9c18a 69#include "tree-ssa-loop-niter.h"
073c1fd5 70#include "tree-ssa-loop.h"
d27b0b64 71#include "tree-scalar-evolution.h"
56ff4880 72
2e3c56e8 73/* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
74 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
8201d1f6 75static sreal real_almost_one, real_br_prob_base,
e9d7220b 76 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
59423b59 77
ee5f6585 78static void combine_predictions_for_insn (rtx_insn *, basic_block);
4ee9c684 79static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
d704ea82 80static void predict_paths_leading_to (basic_block, enum br_predictor, enum prediction);
5707768a 81static void predict_paths_leading_to_edge (edge, enum br_predictor, enum prediction);
ee5f6585 82static bool can_predict_insn_p (const rtx_insn *);
5e96f51e 83
13488c51 84/* Information we hold about each branch predictor.
85 Filled using information from predict.def. */
e6751e9a 86
13488c51 87struct predictor_info
5e96f51e 88{
e99c3a1d 89 const char *const name; /* Name used in the debugging dumps. */
90 const int hitrate; /* Expected hitrate used by
91 predict_insn_def call. */
92 const int flags;
13488c51 93};
5e96f51e 94
eb429644 95/* Use given predictor without Dempster-Shaffer theory if it matches
96 using first_match heuristics. */
97#define PRED_FLAG_FIRST_MATCH 1
98
99/* Recompute hitrate in percent to our representation. */
100
e6751e9a 101#define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
eb429644 102
103#define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
e6751e9a 104static const struct predictor_info predictor_info[]= {
13488c51 105#include "predict.def"
106
aa40f561 107 /* Upper bound on predictors. */
eb429644 108 {NULL, 0, 0}
13488c51 109};
110#undef DEF_PREDICTOR
429fa7fa 111
eb7df8c2 112/* Return TRUE if frequency FREQ is considered to be hot. */
f29b326e 113
114static inline bool
8d672d12 115maybe_hot_frequency_p (struct function *fun, int freq)
eb7df8c2 116{
415d1b9a 117 struct cgraph_node *node = cgraph_node::get (fun->decl);
69ad6a32 118 if (!profile_info
119 || !opt_for_fn (fun->decl, flag_branch_probabilities))
eb7df8c2 120 {
125b6d78 121 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
eb7df8c2 122 return false;
125b6d78 123 if (node->frequency == NODE_FREQUENCY_HOT)
eb7df8c2 124 return true;
125 }
3bedbae3 126 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
aa5f4f32 127 return true;
125b6d78 128 if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
34154e27 129 && freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency * 2 / 3))
125b6d78 130 return false;
6040d650 131 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0)
132 return false;
34154e27 133 if (freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency
8d672d12 134 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
eb7df8c2 135 return false;
136 return true;
137}
138
9e179a64 139static gcov_type min_count = -1;
140
141/* Determine the threshold for hot BB counts. */
142
143gcov_type
144get_hot_bb_threshold ()
145{
146 gcov_working_set_t *ws;
147 if (min_count == -1)
148 {
149 ws = find_working_set (PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE));
150 gcc_assert (ws);
151 min_count = ws->min_counter;
152 }
153 return min_count;
154}
155
156/* Set the threshold for hot BB counts. */
157
158void
159set_hot_bb_threshold (gcov_type min)
160{
161 min_count = min;
162}
163
f29b326e 164/* Return TRUE if frequency FREQ is considered to be hot. */
165
94bed7c3 166bool
8d672d12 167maybe_hot_count_p (struct function *fun, gcov_type count)
f29b326e 168{
3bedbae3 169 if (fun && profile_status_for_fn (fun) != PROFILE_READ)
f29b326e 170 return true;
171 /* Code executed at most once is not hot. */
172 if (profile_info->runs >= count)
173 return false;
9e179a64 174 return (count >= get_hot_bb_threshold ());
f29b326e 175}
176
429fa7fa 177/* Return true in case BB can be CPU intensive and should be optimized
41a6f238 178 for maximal performance. */
429fa7fa 179
180bool
8d672d12 181maybe_hot_bb_p (struct function *fun, const_basic_block bb)
429fa7fa 182{
8d672d12 183 gcc_checking_assert (fun);
3bedbae3 184 if (profile_status_for_fn (fun) == PROFILE_READ)
8d672d12 185 return maybe_hot_count_p (fun, bb->count);
186 return maybe_hot_frequency_p (fun, bb->frequency);
eb7df8c2 187}
188
189/* Return true in case BB can be CPU intensive and should be optimized
190 for maximal performance. */
191
192bool
193maybe_hot_edge_p (edge e)
194{
f26d8580 195 if (profile_status_for_fn (cfun) == PROFILE_READ)
8d672d12 196 return maybe_hot_count_p (cfun, e->count);
197 return maybe_hot_frequency_p (cfun, EDGE_FREQUENCY (e));
429fa7fa 198}
199
dcc9b351 200/* Return true if profile COUNT and FREQUENCY, or function FUN static
201 node frequency reflects never being executed. */
202
203static bool
204probably_never_executed (struct function *fun,
205 gcov_type count, int frequency)
429fa7fa 206{
8d672d12 207 gcc_checking_assert (fun);
69ad6a32 208 if (profile_status_for_fn (fun) == PROFILE_READ)
4befb9f4 209 {
c1acf60c 210 int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION);
211 if (count * unlikely_count_fraction >= profile_info->runs)
4befb9f4 212 return false;
dcc9b351 213 if (!frequency)
4befb9f4 214 return true;
69ad6a32 215 if (!ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency)
4befb9f4 216 return false;
69ad6a32 217 if (ENTRY_BLOCK_PTR_FOR_FN (fun)->count)
4befb9f4 218 {
c1acf60c 219 gcov_type computed_count;
220 /* Check for possibility of overflow, in which case entry bb count
221 is large enough to do the division first without losing much
222 precision. */
69ad6a32 223 if (ENTRY_BLOCK_PTR_FOR_FN (fun)->count < REG_BR_PROB_BASE *
34154e27 224 REG_BR_PROB_BASE)
c1acf60c 225 {
226 gcov_type scaled_count
69ad6a32 227 = frequency * ENTRY_BLOCK_PTR_FOR_FN (fun)->count *
34154e27 228 unlikely_count_fraction;
229 computed_count = RDIV (scaled_count,
69ad6a32 230 ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency);
c1acf60c 231 }
232 else
233 {
69ad6a32 234 computed_count = RDIV (ENTRY_BLOCK_PTR_FOR_FN (fun)->count,
235 ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency);
c1acf60c 236 computed_count *= frequency * unlikely_count_fraction;
237 }
238 if (computed_count >= profile_info->runs)
239 return false;
4befb9f4 240 }
241 return true;
242 }
69ad6a32 243 if ((!profile_info || !(opt_for_fn (fun->decl, flag_branch_probabilities)))
415d1b9a 244 && (cgraph_node::get (fun->decl)->frequency
fd6a3c41 245 == NODE_FREQUENCY_UNLIKELY_EXECUTED))
5de92639 246 return true;
429fa7fa 247 return false;
248}
249
80adc5a6 250
dcc9b351 251/* Return true in case BB is probably never executed. */
252
253bool
254probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
255{
256 return probably_never_executed (fun, bb->count, bb->frequency);
257}
258
259
80adc5a6 260/* Return true in case edge E is probably never executed. */
261
262bool
263probably_never_executed_edge_p (struct function *fun, edge e)
429fa7fa 264{
dcc9b351 265 return probably_never_executed (fun, e->count, EDGE_FREQUENCY (e));
429fa7fa 266}
267
cf262be9 268/* Return true when current function should always be optimized for size. */
269
270bool
271optimize_function_for_size_p (struct function *fun)
272{
cf262be9 273 if (!fun || !fun->decl)
69ad6a32 274 return optimize_size;
415d1b9a 275 cgraph_node *n = cgraph_node::get (fun->decl);
276 return n && n->optimize_for_size_p ();
cf262be9 277}
278
533af0db 279/* Return true when current function should always be optimized for speed. */
280
281bool
282optimize_function_for_speed_p (struct function *fun)
283{
284 return !optimize_function_for_size_p (fun);
7dfb44a0 285}
286
287/* Return TRUE when BB should be optimized for size. */
288
289bool
94ba1cf1 290optimize_bb_for_size_p (const_basic_block bb)
7dfb44a0 291{
b9ea678c 292 return (optimize_function_for_size_p (cfun)
293 || (bb && !maybe_hot_bb_p (cfun, bb)));
7dfb44a0 294}
295
296/* Return TRUE when BB should be optimized for speed. */
297
298bool
94ba1cf1 299optimize_bb_for_speed_p (const_basic_block bb)
7dfb44a0 300{
301 return !optimize_bb_for_size_p (bb);
302}
303
304/* Return TRUE when BB should be optimized for size. */
305
306bool
307optimize_edge_for_size_p (edge e)
308{
533af0db 309 return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
7dfb44a0 310}
311
312/* Return TRUE when BB should be optimized for speed. */
313
314bool
315optimize_edge_for_speed_p (edge e)
316{
317 return !optimize_edge_for_size_p (e);
318}
319
320/* Return TRUE when BB should be optimized for size. */
321
322bool
323optimize_insn_for_size_p (void)
324{
533af0db 325 return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
7dfb44a0 326}
327
328/* Return TRUE when BB should be optimized for speed. */
329
330bool
331optimize_insn_for_speed_p (void)
332{
333 return !optimize_insn_for_size_p ();
334}
335
94ba1cf1 336/* Return TRUE when LOOP should be optimized for size. */
337
338bool
339optimize_loop_for_size_p (struct loop *loop)
340{
341 return optimize_bb_for_size_p (loop->header);
342}
343
344/* Return TRUE when LOOP should be optimized for speed. */
345
346bool
347optimize_loop_for_speed_p (struct loop *loop)
348{
349 return optimize_bb_for_speed_p (loop->header);
350}
351
0bfd8d5c 352/* Return TRUE when LOOP nest should be optimized for speed. */
353
354bool
355optimize_loop_nest_for_speed_p (struct loop *loop)
356{
357 struct loop *l = loop;
358 if (optimize_loop_for_speed_p (loop))
359 return true;
360 l = loop->inner;
53be41ae 361 while (l && l != loop)
0bfd8d5c 362 {
363 if (optimize_loop_for_speed_p (l))
364 return true;
365 if (l->inner)
366 l = l->inner;
367 else if (l->next)
368 l = l->next;
369 else
7baffbd3 370 {
371 while (l != loop && !l->next)
372 l = loop_outer (l);
373 if (l != loop)
374 l = l->next;
375 }
0bfd8d5c 376 }
377 return false;
378}
379
380/* Return TRUE when LOOP nest should be optimized for size. */
381
382bool
383optimize_loop_nest_for_size_p (struct loop *loop)
384{
385 return !optimize_loop_nest_for_speed_p (loop);
386}
387
4a9d7ef7 388/* Return true when edge E is likely to be well predictable by branch
389 predictor. */
390
391bool
392predictable_edge_p (edge e)
393{
f26d8580 394 if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
4a9d7ef7 395 return false;
396 if ((e->probability
397 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)
398 || (REG_BR_PROB_BASE - e->probability
399 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100))
400 return true;
401 return false;
402}
403
404
7dfb44a0 405/* Set RTL expansion for BB profile. */
406
407void
408rtl_profile_for_bb (basic_block bb)
409{
8d672d12 410 crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb);
7dfb44a0 411}
412
413/* Set RTL expansion for edge profile. */
414
415void
416rtl_profile_for_edge (edge e)
417{
418 crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
419}
420
421/* Set RTL expansion to default mode (i.e. when profile info is not known). */
422void
423default_rtl_profile (void)
424{
425 crtl->maybe_hot_insn_p = true;
426}
427
cd0fe062 428/* Return true if the one of outgoing edges is already predicted by
429 PREDICTOR. */
430
4ee9c684 431bool
5493cb9a 432rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
cd0fe062 433{
434 rtx note;
5496dbfc 435 if (!INSN_P (BB_END (bb)))
cd0fe062 436 return false;
5496dbfc 437 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
cd0fe062 438 if (REG_NOTE_KIND (note) == REG_BR_PRED
439 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
440 return true;
441 return false;
442}
5e96f51e 443
eeb030c4 444/* Structure representing predictions in tree level. */
445
446struct edge_prediction {
447 struct edge_prediction *ep_next;
448 edge ep_edge;
449 enum br_predictor ep_predictor;
450 int ep_probability;
451};
452
06ecf488 453/* This map contains for a basic block the list of predictions for the
454 outgoing edges. */
455
456static hash_map<const_basic_block, edge_prediction *> *bb_predictions;
457
4ee9c684 458/* Return true if the one of outgoing edges is already predicted by
459 PREDICTOR. */
460
461bool
75a70cf9 462gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
4ee9c684 463{
7ea47fbd 464 struct edge_prediction *i;
06ecf488 465 edge_prediction **preds = bb_predictions->get (bb);
b3723726 466
467 if (!preds)
468 return false;
48e1416a 469
06ecf488 470 for (i = *preds; i; i = i->ep_next)
f45e9182 471 if (i->ep_predictor == predictor)
4ee9c684 472 return true;
473 return false;
474}
475
b41438e5 476/* Return true when the probability of edge is reliable.
48e1416a 477
b41438e5 478 The profile guessing code is good at predicting branch outcome (ie.
479 taken/not taken), that is predicted right slightly over 75% of time.
ab4f0a13 480 It is however notoriously poor on predicting the probability itself.
b41438e5 481 In general the profile appear a lot flatter (with probabilities closer
482 to 50%) than the reality so it is bad idea to use it to drive optimization
483 such as those disabling dynamic branch prediction for well predictable
484 branches.
485
486 There are two exceptions - edges leading to noreturn edges and edges
487 predicted by number of iterations heuristics are predicted well. This macro
488 should be able to distinguish those, but at the moment it simply check for
489 noreturn heuristic that is only one giving probability over 99% or bellow
ab4f0a13 490 1%. In future we might want to propagate reliability information across the
b41438e5 491 CFG if we find this information useful on multiple places. */
492static bool
493probability_reliable_p (int prob)
494{
f26d8580 495 return (profile_status_for_fn (cfun) == PROFILE_READ
496 || (profile_status_for_fn (cfun) == PROFILE_GUESSED
b41438e5 497 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
498}
499
500/* Same predicate as above, working on edges. */
501bool
7ecb5bb2 502edge_probability_reliable_p (const_edge e)
b41438e5 503{
504 return probability_reliable_p (e->probability);
505}
506
507/* Same predicate as edge_probability_reliable_p, working on notes. */
508bool
7ecb5bb2 509br_prob_note_reliable_p (const_rtx note)
b41438e5 510{
511 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
9eb946de 512 return probability_reliable_p (XINT (note, 0));
b41438e5 513}
514
aa157ca4 515static void
ee5f6585 516predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability)
13488c51 517{
876760f6 518 gcc_assert (any_condjump_p (insn));
b28bedce 519 if (!flag_guess_branch_prob)
520 return;
e6751e9a 521
a1ddb869 522 add_reg_note (insn, REG_BR_PRED,
523 gen_rtx_CONCAT (VOIDmode,
524 GEN_INT ((int) predictor),
525 GEN_INT ((int) probability)));
13488c51 526}
527
528/* Predict insn by given predictor. */
e6751e9a 529
13488c51 530void
ee5f6585 531predict_insn_def (rtx_insn *insn, enum br_predictor predictor,
d598ad0d 532 enum prediction taken)
13488c51 533{
534 int probability = predictor_info[(int) predictor].hitrate;
e6751e9a 535
13488c51 536 if (taken != TAKEN)
537 probability = REG_BR_PROB_BASE - probability;
e6751e9a 538
13488c51 539 predict_insn (insn, predictor, probability);
5e96f51e 540}
541
542/* Predict edge E with given probability if possible. */
e6751e9a 543
13488c51 544void
4ee9c684 545rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
5e96f51e 546{
ee5f6585 547 rtx_insn *last_insn;
5496dbfc 548 last_insn = BB_END (e->src);
5e96f51e 549
550 /* We can store the branch prediction information only about
551 conditional jumps. */
552 if (!any_condjump_p (last_insn))
553 return;
554
555 /* We always store probability of branching. */
556 if (e->flags & EDGE_FALLTHRU)
557 probability = REG_BR_PROB_BASE - probability;
558
13488c51 559 predict_insn (last_insn, predictor, probability);
560}
561
4ee9c684 562/* Predict edge E with the given PROBABILITY. */
563void
75a70cf9 564gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
4ee9c684 565{
f26d8580 566 gcc_assert (profile_status_for_fn (cfun) != PROFILE_GUESSED);
34154e27 567 if ((e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && EDGE_COUNT (e->src->succs) >
568 1)
d5043f32 569 && flag_guess_branch_prob && optimize)
ebd65d12 570 {
b3723726 571 struct edge_prediction *i = XNEW (struct edge_prediction);
06ecf488 572 edge_prediction *&preds = bb_predictions->get_or_insert (e->src);
4ee9c684 573
06ecf488 574 i->ep_next = preds;
575 preds = i;
f45e9182 576 i->ep_probability = probability;
577 i->ep_predictor = predictor;
578 i->ep_edge = e;
ebd65d12 579 }
4ee9c684 580}
581
631fa7de 582/* Remove all predictions on given basic block that are attached
583 to edge E. */
584void
585remove_predictions_associated_with_edge (edge e)
586{
b3723726 587 if (!bb_predictions)
588 return;
589
06ecf488 590 edge_prediction **preds = bb_predictions->get (e->src);
b3723726 591
592 if (preds)
631fa7de 593 {
06ecf488 594 struct edge_prediction **prediction = preds;
b3723726 595 struct edge_prediction *next;
596
631fa7de 597 while (*prediction)
598 {
f45e9182 599 if ((*prediction)->ep_edge == e)
b3723726 600 {
601 next = (*prediction)->ep_next;
602 free (*prediction);
603 *prediction = next;
604 }
631fa7de 605 else
f45e9182 606 prediction = &((*prediction)->ep_next);
631fa7de 607 }
608 }
609}
610
b3723726 611/* Clears the list of predictions stored for BB. */
612
613static void
614clear_bb_predictions (basic_block bb)
615{
06ecf488 616 edge_prediction **preds = bb_predictions->get (bb);
b3723726 617 struct edge_prediction *pred, *next;
618
619 if (!preds)
620 return;
621
06ecf488 622 for (pred = *preds; pred; pred = next)
b3723726 623 {
624 next = pred->ep_next;
625 free (pred);
626 }
627 *preds = NULL;
628}
629
1a12dac4 630/* Return true when we can store prediction on insn INSN.
631 At the moment we represent predictions only on conditional
632 jumps, not at computed jump or other complicated cases. */
633static bool
ee5f6585 634can_predict_insn_p (const rtx_insn *insn)
1a12dac4 635{
6d7dc5b9 636 return (JUMP_P (insn)
1a12dac4 637 && any_condjump_p (insn)
cd665a06 638 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
1a12dac4 639}
640
13488c51 641/* Predict edge E by given predictor if possible. */
e6751e9a 642
13488c51 643void
d598ad0d 644predict_edge_def (edge e, enum br_predictor predictor,
645 enum prediction taken)
13488c51 646{
647 int probability = predictor_info[(int) predictor].hitrate;
648
649 if (taken != TAKEN)
650 probability = REG_BR_PROB_BASE - probability;
e6751e9a 651
13488c51 652 predict_edge (e, predictor, probability);
653}
654
655/* Invert all branch predictions or probability notes in the INSN. This needs
656 to be done each time we invert the condition used by the jump. */
e6751e9a 657
13488c51 658void
d598ad0d 659invert_br_probabilities (rtx insn)
13488c51 660{
e6751e9a 661 rtx note;
662
663 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
664 if (REG_NOTE_KIND (note) == REG_BR_PROB)
9eb946de 665 XINT (note, 0) = REG_BR_PROB_BASE - XINT (note, 0);
e6751e9a 666 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
667 XEXP (XEXP (note, 0), 1)
668 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
13488c51 669}
670
671/* Dump information about the branch prediction to the output file. */
e6751e9a 672
13488c51 673static void
4ee9c684 674dump_prediction (FILE *file, enum br_predictor predictor, int probability,
d598ad0d 675 basic_block bb, int used)
13488c51 676{
cd665a06 677 edge e;
678 edge_iterator ei;
13488c51 679
4ee9c684 680 if (!file)
13488c51 681 return;
682
cd665a06 683 FOR_EACH_EDGE (e, ei, bb->succs)
684 if (! (e->flags & EDGE_FALLTHRU))
685 break;
13488c51 686
4ee9c684 687 fprintf (file, " %s heuristics%s: %.1f%%",
13488c51 688 predictor_info[predictor].name,
e6751e9a 689 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
13488c51 690
691 if (bb->count)
17a81216 692 {
f03df321 693 fprintf (file, " exec %" PRId64, bb->count);
12c94d25 694 if (e)
695 {
f03df321 696 fprintf (file, " hit %" PRId64, e->count);
4ee9c684 697 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
12c94d25 698 }
17a81216 699 }
e6751e9a 700
4ee9c684 701 fprintf (file, "\n");
13488c51 702}
703
7edd21a5 704/* We can not predict the probabilities of outgoing edges of bb. Set them
83c8a977 705 evenly and hope for the best. */
706static void
707set_even_probabilities (basic_block bb)
708{
709 int nedges = 0;
710 edge e;
cd665a06 711 edge_iterator ei;
83c8a977 712
cd665a06 713 FOR_EACH_EDGE (e, ei, bb->succs)
83c8a977 714 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
715 nedges ++;
cd665a06 716 FOR_EACH_EDGE (e, ei, bb->succs)
83c8a977 717 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
718 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
719 else
720 e->probability = 0;
721}
722
13488c51 723/* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
724 note if not already present. Remove now useless REG_BR_PRED notes. */
e6751e9a 725
13488c51 726static void
ee5f6585 727combine_predictions_for_insn (rtx_insn *insn, basic_block bb)
13488c51 728{
83c8a977 729 rtx prob_note;
730 rtx *pnote;
e6751e9a 731 rtx note;
13488c51 732 int best_probability = PROB_EVEN;
b9c74b4d 733 enum br_predictor best_predictor = END_PREDICTORS;
eb429644 734 int combined_probability = REG_BR_PROB_BASE / 2;
735 int d;
49d7c0db 736 bool first_match = false;
737 bool found = false;
13488c51 738
83c8a977 739 if (!can_predict_insn_p (insn))
740 {
741 set_even_probabilities (bb);
742 return;
743 }
744
745 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
746 pnote = &REG_NOTES (insn);
450d042a 747 if (dump_file)
748 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
b3d6de89 749 bb->index);
13488c51 750
751 /* We implement "first match" heuristics and use probability guessed
4ee9c684 752 by predictor with smallest index. */
e6751e9a 753 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
754 if (REG_NOTE_KIND (note) == REG_BR_PRED)
755 {
bc620c5c 756 enum br_predictor predictor = ((enum br_predictor)
757 INTVAL (XEXP (XEXP (note, 0), 0)));
e6751e9a 758 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
759
760 found = true;
761 if (best_predictor > predictor)
762 best_probability = probability, best_predictor = predictor;
763
764 d = (combined_probability * probability
765 + (REG_BR_PROB_BASE - combined_probability)
766 * (REG_BR_PROB_BASE - probability));
767
768 /* Use FP math to avoid overflows of 32bit integers. */
c4a616f2 769 if (d == 0)
770 /* If one probability is 0% and one 100%, avoid division by zero. */
771 combined_probability = REG_BR_PROB_BASE / 2;
772 else
773 combined_probability = (((double) combined_probability) * probability
774 * REG_BR_PROB_BASE / d + 0.5);
e6751e9a 775 }
776
777 /* Decide which heuristic to use. In case we didn't match anything,
778 use no_prediction heuristic, in case we did match, use either
49d7c0db 779 first match or Dempster-Shaffer theory depending on the flags. */
780
eb429644 781 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
49d7c0db 782 first_match = true;
783
784 if (!found)
4ee9c684 785 dump_prediction (dump_file, PRED_NO_PREDICTION,
786 combined_probability, bb, true);
49d7c0db 787 else
788 {
4ee9c684 789 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
790 bb, !first_match);
791 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
792 bb, first_match);
49d7c0db 793 }
794
795 if (first_match)
eb429644 796 combined_probability = best_probability;
4ee9c684 797 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
49d7c0db 798
799 while (*pnote)
800 {
801 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
802 {
bc620c5c 803 enum br_predictor predictor = ((enum br_predictor)
804 INTVAL (XEXP (XEXP (*pnote, 0), 0)));
49d7c0db 805 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
806
4ee9c684 807 dump_prediction (dump_file, predictor, probability, bb,
49d7c0db 808 !first_match || best_predictor == predictor);
195731ad 809 *pnote = XEXP (*pnote, 1);
49d7c0db 810 }
811 else
195731ad 812 pnote = &XEXP (*pnote, 1);
49d7c0db 813 }
e6751e9a 814
13488c51 815 if (!prob_note)
816 {
9eb946de 817 add_int_reg_note (insn, REG_BR_PROB, combined_probability);
e6751e9a 818
eb429644 819 /* Save the prediction into CFG in case we are seeing non-degenerated
820 conditional jump. */
ea091dfd 821 if (!single_succ_p (bb))
eb429644 822 {
823 BRANCH_EDGE (bb)->probability = combined_probability;
e6751e9a 824 FALLTHRU_EDGE (bb)->probability
825 = REG_BR_PROB_BASE - combined_probability;
eb429644 826 }
13488c51 827 }
ea091dfd 828 else if (!single_succ_p (bb))
d8c70625 829 {
9eb946de 830 int prob = XINT (prob_note, 0);
d8c70625 831
832 BRANCH_EDGE (bb)->probability = prob;
833 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
834 }
835 else
ea091dfd 836 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
5e96f51e 837}
838
4ee9c684 839/* Combine predictions into single probability and store them into CFG.
840 Remove now useless prediction entries. */
59423b59 841
4ee9c684 842static void
3f5be5f4 843combine_predictions_for_bb (basic_block bb)
59423b59 844{
4ee9c684 845 int best_probability = PROB_EVEN;
b9c74b4d 846 enum br_predictor best_predictor = END_PREDICTORS;
4ee9c684 847 int combined_probability = REG_BR_PROB_BASE / 2;
848 int d;
849 bool first_match = false;
850 bool found = false;
851 struct edge_prediction *pred;
852 int nedges = 0;
853 edge e, first = NULL, second = NULL;
cd665a06 854 edge_iterator ei;
59423b59 855
cd665a06 856 FOR_EACH_EDGE (e, ei, bb->succs)
4ee9c684 857 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
858 {
cd665a06 859 nedges ++;
4ee9c684 860 if (first && !second)
861 second = e;
862 if (!first)
863 first = e;
864 }
865
48e1416a 866 /* When there is no successor or only one choice, prediction is easy.
4ee9c684 867
868 We are lazy for now and predict only basic blocks with two outgoing
869 edges. It is possible to predict generic case too, but we have to
870 ignore first match heuristics and do more involved combining. Implement
871 this later. */
872 if (nedges != 2)
873 {
83c8a977 874 if (!bb->count)
875 set_even_probabilities (bb);
b3723726 876 clear_bb_predictions (bb);
3f5be5f4 877 if (dump_file)
878 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
4ee9c684 879 nedges, bb->index);
880 return;
881 }
882
3f5be5f4 883 if (dump_file)
884 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
4ee9c684 885
06ecf488 886 edge_prediction **preds = bb_predictions->get (bb);
b3723726 887 if (preds)
4ee9c684 888 {
b3723726 889 /* We implement "first match" heuristics and use probability guessed
890 by predictor with smallest index. */
06ecf488 891 for (pred = *preds; pred; pred = pred->ep_next)
b3723726 892 {
b9c74b4d 893 enum br_predictor predictor = pred->ep_predictor;
b3723726 894 int probability = pred->ep_probability;
4ee9c684 895
b3723726 896 if (pred->ep_edge != first)
897 probability = REG_BR_PROB_BASE - probability;
4ee9c684 898
b3723726 899 found = true;
9f694a82 900 /* First match heuristics would be widly confused if we predicted
901 both directions. */
b3723726 902 if (best_predictor > predictor)
9f694a82 903 {
904 struct edge_prediction *pred2;
905 int prob = probability;
906
c83059be 907 for (pred2 = (struct edge_prediction *) *preds;
908 pred2; pred2 = pred2->ep_next)
9f694a82 909 if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
910 {
911 int probability2 = pred->ep_probability;
912
913 if (pred2->ep_edge != first)
914 probability2 = REG_BR_PROB_BASE - probability2;
915
48e1416a 916 if ((probability < REG_BR_PROB_BASE / 2) !=
9f694a82 917 (probability2 < REG_BR_PROB_BASE / 2))
918 break;
919
920 /* If the same predictor later gave better result, go for it! */
921 if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
922 || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
923 prob = probability2;
924 }
925 if (!pred2)
926 best_probability = prob, best_predictor = predictor;
927 }
4ee9c684 928
b3723726 929 d = (combined_probability * probability
930 + (REG_BR_PROB_BASE - combined_probability)
931 * (REG_BR_PROB_BASE - probability));
4ee9c684 932
b3723726 933 /* Use FP math to avoid overflows of 32bit integers. */
934 if (d == 0)
935 /* If one probability is 0% and one 100%, avoid division by zero. */
936 combined_probability = REG_BR_PROB_BASE / 2;
937 else
938 combined_probability = (((double) combined_probability)
939 * probability
940 * REG_BR_PROB_BASE / d + 0.5);
941 }
4ee9c684 942 }
943
944 /* Decide which heuristic to use. In case we didn't match anything,
945 use no_prediction heuristic, in case we did match, use either
946 first match or Dempster-Shaffer theory depending on the flags. */
947
948 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
949 first_match = true;
950
951 if (!found)
3f5be5f4 952 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
4ee9c684 953 else
954 {
3f5be5f4 955 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
4ee9c684 956 !first_match);
3f5be5f4 957 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
4ee9c684 958 first_match);
959 }
960
961 if (first_match)
962 combined_probability = best_probability;
3f5be5f4 963 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
4ee9c684 964
b3723726 965 if (preds)
4ee9c684 966 {
4077bf7a 967 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
b3723726 968 {
b9c74b4d 969 enum br_predictor predictor = pred->ep_predictor;
b3723726 970 int probability = pred->ep_probability;
4ee9c684 971
b3723726 972 if (pred->ep_edge != EDGE_SUCC (bb, 0))
973 probability = REG_BR_PROB_BASE - probability;
974 dump_prediction (dump_file, predictor, probability, bb,
975 !first_match || best_predictor == predictor);
976 }
4ee9c684 977 }
b3723726 978 clear_bb_predictions (bb);
4ee9c684 979
83c8a977 980 if (!bb->count)
981 {
982 first->probability = combined_probability;
983 second->probability = REG_BR_PROB_BASE - combined_probability;
984 }
4ee9c684 985}
986
fd757b76 987/* Check if T1 and T2 satisfy the IV_COMPARE condition.
988 Return the SSA_NAME if the condition satisfies, NULL otherwise.
989
990 T1 and T2 should be one of the following cases:
991 1. T1 is SSA_NAME, T2 is NULL
992 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
993 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
994
995static tree
996strips_small_constant (tree t1, tree t2)
997{
998 tree ret = NULL;
999 int value = 0;
1000
1001 if (!t1)
1002 return NULL;
1003 else if (TREE_CODE (t1) == SSA_NAME)
1004 ret = t1;
e913b5cd 1005 else if (tree_fits_shwi_p (t1))
1006 value = tree_to_shwi (t1);
fd757b76 1007 else
1008 return NULL;
1009
1010 if (!t2)
1011 return ret;
e913b5cd 1012 else if (tree_fits_shwi_p (t2))
1013 value = tree_to_shwi (t2);
fd757b76 1014 else if (TREE_CODE (t2) == SSA_NAME)
1015 {
1016 if (ret)
1017 return NULL;
1018 else
1019 ret = t2;
1020 }
1021
1022 if (value <= 4 && value >= -4)
1023 return ret;
1024 else
1025 return NULL;
1026}
1027
1028/* Return the SSA_NAME in T or T's operands.
1029 Return NULL if SSA_NAME cannot be found. */
1030
1031static tree
1032get_base_value (tree t)
1033{
1034 if (TREE_CODE (t) == SSA_NAME)
1035 return t;
1036
1037 if (!BINARY_CLASS_P (t))
1038 return NULL;
1039
1040 switch (TREE_OPERAND_LENGTH (t))
1041 {
1042 case 1:
1043 return strips_small_constant (TREE_OPERAND (t, 0), NULL);
1044 case 2:
1045 return strips_small_constant (TREE_OPERAND (t, 0),
1046 TREE_OPERAND (t, 1));
1047 default:
1048 return NULL;
1049 }
1050}
1051
1052/* Check the compare STMT in LOOP. If it compares an induction
1053 variable to a loop invariant, return true, and save
1054 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1055 Otherwise return false and set LOOP_INVAIANT to NULL. */
1056
1057static bool
1a91d914 1058is_comparison_with_loop_invariant_p (gcond *stmt, struct loop *loop,
fd757b76 1059 tree *loop_invariant,
1060 enum tree_code *compare_code,
b3269f54 1061 tree *loop_step,
fd757b76 1062 tree *loop_iv_base)
1063{
1064 tree op0, op1, bound, base;
1065 affine_iv iv0, iv1;
1066 enum tree_code code;
b3269f54 1067 tree step;
fd757b76 1068
1069 code = gimple_cond_code (stmt);
1070 *loop_invariant = NULL;
1071
1072 switch (code)
1073 {
1074 case GT_EXPR:
1075 case GE_EXPR:
1076 case NE_EXPR:
1077 case LT_EXPR:
1078 case LE_EXPR:
1079 case EQ_EXPR:
1080 break;
1081
1082 default:
1083 return false;
1084 }
1085
1086 op0 = gimple_cond_lhs (stmt);
1087 op1 = gimple_cond_rhs (stmt);
1088
1089 if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST)
1090 || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST))
1091 return false;
1092 if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true))
1093 return false;
1094 if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true))
1095 return false;
1096 if (TREE_CODE (iv0.step) != INTEGER_CST
1097 || TREE_CODE (iv1.step) != INTEGER_CST)
1098 return false;
1099 if ((integer_zerop (iv0.step) && integer_zerop (iv1.step))
1100 || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step)))
1101 return false;
1102
1103 if (integer_zerop (iv0.step))
1104 {
1105 if (code != NE_EXPR && code != EQ_EXPR)
1106 code = invert_tree_comparison (code, false);
1107 bound = iv0.base;
1108 base = iv1.base;
e913b5cd 1109 if (tree_fits_shwi_p (iv1.step))
b3269f54 1110 step = iv1.step;
fd757b76 1111 else
1112 return false;
1113 }
1114 else
1115 {
1116 bound = iv1.base;
1117 base = iv0.base;
e913b5cd 1118 if (tree_fits_shwi_p (iv0.step))
b3269f54 1119 step = iv0.step;
fd757b76 1120 else
1121 return false;
1122 }
1123
1124 if (TREE_CODE (bound) != INTEGER_CST)
1125 bound = get_base_value (bound);
1126 if (!bound)
1127 return false;
1128 if (TREE_CODE (base) != INTEGER_CST)
1129 base = get_base_value (base);
1130 if (!base)
1131 return false;
1132
1133 *loop_invariant = bound;
1134 *compare_code = code;
1135 *loop_step = step;
1136 *loop_iv_base = base;
1137 return true;
1138}
1139
1140/* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1141
1142static bool
1143expr_coherent_p (tree t1, tree t2)
1144{
42acab1c 1145 gimple *stmt;
fd757b76 1146 tree ssa_name_1 = NULL;
1147 tree ssa_name_2 = NULL;
1148
1149 gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST);
1150 gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST);
1151
1152 if (t1 == t2)
1153 return true;
1154
1155 if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST)
1156 return true;
1157 if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST)
1158 return false;
1159
1160 /* Check to see if t1 is expressed/defined with t2. */
1161 stmt = SSA_NAME_DEF_STMT (t1);
1162 gcc_assert (stmt != NULL);
1163 if (is_gimple_assign (stmt))
1164 {
1165 ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1166 if (ssa_name_1 && ssa_name_1 == t2)
1167 return true;
1168 }
1169
1170 /* Check to see if t2 is expressed/defined with t1. */
1171 stmt = SSA_NAME_DEF_STMT (t2);
1172 gcc_assert (stmt != NULL);
1173 if (is_gimple_assign (stmt))
1174 {
1175 ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1176 if (ssa_name_2 && ssa_name_2 == t1)
1177 return true;
1178 }
1179
1180 /* Compare if t1 and t2's def_stmts are identical. */
1181 if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2)
1182 return true;
1183 else
1184 return false;
1185}
1186
1187/* Predict branch probability of BB when BB contains a branch that compares
1188 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1189 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1190
1191 E.g.
1192 for (int i = 0; i < bound; i++) {
1193 if (i < bound - 2)
1194 computation_1();
1195 else
1196 computation_2();
1197 }
1198
1199 In this loop, we will predict the branch inside the loop to be taken. */
1200
1201static void
1202predict_iv_comparison (struct loop *loop, basic_block bb,
1203 tree loop_bound_var,
1204 tree loop_iv_base_var,
1205 enum tree_code loop_bound_code,
1206 int loop_bound_step)
1207{
42acab1c 1208 gimple *stmt;
fd757b76 1209 tree compare_var, compare_base;
1210 enum tree_code compare_code;
b3269f54 1211 tree compare_step_var;
fd757b76 1212 edge then_edge;
1213 edge_iterator ei;
1214
1215 if (predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1216 || predicted_by_p (bb, PRED_LOOP_ITERATIONS)
1217 || predicted_by_p (bb, PRED_LOOP_EXIT))
1218 return;
1219
1220 stmt = last_stmt (bb);
1221 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1222 return;
1a91d914 1223 if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt),
1224 loop, &compare_var,
fd757b76 1225 &compare_code,
b3269f54 1226 &compare_step_var,
fd757b76 1227 &compare_base))
1228 return;
1229
1230 /* Find the taken edge. */
1231 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1232 if (then_edge->flags & EDGE_TRUE_VALUE)
1233 break;
1234
1235 /* When comparing an IV to a loop invariant, NE is more likely to be
1236 taken while EQ is more likely to be not-taken. */
1237 if (compare_code == NE_EXPR)
1238 {
1239 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1240 return;
1241 }
1242 else if (compare_code == EQ_EXPR)
1243 {
1244 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1245 return;
1246 }
1247
1248 if (!expr_coherent_p (loop_iv_base_var, compare_base))
1249 return;
1250
1251 /* If loop bound, base and compare bound are all constants, we can
1252 calculate the probability directly. */
e913b5cd 1253 if (tree_fits_shwi_p (loop_bound_var)
1254 && tree_fits_shwi_p (compare_var)
1255 && tree_fits_shwi_p (compare_base))
fd757b76 1256 {
1257 int probability;
e913b5cd 1258 bool overflow, overall_overflow = false;
ab2c1de8 1259 widest_int compare_count, tem;
b3269f54 1260
b3269f54 1261 /* (loop_bound - base) / compare_step */
c311b856 1262 tem = wi::sub (wi::to_widest (loop_bound_var),
1263 wi::to_widest (compare_base), SIGNED, &overflow);
e913b5cd 1264 overall_overflow |= overflow;
c311b856 1265 widest_int loop_count = wi::div_trunc (tem,
1266 wi::to_widest (compare_step_var),
1267 SIGNED, &overflow);
e913b5cd 1268 overall_overflow |= overflow;
1269
c311b856 1270 if (!wi::neg_p (wi::to_widest (compare_step_var))
fd757b76 1271 ^ (compare_code == LT_EXPR || compare_code == LE_EXPR))
b3269f54 1272 {
1273 /* (loop_bound - compare_bound) / compare_step */
c311b856 1274 tem = wi::sub (wi::to_widest (loop_bound_var),
1275 wi::to_widest (compare_var), SIGNED, &overflow);
e913b5cd 1276 overall_overflow |= overflow;
c311b856 1277 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1278 SIGNED, &overflow);
e913b5cd 1279 overall_overflow |= overflow;
b3269f54 1280 }
fd757b76 1281 else
b3269f54 1282 {
1283 /* (compare_bound - base) / compare_step */
c311b856 1284 tem = wi::sub (wi::to_widest (compare_var),
1285 wi::to_widest (compare_base), SIGNED, &overflow);
e913b5cd 1286 overall_overflow |= overflow;
c311b856 1287 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1288 SIGNED, &overflow);
e913b5cd 1289 overall_overflow |= overflow;
b3269f54 1290 }
fd757b76 1291 if (compare_code == LE_EXPR || compare_code == GE_EXPR)
b3269f54 1292 ++compare_count;
fd757b76 1293 if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR)
b3269f54 1294 ++loop_count;
796b6678 1295 if (wi::neg_p (compare_count))
e913b5cd 1296 compare_count = 0;
796b6678 1297 if (wi::neg_p (loop_count))
e913b5cd 1298 loop_count = 0;
796b6678 1299 if (loop_count == 0)
fd757b76 1300 probability = 0;
796b6678 1301 else if (wi::cmps (compare_count, loop_count) == 1)
fd757b76 1302 probability = REG_BR_PROB_BASE;
1303 else
b3269f54 1304 {
e913b5cd 1305 tem = compare_count * REG_BR_PROB_BASE;
796b6678 1306 tem = wi::udiv_trunc (tem, loop_count);
b3269f54 1307 probability = tem.to_uhwi ();
1308 }
1309
e913b5cd 1310 if (!overall_overflow)
b3269f54 1311 predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability);
1312
fd757b76 1313 return;
1314 }
1315
1316 if (expr_coherent_p (loop_bound_var, compare_var))
1317 {
1318 if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR)
1319 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1320 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1321 else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR)
1322 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1323 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1324 else if (loop_bound_code == NE_EXPR)
1325 {
1326 /* If the loop backedge condition is "(i != bound)", we do
1327 the comparison based on the step of IV:
1328 * step < 0 : backedge condition is like (i > bound)
1329 * step > 0 : backedge condition is like (i < bound) */
1330 gcc_assert (loop_bound_step != 0);
1331 if (loop_bound_step > 0
1332 && (compare_code == LT_EXPR
1333 || compare_code == LE_EXPR))
1334 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1335 else if (loop_bound_step < 0
1336 && (compare_code == GT_EXPR
1337 || compare_code == GE_EXPR))
1338 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1339 else
1340 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1341 }
1342 else
1343 /* The branch is predicted not-taken if loop_bound_code is
1344 opposite with compare_code. */
1345 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1346 }
1347 else if (expr_coherent_p (loop_iv_base_var, compare_var))
1348 {
1349 /* For cases like:
1350 for (i = s; i < h; i++)
1351 if (i > s + 2) ....
1352 The branch should be predicted taken. */
1353 if (loop_bound_step > 0
1354 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1355 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1356 else if (loop_bound_step < 0
1357 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1358 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1359 else
1360 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1361 }
1362}
4ca17abf 1363
1364/* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop
1365 exits are resulted from short-circuit conditions that will generate an
1366 if_tmp. E.g.:
1367
1368 if (foo() || global > 10)
1369 break;
1370
1371 This will be translated into:
1372
1373 BB3:
1374 loop header...
1375 BB4:
1376 if foo() goto BB6 else goto BB5
1377 BB5:
1378 if global > 10 goto BB6 else goto BB7
1379 BB6:
1380 goto BB7
1381 BB7:
1382 iftmp = (PHI 0(BB5), 1(BB6))
1383 if iftmp == 1 goto BB8 else goto BB3
1384 BB8:
1385 outside of the loop...
1386
1387 The edge BB7->BB8 is loop exit because BB8 is outside of the loop.
1388 From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop
1389 exits. This function takes BB7->BB8 as input, and finds out the extra loop
1390 exits to predict them using PRED_LOOP_EXIT. */
1391
1392static void
1393predict_extra_loop_exits (edge exit_edge)
1394{
1395 unsigned i;
1396 bool check_value_one;
42acab1c 1397 gimple *lhs_def_stmt;
1a91d914 1398 gphi *phi_stmt;
4ca17abf 1399 tree cmp_rhs, cmp_lhs;
42acab1c 1400 gimple *last;
1a91d914 1401 gcond *cmp_stmt;
4ca17abf 1402
1a91d914 1403 last = last_stmt (exit_edge->src);
1404 if (!last)
1405 return;
1406 cmp_stmt = dyn_cast <gcond *> (last);
1407 if (!cmp_stmt)
4ca17abf 1408 return;
1a91d914 1409
4ca17abf 1410 cmp_rhs = gimple_cond_rhs (cmp_stmt);
1411 cmp_lhs = gimple_cond_lhs (cmp_stmt);
1412 if (!TREE_CONSTANT (cmp_rhs)
1413 || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs)))
1414 return;
1415 if (TREE_CODE (cmp_lhs) != SSA_NAME)
1416 return;
1417
1418 /* If check_value_one is true, only the phi_args with value '1' will lead
1419 to loop exit. Otherwise, only the phi_args with value '0' will lead to
1420 loop exit. */
1421 check_value_one = (((integer_onep (cmp_rhs))
1422 ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR))
1423 ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0));
1424
1a91d914 1425 lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
1426 if (!lhs_def_stmt)
1427 return;
1428
1429 phi_stmt = dyn_cast <gphi *> (lhs_def_stmt);
1430 if (!phi_stmt)
4ca17abf 1431 return;
1432
1433 for (i = 0; i < gimple_phi_num_args (phi_stmt); i++)
1434 {
1435 edge e1;
1436 edge_iterator ei;
1437 tree val = gimple_phi_arg_def (phi_stmt, i);
1438 edge e = gimple_phi_arg_edge (phi_stmt, i);
1439
1440 if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val)))
1441 continue;
1442 if ((check_value_one ^ integer_onep (val)) == 1)
1443 continue;
1444 if (EDGE_COUNT (e->src->succs) != 1)
1445 {
1446 predict_paths_leading_to_edge (e, PRED_LOOP_EXIT, NOT_TAKEN);
1447 continue;
1448 }
1449
1450 FOR_EACH_EDGE (e1, ei, e->src->preds)
1451 predict_paths_leading_to_edge (e1, PRED_LOOP_EXIT, NOT_TAKEN);
1452 }
1453}
1454
7194de72 1455/* Predict edge probabilities by exploiting loop structure. */
1456
4ee9c684 1457static void
7194de72 1458predict_loops (void)
4ee9c684 1459{
17519ba0 1460 struct loop *loop;
c12f2fcb 1461
7fcadf62 1462 /* Try to predict out blocks in a loop that are not part of a
1463 natural loop. */
f21d4d00 1464 FOR_EACH_LOOP (loop, 0)
59423b59 1465 {
7fb12188 1466 basic_block bb, *bbs;
749ea85f 1467 unsigned j, n_exits;
f1f41a6c 1468 vec<edge> exits;
3b0b2309 1469 struct tree_niter_desc niter_desc;
749ea85f 1470 edge ex;
fd757b76 1471 struct nb_iter_bound *nb_iter;
1472 enum tree_code loop_bound_code = ERROR_MARK;
b3269f54 1473 tree loop_bound_step = NULL;
fd757b76 1474 tree loop_bound_var = NULL;
1475 tree loop_iv_base = NULL;
1a91d914 1476 gcond *stmt = NULL;
59423b59 1477
749ea85f 1478 exits = get_loop_exit_edges (loop);
f1f41a6c 1479 n_exits = exits.length ();
5d865361 1480 if (!n_exits)
1481 {
f1f41a6c 1482 exits.release ();
5d865361 1483 continue;
1484 }
ba38e12b 1485
f1f41a6c 1486 FOR_EACH_VEC_ELT (exits, j, ex)
d27b0b64 1487 {
3b0b2309 1488 tree niter = NULL;
d500fef3 1489 HOST_WIDE_INT nitercst;
1490 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
1491 int probability;
1492 enum br_predictor predictor;
d27b0b64 1493
4ca17abf 1494 predict_extra_loop_exits (ex);
1495
3f78e715 1496 if (number_of_iterations_exit (loop, ex, &niter_desc, false, false))
3b0b2309 1497 niter = niter_desc.niter;
1498 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
749ea85f 1499 niter = loop_niter_by_eval (loop, ex);
d27b0b64 1500
3b0b2309 1501 if (TREE_CODE (niter) == INTEGER_CST)
1502 {
e913b5cd 1503 if (tree_fits_uhwi_p (niter)
ed60f27f 1504 && max
1505 && compare_tree_int (niter, max - 1) == -1)
e913b5cd 1506 nitercst = tree_to_uhwi (niter) + 1;
3b0b2309 1507 else
d500fef3 1508 nitercst = max;
1509 predictor = PRED_LOOP_ITERATIONS;
1510 }
1511 /* If we have just one exit and we can derive some information about
1512 the number of iterations of the loop from the statements inside
1513 the loop, use it to predict this exit. */
1514 else if (n_exits == 1)
1515 {
fee017b3 1516 nitercst = estimated_stmt_executions_int (loop);
d500fef3 1517 if (nitercst < 0)
1518 continue;
1519 if (nitercst > max)
1520 nitercst = max;
d27b0b64 1521
d500fef3 1522 predictor = PRED_LOOP_ITERATIONS_GUESSED;
3b0b2309 1523 }
d500fef3 1524 else
1525 continue;
1526
ed60f27f 1527 /* If the prediction for number of iterations is zero, do not
1528 predict the exit edges. */
1529 if (nitercst == 0)
1530 continue;
1531
d500fef3 1532 probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
1533 predict_edge (ex, predictor, probability);
d27b0b64 1534 }
f1f41a6c 1535 exits.release ();
862be747 1536
fd757b76 1537 /* Find information about loop bound variables. */
1538 for (nb_iter = loop->bounds; nb_iter;
1539 nb_iter = nb_iter->next)
1540 if (nb_iter->stmt
1541 && gimple_code (nb_iter->stmt) == GIMPLE_COND)
1542 {
1a91d914 1543 stmt = as_a <gcond *> (nb_iter->stmt);
fd757b76 1544 break;
1545 }
1546 if (!stmt && last_stmt (loop->header)
1547 && gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
1a91d914 1548 stmt = as_a <gcond *> (last_stmt (loop->header));
fd757b76 1549 if (stmt)
1550 is_comparison_with_loop_invariant_p (stmt, loop,
1551 &loop_bound_var,
1552 &loop_bound_code,
1553 &loop_bound_step,
1554 &loop_iv_base);
1555
7fb12188 1556 bbs = get_loop_body (loop);
4ee9c684 1557
7fb12188 1558 for (j = 0; j < loop->num_nodes; j++)
1559 {
1560 int header_found = 0;
1561 edge e;
cd665a06 1562 edge_iterator ei;
7fb12188 1563
1564 bb = bbs[j];
e6751e9a 1565
cd0fe062 1566 /* Bypass loop heuristics on continue statement. These
1567 statements construct loops via "non-loop" constructs
1568 in the source language and are better to be handled
1569 separately. */
3b0b2309 1570 if (predicted_by_p (bb, PRED_CONTINUE))
cd0fe062 1571 continue;
1572
7fb12188 1573 /* Loop branch heuristics - predict an edge back to a
1574 loop's head as taken. */
c6356c17 1575 if (bb == loop->latch)
1576 {
1577 e = find_edge (loop->latch, loop->header);
1578 if (e)
1579 {
1580 header_found = 1;
1581 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
1582 }
1583 }
e6751e9a 1584
7fb12188 1585 /* Loop exit heuristics - predict an edge exiting the loop if the
41a6f238 1586 conditional has no loop header successors as not taken. */
d500fef3 1587 if (!header_found
1588 /* If we already used more reliable loop exit predictors, do not
1589 bother with PRED_LOOP_EXIT. */
1590 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1591 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
b41438e5 1592 {
1593 /* For loop with many exits we don't want to predict all exits
1594 with the pretty large probability, because if all exits are
1595 considered in row, the loop would be predicted to iterate
1596 almost never. The code to divide probability by number of
1597 exits is very rough. It should compute the number of exits
1598 taken in each patch through function (not the overall number
1599 of exits that might be a lot higher for loops with wide switch
1600 statements in them) and compute n-th square root.
1601
1602 We limit the minimal probability by 2% to avoid
1603 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
1604 as this was causing regression in perl benchmark containing such
1605 a wide loop. */
48e1416a 1606
b41438e5 1607 int probability = ((REG_BR_PROB_BASE
1608 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
1609 / n_exits);
1610 if (probability < HITRATE (2))
1611 probability = HITRATE (2);
1612 FOR_EACH_EDGE (e, ei, bb->succs)
1613 if (e->dest->index < NUM_FIXED_BLOCKS
1614 || !flow_bb_inside_loop_p (loop, e->dest))
1615 predict_edge (e, PRED_LOOP_EXIT, probability);
1616 }
fd757b76 1617 if (loop_bound_var)
1618 predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base,
1619 loop_bound_code,
e913b5cd 1620 tree_to_shwi (loop_bound_step));
7fb12188 1621 }
48e1416a 1622
21dda4ee 1623 /* Free basic blocks from get_loop_body. */
dcd8fd01 1624 free (bbs);
59423b59 1625 }
4ee9c684 1626}
1627
83c8a977 1628/* Attempt to predict probabilities of BB outgoing edges using local
1629 properties. */
1630static void
1631bb_estimate_probability_locally (basic_block bb)
1632{
ee5f6585 1633 rtx_insn *last_insn = BB_END (bb);
83c8a977 1634 rtx cond;
1635
1636 if (! can_predict_insn_p (last_insn))
1637 return;
1638 cond = get_condition (last_insn, NULL, false, false);
1639 if (! cond)
1640 return;
1641
1642 /* Try "pointer heuristic."
1643 A comparison ptr == 0 is predicted as false.
1644 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1645 if (COMPARISON_P (cond)
1646 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
1647 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
1648 {
1649 if (GET_CODE (cond) == EQ)
1650 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
1651 else if (GET_CODE (cond) == NE)
1652 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
1653 }
1654 else
1655
1656 /* Try "opcode heuristic."
1657 EQ tests are usually false and NE tests are usually true. Also,
1658 most quantities are positive, so we can make the appropriate guesses
1659 about signed comparisons against zero. */
1660 switch (GET_CODE (cond))
1661 {
1662 case CONST_INT:
1663 /* Unconditional branch. */
1664 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
1665 cond == const0_rtx ? NOT_TAKEN : TAKEN);
1666 break;
1667
1668 case EQ:
1669 case UNEQ:
1670 /* Floating point comparisons appears to behave in a very
1671 unpredictable way because of special role of = tests in
1672 FP code. */
1673 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1674 ;
1675 /* Comparisons with 0 are often used for booleans and there is
1676 nothing useful to predict about them. */
1677 else if (XEXP (cond, 1) == const0_rtx
1678 || XEXP (cond, 0) == const0_rtx)
1679 ;
1680 else
1681 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
1682 break;
1683
1684 case NE:
1685 case LTGT:
1686 /* Floating point comparisons appears to behave in a very
1687 unpredictable way because of special role of = tests in
1688 FP code. */
1689 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1690 ;
1691 /* Comparisons with 0 are often used for booleans and there is
1692 nothing useful to predict about them. */
1693 else if (XEXP (cond, 1) == const0_rtx
1694 || XEXP (cond, 0) == const0_rtx)
1695 ;
1696 else
1697 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
1698 break;
1699
1700 case ORDERED:
1701 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
1702 break;
1703
1704 case UNORDERED:
1705 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
1706 break;
1707
1708 case LE:
1709 case LT:
1710 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1711 || XEXP (cond, 1) == constm1_rtx)
1712 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
1713 break;
1714
1715 case GE:
1716 case GT:
1717 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1718 || XEXP (cond, 1) == constm1_rtx)
1719 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
1720 break;
1721
1722 default:
1723 break;
1724 }
1725}
1726
7edd21a5 1727/* Set edge->probability for each successor edge of BB. */
83c8a977 1728void
1729guess_outgoing_edge_probabilities (basic_block bb)
1730{
1731 bb_estimate_probability_locally (bb);
1732 combine_predictions_for_insn (BB_END (bb), bb);
1733}
4ee9c684 1734\f
c83059be 1735static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor);
75a70cf9 1736
1737/* Helper function for expr_expected_value. */
42975b1f 1738
1739static tree
2380e91e 1740expr_expected_value_1 (tree type, tree op0, enum tree_code code,
c83059be 1741 tree op1, bitmap visited, enum br_predictor *predictor)
42975b1f 1742{
42acab1c 1743 gimple *def;
75a70cf9 1744
c83059be 1745 if (predictor)
1746 *predictor = PRED_UNCONDITIONAL;
1747
75a70cf9 1748 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
42975b1f 1749 {
75a70cf9 1750 if (TREE_CONSTANT (op0))
1751 return op0;
1752
1753 if (code != SSA_NAME)
1754 return NULL_TREE;
1755
1756 def = SSA_NAME_DEF_STMT (op0);
42975b1f 1757
1758 /* If we were already here, break the infinite cycle. */
6ef9bbe0 1759 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
42975b1f 1760 return NULL;
42975b1f 1761
75a70cf9 1762 if (gimple_code (def) == GIMPLE_PHI)
42975b1f 1763 {
1764 /* All the arguments of the PHI node must have the same constant
1765 length. */
75a70cf9 1766 int i, n = gimple_phi_num_args (def);
42975b1f 1767 tree val = NULL, new_val;
4ee9c684 1768
75a70cf9 1769 for (i = 0; i < n; i++)
42975b1f 1770 {
1771 tree arg = PHI_ARG_DEF (def, i);
c83059be 1772 enum br_predictor predictor2;
42975b1f 1773
1774 /* If this PHI has itself as an argument, we cannot
1775 determine the string length of this argument. However,
86481e89 1776 if we can find an expected constant value for the other
42975b1f 1777 PHI args then we can still be sure that this is
1778 likely a constant. So be optimistic and just
1779 continue with the next argument. */
1780 if (arg == PHI_RESULT (def))
1781 continue;
1782
c83059be 1783 new_val = expr_expected_value (arg, visited, &predictor2);
1784
1785 /* It is difficult to combine value predictors. Simply assume
1786 that later predictor is weaker and take its prediction. */
1787 if (predictor && *predictor < predictor2)
1788 *predictor = predictor2;
42975b1f 1789 if (!new_val)
1790 return NULL;
1791 if (!val)
1792 val = new_val;
1793 else if (!operand_equal_p (val, new_val, false))
1794 return NULL;
1795 }
1796 return val;
1797 }
75a70cf9 1798 if (is_gimple_assign (def))
42975b1f 1799 {
75a70cf9 1800 if (gimple_assign_lhs (def) != op0)
1801 return NULL;
42975b1f 1802
75a70cf9 1803 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
1804 gimple_assign_rhs1 (def),
1805 gimple_assign_rhs_code (def),
1806 gimple_assign_rhs2 (def),
c83059be 1807 visited, predictor);
75a70cf9 1808 }
1809
1810 if (is_gimple_call (def))
1811 {
1812 tree decl = gimple_call_fndecl (def);
1813 if (!decl)
c83059be 1814 {
1815 if (gimple_call_internal_p (def)
1816 && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT)
1817 {
1818 gcc_assert (gimple_call_num_args (def) == 3);
1819 tree val = gimple_call_arg (def, 0);
1820 if (TREE_CONSTANT (val))
1821 return val;
1822 if (predictor)
1823 {
c83059be 1824 tree val2 = gimple_call_arg (def, 2);
1825 gcc_assert (TREE_CODE (val2) == INTEGER_CST
1826 && tree_fits_uhwi_p (val2)
1827 && tree_to_uhwi (val2) < END_PREDICTORS);
1828 *predictor = (enum br_predictor) tree_to_uhwi (val2);
1829 }
1830 return gimple_call_arg (def, 1);
1831 }
1832 return NULL;
1833 }
2380e91e 1834 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1835 switch (DECL_FUNCTION_CODE (decl))
1836 {
1837 case BUILT_IN_EXPECT:
1838 {
1839 tree val;
1840 if (gimple_call_num_args (def) != 2)
1841 return NULL;
1842 val = gimple_call_arg (def, 0);
1843 if (TREE_CONSTANT (val))
1844 return val;
c83059be 1845 if (predictor)
1846 *predictor = PRED_BUILTIN_EXPECT;
2380e91e 1847 return gimple_call_arg (def, 1);
1848 }
75a70cf9 1849
2380e91e 1850 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
1851 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1852 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1853 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1854 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1855 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1856 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
1857 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
1858 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1859 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1860 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1861 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1862 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1863 /* Assume that any given atomic operation has low contention,
1864 and thus the compare-and-swap operation succeeds. */
c83059be 1865 if (predictor)
1866 *predictor = PRED_COMPARE_AND_SWAP;
2380e91e 1867 return boolean_true_node;
5213d6c9 1868 default:
1869 break;
75a70cf9 1870 }
42975b1f 1871 }
75a70cf9 1872
1873 return NULL;
42975b1f 1874 }
75a70cf9 1875
1876 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
42975b1f 1877 {
75a70cf9 1878 tree res;
c83059be 1879 enum br_predictor predictor2;
1880 op0 = expr_expected_value (op0, visited, predictor);
42975b1f 1881 if (!op0)
1882 return NULL;
c83059be 1883 op1 = expr_expected_value (op1, visited, &predictor2);
1884 if (predictor && *predictor < predictor2)
1885 *predictor = predictor2;
42975b1f 1886 if (!op1)
1887 return NULL;
75a70cf9 1888 res = fold_build2 (code, type, op0, op1);
42975b1f 1889 if (TREE_CONSTANT (res))
1890 return res;
1891 return NULL;
1892 }
75a70cf9 1893 if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
42975b1f 1894 {
75a70cf9 1895 tree res;
c83059be 1896 op0 = expr_expected_value (op0, visited, predictor);
42975b1f 1897 if (!op0)
1898 return NULL;
75a70cf9 1899 res = fold_build1 (code, type, op0);
42975b1f 1900 if (TREE_CONSTANT (res))
1901 return res;
1902 return NULL;
1903 }
1904 return NULL;
1905}
75a70cf9 1906
48e1416a 1907/* Return constant EXPR will likely have at execution time, NULL if unknown.
75a70cf9 1908 The function is used by builtin_expect branch predictor so the evidence
1909 must come from this construct and additional possible constant folding.
48e1416a 1910
75a70cf9 1911 We may want to implement more involved value guess (such as value range
1912 propagation based prediction), but such tricks shall go to new
1913 implementation. */
1914
1915static tree
c83059be 1916expr_expected_value (tree expr, bitmap visited,
1917 enum br_predictor *predictor)
75a70cf9 1918{
1919 enum tree_code code;
1920 tree op0, op1;
1921
1922 if (TREE_CONSTANT (expr))
c83059be 1923 {
1924 if (predictor)
1925 *predictor = PRED_UNCONDITIONAL;
1926 return expr;
1927 }
75a70cf9 1928
1929 extract_ops_from_tree (expr, &code, &op0, &op1);
1930 return expr_expected_value_1 (TREE_TYPE (expr),
c83059be 1931 op0, code, op1, visited, predictor);
75a70cf9 1932}
42975b1f 1933\f
4ee9c684 1934/* Predict using opcode of the last statement in basic block. */
1935static void
1936tree_predict_by_opcode (basic_block bb)
1937{
42acab1c 1938 gimple *stmt = last_stmt (bb);
4ee9c684 1939 edge then_edge;
75a70cf9 1940 tree op0, op1;
4ee9c684 1941 tree type;
42975b1f 1942 tree val;
75a70cf9 1943 enum tree_code cmp;
42975b1f 1944 bitmap visited;
cd665a06 1945 edge_iterator ei;
c83059be 1946 enum br_predictor predictor;
4ee9c684 1947
75a70cf9 1948 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
4ee9c684 1949 return;
cd665a06 1950 FOR_EACH_EDGE (then_edge, ei, bb->succs)
4ee9c684 1951 if (then_edge->flags & EDGE_TRUE_VALUE)
cd665a06 1952 break;
75a70cf9 1953 op0 = gimple_cond_lhs (stmt);
1954 op1 = gimple_cond_rhs (stmt);
1955 cmp = gimple_cond_code (stmt);
4ee9c684 1956 type = TREE_TYPE (op0);
27335ffd 1957 visited = BITMAP_ALLOC (NULL);
c83059be 1958 val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited,
1959 &predictor);
27335ffd 1960 BITMAP_FREE (visited);
c83059be 1961 if (val && TREE_CODE (val) == INTEGER_CST)
42975b1f 1962 {
c83059be 1963 if (predictor == PRED_BUILTIN_EXPECT)
1964 {
1965 int percent = PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY);
21853731 1966
c83059be 1967 gcc_assert (percent >= 0 && percent <= 100);
1968 if (integer_zerop (val))
1969 percent = 100 - percent;
1970 predict_edge (then_edge, PRED_BUILTIN_EXPECT, HITRATE (percent));
1971 }
1972 else
1973 predict_edge (then_edge, predictor,
1974 integer_zerop (val) ? NOT_TAKEN : TAKEN);
42975b1f 1975 }
4ee9c684 1976 /* Try "pointer heuristic."
1977 A comparison ptr == 0 is predicted as false.
1978 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1979 if (POINTER_TYPE_P (type))
1980 {
75a70cf9 1981 if (cmp == EQ_EXPR)
4ee9c684 1982 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
75a70cf9 1983 else if (cmp == NE_EXPR)
4ee9c684 1984 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1985 }
1986 else
1987
1988 /* Try "opcode heuristic."
1989 EQ tests are usually false and NE tests are usually true. Also,
1990 most quantities are positive, so we can make the appropriate guesses
1991 about signed comparisons against zero. */
75a70cf9 1992 switch (cmp)
4ee9c684 1993 {
1994 case EQ_EXPR:
1995 case UNEQ_EXPR:
1996 /* Floating point comparisons appears to behave in a very
1997 unpredictable way because of special role of = tests in
1998 FP code. */
1999 if (FLOAT_TYPE_P (type))
2000 ;
2001 /* Comparisons with 0 are often used for booleans and there is
2002 nothing useful to predict about them. */
75a70cf9 2003 else if (integer_zerop (op0) || integer_zerop (op1))
4ee9c684 2004 ;
2005 else
2006 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
2007 break;
2008
2009 case NE_EXPR:
318a728f 2010 case LTGT_EXPR:
4ee9c684 2011 /* Floating point comparisons appears to behave in a very
2012 unpredictable way because of special role of = tests in
2013 FP code. */
2014 if (FLOAT_TYPE_P (type))
2015 ;
2016 /* Comparisons with 0 are often used for booleans and there is
2017 nothing useful to predict about them. */
2018 else if (integer_zerop (op0)
75a70cf9 2019 || integer_zerop (op1))
4ee9c684 2020 ;
2021 else
2022 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
2023 break;
2024
2025 case ORDERED_EXPR:
2026 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
2027 break;
2028
2029 case UNORDERED_EXPR:
2030 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
2031 break;
2032
2033 case LE_EXPR:
2034 case LT_EXPR:
75a70cf9 2035 if (integer_zerop (op1)
2036 || integer_onep (op1)
2037 || integer_all_onesp (op1)
2038 || real_zerop (op1)
2039 || real_onep (op1)
2040 || real_minus_onep (op1))
4ee9c684 2041 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
2042 break;
2043
2044 case GE_EXPR:
2045 case GT_EXPR:
75a70cf9 2046 if (integer_zerop (op1)
2047 || integer_onep (op1)
2048 || integer_all_onesp (op1)
2049 || real_zerop (op1)
2050 || real_onep (op1)
2051 || real_minus_onep (op1))
4ee9c684 2052 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
2053 break;
2054
2055 default:
2056 break;
2057 }
2058}
2059
f816ec49 2060/* Try to guess whether the value of return means error code. */
75a70cf9 2061
f816ec49 2062static enum br_predictor
2063return_prediction (tree val, enum prediction *prediction)
2064{
2065 /* VOID. */
2066 if (!val)
2067 return PRED_NO_PREDICTION;
2068 /* Different heuristics for pointers and scalars. */
2069 if (POINTER_TYPE_P (TREE_TYPE (val)))
2070 {
2071 /* NULL is usually not returned. */
2072 if (integer_zerop (val))
2073 {
2074 *prediction = NOT_TAKEN;
2075 return PRED_NULL_RETURN;
2076 }
2077 }
2078 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
2079 {
2080 /* Negative return values are often used to indicate
2081 errors. */
2082 if (TREE_CODE (val) == INTEGER_CST
2083 && tree_int_cst_sgn (val) < 0)
2084 {
2085 *prediction = NOT_TAKEN;
2086 return PRED_NEGATIVE_RETURN;
2087 }
2088 /* Constant return values seems to be commonly taken.
2089 Zero/one often represent booleans so exclude them from the
2090 heuristics. */
2091 if (TREE_CONSTANT (val)
2092 && (!integer_zerop (val) && !integer_onep (val)))
2093 {
2094 *prediction = TAKEN;
4a4e4487 2095 return PRED_CONST_RETURN;
f816ec49 2096 }
2097 }
2098 return PRED_NO_PREDICTION;
2099}
2100
2101/* Find the basic block with return expression and look up for possible
2102 return value trying to apply RETURN_PREDICTION heuristics. */
2103static void
d704ea82 2104apply_return_prediction (void)
f816ec49 2105{
1a91d914 2106 greturn *return_stmt = NULL;
f816ec49 2107 tree return_val;
2108 edge e;
1a91d914 2109 gphi *phi;
f816ec49 2110 int phi_num_args, i;
2111 enum br_predictor pred;
2112 enum prediction direction;
cd665a06 2113 edge_iterator ei;
f816ec49 2114
34154e27 2115 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
f816ec49 2116 {
42acab1c 2117 gimple *last = last_stmt (e->src);
1a91d914 2118 if (last
2119 && gimple_code (last) == GIMPLE_RETURN)
2120 {
2121 return_stmt = as_a <greturn *> (last);
2122 break;
2123 }
f816ec49 2124 }
2125 if (!e)
2126 return;
75a70cf9 2127 return_val = gimple_return_retval (return_stmt);
f816ec49 2128 if (!return_val)
2129 return;
f816ec49 2130 if (TREE_CODE (return_val) != SSA_NAME
2131 || !SSA_NAME_DEF_STMT (return_val)
75a70cf9 2132 || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
f816ec49 2133 return;
1a91d914 2134 phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val));
75a70cf9 2135 phi_num_args = gimple_phi_num_args (phi);
f816ec49 2136 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
2137
2138 /* Avoid the degenerate case where all return values form the function
2139 belongs to same category (ie they are all positive constants)
2140 so we can hardly say something about them. */
2141 for (i = 1; i < phi_num_args; i++)
2142 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
2143 break;
2144 if (i != phi_num_args)
2145 for (i = 0; i < phi_num_args; i++)
2146 {
2147 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
2148 if (pred != PRED_NO_PREDICTION)
5707768a 2149 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
2150 direction);
f816ec49 2151 }
2152}
2153
2154/* Look for basic block that contains unlikely to happen events
2155 (such as noreturn calls) and mark all paths leading to execution
2156 of this basic blocks as unlikely. */
2157
2158static void
2159tree_bb_level_predictions (void)
2160{
2161 basic_block bb;
9f694a82 2162 bool has_return_edges = false;
2163 edge e;
2164 edge_iterator ei;
2165
34154e27 2166 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
9f694a82 2167 if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH)))
2168 {
2169 has_return_edges = true;
2170 break;
2171 }
f816ec49 2172
d704ea82 2173 apply_return_prediction ();
f816ec49 2174
fc00614f 2175 FOR_EACH_BB_FN (bb, cfun)
f816ec49 2176 {
75a70cf9 2177 gimple_stmt_iterator gsi;
f816ec49 2178
1add270f 2179 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
f816ec49 2180 {
42acab1c 2181 gimple *stmt = gsi_stmt (gsi);
5de92639 2182 tree decl;
3ed4a4a1 2183
75a70cf9 2184 if (is_gimple_call (stmt))
f816ec49 2185 {
9f694a82 2186 if ((gimple_call_flags (stmt) & ECF_NORETURN)
2187 && has_return_edges)
75a70cf9 2188 predict_paths_leading_to (bb, PRED_NORETURN,
2189 NOT_TAKEN);
2190 decl = gimple_call_fndecl (stmt);
2191 if (decl
2192 && lookup_attribute ("cold",
2193 DECL_ATTRIBUTES (decl)))
2194 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
2195 NOT_TAKEN);
f816ec49 2196 }
75a70cf9 2197 else if (gimple_code (stmt) == GIMPLE_PREDICT)
2198 {
2199 predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
2200 gimple_predict_outcome (stmt));
1add270f 2201 /* Keep GIMPLE_PREDICT around so early inlining will propagate
2202 hints to callers. */
75a70cf9 2203 }
f816ec49 2204 }
2205 }
f816ec49 2206}
2207
06ecf488 2208/* Callback for hash_map::traverse, asserts that the pointer map is
b3723726 2209 empty. */
2210
06ecf488 2211bool
2212assert_is_empty (const_basic_block const &, edge_prediction *const &value,
2213 void *)
b3723726 2214{
06ecf488 2215 gcc_assert (!value);
b3723726 2216 return false;
2217}
b3723726 2218
675d86b2 2219/* Predict branch probabilities and estimate profile for basic block BB. */
2220
2221static void
2222tree_estimate_probability_bb (basic_block bb)
2223{
2224 edge e;
2225 edge_iterator ei;
42acab1c 2226 gimple *last;
675d86b2 2227
2228 FOR_EACH_EDGE (e, ei, bb->succs)
2229 {
758a38ab 2230 /* Predict edges to user labels with attributes. */
34154e27 2231 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
758a38ab 2232 {
2233 gimple_stmt_iterator gi;
2234 for (gi = gsi_start_bb (e->dest); !gsi_end_p (gi); gsi_next (&gi))
2235 {
1a91d914 2236 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gi));
758a38ab 2237 tree decl;
2238
1a91d914 2239 if (!label_stmt)
758a38ab 2240 break;
1a91d914 2241 decl = gimple_label_label (label_stmt);
758a38ab 2242 if (DECL_ARTIFICIAL (decl))
2243 continue;
2244
2245 /* Finally, we have a user-defined label. */
2246 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl)))
2247 predict_edge_def (e, PRED_COLD_LABEL, NOT_TAKEN);
2248 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (decl)))
2249 predict_edge_def (e, PRED_HOT_LABEL, TAKEN);
2250 }
2251 }
2252
675d86b2 2253 /* Predict early returns to be probable, as we've already taken
2254 care for error returns and other cases are often used for
2255 fast paths through function.
2256
2257 Since we've already removed the return statements, we are
2258 looking for CFG like:
2259
2260 if (conditional)
2261 {
2262 ..
2263 goto return_block
2264 }
2265 some other blocks
2266 return_block:
2267 return_stmt. */
2268 if (e->dest != bb->next_bb
34154e27 2269 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
675d86b2 2270 && single_succ_p (e->dest)
34154e27 2271 && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
675d86b2 2272 && (last = last_stmt (e->dest)) != NULL
2273 && gimple_code (last) == GIMPLE_RETURN)
2274 {
2275 edge e1;
2276 edge_iterator ei1;
2277
2278 if (single_succ_p (bb))
2279 {
2280 FOR_EACH_EDGE (e1, ei1, bb->preds)
2281 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
2282 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
2283 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
2284 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2285 }
2286 else
2287 if (!predicted_by_p (e->src, PRED_NULL_RETURN)
2288 && !predicted_by_p (e->src, PRED_CONST_RETURN)
2289 && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
2290 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2291 }
2292
2293 /* Look for block we are guarding (ie we dominate it,
2294 but it doesn't postdominate us). */
34154e27 2295 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb
675d86b2 2296 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
2297 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
2298 {
2299 gimple_stmt_iterator bi;
2300
2301 /* The call heuristic claims that a guarded function call
2302 is improbable. This is because such calls are often used
2303 to signal exceptional situations such as printing error
2304 messages. */
2305 for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
2306 gsi_next (&bi))
2307 {
42acab1c 2308 gimple *stmt = gsi_stmt (bi);
675d86b2 2309 if (is_gimple_call (stmt)
2310 /* Constant and pure calls are hardly used to signalize
2311 something exceptional. */
2312 && gimple_has_side_effects (stmt))
2313 {
2314 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
2315 break;
2316 }
2317 }
2318 }
2319 }
2320 tree_predict_by_opcode (bb);
2321}
2322
2323/* Predict branch probabilities and estimate profile of the tree CFG.
2324 This function can be called from the loop optimizers to recompute
2325 the profile information. */
2326
2327void
4ee9c684 2328tree_estimate_probability (void)
2329{
2330 basic_block bb;
4ee9c684 2331
f816ec49 2332 add_noreturn_fake_exit_edges ();
4ee9c684 2333 connect_infinite_loops_to_exit ();
d8a0d6b8 2334 /* We use loop_niter_by_eval, which requires that the loops have
2335 preheaders. */
2336 create_preheaders (CP_SIMPLE_PREHEADERS);
4ee9c684 2337 calculate_dominance_info (CDI_POST_DOMINATORS);
2338
06ecf488 2339 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
f816ec49 2340 tree_bb_level_predictions ();
d500fef3 2341 record_loop_exits ();
675d86b2 2342
41f75a99 2343 if (number_of_loops (cfun) > 1)
7194de72 2344 predict_loops ();
4ee9c684 2345
fc00614f 2346 FOR_EACH_BB_FN (bb, cfun)
675d86b2 2347 tree_estimate_probability_bb (bb);
4ee9c684 2348
fc00614f 2349 FOR_EACH_BB_FN (bb, cfun)
3f5be5f4 2350 combine_predictions_for_bb (bb);
f81d9f78 2351
382ecba7 2352 if (flag_checking)
2353 bb_predictions->traverse<void *, assert_is_empty> (NULL);
2354
06ecf488 2355 delete bb_predictions;
b3723726 2356 bb_predictions = NULL;
2357
5327650f 2358 estimate_bb_frequencies (false);
4ee9c684 2359 free_dominance_info (CDI_POST_DOMINATORS);
41d24834 2360 remove_fake_exit_edges ();
675d86b2 2361}
89cfe6e5 2362\f
f0b5f617 2363/* Predict edges to successors of CUR whose sources are not postdominated by
d704ea82 2364 BB by PRED and recurse to all postdominators. */
f816ec49 2365
2366static void
d704ea82 2367predict_paths_for_bb (basic_block cur, basic_block bb,
2368 enum br_predictor pred,
d3443011 2369 enum prediction taken,
2370 bitmap visited)
f816ec49 2371{
2372 edge e;
cd665a06 2373 edge_iterator ei;
d704ea82 2374 basic_block son;
f816ec49 2375
d704ea82 2376 /* We are looking for all edges forming edge cut induced by
2377 set of all blocks postdominated by BB. */
2378 FOR_EACH_EDGE (e, ei, cur->preds)
2379 if (e->src->index >= NUM_FIXED_BLOCKS
2380 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
f816ec49 2381 {
f1d5a92b 2382 edge e2;
2383 edge_iterator ei2;
2384 bool found = false;
2385
5707768a 2386 /* Ignore fake edges and eh, we predict them as not taken anyway. */
2387 if (e->flags & (EDGE_EH | EDGE_FAKE))
f1d5a92b 2388 continue;
d704ea82 2389 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
f1d5a92b 2390
d3443011 2391 /* See if there is an edge from e->src that is not abnormal
f1d5a92b 2392 and does not lead to BB. */
2393 FOR_EACH_EDGE (e2, ei2, e->src->succs)
2394 if (e2 != e
5707768a 2395 && !(e2->flags & (EDGE_EH | EDGE_FAKE))
f1d5a92b 2396 && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb))
2397 {
2398 found = true;
2399 break;
2400 }
2401
2402 /* If there is non-abnormal path leaving e->src, predict edge
2403 using predictor. Otherwise we need to look for paths
d3443011 2404 leading to e->src.
2405
2406 The second may lead to infinite loop in the case we are predicitng
2407 regions that are only reachable by abnormal edges. We simply
2408 prevent visiting given BB twice. */
f1d5a92b 2409 if (found)
2410 predict_edge_def (e, pred, taken);
6e3803fb 2411 else if (bitmap_set_bit (visited, e->src->index))
d3443011 2412 predict_paths_for_bb (e->src, e->src, pred, taken, visited);
f816ec49 2413 }
d704ea82 2414 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
2415 son;
2416 son = next_dom_son (CDI_POST_DOMINATORS, son))
d3443011 2417 predict_paths_for_bb (son, bb, pred, taken, visited);
d704ea82 2418}
f816ec49 2419
d704ea82 2420/* Sets branch probabilities according to PREDiction and
2421 FLAGS. */
f816ec49 2422
d704ea82 2423static void
2424predict_paths_leading_to (basic_block bb, enum br_predictor pred,
2425 enum prediction taken)
2426{
d3443011 2427 bitmap visited = BITMAP_ALLOC (NULL);
2428 predict_paths_for_bb (bb, bb, pred, taken, visited);
2429 BITMAP_FREE (visited);
f816ec49 2430}
5707768a 2431
2432/* Like predict_paths_leading_to but take edge instead of basic block. */
2433
2434static void
2435predict_paths_leading_to_edge (edge e, enum br_predictor pred,
2436 enum prediction taken)
2437{
2438 bool has_nonloop_edge = false;
2439 edge_iterator ei;
2440 edge e2;
2441
2442 basic_block bb = e->src;
2443 FOR_EACH_EDGE (e2, ei, bb->succs)
2444 if (e2->dest != e->src && e2->dest != e->dest
2445 && !(e->flags & (EDGE_EH | EDGE_FAKE))
2446 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
2447 {
2448 has_nonloop_edge = true;
2449 break;
2450 }
2451 if (!has_nonloop_edge)
d3443011 2452 {
2453 bitmap visited = BITMAP_ALLOC (NULL);
2454 predict_paths_for_bb (bb, bb, pred, taken, visited);
2455 BITMAP_FREE (visited);
2456 }
5707768a 2457 else
2458 predict_edge_def (e, pred, taken);
2459}
cd0fe062 2460\f
e725f898 2461/* This is used to carry information about basic blocks. It is
f81d9f78 2462 attached to the AUX field of the standard CFG block. */
2463
9908fe4d 2464struct block_info
f81d9f78 2465{
2466 /* Estimated frequency of execution of basic_block. */
e9d7220b 2467 sreal frequency;
f81d9f78 2468
2469 /* To keep queue of basic blocks to process. */
2470 basic_block next;
2471
4a82352a 2472 /* Number of predecessors we need to visit first. */
4ad72a03 2473 int npredecessors;
9908fe4d 2474};
f81d9f78 2475
2476/* Similar information for edges. */
9908fe4d 2477struct edge_prob_info
f81d9f78 2478{
77aa6362 2479 /* In case edge is a loopback edge, the probability edge will be reached
f81d9f78 2480 in case header is. Estimated number of iterations of the loop can be
56ff4880 2481 then computed as 1 / (1 - back_edge_prob). */
e9d7220b 2482 sreal back_edge_prob;
77aa6362 2483 /* True if the edge is a loopback edge in the natural loop. */
74cbb553 2484 unsigned int back_edge:1;
9908fe4d 2485};
f81d9f78 2486
9908fe4d 2487#define BLOCK_INFO(B) ((block_info *) (B)->aux)
886c1262 2488#undef EDGE_INFO
9908fe4d 2489#define EDGE_INFO(E) ((edge_prob_info *) (E)->aux)
f81d9f78 2490
2491/* Helper function for estimate_bb_frequencies.
88e6f696 2492 Propagate the frequencies in blocks marked in
2493 TOVISIT, starting in HEAD. */
e6751e9a 2494
f81d9f78 2495static void
88e6f696 2496propagate_freq (basic_block head, bitmap tovisit)
f81d9f78 2497{
4c26117a 2498 basic_block bb;
2499 basic_block last;
9ea83aa5 2500 unsigned i;
f81d9f78 2501 edge e;
2502 basic_block nextbb;
b1bb9b10 2503 bitmap_iterator bi;
312866af 2504
4a82352a 2505 /* For each basic block we need to visit count number of his predecessors
312866af 2506 we need to visit first. */
b1bb9b10 2507 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
312866af 2508 {
b1bb9b10 2509 edge_iterator ei;
2510 int count = 0;
2511
f5a6b05f 2512 bb = BASIC_BLOCK_FOR_FN (cfun, i);
e6751e9a 2513
b1bb9b10 2514 FOR_EACH_EDGE (e, ei, bb->preds)
2515 {
2516 bool visit = bitmap_bit_p (tovisit, e->src->index);
2517
2518 if (visit && !(e->flags & EDGE_DFS_BACK))
2519 count++;
2520 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
2521 fprintf (dump_file,
2522 "Irreducible region hit, ignoring edge to %i->%i\n",
2523 e->src->index, bb->index);
312866af 2524 }
9ea83aa5 2525 BLOCK_INFO (bb)->npredecessors = count;
555e8b05 2526 /* When function never returns, we will never process exit block. */
34154e27 2527 if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
555e8b05 2528 bb->count = bb->frequency = 0;
312866af 2529 }
f81d9f78 2530
8201d1f6 2531 BLOCK_INFO (head)->frequency = 1;
4c26117a 2532 last = head;
2533 for (bb = head; bb; bb = nextbb)
f81d9f78 2534 {
cd665a06 2535 edge_iterator ei;
8201d1f6 2536 sreal cyclic_probability = 0;
2537 sreal frequency = 0;
f81d9f78 2538
2539 nextbb = BLOCK_INFO (bb)->next;
2540 BLOCK_INFO (bb)->next = NULL;
2541
2542 /* Compute frequency of basic block. */
2543 if (bb != head)
2544 {
382ecba7 2545 if (flag_checking)
2546 FOR_EACH_EDGE (e, ei, bb->preds)
2547 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
2548 || (e->flags & EDGE_DFS_BACK));
f81d9f78 2549
cd665a06 2550 FOR_EACH_EDGE (e, ei, bb->preds)
f81d9f78 2551 if (EDGE_INFO (e)->back_edge)
56ff4880 2552 {
23a92fc7 2553 cyclic_probability += EDGE_INFO (e)->back_edge_prob;
56ff4880 2554 }
312866af 2555 else if (!(e->flags & EDGE_DFS_BACK))
56ff4880 2556 {
56ff4880 2557 /* frequency += (e->probability
2558 * BLOCK_INFO (e->src)->frequency /
2559 REG_BR_PROB_BASE); */
2560
8201d1f6 2561 sreal tmp = e->probability;
23a92fc7 2562 tmp *= BLOCK_INFO (e->src)->frequency;
2563 tmp *= real_inv_br_prob_base;
2564 frequency += tmp;
56ff4880 2565 }
2566
8201d1f6 2567 if (cyclic_probability == 0)
e9d7220b 2568 {
23a92fc7 2569 BLOCK_INFO (bb)->frequency = frequency;
e9d7220b 2570 }
2e3c56e8 2571 else
2572 {
23a92fc7 2573 if (cyclic_probability > real_almost_one)
2574 cyclic_probability = real_almost_one;
f81d9f78 2575
d598ad0d 2576 /* BLOCK_INFO (bb)->frequency = frequency
e9d7220b 2577 / (1 - cyclic_probability) */
f81d9f78 2578
8201d1f6 2579 cyclic_probability = sreal (1) - cyclic_probability;
23a92fc7 2580 BLOCK_INFO (bb)->frequency = frequency / cyclic_probability;
2e3c56e8 2581 }
f81d9f78 2582 }
2583
b1bb9b10 2584 bitmap_clear_bit (tovisit, bb->index);
f81d9f78 2585
c6356c17 2586 e = find_edge (bb, head);
2587 if (e)
2588 {
c6356c17 2589 /* EDGE_INFO (e)->back_edge_prob
2590 = ((e->probability * BLOCK_INFO (bb)->frequency)
2591 / REG_BR_PROB_BASE); */
48e1416a 2592
8201d1f6 2593 sreal tmp = e->probability;
23a92fc7 2594 tmp *= BLOCK_INFO (bb)->frequency;
2595 EDGE_INFO (e)->back_edge_prob = tmp * real_inv_br_prob_base;
c6356c17 2596 }
f81d9f78 2597
e725f898 2598 /* Propagate to successor blocks. */
cd665a06 2599 FOR_EACH_EDGE (e, ei, bb->succs)
312866af 2600 if (!(e->flags & EDGE_DFS_BACK)
4ad72a03 2601 && BLOCK_INFO (e->dest)->npredecessors)
f81d9f78 2602 {
4ad72a03 2603 BLOCK_INFO (e->dest)->npredecessors--;
2604 if (!BLOCK_INFO (e->dest)->npredecessors)
312866af 2605 {
2606 if (!nextbb)
2607 nextbb = e->dest;
2608 else
2609 BLOCK_INFO (last)->next = e->dest;
48e1416a 2610
312866af 2611 last = e->dest;
2612 }
cd665a06 2613 }
f81d9f78 2614 }
2615}
2616
5327650f 2617/* Estimate frequencies in loops at same nest level. */
e6751e9a 2618
f81d9f78 2619static void
88e6f696 2620estimate_loops_at_level (struct loop *first_loop)
f81d9f78 2621{
7fb12188 2622 struct loop *loop;
f81d9f78 2623
2624 for (loop = first_loop; loop; loop = loop->next)
2625 {
f81d9f78 2626 edge e;
7fb12188 2627 basic_block *bbs;
862be747 2628 unsigned i;
88e6f696 2629 bitmap tovisit = BITMAP_ALLOC (NULL);
f81d9f78 2630
88e6f696 2631 estimate_loops_at_level (loop->inner);
d598ad0d 2632
88e6f696 2633 /* Find current loop back edge and mark it. */
2634 e = loop_latch_edge (loop);
2635 EDGE_INFO (e)->back_edge = 1;
7fb12188 2636
2637 bbs = get_loop_body (loop);
2638 for (i = 0; i < loop->num_nodes; i++)
b1bb9b10 2639 bitmap_set_bit (tovisit, bbs[i]->index);
7fb12188 2640 free (bbs);
88e6f696 2641 propagate_freq (loop->header, tovisit);
2642 BITMAP_FREE (tovisit);
f81d9f78 2643 }
2644}
2645
fa7637bd 2646/* Propagates frequencies through structure of loops. */
88e6f696 2647
2648static void
7194de72 2649estimate_loops (void)
88e6f696 2650{
2651 bitmap tovisit = BITMAP_ALLOC (NULL);
2652 basic_block bb;
2653
2654 /* Start by estimating the frequencies in the loops. */
41f75a99 2655 if (number_of_loops (cfun) > 1)
7194de72 2656 estimate_loops_at_level (current_loops->tree_root->inner);
88e6f696 2657
2658 /* Now propagate the frequencies through all the blocks. */
ed7d889a 2659 FOR_ALL_BB_FN (bb, cfun)
88e6f696 2660 {
2661 bitmap_set_bit (tovisit, bb->index);
2662 }
34154e27 2663 propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit);
88e6f696 2664 BITMAP_FREE (tovisit);
2665}
2666
38a65d4e 2667/* Drop the profile for NODE to guessed, and update its frequency based on
901d3ddc 2668 whether it is expected to be hot given the CALL_COUNT. */
38a65d4e 2669
2670static void
901d3ddc 2671drop_profile (struct cgraph_node *node, gcov_type call_count)
38a65d4e 2672{
2673 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
901d3ddc 2674 /* In the case where this was called by another function with a
2675 dropped profile, call_count will be 0. Since there are no
2676 non-zero call counts to this function, we don't know for sure
2677 whether it is hot, and therefore it will be marked normal below. */
2678 bool hot = maybe_hot_count_p (NULL, call_count);
38a65d4e 2679
2680 if (dump_file)
2681 fprintf (dump_file,
2682 "Dropping 0 profile for %s/%i. %s based on calls.\n",
f1c8b4d7 2683 node->name (), node->order,
38a65d4e 2684 hot ? "Function is hot" : "Function is normal");
2685 /* We only expect to miss profiles for functions that are reached
2686 via non-zero call edges in cases where the function may have
2687 been linked from another module or library (COMDATs and extern
901d3ddc 2688 templates). See the comments below for handle_missing_profiles.
2689 Also, only warn in cases where the missing counts exceed the
2690 number of training runs. In certain cases with an execv followed
2691 by a no-return call the profile for the no-return call is not
2692 dumped and there can be a mismatch. */
2693 if (!DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl)
2694 && call_count > profile_info->runs)
38a65d4e 2695 {
2696 if (flag_profile_correction)
2697 {
2698 if (dump_file)
2699 fprintf (dump_file,
2700 "Missing counts for called function %s/%i\n",
f1c8b4d7 2701 node->name (), node->order);
38a65d4e 2702 }
2703 else
901d3ddc 2704 warning (0, "Missing counts for called function %s/%i",
f1c8b4d7 2705 node->name (), node->order);
38a65d4e 2706 }
2707
3bedbae3 2708 profile_status_for_fn (fn)
38a65d4e 2709 = (flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT);
2710 node->frequency
2711 = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL;
2712}
2713
2714/* In the case of COMDAT routines, multiple object files will contain the same
2715 function and the linker will select one for the binary. In that case
2716 all the other copies from the profile instrument binary will be missing
2717 profile counts. Look for cases where this happened, due to non-zero
2718 call counts going to 0-count functions, and drop the profile to guessed
2719 so that we can use the estimated probabilities and avoid optimizing only
2720 for size.
2721
2722 The other case where the profile may be missing is when the routine
2723 is not going to be emitted to the object file, e.g. for "extern template"
2724 class methods. Those will be marked DECL_EXTERNAL. Emit a warning in
2725 all other cases of non-zero calls to 0-count functions. */
2726
2727void
2728handle_missing_profiles (void)
2729{
2730 struct cgraph_node *node;
2731 int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION);
2732 vec<struct cgraph_node *> worklist;
2733 worklist.create (64);
2734
2735 /* See if 0 count function has non-0 count callers. In this case we
2736 lost some profile. Drop its function profile to PROFILE_GUESSED. */
2737 FOR_EACH_DEFINED_FUNCTION (node)
2738 {
2739 struct cgraph_edge *e;
2740 gcov_type call_count = 0;
af48f0b1 2741 gcov_type max_tp_first_run = 0;
38a65d4e 2742 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2743
2744 if (node->count)
2745 continue;
2746 for (e = node->callers; e; e = e->next_caller)
af48f0b1 2747 {
38a65d4e 2748 call_count += e->count;
af48f0b1 2749
2750 if (e->caller->tp_first_run > max_tp_first_run)
2751 max_tp_first_run = e->caller->tp_first_run;
2752 }
2753
2754 /* If time profile is missing, let assign the maximum that comes from
2755 caller functions. */
2756 if (!node->tp_first_run && max_tp_first_run)
2757 node->tp_first_run = max_tp_first_run + 1;
2758
38a65d4e 2759 if (call_count
2760 && fn && fn->cfg
2761 && (call_count * unlikely_count_fraction >= profile_info->runs))
2762 {
901d3ddc 2763 drop_profile (node, call_count);
38a65d4e 2764 worklist.safe_push (node);
2765 }
2766 }
2767
2768 /* Propagate the profile dropping to other 0-count COMDATs that are
2769 potentially called by COMDATs we already dropped the profile on. */
2770 while (worklist.length () > 0)
2771 {
2772 struct cgraph_edge *e;
2773
2774 node = worklist.pop ();
2775 for (e = node->callees; e; e = e->next_caller)
2776 {
2777 struct cgraph_node *callee = e->callee;
2778 struct function *fn = DECL_STRUCT_FUNCTION (callee->decl);
2779
2780 if (callee->count > 0)
2781 continue;
2782 if (DECL_COMDAT (callee->decl) && fn && fn->cfg
3bedbae3 2783 && profile_status_for_fn (fn) == PROFILE_READ)
38a65d4e 2784 {
901d3ddc 2785 drop_profile (node, 0);
38a65d4e 2786 worklist.safe_push (callee);
2787 }
2788 }
2789 }
2790 worklist.release ();
2791}
2792
3f18719c 2793/* Convert counts measured by profile driven feedback to frequencies.
2794 Return nonzero iff there was any nonzero execution count. */
e6751e9a 2795
ffedd254 2796int
d598ad0d 2797counts_to_freqs (void)
f81d9f78 2798{
3f18719c 2799 gcov_type count_max, true_count_max = 0;
4c26117a 2800 basic_block bb;
b3d6de89 2801
38a65d4e 2802 /* Don't overwrite the estimated frequencies when the profile for
2803 the function is missing. We may drop this function PROFILE_GUESSED
2804 later in drop_profile (). */
94bed7c3 2805 if (!flag_auto_profile && !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
38a65d4e 2806 return 0;
2807
34154e27 2808 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3f18719c 2809 true_count_max = MAX (bb->count, true_count_max);
f81d9f78 2810
3f18719c 2811 count_max = MAX (true_count_max, 1);
34154e27 2812 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
4c26117a 2813 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
167b550b 2814
3f18719c 2815 return true_count_max;
f81d9f78 2816}
2817
e6751e9a 2818/* Return true if function is likely to be expensive, so there is no point to
2819 optimize performance of prologue, epilogue or do inlining at the expense
41a6f238 2820 of code size growth. THRESHOLD is the limit of number of instructions
e6751e9a 2821 function can execute at average to be still considered not expensive. */
2822
f4c0c1a2 2823bool
d598ad0d 2824expensive_function_p (int threshold)
f4c0c1a2 2825{
2826 unsigned int sum = 0;
4c26117a 2827 basic_block bb;
27d0c333 2828 unsigned int limit;
f4c0c1a2 2829
2830 /* We can not compute accurately for large thresholds due to scaled
2831 frequencies. */
876760f6 2832 gcc_assert (threshold <= BB_FREQ_MAX);
f4c0c1a2 2833
4a82352a 2834 /* Frequencies are out of range. This either means that function contains
f4c0c1a2 2835 internal loop executing more than BB_FREQ_MAX times or profile feedback
2836 is available and function has not been executed at all. */
34154e27 2837 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency == 0)
f4c0c1a2 2838 return true;
195731ad 2839
f4c0c1a2 2840 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
34154e27 2841 limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency * threshold;
fc00614f 2842 FOR_EACH_BB_FN (bb, cfun)
f4c0c1a2 2843 {
ee5f6585 2844 rtx_insn *insn;
f4c0c1a2 2845
91f71fa3 2846 FOR_BB_INSNS (bb, insn)
e6751e9a 2847 if (active_insn_p (insn))
2848 {
2849 sum += bb->frequency;
2850 if (sum > limit)
2851 return true;
f4c0c1a2 2852 }
2853 }
e6751e9a 2854
f4c0c1a2 2855 return false;
2856}
2857
5327650f 2858/* Estimate and propagate basic block frequencies using the given branch
2859 probabilities. If FORCE is true, the frequencies are used to estimate
2860 the counts even when there are already non-zero profile counts. */
e6751e9a 2861
4ae20857 2862void
5327650f 2863estimate_bb_frequencies (bool force)
f81d9f78 2864{
4c26117a 2865 basic_block bb;
e9d7220b 2866 sreal freq_max;
56ff4880 2867
f26d8580 2868 if (force || profile_status_for_fn (cfun) != PROFILE_READ || !counts_to_freqs ())
429fa7fa 2869 {
31e4010e 2870 static int real_values_initialized = 0;
2871
2872 if (!real_values_initialized)
2873 {
fc22704f 2874 real_values_initialized = 1;
8201d1f6 2875 real_br_prob_base = REG_BR_PROB_BASE;
2876 real_bb_freq_max = BB_FREQ_MAX;
23a92fc7 2877 real_one_half = sreal (1, -1);
8201d1f6 2878 real_inv_br_prob_base = sreal (1) / real_br_prob_base;
2879 real_almost_one = sreal (1) - real_inv_br_prob_base;
31e4010e 2880 }
f81d9f78 2881
429fa7fa 2882 mark_dfs_back_edges ();
429fa7fa 2883
34154e27 2884 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability =
2885 REG_BR_PROB_BASE;
429fa7fa 2886
2887 /* Set up block info for each basic block. */
9908fe4d 2888 alloc_aux_for_blocks (sizeof (block_info));
2889 alloc_aux_for_edges (sizeof (edge_prob_info));
34154e27 2890 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
f81d9f78 2891 {
f81d9f78 2892 edge e;
cd665a06 2893 edge_iterator ei;
429fa7fa 2894
cd665a06 2895 FOR_EACH_EDGE (e, ei, bb->succs)
f81d9f78 2896 {
8201d1f6 2897 EDGE_INFO (e)->back_edge_prob = e->probability;
23a92fc7 2898 EDGE_INFO (e)->back_edge_prob *= real_inv_br_prob_base;
f81d9f78 2899 }
f81d9f78 2900 }
e6751e9a 2901
5327650f 2902 /* First compute frequencies locally for each loop from innermost
2903 to outermost to examine frequencies for back edges. */
7194de72 2904 estimate_loops ();
f81d9f78 2905
8201d1f6 2906 freq_max = 0;
fc00614f 2907 FOR_EACH_BB_FN (bb, cfun)
23a92fc7 2908 if (freq_max < BLOCK_INFO (bb)->frequency)
2909 freq_max = BLOCK_INFO (bb)->frequency;
2e3c56e8 2910
23a92fc7 2911 freq_max = real_bb_freq_max / freq_max;
34154e27 2912 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
56ff4880 2913 {
23a92fc7 2914 sreal tmp = BLOCK_INFO (bb)->frequency * freq_max + real_one_half;
2915 bb->frequency = tmp.to_int ();
429fa7fa 2916 }
e6751e9a 2917
429fa7fa 2918 free_aux_for_blocks ();
2919 free_aux_for_edges ();
2920 }
2921 compute_function_frequency ();
429fa7fa 2922}
f81d9f78 2923
429fa7fa 2924/* Decide whether function is hot, cold or unlikely executed. */
63aab97d 2925void
d598ad0d 2926compute_function_frequency (void)
429fa7fa 2927{
4c26117a 2928 basic_block bb;
415d1b9a 2929 struct cgraph_node *node = cgraph_node::get (current_function_decl);
e27f29dd 2930
0f9fb931 2931 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2932 || MAIN_NAME_P (DECL_NAME (current_function_decl)))
2933 node->only_called_at_startup = true;
2934 if (DECL_STATIC_DESTRUCTOR (current_function_decl))
2935 node->only_called_at_exit = true;
4c26117a 2936
f26d8580 2937 if (profile_status_for_fn (cfun) != PROFILE_READ)
5de92639 2938 {
125b6d78 2939 int flags = flags_from_decl_or_type (current_function_decl);
5de92639 2940 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
2941 != NULL)
125b6d78 2942 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
5de92639 2943 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
2944 != NULL)
125b6d78 2945 node->frequency = NODE_FREQUENCY_HOT;
2946 else if (flags & ECF_NORETURN)
2947 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2948 else if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
2949 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2950 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2951 || DECL_STATIC_DESTRUCTOR (current_function_decl))
2952 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
5de92639 2953 return;
2954 }
e27f29dd 2955
2956 /* Only first time try to drop function into unlikely executed.
2957 After inlining the roundoff errors may confuse us.
2958 Ipa-profile pass will drop functions only called from unlikely
2959 functions to unlikely and that is most of what we care about. */
2960 if (!cfun->after_inlining)
2961 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
fc00614f 2962 FOR_EACH_BB_FN (bb, cfun)
f81d9f78 2963 {
8d672d12 2964 if (maybe_hot_bb_p (cfun, bb))
429fa7fa 2965 {
125b6d78 2966 node->frequency = NODE_FREQUENCY_HOT;
429fa7fa 2967 return;
2968 }
8d672d12 2969 if (!probably_never_executed_bb_p (cfun, bb))
125b6d78 2970 node->frequency = NODE_FREQUENCY_NORMAL;
f81d9f78 2971 }
429fa7fa 2972}
f81d9f78 2973
4a1849e3 2974/* Build PREDICT_EXPR. */
2975tree
2976build_predict_expr (enum br_predictor predictor, enum prediction taken)
2977{
08f62b1b 2978 tree t = build1 (PREDICT_EXPR, void_type_node,
b3d480fb 2979 build_int_cst (integer_type_node, predictor));
b9c74b4d 2980 SET_PREDICT_EXPR_OUTCOME (t, taken);
4a1849e3 2981 return t;
2982}
2983
2984const char *
2985predictor_name (enum br_predictor predictor)
2986{
2987 return predictor_info[predictor].name;
2988}
2989
65b0537f 2990/* Predict branch probabilities and estimate profile of the tree CFG. */
2991
cbe8bda8 2992namespace {
2993
2994const pass_data pass_data_profile =
2995{
2996 GIMPLE_PASS, /* type */
2997 "profile_estimate", /* name */
2998 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 2999 TV_BRANCH_PROB, /* tv_id */
3000 PROP_cfg, /* properties_required */
3001 0, /* properties_provided */
3002 0, /* properties_destroyed */
3003 0, /* todo_flags_start */
8b88439e 3004 0, /* todo_flags_finish */
4ee9c684 3005};
1add270f 3006
cbe8bda8 3007class pass_profile : public gimple_opt_pass
3008{
3009public:
9af5ce0c 3010 pass_profile (gcc::context *ctxt)
3011 : gimple_opt_pass (pass_data_profile, ctxt)
cbe8bda8 3012 {}
3013
3014 /* opt_pass methods: */
31315c24 3015 virtual bool gate (function *) { return flag_guess_branch_prob; }
65b0537f 3016 virtual unsigned int execute (function *);
cbe8bda8 3017
3018}; // class pass_profile
3019
65b0537f 3020unsigned int
3021pass_profile::execute (function *fun)
3022{
3023 unsigned nb_loops;
3024
3a9f48e7 3025 if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
3026 return 0;
3027
65b0537f 3028 loop_optimizer_init (LOOPS_NORMAL);
3029 if (dump_file && (dump_flags & TDF_DETAILS))
3030 flow_loops_dump (dump_file, NULL, 0);
3031
3032 mark_irreducible_loops ();
3033
3034 nb_loops = number_of_loops (fun);
3035 if (nb_loops > 1)
3036 scev_initialize ();
3037
3038 tree_estimate_probability ();
3039
3040 if (nb_loops > 1)
3041 scev_finalize ();
3042
3043 loop_optimizer_finalize ();
3044 if (dump_file && (dump_flags & TDF_DETAILS))
3045 gimple_dump_cfg (dump_file, dump_flags);
3046 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
3047 profile_status_for_fn (fun) = PROFILE_GUESSED;
3048 return 0;
3049}
3050
cbe8bda8 3051} // anon namespace
3052
3053gimple_opt_pass *
3054make_pass_profile (gcc::context *ctxt)
3055{
3056 return new pass_profile (ctxt);
3057}
3058
3059namespace {
3060
3061const pass_data pass_data_strip_predict_hints =
3062{
3063 GIMPLE_PASS, /* type */
3064 "*strip_predict_hints", /* name */
3065 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 3066 TV_BRANCH_PROB, /* tv_id */
3067 PROP_cfg, /* properties_required */
3068 0, /* properties_provided */
3069 0, /* properties_destroyed */
3070 0, /* todo_flags_start */
8b88439e 3071 0, /* todo_flags_finish */
1add270f 3072};
555e8b05 3073
cbe8bda8 3074class pass_strip_predict_hints : public gimple_opt_pass
3075{
3076public:
9af5ce0c 3077 pass_strip_predict_hints (gcc::context *ctxt)
3078 : gimple_opt_pass (pass_data_strip_predict_hints, ctxt)
cbe8bda8 3079 {}
3080
3081 /* opt_pass methods: */
ae84f584 3082 opt_pass * clone () { return new pass_strip_predict_hints (m_ctxt); }
65b0537f 3083 virtual unsigned int execute (function *);
cbe8bda8 3084
3085}; // class pass_strip_predict_hints
3086
65b0537f 3087/* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
3088 we no longer need. */
3089unsigned int
3090pass_strip_predict_hints::execute (function *fun)
3091{
3092 basic_block bb;
42acab1c 3093 gimple *ass_stmt;
65b0537f 3094 tree var;
3095
3096 FOR_EACH_BB_FN (bb, fun)
3097 {
3098 gimple_stmt_iterator bi;
3099 for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
3100 {
42acab1c 3101 gimple *stmt = gsi_stmt (bi);
65b0537f 3102
3103 if (gimple_code (stmt) == GIMPLE_PREDICT)
3104 {
3105 gsi_remove (&bi, true);
3106 continue;
3107 }
3108 else if (is_gimple_call (stmt))
3109 {
3110 tree fndecl = gimple_call_fndecl (stmt);
3111
3112 if ((fndecl
3113 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3114 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
3115 && gimple_call_num_args (stmt) == 2)
3116 || (gimple_call_internal_p (stmt)
3117 && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT))
3118 {
3119 var = gimple_call_lhs (stmt);
3120 if (var)
3121 {
3122 ass_stmt
3123 = gimple_build_assign (var, gimple_call_arg (stmt, 0));
3124 gsi_replace (&bi, ass_stmt, true);
3125 }
3126 else
3127 {
3128 gsi_remove (&bi, true);
3129 continue;
3130 }
3131 }
3132 }
3133 gsi_next (&bi);
3134 }
3135 }
3136 return 0;
3137}
3138
cbe8bda8 3139} // anon namespace
3140
3141gimple_opt_pass *
3142make_pass_strip_predict_hints (gcc::context *ctxt)
3143{
3144 return new pass_strip_predict_hints (ctxt);
3145}
3146
555e8b05 3147/* Rebuild function frequencies. Passes are in general expected to
3148 maintain profile by hand, however in some cases this is not possible:
3149 for example when inlining several functions with loops freuqencies might run
3150 out of scale and thus needs to be recomputed. */
3151
3152void
3153rebuild_frequencies (void)
3154{
4b366dd3 3155 timevar_push (TV_REBUILD_FREQUENCIES);
5327650f 3156
3157 /* When the max bb count in the function is small, there is a higher
3158 chance that there were truncation errors in the integer scaling
3159 of counts by inlining and other optimizations. This could lead
3160 to incorrect classification of code as being cold when it isn't.
3161 In that case, force the estimation of bb counts/frequencies from the
3162 branch probabilities, rather than computing frequencies from counts,
3163 which may also lead to frequencies incorrectly reduced to 0. There
3164 is less precision in the probabilities, so we only do this for small
3165 max counts. */
3166 gcov_type count_max = 0;
3167 basic_block bb;
34154e27 3168 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
5327650f 3169 count_max = MAX (bb->count, count_max);
3170
f26d8580 3171 if (profile_status_for_fn (cfun) == PROFILE_GUESSED
94bed7c3 3172 || (!flag_auto_profile && profile_status_for_fn (cfun) == PROFILE_READ
3173 && count_max < REG_BR_PROB_BASE/10))
555e8b05 3174 {
3175 loop_optimizer_init (0);
3176 add_noreturn_fake_exit_edges ();
3177 mark_irreducible_loops ();
3178 connect_infinite_loops_to_exit ();
5327650f 3179 estimate_bb_frequencies (true);
555e8b05 3180 remove_fake_exit_edges ();
3181 loop_optimizer_finalize ();
3182 }
f26d8580 3183 else if (profile_status_for_fn (cfun) == PROFILE_READ)
555e8b05 3184 counts_to_freqs ();
3185 else
3186 gcc_unreachable ();
4b366dd3 3187 timevar_pop (TV_REBUILD_FREQUENCIES);
555e8b05 3188}