]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/predict.c
alias.c: Reorder #include statements and remove duplicates.
[thirdparty/gcc.git] / gcc / predict.c
CommitLineData
f1ebdfc5 1/* Branch prediction routines for the GNU compiler.
5624e564 2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
f1ebdfc5 3
bfdade77 4This file is part of GCC.
f1ebdfc5 5
bfdade77
RK
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
bfdade77 9version.
f1ebdfc5 10
bfdade77
RK
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
f1ebdfc5 15
bfdade77 16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
f1ebdfc5
JE
19
20/* References:
21
22 [1] "Branch Prediction for Free"
23 Ball and Larus; PLDI '93.
24 [2] "Static Branch Frequency and Program Profile Analysis"
25 Wu and Larus; MICRO-27.
26 [3] "Corpus-based Static Branch Prediction"
3ef42a0c 27 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
f1ebdfc5
JE
28
29
30#include "config.h"
31#include "system.h"
4977bab6 32#include "coretypes.h"
c7131fb2 33#include "backend.h"
957060b5
AM
34#include "target.h"
35#include "rtl.h"
f1ebdfc5 36#include "tree.h"
c7131fb2 37#include "gimple.h"
957060b5
AM
38#include "cfghooks.h"
39#include "tree-pass.h"
40#include "tm_p.h"
c7131fb2 41#include "ssa.h"
957060b5
AM
42#include "expmed.h"
43#include "insn-config.h"
44#include "regs.h"
45#include "emit-rtl.h"
46#include "recog.h"
47#include "cgraph.h"
48#include "coverage.h"
49#include "diagnostic-core.h"
50#include "gimple-predict.h"
c7131fb2 51#include "alias.h"
40e23961 52#include "fold-const.h"
d8a2d370 53#include "calls.h"
60393bbc 54#include "cfganal.h"
60393bbc 55#include "flags.h"
59f2e9d8 56#include "profile.h"
f1ebdfc5 57#include "except.h"
36566b39
PK
58#include "dojump.h"
59#include "explow.h"
36566b39
PK
60#include "varasm.h"
61#include "stmt.h"
f1ebdfc5 62#include "expr.h"
ac5e69da 63#include "sreal.h"
194734e9 64#include "params.h"
3d436d2a 65#include "cfgloop.h"
2fb9a547 66#include "internal-fn.h"
5be5c238 67#include "gimple-iterator.h"
442b4905 68#include "tree-cfg.h"
e28030cf 69#include "tree-ssa-loop-niter.h"
442b4905 70#include "tree-ssa-loop.h"
b6acab32 71#include "tree-scalar-evolution.h"
8aa18a7d 72
fbe3b30b
SB
73/* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
74 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */
fd27ffab 75static sreal real_almost_one, real_br_prob_base,
ac5e69da 76 real_inv_br_prob_base, real_one_half, real_bb_freq_max;
f1ebdfc5 77
9f215bf5 78static void combine_predictions_for_insn (rtx_insn *, basic_block);
6de9cd9a 79static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
3e4b9ad0 80static void predict_paths_leading_to (basic_block, enum br_predictor, enum prediction);
5210bbc5 81static void predict_paths_leading_to_edge (edge, enum br_predictor, enum prediction);
9f215bf5 82static bool can_predict_insn_p (const rtx_insn *);
ee92cb46 83
4db384c9
JH
84/* Information we hold about each branch predictor.
85 Filled using information from predict.def. */
bfdade77 86
4db384c9 87struct predictor_info
ee92cb46 88{
8b60264b
KG
89 const char *const name; /* Name used in the debugging dumps. */
90 const int hitrate; /* Expected hitrate used by
91 predict_insn_def call. */
92 const int flags;
4db384c9 93};
ee92cb46 94
134d3a2e
JH
95/* Use given predictor without Dempster-Shaffer theory if it matches
96 using first_match heuristics. */
97#define PRED_FLAG_FIRST_MATCH 1
98
99/* Recompute hitrate in percent to our representation. */
100
bfdade77 101#define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
134d3a2e
JH
102
103#define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
bfdade77 104static const struct predictor_info predictor_info[]= {
4db384c9
JH
105#include "predict.def"
106
dc297297 107 /* Upper bound on predictors. */
134d3a2e 108 {NULL, 0, 0}
4db384c9
JH
109};
110#undef DEF_PREDICTOR
194734e9 111
3250d724 112/* Return TRUE if frequency FREQ is considered to be hot. */
fb2fed03
JH
113
114static inline bool
2eb712b4 115maybe_hot_frequency_p (struct function *fun, int freq)
3250d724 116{
d52f5295 117 struct cgraph_node *node = cgraph_node::get (fun->decl);
7525bb7d
JH
118 if (!profile_info
119 || !opt_for_fn (fun->decl, flag_branch_probabilities))
3250d724 120 {
5fefcf92 121 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
3250d724 122 return false;
5fefcf92 123 if (node->frequency == NODE_FREQUENCY_HOT)
3250d724
JH
124 return true;
125 }
ea19eb9f 126 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
c3702ff9 127 return true;
5fefcf92 128 if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
fefa31b5 129 && freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency * 2 / 3))
5fefcf92 130 return false;
39c1b6db
MP
131 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0)
132 return false;
fefa31b5 133 if (freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency
2eb712b4 134 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
3250d724
JH
135 return false;
136 return true;
137}
138
0208f7da
JH
139static gcov_type min_count = -1;
140
141/* Determine the threshold for hot BB counts. */
142
143gcov_type
144get_hot_bb_threshold ()
145{
146 gcov_working_set_t *ws;
147 if (min_count == -1)
148 {
149 ws = find_working_set (PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE));
150 gcc_assert (ws);
151 min_count = ws->min_counter;
152 }
153 return min_count;
154}
155
156/* Set the threshold for hot BB counts. */
157
158void
159set_hot_bb_threshold (gcov_type min)
160{
161 min_count = min;
162}
163
fb2fed03
JH
164/* Return TRUE if frequency FREQ is considered to be hot. */
165
be3c16c4 166bool
2eb712b4 167maybe_hot_count_p (struct function *fun, gcov_type count)
fb2fed03 168{
ea19eb9f 169 if (fun && profile_status_for_fn (fun) != PROFILE_READ)
fb2fed03
JH
170 return true;
171 /* Code executed at most once is not hot. */
172 if (profile_info->runs >= count)
173 return false;
0208f7da 174 return (count >= get_hot_bb_threshold ());
fb2fed03
JH
175}
176
194734e9 177/* Return true in case BB can be CPU intensive and should be optimized
d55d8fc7 178 for maximal performance. */
194734e9
JH
179
180bool
2eb712b4 181maybe_hot_bb_p (struct function *fun, const_basic_block bb)
194734e9 182{
2eb712b4 183 gcc_checking_assert (fun);
ea19eb9f 184 if (profile_status_for_fn (fun) == PROFILE_READ)
2eb712b4
MJ
185 return maybe_hot_count_p (fun, bb->count);
186 return maybe_hot_frequency_p (fun, bb->frequency);
3250d724
JH
187}
188
189/* Return true in case BB can be CPU intensive and should be optimized
190 for maximal performance. */
191
192bool
193maybe_hot_edge_p (edge e)
194{
0a6a6ac9 195 if (profile_status_for_fn (cfun) == PROFILE_READ)
2eb712b4
MJ
196 return maybe_hot_count_p (cfun, e->count);
197 return maybe_hot_frequency_p (cfun, EDGE_FREQUENCY (e));
194734e9
JH
198}
199
79221839
TJ
200/* Return true if profile COUNT and FREQUENCY, or function FUN static
201 node frequency reflects never being executed. */
202
203static bool
204probably_never_executed (struct function *fun,
205 gcov_type count, int frequency)
194734e9 206{
2eb712b4 207 gcc_checking_assert (fun);
7525bb7d 208 if (profile_status_for_fn (fun) == PROFILE_READ)
61a74079 209 {
bfaa17c1
TJ
210 int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION);
211 if (count * unlikely_count_fraction >= profile_info->runs)
61a74079 212 return false;
79221839 213 if (!frequency)
61a74079 214 return true;
7525bb7d 215 if (!ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency)
61a74079 216 return false;
7525bb7d 217 if (ENTRY_BLOCK_PTR_FOR_FN (fun)->count)
61a74079 218 {
bfaa17c1
TJ
219 gcov_type computed_count;
220 /* Check for possibility of overflow, in which case entry bb count
221 is large enough to do the division first without losing much
222 precision. */
7525bb7d 223 if (ENTRY_BLOCK_PTR_FOR_FN (fun)->count < REG_BR_PROB_BASE *
fefa31b5 224 REG_BR_PROB_BASE)
bfaa17c1
TJ
225 {
226 gcov_type scaled_count
7525bb7d 227 = frequency * ENTRY_BLOCK_PTR_FOR_FN (fun)->count *
fefa31b5
DM
228 unlikely_count_fraction;
229 computed_count = RDIV (scaled_count,
7525bb7d 230 ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency);
bfaa17c1
TJ
231 }
232 else
233 {
7525bb7d
JH
234 computed_count = RDIV (ENTRY_BLOCK_PTR_FOR_FN (fun)->count,
235 ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency);
bfaa17c1
TJ
236 computed_count *= frequency * unlikely_count_fraction;
237 }
238 if (computed_count >= profile_info->runs)
239 return false;
61a74079
JH
240 }
241 return true;
242 }
7525bb7d 243 if ((!profile_info || !(opt_for_fn (fun->decl, flag_branch_probabilities)))
d52f5295 244 && (cgraph_node::get (fun->decl)->frequency
581985d7 245 == NODE_FREQUENCY_UNLIKELY_EXECUTED))
52bf96d2 246 return true;
194734e9
JH
247 return false;
248}
249
600b5b1d 250
79221839
TJ
251/* Return true in case BB is probably never executed. */
252
253bool
254probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
255{
256 return probably_never_executed (fun, bb->count, bb->frequency);
257}
258
259
600b5b1d
TJ
260/* Return true in case edge E is probably never executed. */
261
262bool
263probably_never_executed_edge_p (struct function *fun, edge e)
264{
79221839 265 return probably_never_executed (fun, e->count, EDGE_FREQUENCY (e));
600b5b1d
TJ
266}
267
e6416b30
JH
268/* Return true when current function should always be optimized for size. */
269
270bool
271optimize_function_for_size_p (struct function *fun)
272{
e6416b30 273 if (!fun || !fun->decl)
7525bb7d 274 return optimize_size;
d52f5295
ML
275 cgraph_node *n = cgraph_node::get (fun->decl);
276 return n && n->optimize_for_size_p ();
e6416b30
JH
277}
278
3debdc1e
JH
279/* Return true when current function should always be optimized for speed. */
280
281bool
282optimize_function_for_speed_p (struct function *fun)
283{
284 return !optimize_function_for_size_p (fun);
bf08ebeb
JH
285}
286
287/* Return TRUE when BB should be optimized for size. */
288
289bool
cc870036 290optimize_bb_for_size_p (const_basic_block bb)
bf08ebeb 291{
fef5a0d9
RB
292 return (optimize_function_for_size_p (cfun)
293 || (bb && !maybe_hot_bb_p (cfun, bb)));
bf08ebeb
JH
294}
295
296/* Return TRUE when BB should be optimized for speed. */
297
298bool
cc870036 299optimize_bb_for_speed_p (const_basic_block bb)
bf08ebeb
JH
300{
301 return !optimize_bb_for_size_p (bb);
302}
303
304/* Return TRUE when BB should be optimized for size. */
305
306bool
307optimize_edge_for_size_p (edge e)
308{
3debdc1e 309 return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
bf08ebeb
JH
310}
311
312/* Return TRUE when BB should be optimized for speed. */
313
314bool
315optimize_edge_for_speed_p (edge e)
316{
317 return !optimize_edge_for_size_p (e);
318}
319
320/* Return TRUE when BB should be optimized for size. */
321
322bool
323optimize_insn_for_size_p (void)
324{
3debdc1e 325 return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
bf08ebeb
JH
326}
327
328/* Return TRUE when BB should be optimized for speed. */
329
330bool
331optimize_insn_for_speed_p (void)
332{
333 return !optimize_insn_for_size_p ();
334}
335
cc870036
JH
336/* Return TRUE when LOOP should be optimized for size. */
337
338bool
339optimize_loop_for_size_p (struct loop *loop)
340{
341 return optimize_bb_for_size_p (loop->header);
342}
343
344/* Return TRUE when LOOP should be optimized for speed. */
345
346bool
347optimize_loop_for_speed_p (struct loop *loop)
348{
349 return optimize_bb_for_speed_p (loop->header);
350}
351
efd8f750
JH
352/* Return TRUE when LOOP nest should be optimized for speed. */
353
354bool
355optimize_loop_nest_for_speed_p (struct loop *loop)
356{
357 struct loop *l = loop;
358 if (optimize_loop_for_speed_p (loop))
359 return true;
360 l = loop->inner;
c16eb95f 361 while (l && l != loop)
efd8f750
JH
362 {
363 if (optimize_loop_for_speed_p (l))
364 return true;
365 if (l->inner)
366 l = l->inner;
367 else if (l->next)
368 l = l->next;
369 else
8bcf15f6
JH
370 {
371 while (l != loop && !l->next)
372 l = loop_outer (l);
373 if (l != loop)
374 l = l->next;
375 }
efd8f750
JH
376 }
377 return false;
378}
379
380/* Return TRUE when LOOP nest should be optimized for size. */
381
382bool
383optimize_loop_nest_for_size_p (struct loop *loop)
384{
385 return !optimize_loop_nest_for_speed_p (loop);
386}
387
3a4fd356
JH
388/* Return true when edge E is likely to be well predictable by branch
389 predictor. */
390
391bool
392predictable_edge_p (edge e)
393{
0a6a6ac9 394 if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
3a4fd356
JH
395 return false;
396 if ((e->probability
397 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)
398 || (REG_BR_PROB_BASE - e->probability
399 <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100))
400 return true;
401 return false;
402}
403
404
bf08ebeb
JH
405/* Set RTL expansion for BB profile. */
406
407void
408rtl_profile_for_bb (basic_block bb)
409{
2eb712b4 410 crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb);
bf08ebeb
JH
411}
412
413/* Set RTL expansion for edge profile. */
414
415void
416rtl_profile_for_edge (edge e)
417{
418 crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
419}
420
421/* Set RTL expansion to default mode (i.e. when profile info is not known). */
422void
423default_rtl_profile (void)
424{
425 crtl->maybe_hot_insn_p = true;
426}
427
969d70ca
JH
428/* Return true if the one of outgoing edges is already predicted by
429 PREDICTOR. */
430
6de9cd9a 431bool
9678086d 432rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
969d70ca
JH
433{
434 rtx note;
a813c111 435 if (!INSN_P (BB_END (bb)))
969d70ca 436 return false;
a813c111 437 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
969d70ca
JH
438 if (REG_NOTE_KIND (note) == REG_BR_PRED
439 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
440 return true;
441 return false;
442}
ee92cb46 443
fba4cb03
LB
444/* Structure representing predictions in tree level. */
445
446struct edge_prediction {
447 struct edge_prediction *ep_next;
448 edge ep_edge;
449 enum br_predictor ep_predictor;
450 int ep_probability;
451};
452
b787e7a2
TS
453/* This map contains for a basic block the list of predictions for the
454 outgoing edges. */
455
456static hash_map<const_basic_block, edge_prediction *> *bb_predictions;
457
6de9cd9a
DN
458/* Return true if the one of outgoing edges is already predicted by
459 PREDICTOR. */
460
461bool
726a989a 462gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
6de9cd9a 463{
4aab792d 464 struct edge_prediction *i;
b787e7a2 465 edge_prediction **preds = bb_predictions->get (bb);
f06b0a10
ZD
466
467 if (!preds)
468 return false;
b8698a0f 469
b787e7a2 470 for (i = *preds; i; i = i->ep_next)
59ced947 471 if (i->ep_predictor == predictor)
6de9cd9a
DN
472 return true;
473 return false;
474}
475
2c9e13f3 476/* Return true when the probability of edge is reliable.
b8698a0f 477
2c9e13f3
JH
478 The profile guessing code is good at predicting branch outcome (ie.
479 taken/not taken), that is predicted right slightly over 75% of time.
86c33cd0 480 It is however notoriously poor on predicting the probability itself.
2c9e13f3
JH
481 In general the profile appear a lot flatter (with probabilities closer
482 to 50%) than the reality so it is bad idea to use it to drive optimization
483 such as those disabling dynamic branch prediction for well predictable
484 branches.
485
486 There are two exceptions - edges leading to noreturn edges and edges
487 predicted by number of iterations heuristics are predicted well. This macro
488 should be able to distinguish those, but at the moment it simply check for
489 noreturn heuristic that is only one giving probability over 99% or bellow
86c33cd0 490 1%. In future we might want to propagate reliability information across the
2c9e13f3
JH
491 CFG if we find this information useful on multiple places. */
492static bool
493probability_reliable_p (int prob)
494{
0a6a6ac9
DM
495 return (profile_status_for_fn (cfun) == PROFILE_READ
496 || (profile_status_for_fn (cfun) == PROFILE_GUESSED
2c9e13f3
JH
497 && (prob <= HITRATE (1) || prob >= HITRATE (99))));
498}
499
500/* Same predicate as above, working on edges. */
501bool
ed7a4b4b 502edge_probability_reliable_p (const_edge e)
2c9e13f3
JH
503{
504 return probability_reliable_p (e->probability);
505}
506
507/* Same predicate as edge_probability_reliable_p, working on notes. */
508bool
ed7a4b4b 509br_prob_note_reliable_p (const_rtx note)
2c9e13f3
JH
510{
511 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
e5af9ddd 512 return probability_reliable_p (XINT (note, 0));
2c9e13f3
JH
513}
514
7d6d381a 515static void
9f215bf5 516predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability)
4db384c9 517{
e16acfcd 518 gcc_assert (any_condjump_p (insn));
d50672ef
JH
519 if (!flag_guess_branch_prob)
520 return;
bfdade77 521
65c5f2a6
ILT
522 add_reg_note (insn, REG_BR_PRED,
523 gen_rtx_CONCAT (VOIDmode,
524 GEN_INT ((int) predictor),
525 GEN_INT ((int) probability)));
4db384c9
JH
526}
527
528/* Predict insn by given predictor. */
bfdade77 529
4db384c9 530void
9f215bf5 531predict_insn_def (rtx_insn *insn, enum br_predictor predictor,
79a490a9 532 enum prediction taken)
4db384c9
JH
533{
534 int probability = predictor_info[(int) predictor].hitrate;
bfdade77 535
4db384c9
JH
536 if (taken != TAKEN)
537 probability = REG_BR_PROB_BASE - probability;
bfdade77 538
4db384c9 539 predict_insn (insn, predictor, probability);
ee92cb46
JH
540}
541
542/* Predict edge E with given probability if possible. */
bfdade77 543
4db384c9 544void
6de9cd9a 545rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
ee92cb46 546{
9f215bf5 547 rtx_insn *last_insn;
a813c111 548 last_insn = BB_END (e->src);
ee92cb46
JH
549
550 /* We can store the branch prediction information only about
551 conditional jumps. */
552 if (!any_condjump_p (last_insn))
553 return;
554
555 /* We always store probability of branching. */
556 if (e->flags & EDGE_FALLTHRU)
557 probability = REG_BR_PROB_BASE - probability;
558
4db384c9
JH
559 predict_insn (last_insn, predictor, probability);
560}
561
6de9cd9a
DN
562/* Predict edge E with the given PROBABILITY. */
563void
726a989a 564gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
6de9cd9a 565{
0a6a6ac9 566 gcc_assert (profile_status_for_fn (cfun) != PROFILE_GUESSED);
fefa31b5
DM
567 if ((e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && EDGE_COUNT (e->src->succs) >
568 1)
a00d11f0 569 && flag_guess_branch_prob && optimize)
e0342c26 570 {
f06b0a10 571 struct edge_prediction *i = XNEW (struct edge_prediction);
b787e7a2 572 edge_prediction *&preds = bb_predictions->get_or_insert (e->src);
6de9cd9a 573
b787e7a2
TS
574 i->ep_next = preds;
575 preds = i;
59ced947
RÁE
576 i->ep_probability = probability;
577 i->ep_predictor = predictor;
578 i->ep_edge = e;
e0342c26 579 }
6de9cd9a
DN
580}
581
3809e990
JH
582/* Remove all predictions on given basic block that are attached
583 to edge E. */
584void
585remove_predictions_associated_with_edge (edge e)
586{
f06b0a10
ZD
587 if (!bb_predictions)
588 return;
589
b787e7a2 590 edge_prediction **preds = bb_predictions->get (e->src);
f06b0a10
ZD
591
592 if (preds)
3809e990 593 {
b787e7a2 594 struct edge_prediction **prediction = preds;
f06b0a10
ZD
595 struct edge_prediction *next;
596
3809e990
JH
597 while (*prediction)
598 {
59ced947 599 if ((*prediction)->ep_edge == e)
f06b0a10
ZD
600 {
601 next = (*prediction)->ep_next;
602 free (*prediction);
603 *prediction = next;
604 }
3809e990 605 else
59ced947 606 prediction = &((*prediction)->ep_next);
3809e990
JH
607 }
608 }
609}
610
f06b0a10
ZD
611/* Clears the list of predictions stored for BB. */
612
613static void
614clear_bb_predictions (basic_block bb)
615{
b787e7a2 616 edge_prediction **preds = bb_predictions->get (bb);
f06b0a10
ZD
617 struct edge_prediction *pred, *next;
618
619 if (!preds)
620 return;
621
b787e7a2 622 for (pred = *preds; pred; pred = next)
f06b0a10
ZD
623 {
624 next = pred->ep_next;
625 free (pred);
626 }
627 *preds = NULL;
628}
629
2ffa9932
JH
630/* Return true when we can store prediction on insn INSN.
631 At the moment we represent predictions only on conditional
632 jumps, not at computed jump or other complicated cases. */
633static bool
9f215bf5 634can_predict_insn_p (const rtx_insn *insn)
2ffa9932 635{
4b4bf941 636 return (JUMP_P (insn)
2ffa9932 637 && any_condjump_p (insn)
628f6a4e 638 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
2ffa9932
JH
639}
640
4db384c9 641/* Predict edge E by given predictor if possible. */
bfdade77 642
4db384c9 643void
79a490a9
AJ
644predict_edge_def (edge e, enum br_predictor predictor,
645 enum prediction taken)
4db384c9
JH
646{
647 int probability = predictor_info[(int) predictor].hitrate;
648
649 if (taken != TAKEN)
650 probability = REG_BR_PROB_BASE - probability;
bfdade77 651
4db384c9
JH
652 predict_edge (e, predictor, probability);
653}
654
655/* Invert all branch predictions or probability notes in the INSN. This needs
656 to be done each time we invert the condition used by the jump. */
bfdade77 657
4db384c9 658void
79a490a9 659invert_br_probabilities (rtx insn)
4db384c9 660{
bfdade77
RK
661 rtx note;
662
663 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
664 if (REG_NOTE_KIND (note) == REG_BR_PROB)
e5af9ddd 665 XINT (note, 0) = REG_BR_PROB_BASE - XINT (note, 0);
bfdade77
RK
666 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
667 XEXP (XEXP (note, 0), 1)
668 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
4db384c9
JH
669}
670
671/* Dump information about the branch prediction to the output file. */
bfdade77 672
4db384c9 673static void
6de9cd9a 674dump_prediction (FILE *file, enum br_predictor predictor, int probability,
79a490a9 675 basic_block bb, int used)
4db384c9 676{
628f6a4e
BE
677 edge e;
678 edge_iterator ei;
4db384c9 679
6de9cd9a 680 if (!file)
4db384c9
JH
681 return;
682
628f6a4e
BE
683 FOR_EACH_EDGE (e, ei, bb->succs)
684 if (! (e->flags & EDGE_FALLTHRU))
685 break;
4db384c9 686
6de9cd9a 687 fprintf (file, " %s heuristics%s: %.1f%%",
4db384c9 688 predictor_info[predictor].name,
bfdade77 689 used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
4db384c9
JH
690
691 if (bb->count)
25c3a4ef 692 {
16998094 693 fprintf (file, " exec %" PRId64, bb->count);
fbc2782e
DD
694 if (e)
695 {
16998094 696 fprintf (file, " hit %" PRId64, e->count);
6de9cd9a 697 fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
fbc2782e 698 }
25c3a4ef 699 }
bfdade77 700
6de9cd9a 701 fprintf (file, "\n");
4db384c9
JH
702}
703
229031d0 704/* We can not predict the probabilities of outgoing edges of bb. Set them
87022a6b
JH
705 evenly and hope for the best. */
706static void
707set_even_probabilities (basic_block bb)
708{
709 int nedges = 0;
710 edge e;
628f6a4e 711 edge_iterator ei;
87022a6b 712
628f6a4e 713 FOR_EACH_EDGE (e, ei, bb->succs)
87022a6b
JH
714 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
715 nedges ++;
628f6a4e 716 FOR_EACH_EDGE (e, ei, bb->succs)
87022a6b
JH
717 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
718 e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
719 else
720 e->probability = 0;
721}
722
4db384c9
JH
723/* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
724 note if not already present. Remove now useless REG_BR_PRED notes. */
bfdade77 725
4db384c9 726static void
9f215bf5 727combine_predictions_for_insn (rtx_insn *insn, basic_block bb)
4db384c9 728{
87022a6b
JH
729 rtx prob_note;
730 rtx *pnote;
bfdade77 731 rtx note;
4db384c9 732 int best_probability = PROB_EVEN;
bbbbb16a 733 enum br_predictor best_predictor = END_PREDICTORS;
134d3a2e
JH
734 int combined_probability = REG_BR_PROB_BASE / 2;
735 int d;
d195b46f
JH
736 bool first_match = false;
737 bool found = false;
4db384c9 738
87022a6b
JH
739 if (!can_predict_insn_p (insn))
740 {
741 set_even_probabilities (bb);
742 return;
743 }
744
745 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
746 pnote = &REG_NOTES (insn);
c263766c
RH
747 if (dump_file)
748 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
0b17ab2f 749 bb->index);
4db384c9
JH
750
751 /* We implement "first match" heuristics and use probability guessed
6de9cd9a 752 by predictor with smallest index. */
bfdade77
RK
753 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
754 if (REG_NOTE_KIND (note) == REG_BR_PRED)
755 {
81f40b79
ILT
756 enum br_predictor predictor = ((enum br_predictor)
757 INTVAL (XEXP (XEXP (note, 0), 0)));
bfdade77
RK
758 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
759
760 found = true;
761 if (best_predictor > predictor)
762 best_probability = probability, best_predictor = predictor;
763
764 d = (combined_probability * probability
765 + (REG_BR_PROB_BASE - combined_probability)
766 * (REG_BR_PROB_BASE - probability));
767
768 /* Use FP math to avoid overflows of 32bit integers. */
571a03b8
JJ
769 if (d == 0)
770 /* If one probability is 0% and one 100%, avoid division by zero. */
771 combined_probability = REG_BR_PROB_BASE / 2;
772 else
773 combined_probability = (((double) combined_probability) * probability
774 * REG_BR_PROB_BASE / d + 0.5);
bfdade77
RK
775 }
776
777 /* Decide which heuristic to use. In case we didn't match anything,
778 use no_prediction heuristic, in case we did match, use either
d195b46f
JH
779 first match or Dempster-Shaffer theory depending on the flags. */
780
134d3a2e 781 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
d195b46f
JH
782 first_match = true;
783
784 if (!found)
6de9cd9a
DN
785 dump_prediction (dump_file, PRED_NO_PREDICTION,
786 combined_probability, bb, true);
d195b46f
JH
787 else
788 {
6de9cd9a
DN
789 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
790 bb, !first_match);
791 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
792 bb, first_match);
d195b46f
JH
793 }
794
795 if (first_match)
134d3a2e 796 combined_probability = best_probability;
6de9cd9a 797 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
d195b46f
JH
798
799 while (*pnote)
800 {
801 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
802 {
81f40b79
ILT
803 enum br_predictor predictor = ((enum br_predictor)
804 INTVAL (XEXP (XEXP (*pnote, 0), 0)));
d195b46f
JH
805 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
806
6de9cd9a 807 dump_prediction (dump_file, predictor, probability, bb,
d195b46f 808 !first_match || best_predictor == predictor);
6a4d6760 809 *pnote = XEXP (*pnote, 1);
d195b46f
JH
810 }
811 else
6a4d6760 812 pnote = &XEXP (*pnote, 1);
d195b46f 813 }
bfdade77 814
4db384c9
JH
815 if (!prob_note)
816 {
e5af9ddd 817 add_int_reg_note (insn, REG_BR_PROB, combined_probability);
bfdade77 818
134d3a2e
JH
819 /* Save the prediction into CFG in case we are seeing non-degenerated
820 conditional jump. */
c5cbcccf 821 if (!single_succ_p (bb))
134d3a2e
JH
822 {
823 BRANCH_EDGE (bb)->probability = combined_probability;
bfdade77
RK
824 FALLTHRU_EDGE (bb)->probability
825 = REG_BR_PROB_BASE - combined_probability;
134d3a2e 826 }
4db384c9 827 }
c5cbcccf 828 else if (!single_succ_p (bb))
e53de54d 829 {
e5af9ddd 830 int prob = XINT (prob_note, 0);
e53de54d
JH
831
832 BRANCH_EDGE (bb)->probability = prob;
833 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
834 }
835 else
c5cbcccf 836 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
ee92cb46
JH
837}
838
6de9cd9a
DN
839/* Combine predictions into single probability and store them into CFG.
840 Remove now useless prediction entries. */
f1ebdfc5 841
6de9cd9a 842static void
10d22567 843combine_predictions_for_bb (basic_block bb)
f1ebdfc5 844{
6de9cd9a 845 int best_probability = PROB_EVEN;
bbbbb16a 846 enum br_predictor best_predictor = END_PREDICTORS;
6de9cd9a
DN
847 int combined_probability = REG_BR_PROB_BASE / 2;
848 int d;
849 bool first_match = false;
850 bool found = false;
851 struct edge_prediction *pred;
852 int nedges = 0;
853 edge e, first = NULL, second = NULL;
628f6a4e 854 edge_iterator ei;
f1ebdfc5 855
628f6a4e 856 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
857 if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
858 {
628f6a4e 859 nedges ++;
6de9cd9a
DN
860 if (first && !second)
861 second = e;
862 if (!first)
863 first = e;
864 }
865
b8698a0f 866 /* When there is no successor or only one choice, prediction is easy.
6de9cd9a
DN
867
868 We are lazy for now and predict only basic blocks with two outgoing
869 edges. It is possible to predict generic case too, but we have to
870 ignore first match heuristics and do more involved combining. Implement
871 this later. */
872 if (nedges != 2)
873 {
87022a6b
JH
874 if (!bb->count)
875 set_even_probabilities (bb);
f06b0a10 876 clear_bb_predictions (bb);
10d22567
ZD
877 if (dump_file)
878 fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
6de9cd9a
DN
879 nedges, bb->index);
880 return;
881 }
882
10d22567
ZD
883 if (dump_file)
884 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
6de9cd9a 885
b787e7a2 886 edge_prediction **preds = bb_predictions->get (bb);
f06b0a10 887 if (preds)
6de9cd9a 888 {
f06b0a10
ZD
889 /* We implement "first match" heuristics and use probability guessed
890 by predictor with smallest index. */
b787e7a2 891 for (pred = *preds; pred; pred = pred->ep_next)
f06b0a10 892 {
bbbbb16a 893 enum br_predictor predictor = pred->ep_predictor;
f06b0a10 894 int probability = pred->ep_probability;
6de9cd9a 895
f06b0a10
ZD
896 if (pred->ep_edge != first)
897 probability = REG_BR_PROB_BASE - probability;
6de9cd9a 898
f06b0a10 899 found = true;
c0ee0021
JH
900 /* First match heuristics would be widly confused if we predicted
901 both directions. */
f06b0a10 902 if (best_predictor > predictor)
c0ee0021
JH
903 {
904 struct edge_prediction *pred2;
905 int prob = probability;
906
ed9c79e1
JJ
907 for (pred2 = (struct edge_prediction *) *preds;
908 pred2; pred2 = pred2->ep_next)
c0ee0021
JH
909 if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
910 {
911 int probability2 = pred->ep_probability;
912
913 if (pred2->ep_edge != first)
914 probability2 = REG_BR_PROB_BASE - probability2;
915
b8698a0f 916 if ((probability < REG_BR_PROB_BASE / 2) !=
c0ee0021
JH
917 (probability2 < REG_BR_PROB_BASE / 2))
918 break;
919
920 /* If the same predictor later gave better result, go for it! */
921 if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
922 || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
923 prob = probability2;
924 }
925 if (!pred2)
926 best_probability = prob, best_predictor = predictor;
927 }
6de9cd9a 928
f06b0a10
ZD
929 d = (combined_probability * probability
930 + (REG_BR_PROB_BASE - combined_probability)
931 * (REG_BR_PROB_BASE - probability));
6de9cd9a 932
f06b0a10
ZD
933 /* Use FP math to avoid overflows of 32bit integers. */
934 if (d == 0)
935 /* If one probability is 0% and one 100%, avoid division by zero. */
936 combined_probability = REG_BR_PROB_BASE / 2;
937 else
938 combined_probability = (((double) combined_probability)
939 * probability
940 * REG_BR_PROB_BASE / d + 0.5);
941 }
6de9cd9a
DN
942 }
943
944 /* Decide which heuristic to use. In case we didn't match anything,
945 use no_prediction heuristic, in case we did match, use either
946 first match or Dempster-Shaffer theory depending on the flags. */
947
948 if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
949 first_match = true;
950
951 if (!found)
10d22567 952 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
6de9cd9a
DN
953 else
954 {
10d22567 955 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
6de9cd9a 956 !first_match);
10d22567 957 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
6de9cd9a
DN
958 first_match);
959 }
960
961 if (first_match)
962 combined_probability = best_probability;
10d22567 963 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
6de9cd9a 964
f06b0a10 965 if (preds)
6de9cd9a 966 {
d3bfe4de 967 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
f06b0a10 968 {
bbbbb16a 969 enum br_predictor predictor = pred->ep_predictor;
f06b0a10 970 int probability = pred->ep_probability;
6de9cd9a 971
f06b0a10
ZD
972 if (pred->ep_edge != EDGE_SUCC (bb, 0))
973 probability = REG_BR_PROB_BASE - probability;
974 dump_prediction (dump_file, predictor, probability, bb,
975 !first_match || best_predictor == predictor);
976 }
6de9cd9a 977 }
f06b0a10 978 clear_bb_predictions (bb);
6de9cd9a 979
87022a6b
JH
980 if (!bb->count)
981 {
982 first->probability = combined_probability;
983 second->probability = REG_BR_PROB_BASE - combined_probability;
984 }
6de9cd9a
DN
985}
986
9c04723a
DC
987/* Check if T1 and T2 satisfy the IV_COMPARE condition.
988 Return the SSA_NAME if the condition satisfies, NULL otherwise.
989
990 T1 and T2 should be one of the following cases:
991 1. T1 is SSA_NAME, T2 is NULL
992 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
993 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
994
995static tree
996strips_small_constant (tree t1, tree t2)
997{
998 tree ret = NULL;
999 int value = 0;
1000
1001 if (!t1)
1002 return NULL;
1003 else if (TREE_CODE (t1) == SSA_NAME)
1004 ret = t1;
9541ffee 1005 else if (tree_fits_shwi_p (t1))
9439e9a1 1006 value = tree_to_shwi (t1);
9c04723a
DC
1007 else
1008 return NULL;
1009
1010 if (!t2)
1011 return ret;
9541ffee 1012 else if (tree_fits_shwi_p (t2))
9439e9a1 1013 value = tree_to_shwi (t2);
9c04723a
DC
1014 else if (TREE_CODE (t2) == SSA_NAME)
1015 {
1016 if (ret)
1017 return NULL;
1018 else
1019 ret = t2;
1020 }
1021
1022 if (value <= 4 && value >= -4)
1023 return ret;
1024 else
1025 return NULL;
1026}
1027
1028/* Return the SSA_NAME in T or T's operands.
1029 Return NULL if SSA_NAME cannot be found. */
1030
1031static tree
1032get_base_value (tree t)
1033{
1034 if (TREE_CODE (t) == SSA_NAME)
1035 return t;
1036
1037 if (!BINARY_CLASS_P (t))
1038 return NULL;
1039
1040 switch (TREE_OPERAND_LENGTH (t))
1041 {
1042 case 1:
1043 return strips_small_constant (TREE_OPERAND (t, 0), NULL);
1044 case 2:
1045 return strips_small_constant (TREE_OPERAND (t, 0),
1046 TREE_OPERAND (t, 1));
1047 default:
1048 return NULL;
1049 }
1050}
1051
1052/* Check the compare STMT in LOOP. If it compares an induction
1053 variable to a loop invariant, return true, and save
1054 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1055 Otherwise return false and set LOOP_INVAIANT to NULL. */
1056
1057static bool
538dd0b7 1058is_comparison_with_loop_invariant_p (gcond *stmt, struct loop *loop,
9c04723a
DC
1059 tree *loop_invariant,
1060 enum tree_code *compare_code,
ecd4f20a 1061 tree *loop_step,
9c04723a
DC
1062 tree *loop_iv_base)
1063{
1064 tree op0, op1, bound, base;
1065 affine_iv iv0, iv1;
1066 enum tree_code code;
ecd4f20a 1067 tree step;
9c04723a
DC
1068
1069 code = gimple_cond_code (stmt);
1070 *loop_invariant = NULL;
1071
1072 switch (code)
1073 {
1074 case GT_EXPR:
1075 case GE_EXPR:
1076 case NE_EXPR:
1077 case LT_EXPR:
1078 case LE_EXPR:
1079 case EQ_EXPR:
1080 break;
1081
1082 default:
1083 return false;
1084 }
1085
1086 op0 = gimple_cond_lhs (stmt);
1087 op1 = gimple_cond_rhs (stmt);
1088
1089 if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST)
1090 || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST))
1091 return false;
1092 if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true))
1093 return false;
1094 if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true))
1095 return false;
1096 if (TREE_CODE (iv0.step) != INTEGER_CST
1097 || TREE_CODE (iv1.step) != INTEGER_CST)
1098 return false;
1099 if ((integer_zerop (iv0.step) && integer_zerop (iv1.step))
1100 || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step)))
1101 return false;
1102
1103 if (integer_zerop (iv0.step))
1104 {
1105 if (code != NE_EXPR && code != EQ_EXPR)
1106 code = invert_tree_comparison (code, false);
1107 bound = iv0.base;
1108 base = iv1.base;
9541ffee 1109 if (tree_fits_shwi_p (iv1.step))
ecd4f20a 1110 step = iv1.step;
9c04723a
DC
1111 else
1112 return false;
1113 }
1114 else
1115 {
1116 bound = iv1.base;
1117 base = iv0.base;
9541ffee 1118 if (tree_fits_shwi_p (iv0.step))
ecd4f20a 1119 step = iv0.step;
9c04723a
DC
1120 else
1121 return false;
1122 }
1123
1124 if (TREE_CODE (bound) != INTEGER_CST)
1125 bound = get_base_value (bound);
1126 if (!bound)
1127 return false;
1128 if (TREE_CODE (base) != INTEGER_CST)
1129 base = get_base_value (base);
1130 if (!base)
1131 return false;
1132
1133 *loop_invariant = bound;
1134 *compare_code = code;
1135 *loop_step = step;
1136 *loop_iv_base = base;
1137 return true;
1138}
1139
1140/* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1141
1142static bool
1143expr_coherent_p (tree t1, tree t2)
1144{
355fe088 1145 gimple *stmt;
9c04723a
DC
1146 tree ssa_name_1 = NULL;
1147 tree ssa_name_2 = NULL;
1148
1149 gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST);
1150 gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST);
1151
1152 if (t1 == t2)
1153 return true;
1154
1155 if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST)
1156 return true;
1157 if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST)
1158 return false;
1159
1160 /* Check to see if t1 is expressed/defined with t2. */
1161 stmt = SSA_NAME_DEF_STMT (t1);
1162 gcc_assert (stmt != NULL);
1163 if (is_gimple_assign (stmt))
1164 {
1165 ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1166 if (ssa_name_1 && ssa_name_1 == t2)
1167 return true;
1168 }
1169
1170 /* Check to see if t2 is expressed/defined with t1. */
1171 stmt = SSA_NAME_DEF_STMT (t2);
1172 gcc_assert (stmt != NULL);
1173 if (is_gimple_assign (stmt))
1174 {
1175 ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1176 if (ssa_name_2 && ssa_name_2 == t1)
1177 return true;
1178 }
1179
1180 /* Compare if t1 and t2's def_stmts are identical. */
1181 if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2)
1182 return true;
1183 else
1184 return false;
1185}
1186
1187/* Predict branch probability of BB when BB contains a branch that compares
1188 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1189 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1190
1191 E.g.
1192 for (int i = 0; i < bound; i++) {
1193 if (i < bound - 2)
1194 computation_1();
1195 else
1196 computation_2();
1197 }
1198
1199 In this loop, we will predict the branch inside the loop to be taken. */
1200
1201static void
1202predict_iv_comparison (struct loop *loop, basic_block bb,
1203 tree loop_bound_var,
1204 tree loop_iv_base_var,
1205 enum tree_code loop_bound_code,
1206 int loop_bound_step)
1207{
355fe088 1208 gimple *stmt;
9c04723a
DC
1209 tree compare_var, compare_base;
1210 enum tree_code compare_code;
ecd4f20a 1211 tree compare_step_var;
9c04723a
DC
1212 edge then_edge;
1213 edge_iterator ei;
1214
1215 if (predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1216 || predicted_by_p (bb, PRED_LOOP_ITERATIONS)
1217 || predicted_by_p (bb, PRED_LOOP_EXIT))
1218 return;
1219
1220 stmt = last_stmt (bb);
1221 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1222 return;
538dd0b7
DM
1223 if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt),
1224 loop, &compare_var,
9c04723a 1225 &compare_code,
ecd4f20a 1226 &compare_step_var,
9c04723a
DC
1227 &compare_base))
1228 return;
1229
1230 /* Find the taken edge. */
1231 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1232 if (then_edge->flags & EDGE_TRUE_VALUE)
1233 break;
1234
1235 /* When comparing an IV to a loop invariant, NE is more likely to be
1236 taken while EQ is more likely to be not-taken. */
1237 if (compare_code == NE_EXPR)
1238 {
1239 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1240 return;
1241 }
1242 else if (compare_code == EQ_EXPR)
1243 {
1244 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1245 return;
1246 }
1247
1248 if (!expr_coherent_p (loop_iv_base_var, compare_base))
1249 return;
1250
1251 /* If loop bound, base and compare bound are all constants, we can
1252 calculate the probability directly. */
9541ffee
RS
1253 if (tree_fits_shwi_p (loop_bound_var)
1254 && tree_fits_shwi_p (compare_var)
1255 && tree_fits_shwi_p (compare_base))
9c04723a
DC
1256 {
1257 int probability;
807e902e
KZ
1258 bool overflow, overall_overflow = false;
1259 widest_int compare_count, tem;
ecd4f20a
MP
1260
1261 /* (loop_bound - base) / compare_step */
807e902e
KZ
1262 tem = wi::sub (wi::to_widest (loop_bound_var),
1263 wi::to_widest (compare_base), SIGNED, &overflow);
1264 overall_overflow |= overflow;
1265 widest_int loop_count = wi::div_trunc (tem,
1266 wi::to_widest (compare_step_var),
1267 SIGNED, &overflow);
1268 overall_overflow |= overflow;
1269
1270 if (!wi::neg_p (wi::to_widest (compare_step_var))
9c04723a 1271 ^ (compare_code == LT_EXPR || compare_code == LE_EXPR))
ecd4f20a
MP
1272 {
1273 /* (loop_bound - compare_bound) / compare_step */
807e902e
KZ
1274 tem = wi::sub (wi::to_widest (loop_bound_var),
1275 wi::to_widest (compare_var), SIGNED, &overflow);
1276 overall_overflow |= overflow;
1277 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1278 SIGNED, &overflow);
1279 overall_overflow |= overflow;
ecd4f20a 1280 }
9c04723a 1281 else
ecd4f20a
MP
1282 {
1283 /* (compare_bound - base) / compare_step */
807e902e
KZ
1284 tem = wi::sub (wi::to_widest (compare_var),
1285 wi::to_widest (compare_base), SIGNED, &overflow);
1286 overall_overflow |= overflow;
1287 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1288 SIGNED, &overflow);
1289 overall_overflow |= overflow;
ecd4f20a 1290 }
9c04723a 1291 if (compare_code == LE_EXPR || compare_code == GE_EXPR)
ecd4f20a 1292 ++compare_count;
9c04723a 1293 if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR)
ecd4f20a 1294 ++loop_count;
807e902e
KZ
1295 if (wi::neg_p (compare_count))
1296 compare_count = 0;
1297 if (wi::neg_p (loop_count))
1298 loop_count = 0;
1299 if (loop_count == 0)
9c04723a 1300 probability = 0;
807e902e 1301 else if (wi::cmps (compare_count, loop_count) == 1)
9c04723a
DC
1302 probability = REG_BR_PROB_BASE;
1303 else
ecd4f20a 1304 {
807e902e
KZ
1305 tem = compare_count * REG_BR_PROB_BASE;
1306 tem = wi::udiv_trunc (tem, loop_count);
ecd4f20a
MP
1307 probability = tem.to_uhwi ();
1308 }
1309
807e902e 1310 if (!overall_overflow)
ecd4f20a
MP
1311 predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability);
1312
9c04723a
DC
1313 return;
1314 }
1315
1316 if (expr_coherent_p (loop_bound_var, compare_var))
1317 {
1318 if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR)
1319 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1320 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1321 else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR)
1322 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1323 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1324 else if (loop_bound_code == NE_EXPR)
1325 {
1326 /* If the loop backedge condition is "(i != bound)", we do
1327 the comparison based on the step of IV:
1328 * step < 0 : backedge condition is like (i > bound)
1329 * step > 0 : backedge condition is like (i < bound) */
1330 gcc_assert (loop_bound_step != 0);
1331 if (loop_bound_step > 0
1332 && (compare_code == LT_EXPR
1333 || compare_code == LE_EXPR))
1334 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1335 else if (loop_bound_step < 0
1336 && (compare_code == GT_EXPR
1337 || compare_code == GE_EXPR))
1338 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1339 else
1340 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1341 }
1342 else
1343 /* The branch is predicted not-taken if loop_bound_code is
1344 opposite with compare_code. */
1345 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1346 }
1347 else if (expr_coherent_p (loop_iv_base_var, compare_var))
1348 {
1349 /* For cases like:
1350 for (i = s; i < h; i++)
1351 if (i > s + 2) ....
1352 The branch should be predicted taken. */
1353 if (loop_bound_step > 0
1354 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1355 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1356 else if (loop_bound_step < 0
1357 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1358 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1359 else
1360 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1361 }
1362}
16fdb75f
DC
1363
1364/* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop
1365 exits are resulted from short-circuit conditions that will generate an
1366 if_tmp. E.g.:
1367
1368 if (foo() || global > 10)
1369 break;
1370
1371 This will be translated into:
1372
1373 BB3:
1374 loop header...
1375 BB4:
1376 if foo() goto BB6 else goto BB5
1377 BB5:
1378 if global > 10 goto BB6 else goto BB7
1379 BB6:
1380 goto BB7
1381 BB7:
1382 iftmp = (PHI 0(BB5), 1(BB6))
1383 if iftmp == 1 goto BB8 else goto BB3
1384 BB8:
1385 outside of the loop...
1386
1387 The edge BB7->BB8 is loop exit because BB8 is outside of the loop.
1388 From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop
1389 exits. This function takes BB7->BB8 as input, and finds out the extra loop
1390 exits to predict them using PRED_LOOP_EXIT. */
1391
1392static void
1393predict_extra_loop_exits (edge exit_edge)
1394{
1395 unsigned i;
1396 bool check_value_one;
355fe088 1397 gimple *lhs_def_stmt;
538dd0b7 1398 gphi *phi_stmt;
16fdb75f 1399 tree cmp_rhs, cmp_lhs;
355fe088 1400 gimple *last;
538dd0b7 1401 gcond *cmp_stmt;
16fdb75f 1402
538dd0b7
DM
1403 last = last_stmt (exit_edge->src);
1404 if (!last)
1405 return;
1406 cmp_stmt = dyn_cast <gcond *> (last);
1407 if (!cmp_stmt)
16fdb75f 1408 return;
538dd0b7 1409
16fdb75f
DC
1410 cmp_rhs = gimple_cond_rhs (cmp_stmt);
1411 cmp_lhs = gimple_cond_lhs (cmp_stmt);
1412 if (!TREE_CONSTANT (cmp_rhs)
1413 || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs)))
1414 return;
1415 if (TREE_CODE (cmp_lhs) != SSA_NAME)
1416 return;
1417
1418 /* If check_value_one is true, only the phi_args with value '1' will lead
1419 to loop exit. Otherwise, only the phi_args with value '0' will lead to
1420 loop exit. */
1421 check_value_one = (((integer_onep (cmp_rhs))
1422 ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR))
1423 ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0));
1424
538dd0b7
DM
1425 lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
1426 if (!lhs_def_stmt)
1427 return;
1428
1429 phi_stmt = dyn_cast <gphi *> (lhs_def_stmt);
1430 if (!phi_stmt)
16fdb75f
DC
1431 return;
1432
1433 for (i = 0; i < gimple_phi_num_args (phi_stmt); i++)
1434 {
1435 edge e1;
1436 edge_iterator ei;
1437 tree val = gimple_phi_arg_def (phi_stmt, i);
1438 edge e = gimple_phi_arg_edge (phi_stmt, i);
1439
1440 if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val)))
1441 continue;
1442 if ((check_value_one ^ integer_onep (val)) == 1)
1443 continue;
1444 if (EDGE_COUNT (e->src->succs) != 1)
1445 {
1446 predict_paths_leading_to_edge (e, PRED_LOOP_EXIT, NOT_TAKEN);
1447 continue;
1448 }
1449
1450 FOR_EACH_EDGE (e1, ei, e->src->preds)
1451 predict_paths_leading_to_edge (e1, PRED_LOOP_EXIT, NOT_TAKEN);
1452 }
1453}
1454
d73be268
ZD
1455/* Predict edge probabilities by exploiting loop structure. */
1456
6de9cd9a 1457static void
d73be268 1458predict_loops (void)
6de9cd9a 1459{
42fd6772 1460 struct loop *loop;
0b92ff33 1461
65169dcf
JE
1462 /* Try to predict out blocks in a loop that are not part of a
1463 natural loop. */
f0bd40b1 1464 FOR_EACH_LOOP (loop, 0)
f1ebdfc5 1465 {
2ecfd709 1466 basic_block bb, *bbs;
ca83d385 1467 unsigned j, n_exits;
9771b263 1468 vec<edge> exits;
992c31e6 1469 struct tree_niter_desc niter_desc;
ca83d385 1470 edge ex;
9c04723a
DC
1471 struct nb_iter_bound *nb_iter;
1472 enum tree_code loop_bound_code = ERROR_MARK;
ecd4f20a 1473 tree loop_bound_step = NULL;
9c04723a
DC
1474 tree loop_bound_var = NULL;
1475 tree loop_iv_base = NULL;
538dd0b7 1476 gcond *stmt = NULL;
f1ebdfc5 1477
ca83d385 1478 exits = get_loop_exit_edges (loop);
9771b263 1479 n_exits = exits.length ();
f481cd49
JH
1480 if (!n_exits)
1481 {
9771b263 1482 exits.release ();
f481cd49
JH
1483 continue;
1484 }
0dd0e980 1485
9771b263 1486 FOR_EACH_VEC_ELT (exits, j, ex)
b6acab32 1487 {
992c31e6 1488 tree niter = NULL;
4839cb59
ZD
1489 HOST_WIDE_INT nitercst;
1490 int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
1491 int probability;
1492 enum br_predictor predictor;
b6acab32 1493
16fdb75f
DC
1494 predict_extra_loop_exits (ex);
1495
46deac6c 1496 if (number_of_iterations_exit (loop, ex, &niter_desc, false, false))
992c31e6
JH
1497 niter = niter_desc.niter;
1498 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
ca83d385 1499 niter = loop_niter_by_eval (loop, ex);
b6acab32 1500
992c31e6
JH
1501 if (TREE_CODE (niter) == INTEGER_CST)
1502 {
cc269bb6 1503 if (tree_fits_uhwi_p (niter)
2aa579ad
JJ
1504 && max
1505 && compare_tree_int (niter, max - 1) == -1)
ae7e9ddd 1506 nitercst = tree_to_uhwi (niter) + 1;
992c31e6 1507 else
4839cb59
ZD
1508 nitercst = max;
1509 predictor = PRED_LOOP_ITERATIONS;
1510 }
1511 /* If we have just one exit and we can derive some information about
1512 the number of iterations of the loop from the statements inside
1513 the loop, use it to predict this exit. */
1514 else if (n_exits == 1)
1515 {
652c4c71 1516 nitercst = estimated_stmt_executions_int (loop);
4839cb59
ZD
1517 if (nitercst < 0)
1518 continue;
1519 if (nitercst > max)
1520 nitercst = max;
b6acab32 1521
4839cb59 1522 predictor = PRED_LOOP_ITERATIONS_GUESSED;
992c31e6 1523 }
4839cb59
ZD
1524 else
1525 continue;
1526
2aa579ad
JJ
1527 /* If the prediction for number of iterations is zero, do not
1528 predict the exit edges. */
1529 if (nitercst == 0)
1530 continue;
1531
4839cb59
ZD
1532 probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
1533 predict_edge (ex, predictor, probability);
b6acab32 1534 }
9771b263 1535 exits.release ();
3d436d2a 1536
9c04723a
DC
1537 /* Find information about loop bound variables. */
1538 for (nb_iter = loop->bounds; nb_iter;
1539 nb_iter = nb_iter->next)
1540 if (nb_iter->stmt
1541 && gimple_code (nb_iter->stmt) == GIMPLE_COND)
1542 {
538dd0b7 1543 stmt = as_a <gcond *> (nb_iter->stmt);
9c04723a
DC
1544 break;
1545 }
1546 if (!stmt && last_stmt (loop->header)
1547 && gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
538dd0b7 1548 stmt = as_a <gcond *> (last_stmt (loop->header));
9c04723a
DC
1549 if (stmt)
1550 is_comparison_with_loop_invariant_p (stmt, loop,
1551 &loop_bound_var,
1552 &loop_bound_code,
1553 &loop_bound_step,
1554 &loop_iv_base);
1555
2ecfd709 1556 bbs = get_loop_body (loop);
6de9cd9a 1557
2ecfd709
ZD
1558 for (j = 0; j < loop->num_nodes; j++)
1559 {
1560 int header_found = 0;
1561 edge e;
628f6a4e 1562 edge_iterator ei;
2ecfd709
ZD
1563
1564 bb = bbs[j];
bfdade77 1565
969d70ca
JH
1566 /* Bypass loop heuristics on continue statement. These
1567 statements construct loops via "non-loop" constructs
1568 in the source language and are better to be handled
1569 separately. */
992c31e6 1570 if (predicted_by_p (bb, PRED_CONTINUE))
969d70ca
JH
1571 continue;
1572
2ecfd709
ZD
1573 /* Loop branch heuristics - predict an edge back to a
1574 loop's head as taken. */
9ff3d2de
JL
1575 if (bb == loop->latch)
1576 {
1577 e = find_edge (loop->latch, loop->header);
1578 if (e)
1579 {
1580 header_found = 1;
1581 predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
1582 }
1583 }
bfdade77 1584
2ecfd709 1585 /* Loop exit heuristics - predict an edge exiting the loop if the
d55d8fc7 1586 conditional has no loop header successors as not taken. */
4839cb59
ZD
1587 if (!header_found
1588 /* If we already used more reliable loop exit predictors, do not
1589 bother with PRED_LOOP_EXIT. */
1590 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1591 && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
2c9e13f3
JH
1592 {
1593 /* For loop with many exits we don't want to predict all exits
1594 with the pretty large probability, because if all exits are
1595 considered in row, the loop would be predicted to iterate
1596 almost never. The code to divide probability by number of
1597 exits is very rough. It should compute the number of exits
1598 taken in each patch through function (not the overall number
1599 of exits that might be a lot higher for loops with wide switch
1600 statements in them) and compute n-th square root.
1601
1602 We limit the minimal probability by 2% to avoid
1603 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
1604 as this was causing regression in perl benchmark containing such
1605 a wide loop. */
b8698a0f 1606
2c9e13f3
JH
1607 int probability = ((REG_BR_PROB_BASE
1608 - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
1609 / n_exits);
1610 if (probability < HITRATE (2))
1611 probability = HITRATE (2);
1612 FOR_EACH_EDGE (e, ei, bb->succs)
1613 if (e->dest->index < NUM_FIXED_BLOCKS
1614 || !flow_bb_inside_loop_p (loop, e->dest))
1615 predict_edge (e, PRED_LOOP_EXIT, probability);
1616 }
9c04723a
DC
1617 if (loop_bound_var)
1618 predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base,
1619 loop_bound_code,
9439e9a1 1620 tree_to_shwi (loop_bound_step));
2ecfd709 1621 }
b8698a0f 1622
e0a21ab9 1623 /* Free basic blocks from get_loop_body. */
36579663 1624 free (bbs);
f1ebdfc5 1625 }
6de9cd9a
DN
1626}
1627
87022a6b
JH
1628/* Attempt to predict probabilities of BB outgoing edges using local
1629 properties. */
1630static void
1631bb_estimate_probability_locally (basic_block bb)
1632{
9f215bf5 1633 rtx_insn *last_insn = BB_END (bb);
87022a6b
JH
1634 rtx cond;
1635
1636 if (! can_predict_insn_p (last_insn))
1637 return;
1638 cond = get_condition (last_insn, NULL, false, false);
1639 if (! cond)
1640 return;
1641
1642 /* Try "pointer heuristic."
1643 A comparison ptr == 0 is predicted as false.
1644 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1645 if (COMPARISON_P (cond)
1646 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
1647 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
1648 {
1649 if (GET_CODE (cond) == EQ)
1650 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
1651 else if (GET_CODE (cond) == NE)
1652 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
1653 }
1654 else
1655
1656 /* Try "opcode heuristic."
1657 EQ tests are usually false and NE tests are usually true. Also,
1658 most quantities are positive, so we can make the appropriate guesses
1659 about signed comparisons against zero. */
1660 switch (GET_CODE (cond))
1661 {
1662 case CONST_INT:
1663 /* Unconditional branch. */
1664 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
1665 cond == const0_rtx ? NOT_TAKEN : TAKEN);
1666 break;
1667
1668 case EQ:
1669 case UNEQ:
1670 /* Floating point comparisons appears to behave in a very
1671 unpredictable way because of special role of = tests in
1672 FP code. */
1673 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1674 ;
1675 /* Comparisons with 0 are often used for booleans and there is
1676 nothing useful to predict about them. */
1677 else if (XEXP (cond, 1) == const0_rtx
1678 || XEXP (cond, 0) == const0_rtx)
1679 ;
1680 else
1681 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
1682 break;
1683
1684 case NE:
1685 case LTGT:
1686 /* Floating point comparisons appears to behave in a very
1687 unpredictable way because of special role of = tests in
1688 FP code. */
1689 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1690 ;
1691 /* Comparisons with 0 are often used for booleans and there is
1692 nothing useful to predict about them. */
1693 else if (XEXP (cond, 1) == const0_rtx
1694 || XEXP (cond, 0) == const0_rtx)
1695 ;
1696 else
1697 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
1698 break;
1699
1700 case ORDERED:
1701 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
1702 break;
1703
1704 case UNORDERED:
1705 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
1706 break;
1707
1708 case LE:
1709 case LT:
1710 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1711 || XEXP (cond, 1) == constm1_rtx)
1712 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
1713 break;
1714
1715 case GE:
1716 case GT:
1717 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1718 || XEXP (cond, 1) == constm1_rtx)
1719 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
1720 break;
1721
1722 default:
1723 break;
1724 }
1725}
1726
229031d0 1727/* Set edge->probability for each successor edge of BB. */
87022a6b
JH
1728void
1729guess_outgoing_edge_probabilities (basic_block bb)
1730{
1731 bb_estimate_probability_locally (bb);
1732 combine_predictions_for_insn (BB_END (bb), bb);
1733}
6de9cd9a 1734\f
ed9c79e1 1735static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor);
726a989a
RB
1736
1737/* Helper function for expr_expected_value. */
42f97fd2
JH
1738
1739static tree
c08472ea 1740expr_expected_value_1 (tree type, tree op0, enum tree_code code,
ed9c79e1 1741 tree op1, bitmap visited, enum br_predictor *predictor)
42f97fd2 1742{
355fe088 1743 gimple *def;
726a989a 1744
ed9c79e1
JJ
1745 if (predictor)
1746 *predictor = PRED_UNCONDITIONAL;
1747
726a989a 1748 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
42f97fd2 1749 {
726a989a
RB
1750 if (TREE_CONSTANT (op0))
1751 return op0;
1752
1753 if (code != SSA_NAME)
1754 return NULL_TREE;
1755
1756 def = SSA_NAME_DEF_STMT (op0);
42f97fd2
JH
1757
1758 /* If we were already here, break the infinite cycle. */
fcaa4ca4 1759 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
42f97fd2 1760 return NULL;
42f97fd2 1761
726a989a 1762 if (gimple_code (def) == GIMPLE_PHI)
42f97fd2
JH
1763 {
1764 /* All the arguments of the PHI node must have the same constant
1765 length. */
726a989a 1766 int i, n = gimple_phi_num_args (def);
42f97fd2 1767 tree val = NULL, new_val;
6de9cd9a 1768
726a989a 1769 for (i = 0; i < n; i++)
42f97fd2
JH
1770 {
1771 tree arg = PHI_ARG_DEF (def, i);
ed9c79e1 1772 enum br_predictor predictor2;
42f97fd2
JH
1773
1774 /* If this PHI has itself as an argument, we cannot
1775 determine the string length of this argument. However,
1f838355 1776 if we can find an expected constant value for the other
42f97fd2
JH
1777 PHI args then we can still be sure that this is
1778 likely a constant. So be optimistic and just
1779 continue with the next argument. */
1780 if (arg == PHI_RESULT (def))
1781 continue;
1782
ed9c79e1
JJ
1783 new_val = expr_expected_value (arg, visited, &predictor2);
1784
1785 /* It is difficult to combine value predictors. Simply assume
1786 that later predictor is weaker and take its prediction. */
1787 if (predictor && *predictor < predictor2)
1788 *predictor = predictor2;
42f97fd2
JH
1789 if (!new_val)
1790 return NULL;
1791 if (!val)
1792 val = new_val;
1793 else if (!operand_equal_p (val, new_val, false))
1794 return NULL;
1795 }
1796 return val;
1797 }
726a989a 1798 if (is_gimple_assign (def))
42f97fd2 1799 {
726a989a
RB
1800 if (gimple_assign_lhs (def) != op0)
1801 return NULL;
42f97fd2 1802
726a989a
RB
1803 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
1804 gimple_assign_rhs1 (def),
1805 gimple_assign_rhs_code (def),
1806 gimple_assign_rhs2 (def),
ed9c79e1 1807 visited, predictor);
726a989a
RB
1808 }
1809
1810 if (is_gimple_call (def))
1811 {
1812 tree decl = gimple_call_fndecl (def);
1813 if (!decl)
ed9c79e1
JJ
1814 {
1815 if (gimple_call_internal_p (def)
1816 && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT)
1817 {
1818 gcc_assert (gimple_call_num_args (def) == 3);
1819 tree val = gimple_call_arg (def, 0);
1820 if (TREE_CONSTANT (val))
1821 return val;
1822 if (predictor)
1823 {
ed9c79e1
JJ
1824 tree val2 = gimple_call_arg (def, 2);
1825 gcc_assert (TREE_CODE (val2) == INTEGER_CST
1826 && tree_fits_uhwi_p (val2)
1827 && tree_to_uhwi (val2) < END_PREDICTORS);
1828 *predictor = (enum br_predictor) tree_to_uhwi (val2);
1829 }
1830 return gimple_call_arg (def, 1);
1831 }
1832 return NULL;
1833 }
c08472ea
RH
1834 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1835 switch (DECL_FUNCTION_CODE (decl))
1836 {
1837 case BUILT_IN_EXPECT:
1838 {
1839 tree val;
1840 if (gimple_call_num_args (def) != 2)
1841 return NULL;
1842 val = gimple_call_arg (def, 0);
1843 if (TREE_CONSTANT (val))
1844 return val;
ed9c79e1
JJ
1845 if (predictor)
1846 *predictor = PRED_BUILTIN_EXPECT;
c08472ea
RH
1847 return gimple_call_arg (def, 1);
1848 }
726a989a 1849
c08472ea
RH
1850 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
1851 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1852 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1853 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1854 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1855 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1856 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
1857 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
1858 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1859 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1860 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1861 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1862 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1863 /* Assume that any given atomic operation has low contention,
1864 and thus the compare-and-swap operation succeeds. */
ed9c79e1
JJ
1865 if (predictor)
1866 *predictor = PRED_COMPARE_AND_SWAP;
c08472ea 1867 return boolean_true_node;
083e891e
MP
1868 default:
1869 break;
726a989a 1870 }
42f97fd2 1871 }
726a989a
RB
1872
1873 return NULL;
42f97fd2 1874 }
726a989a
RB
1875
1876 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
42f97fd2 1877 {
726a989a 1878 tree res;
ed9c79e1
JJ
1879 enum br_predictor predictor2;
1880 op0 = expr_expected_value (op0, visited, predictor);
42f97fd2
JH
1881 if (!op0)
1882 return NULL;
ed9c79e1
JJ
1883 op1 = expr_expected_value (op1, visited, &predictor2);
1884 if (predictor && *predictor < predictor2)
1885 *predictor = predictor2;
42f97fd2
JH
1886 if (!op1)
1887 return NULL;
726a989a 1888 res = fold_build2 (code, type, op0, op1);
42f97fd2
JH
1889 if (TREE_CONSTANT (res))
1890 return res;
1891 return NULL;
1892 }
726a989a 1893 if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
42f97fd2 1894 {
726a989a 1895 tree res;
ed9c79e1 1896 op0 = expr_expected_value (op0, visited, predictor);
42f97fd2
JH
1897 if (!op0)
1898 return NULL;
726a989a 1899 res = fold_build1 (code, type, op0);
42f97fd2
JH
1900 if (TREE_CONSTANT (res))
1901 return res;
1902 return NULL;
1903 }
1904 return NULL;
1905}
726a989a 1906
b8698a0f 1907/* Return constant EXPR will likely have at execution time, NULL if unknown.
726a989a
RB
1908 The function is used by builtin_expect branch predictor so the evidence
1909 must come from this construct and additional possible constant folding.
b8698a0f 1910
726a989a
RB
1911 We may want to implement more involved value guess (such as value range
1912 propagation based prediction), but such tricks shall go to new
1913 implementation. */
1914
1915static tree
ed9c79e1
JJ
1916expr_expected_value (tree expr, bitmap visited,
1917 enum br_predictor *predictor)
726a989a
RB
1918{
1919 enum tree_code code;
1920 tree op0, op1;
1921
1922 if (TREE_CONSTANT (expr))
ed9c79e1
JJ
1923 {
1924 if (predictor)
1925 *predictor = PRED_UNCONDITIONAL;
1926 return expr;
1927 }
726a989a
RB
1928
1929 extract_ops_from_tree (expr, &code, &op0, &op1);
1930 return expr_expected_value_1 (TREE_TYPE (expr),
ed9c79e1 1931 op0, code, op1, visited, predictor);
726a989a 1932}
42f97fd2 1933\f
6de9cd9a
DN
1934/* Predict using opcode of the last statement in basic block. */
1935static void
1936tree_predict_by_opcode (basic_block bb)
1937{
355fe088 1938 gimple *stmt = last_stmt (bb);
6de9cd9a 1939 edge then_edge;
726a989a 1940 tree op0, op1;
6de9cd9a 1941 tree type;
42f97fd2 1942 tree val;
726a989a 1943 enum tree_code cmp;
42f97fd2 1944 bitmap visited;
628f6a4e 1945 edge_iterator ei;
ed9c79e1 1946 enum br_predictor predictor;
6de9cd9a 1947
726a989a 1948 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
6de9cd9a 1949 return;
628f6a4e 1950 FOR_EACH_EDGE (then_edge, ei, bb->succs)
6de9cd9a 1951 if (then_edge->flags & EDGE_TRUE_VALUE)
628f6a4e 1952 break;
726a989a
RB
1953 op0 = gimple_cond_lhs (stmt);
1954 op1 = gimple_cond_rhs (stmt);
1955 cmp = gimple_cond_code (stmt);
6de9cd9a 1956 type = TREE_TYPE (op0);
8bdbfff5 1957 visited = BITMAP_ALLOC (NULL);
ed9c79e1
JJ
1958 val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited,
1959 &predictor);
8bdbfff5 1960 BITMAP_FREE (visited);
ed9c79e1 1961 if (val && TREE_CODE (val) == INTEGER_CST)
42f97fd2 1962 {
ed9c79e1
JJ
1963 if (predictor == PRED_BUILTIN_EXPECT)
1964 {
1965 int percent = PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY);
942df739 1966
ed9c79e1
JJ
1967 gcc_assert (percent >= 0 && percent <= 100);
1968 if (integer_zerop (val))
1969 percent = 100 - percent;
1970 predict_edge (then_edge, PRED_BUILTIN_EXPECT, HITRATE (percent));
1971 }
1972 else
1973 predict_edge (then_edge, predictor,
1974 integer_zerop (val) ? NOT_TAKEN : TAKEN);
42f97fd2 1975 }
6de9cd9a
DN
1976 /* Try "pointer heuristic."
1977 A comparison ptr == 0 is predicted as false.
1978 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
1979 if (POINTER_TYPE_P (type))
1980 {
726a989a 1981 if (cmp == EQ_EXPR)
6de9cd9a 1982 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
726a989a 1983 else if (cmp == NE_EXPR)
6de9cd9a
DN
1984 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1985 }
1986 else
1987
1988 /* Try "opcode heuristic."
1989 EQ tests are usually false and NE tests are usually true. Also,
1990 most quantities are positive, so we can make the appropriate guesses
1991 about signed comparisons against zero. */
726a989a 1992 switch (cmp)
6de9cd9a
DN
1993 {
1994 case EQ_EXPR:
1995 case UNEQ_EXPR:
1996 /* Floating point comparisons appears to behave in a very
1997 unpredictable way because of special role of = tests in
1998 FP code. */
1999 if (FLOAT_TYPE_P (type))
2000 ;
2001 /* Comparisons with 0 are often used for booleans and there is
2002 nothing useful to predict about them. */
726a989a 2003 else if (integer_zerop (op0) || integer_zerop (op1))
6de9cd9a
DN
2004 ;
2005 else
2006 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
2007 break;
2008
2009 case NE_EXPR:
d1a7edaf 2010 case LTGT_EXPR:
6de9cd9a
DN
2011 /* Floating point comparisons appears to behave in a very
2012 unpredictable way because of special role of = tests in
2013 FP code. */
2014 if (FLOAT_TYPE_P (type))
2015 ;
2016 /* Comparisons with 0 are often used for booleans and there is
2017 nothing useful to predict about them. */
2018 else if (integer_zerop (op0)
726a989a 2019 || integer_zerop (op1))
6de9cd9a
DN
2020 ;
2021 else
2022 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
2023 break;
2024
2025 case ORDERED_EXPR:
2026 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
2027 break;
2028
2029 case UNORDERED_EXPR:
2030 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
2031 break;
2032
2033 case LE_EXPR:
2034 case LT_EXPR:
726a989a
RB
2035 if (integer_zerop (op1)
2036 || integer_onep (op1)
2037 || integer_all_onesp (op1)
2038 || real_zerop (op1)
2039 || real_onep (op1)
2040 || real_minus_onep (op1))
6de9cd9a
DN
2041 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
2042 break;
2043
2044 case GE_EXPR:
2045 case GT_EXPR:
726a989a
RB
2046 if (integer_zerop (op1)
2047 || integer_onep (op1)
2048 || integer_all_onesp (op1)
2049 || real_zerop (op1)
2050 || real_onep (op1)
2051 || real_minus_onep (op1))
6de9cd9a
DN
2052 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
2053 break;
2054
2055 default:
2056 break;
2057 }
2058}
2059
bb033fd8 2060/* Try to guess whether the value of return means error code. */
726a989a 2061
bb033fd8
JH
2062static enum br_predictor
2063return_prediction (tree val, enum prediction *prediction)
2064{
2065 /* VOID. */
2066 if (!val)
2067 return PRED_NO_PREDICTION;
2068 /* Different heuristics for pointers and scalars. */
2069 if (POINTER_TYPE_P (TREE_TYPE (val)))
2070 {
2071 /* NULL is usually not returned. */
2072 if (integer_zerop (val))
2073 {
2074 *prediction = NOT_TAKEN;
2075 return PRED_NULL_RETURN;
2076 }
2077 }
2078 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
2079 {
2080 /* Negative return values are often used to indicate
2081 errors. */
2082 if (TREE_CODE (val) == INTEGER_CST
2083 && tree_int_cst_sgn (val) < 0)
2084 {
2085 *prediction = NOT_TAKEN;
2086 return PRED_NEGATIVE_RETURN;
2087 }
2088 /* Constant return values seems to be commonly taken.
2089 Zero/one often represent booleans so exclude them from the
2090 heuristics. */
2091 if (TREE_CONSTANT (val)
2092 && (!integer_zerop (val) && !integer_onep (val)))
2093 {
2094 *prediction = TAKEN;
75b6bb62 2095 return PRED_CONST_RETURN;
bb033fd8
JH
2096 }
2097 }
2098 return PRED_NO_PREDICTION;
2099}
2100
2101/* Find the basic block with return expression and look up for possible
2102 return value trying to apply RETURN_PREDICTION heuristics. */
2103static void
3e4b9ad0 2104apply_return_prediction (void)
bb033fd8 2105{
538dd0b7 2106 greturn *return_stmt = NULL;
bb033fd8
JH
2107 tree return_val;
2108 edge e;
538dd0b7 2109 gphi *phi;
bb033fd8
JH
2110 int phi_num_args, i;
2111 enum br_predictor pred;
2112 enum prediction direction;
628f6a4e 2113 edge_iterator ei;
bb033fd8 2114
fefa31b5 2115 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
bb033fd8 2116 {
355fe088 2117 gimple *last = last_stmt (e->src);
538dd0b7
DM
2118 if (last
2119 && gimple_code (last) == GIMPLE_RETURN)
2120 {
2121 return_stmt = as_a <greturn *> (last);
2122 break;
2123 }
bb033fd8
JH
2124 }
2125 if (!e)
2126 return;
726a989a 2127 return_val = gimple_return_retval (return_stmt);
bb033fd8
JH
2128 if (!return_val)
2129 return;
bb033fd8
JH
2130 if (TREE_CODE (return_val) != SSA_NAME
2131 || !SSA_NAME_DEF_STMT (return_val)
726a989a 2132 || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
bb033fd8 2133 return;
538dd0b7 2134 phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val));
726a989a 2135 phi_num_args = gimple_phi_num_args (phi);
bb033fd8
JH
2136 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
2137
2138 /* Avoid the degenerate case where all return values form the function
2139 belongs to same category (ie they are all positive constants)
2140 so we can hardly say something about them. */
2141 for (i = 1; i < phi_num_args; i++)
2142 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
2143 break;
2144 if (i != phi_num_args)
2145 for (i = 0; i < phi_num_args; i++)
2146 {
2147 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
2148 if (pred != PRED_NO_PREDICTION)
5210bbc5
JH
2149 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
2150 direction);
bb033fd8
JH
2151 }
2152}
2153
2154/* Look for basic block that contains unlikely to happen events
2155 (such as noreturn calls) and mark all paths leading to execution
2156 of this basic blocks as unlikely. */
2157
2158static void
2159tree_bb_level_predictions (void)
2160{
2161 basic_block bb;
c0ee0021
JH
2162 bool has_return_edges = false;
2163 edge e;
2164 edge_iterator ei;
2165
fefa31b5 2166 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
c0ee0021
JH
2167 if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH)))
2168 {
2169 has_return_edges = true;
2170 break;
2171 }
bb033fd8 2172
3e4b9ad0 2173 apply_return_prediction ();
bb033fd8 2174
11cd3bed 2175 FOR_EACH_BB_FN (bb, cfun)
bb033fd8 2176 {
726a989a 2177 gimple_stmt_iterator gsi;
bb033fd8 2178
7299cb99 2179 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
bb033fd8 2180 {
355fe088 2181 gimple *stmt = gsi_stmt (gsi);
52bf96d2 2182 tree decl;
daac0317 2183
726a989a 2184 if (is_gimple_call (stmt))
bb033fd8 2185 {
c0ee0021
JH
2186 if ((gimple_call_flags (stmt) & ECF_NORETURN)
2187 && has_return_edges)
726a989a
RB
2188 predict_paths_leading_to (bb, PRED_NORETURN,
2189 NOT_TAKEN);
2190 decl = gimple_call_fndecl (stmt);
2191 if (decl
2192 && lookup_attribute ("cold",
2193 DECL_ATTRIBUTES (decl)))
2194 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
2195 NOT_TAKEN);
bb033fd8 2196 }
726a989a
RB
2197 else if (gimple_code (stmt) == GIMPLE_PREDICT)
2198 {
2199 predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
2200 gimple_predict_outcome (stmt));
7299cb99
JH
2201 /* Keep GIMPLE_PREDICT around so early inlining will propagate
2202 hints to callers. */
726a989a 2203 }
bb033fd8
JH
2204 }
2205 }
bb033fd8
JH
2206}
2207
b787e7a2 2208/* Callback for hash_map::traverse, asserts that the pointer map is
f06b0a10
ZD
2209 empty. */
2210
b787e7a2
TS
2211bool
2212assert_is_empty (const_basic_block const &, edge_prediction *const &value,
2213 void *)
f06b0a10 2214{
b787e7a2 2215 gcc_assert (!value);
f06b0a10
ZD
2216 return false;
2217}
f06b0a10 2218
8e88f9fd
SP
2219/* Predict branch probabilities and estimate profile for basic block BB. */
2220
2221static void
2222tree_estimate_probability_bb (basic_block bb)
2223{
2224 edge e;
2225 edge_iterator ei;
355fe088 2226 gimple *last;
8e88f9fd
SP
2227
2228 FOR_EACH_EDGE (e, ei, bb->succs)
2229 {
e45abe1f 2230 /* Predict edges to user labels with attributes. */
fefa31b5 2231 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
e45abe1f
RH
2232 {
2233 gimple_stmt_iterator gi;
2234 for (gi = gsi_start_bb (e->dest); !gsi_end_p (gi); gsi_next (&gi))
2235 {
538dd0b7 2236 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gi));
e45abe1f
RH
2237 tree decl;
2238
538dd0b7 2239 if (!label_stmt)
e45abe1f 2240 break;
538dd0b7 2241 decl = gimple_label_label (label_stmt);
e45abe1f
RH
2242 if (DECL_ARTIFICIAL (decl))
2243 continue;
2244
2245 /* Finally, we have a user-defined label. */
2246 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl)))
2247 predict_edge_def (e, PRED_COLD_LABEL, NOT_TAKEN);
2248 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (decl)))
2249 predict_edge_def (e, PRED_HOT_LABEL, TAKEN);
2250 }
2251 }
2252
8e88f9fd
SP
2253 /* Predict early returns to be probable, as we've already taken
2254 care for error returns and other cases are often used for
2255 fast paths through function.
2256
2257 Since we've already removed the return statements, we are
2258 looking for CFG like:
2259
2260 if (conditional)
2261 {
2262 ..
2263 goto return_block
2264 }
2265 some other blocks
2266 return_block:
2267 return_stmt. */
2268 if (e->dest != bb->next_bb
fefa31b5 2269 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
8e88f9fd 2270 && single_succ_p (e->dest)
fefa31b5 2271 && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
8e88f9fd
SP
2272 && (last = last_stmt (e->dest)) != NULL
2273 && gimple_code (last) == GIMPLE_RETURN)
2274 {
2275 edge e1;
2276 edge_iterator ei1;
2277
2278 if (single_succ_p (bb))
2279 {
2280 FOR_EACH_EDGE (e1, ei1, bb->preds)
2281 if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
2282 && !predicted_by_p (e1->src, PRED_CONST_RETURN)
2283 && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
2284 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2285 }
2286 else
2287 if (!predicted_by_p (e->src, PRED_NULL_RETURN)
2288 && !predicted_by_p (e->src, PRED_CONST_RETURN)
2289 && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
2290 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
2291 }
2292
2293 /* Look for block we are guarding (ie we dominate it,
2294 but it doesn't postdominate us). */
fefa31b5 2295 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb
8e88f9fd
SP
2296 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
2297 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
2298 {
2299 gimple_stmt_iterator bi;
2300
2301 /* The call heuristic claims that a guarded function call
2302 is improbable. This is because such calls are often used
2303 to signal exceptional situations such as printing error
2304 messages. */
2305 for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
2306 gsi_next (&bi))
2307 {
355fe088 2308 gimple *stmt = gsi_stmt (bi);
8e88f9fd
SP
2309 if (is_gimple_call (stmt)
2310 /* Constant and pure calls are hardly used to signalize
2311 something exceptional. */
2312 && gimple_has_side_effects (stmt))
2313 {
2314 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
2315 break;
2316 }
2317 }
2318 }
2319 }
2320 tree_predict_by_opcode (bb);
2321}
2322
2323/* Predict branch probabilities and estimate profile of the tree CFG.
2324 This function can be called from the loop optimizers to recompute
2325 the profile information. */
2326
2327void
6de9cd9a
DN
2328tree_estimate_probability (void)
2329{
2330 basic_block bb;
6de9cd9a 2331
bb033fd8 2332 add_noreturn_fake_exit_edges ();
6de9cd9a 2333 connect_infinite_loops_to_exit ();
c7b852c8
ZD
2334 /* We use loop_niter_by_eval, which requires that the loops have
2335 preheaders. */
2336 create_preheaders (CP_SIMPLE_PREHEADERS);
6de9cd9a
DN
2337 calculate_dominance_info (CDI_POST_DOMINATORS);
2338
b787e7a2 2339 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
bb033fd8 2340 tree_bb_level_predictions ();
4839cb59 2341 record_loop_exits ();
8e88f9fd 2342
0fc822d0 2343 if (number_of_loops (cfun) > 1)
d73be268 2344 predict_loops ();
6de9cd9a 2345
11cd3bed 2346 FOR_EACH_BB_FN (bb, cfun)
8e88f9fd 2347 tree_estimate_probability_bb (bb);
6de9cd9a 2348
11cd3bed 2349 FOR_EACH_BB_FN (bb, cfun)
10d22567 2350 combine_predictions_for_bb (bb);
861f9cd0 2351
b2b29377
MM
2352 if (flag_checking)
2353 bb_predictions->traverse<void *, assert_is_empty> (NULL);
2354
b787e7a2 2355 delete bb_predictions;
f06b0a10
ZD
2356 bb_predictions = NULL;
2357
67fa7880 2358 estimate_bb_frequencies (false);
6de9cd9a 2359 free_dominance_info (CDI_POST_DOMINATORS);
6809cbf9 2360 remove_fake_exit_edges ();
8e88f9fd 2361}
994a57cd 2362\f
fa10beec 2363/* Predict edges to successors of CUR whose sources are not postdominated by
3e4b9ad0 2364 BB by PRED and recurse to all postdominators. */
bb033fd8
JH
2365
2366static void
3e4b9ad0
JH
2367predict_paths_for_bb (basic_block cur, basic_block bb,
2368 enum br_predictor pred,
0f3b7e9a
JH
2369 enum prediction taken,
2370 bitmap visited)
bb033fd8
JH
2371{
2372 edge e;
628f6a4e 2373 edge_iterator ei;
3e4b9ad0 2374 basic_block son;
bb033fd8 2375
3e4b9ad0
JH
2376 /* We are looking for all edges forming edge cut induced by
2377 set of all blocks postdominated by BB. */
2378 FOR_EACH_EDGE (e, ei, cur->preds)
2379 if (e->src->index >= NUM_FIXED_BLOCKS
2380 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
bb033fd8 2381 {
450997ef
JH
2382 edge e2;
2383 edge_iterator ei2;
2384 bool found = false;
2385
5210bbc5
JH
2386 /* Ignore fake edges and eh, we predict them as not taken anyway. */
2387 if (e->flags & (EDGE_EH | EDGE_FAKE))
450997ef 2388 continue;
3e4b9ad0 2389 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
450997ef 2390
0f3b7e9a 2391 /* See if there is an edge from e->src that is not abnormal
450997ef
JH
2392 and does not lead to BB. */
2393 FOR_EACH_EDGE (e2, ei2, e->src->succs)
2394 if (e2 != e
5210bbc5 2395 && !(e2->flags & (EDGE_EH | EDGE_FAKE))
450997ef
JH
2396 && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb))
2397 {
2398 found = true;
2399 break;
2400 }
2401
2402 /* If there is non-abnormal path leaving e->src, predict edge
2403 using predictor. Otherwise we need to look for paths
0f3b7e9a
JH
2404 leading to e->src.
2405
2406 The second may lead to infinite loop in the case we are predicitng
2407 regions that are only reachable by abnormal edges. We simply
2408 prevent visiting given BB twice. */
450997ef
JH
2409 if (found)
2410 predict_edge_def (e, pred, taken);
993716bd 2411 else if (bitmap_set_bit (visited, e->src->index))
0f3b7e9a 2412 predict_paths_for_bb (e->src, e->src, pred, taken, visited);
bb033fd8 2413 }
3e4b9ad0
JH
2414 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
2415 son;
2416 son = next_dom_son (CDI_POST_DOMINATORS, son))
0f3b7e9a 2417 predict_paths_for_bb (son, bb, pred, taken, visited);
3e4b9ad0 2418}
bb033fd8 2419
3e4b9ad0
JH
2420/* Sets branch probabilities according to PREDiction and
2421 FLAGS. */
bb033fd8 2422
3e4b9ad0
JH
2423static void
2424predict_paths_leading_to (basic_block bb, enum br_predictor pred,
2425 enum prediction taken)
2426{
0f3b7e9a
JH
2427 bitmap visited = BITMAP_ALLOC (NULL);
2428 predict_paths_for_bb (bb, bb, pred, taken, visited);
2429 BITMAP_FREE (visited);
bb033fd8 2430}
5210bbc5
JH
2431
2432/* Like predict_paths_leading_to but take edge instead of basic block. */
2433
2434static void
2435predict_paths_leading_to_edge (edge e, enum br_predictor pred,
2436 enum prediction taken)
2437{
2438 bool has_nonloop_edge = false;
2439 edge_iterator ei;
2440 edge e2;
2441
2442 basic_block bb = e->src;
2443 FOR_EACH_EDGE (e2, ei, bb->succs)
2444 if (e2->dest != e->src && e2->dest != e->dest
2445 && !(e->flags & (EDGE_EH | EDGE_FAKE))
2446 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
2447 {
2448 has_nonloop_edge = true;
2449 break;
2450 }
2451 if (!has_nonloop_edge)
0f3b7e9a
JH
2452 {
2453 bitmap visited = BITMAP_ALLOC (NULL);
2454 predict_paths_for_bb (bb, bb, pred, taken, visited);
2455 BITMAP_FREE (visited);
2456 }
5210bbc5
JH
2457 else
2458 predict_edge_def (e, pred, taken);
2459}
969d70ca 2460\f
57cb6d52 2461/* This is used to carry information about basic blocks. It is
861f9cd0
JH
2462 attached to the AUX field of the standard CFG block. */
2463
11478306 2464struct block_info
861f9cd0
JH
2465{
2466 /* Estimated frequency of execution of basic_block. */
ac5e69da 2467 sreal frequency;
861f9cd0
JH
2468
2469 /* To keep queue of basic blocks to process. */
2470 basic_block next;
2471
eaec9b3d 2472 /* Number of predecessors we need to visit first. */
754d9299 2473 int npredecessors;
11478306 2474};
861f9cd0
JH
2475
2476/* Similar information for edges. */
11478306 2477struct edge_prob_info
861f9cd0 2478{
569b7f6a 2479 /* In case edge is a loopback edge, the probability edge will be reached
861f9cd0 2480 in case header is. Estimated number of iterations of the loop can be
8aa18a7d 2481 then computed as 1 / (1 - back_edge_prob). */
ac5e69da 2482 sreal back_edge_prob;
569b7f6a 2483 /* True if the edge is a loopback edge in the natural loop. */
2c45a16a 2484 unsigned int back_edge:1;
11478306 2485};
861f9cd0 2486
11478306 2487#define BLOCK_INFO(B) ((block_info *) (B)->aux)
59f2e9d8 2488#undef EDGE_INFO
11478306 2489#define EDGE_INFO(E) ((edge_prob_info *) (E)->aux)
861f9cd0
JH
2490
2491/* Helper function for estimate_bb_frequencies.
598ec7bd
ZD
2492 Propagate the frequencies in blocks marked in
2493 TOVISIT, starting in HEAD. */
bfdade77 2494
861f9cd0 2495static void
598ec7bd 2496propagate_freq (basic_block head, bitmap tovisit)
861f9cd0 2497{
e0082a72
ZD
2498 basic_block bb;
2499 basic_block last;
b9af0016 2500 unsigned i;
861f9cd0
JH
2501 edge e;
2502 basic_block nextbb;
8a998e0c 2503 bitmap_iterator bi;
247a370b 2504
eaec9b3d 2505 /* For each basic block we need to visit count number of his predecessors
247a370b 2506 we need to visit first. */
8a998e0c 2507 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
247a370b 2508 {
8a998e0c
JL
2509 edge_iterator ei;
2510 int count = 0;
2511
06e28de2 2512 bb = BASIC_BLOCK_FOR_FN (cfun, i);
bfdade77 2513
8a998e0c
JL
2514 FOR_EACH_EDGE (e, ei, bb->preds)
2515 {
2516 bool visit = bitmap_bit_p (tovisit, e->src->index);
2517
2518 if (visit && !(e->flags & EDGE_DFS_BACK))
2519 count++;
2520 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
2521 fprintf (dump_file,
2522 "Irreducible region hit, ignoring edge to %i->%i\n",
2523 e->src->index, bb->index);
247a370b 2524 }
b9af0016 2525 BLOCK_INFO (bb)->npredecessors = count;
b35366ce 2526 /* When function never returns, we will never process exit block. */
fefa31b5 2527 if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
b35366ce 2528 bb->count = bb->frequency = 0;
247a370b 2529 }
861f9cd0 2530
fd27ffab 2531 BLOCK_INFO (head)->frequency = 1;
e0082a72
ZD
2532 last = head;
2533 for (bb = head; bb; bb = nextbb)
861f9cd0 2534 {
628f6a4e 2535 edge_iterator ei;
fd27ffab
ML
2536 sreal cyclic_probability = 0;
2537 sreal frequency = 0;
861f9cd0
JH
2538
2539 nextbb = BLOCK_INFO (bb)->next;
2540 BLOCK_INFO (bb)->next = NULL;
2541
2542 /* Compute frequency of basic block. */
2543 if (bb != head)
2544 {
b2b29377
MM
2545 if (flag_checking)
2546 FOR_EACH_EDGE (e, ei, bb->preds)
2547 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
2548 || (e->flags & EDGE_DFS_BACK));
861f9cd0 2549
628f6a4e 2550 FOR_EACH_EDGE (e, ei, bb->preds)
861f9cd0 2551 if (EDGE_INFO (e)->back_edge)
8aa18a7d 2552 {
618b7f29 2553 cyclic_probability += EDGE_INFO (e)->back_edge_prob;
8aa18a7d 2554 }
247a370b 2555 else if (!(e->flags & EDGE_DFS_BACK))
8aa18a7d 2556 {
8aa18a7d
JH
2557 /* frequency += (e->probability
2558 * BLOCK_INFO (e->src)->frequency /
2559 REG_BR_PROB_BASE); */
2560
fd27ffab 2561 sreal tmp = e->probability;
618b7f29
TS
2562 tmp *= BLOCK_INFO (e->src)->frequency;
2563 tmp *= real_inv_br_prob_base;
2564 frequency += tmp;
8aa18a7d
JH
2565 }
2566
fd27ffab 2567 if (cyclic_probability == 0)
ac5e69da 2568 {
618b7f29 2569 BLOCK_INFO (bb)->frequency = frequency;
ac5e69da 2570 }
fbe3b30b
SB
2571 else
2572 {
618b7f29
TS
2573 if (cyclic_probability > real_almost_one)
2574 cyclic_probability = real_almost_one;
861f9cd0 2575
79a490a9 2576 /* BLOCK_INFO (bb)->frequency = frequency
ac5e69da 2577 / (1 - cyclic_probability) */
861f9cd0 2578
fd27ffab 2579 cyclic_probability = sreal (1) - cyclic_probability;
618b7f29 2580 BLOCK_INFO (bb)->frequency = frequency / cyclic_probability;
fbe3b30b 2581 }
861f9cd0
JH
2582 }
2583
8a998e0c 2584 bitmap_clear_bit (tovisit, bb->index);
861f9cd0 2585
9ff3d2de
JL
2586 e = find_edge (bb, head);
2587 if (e)
2588 {
9ff3d2de
JL
2589 /* EDGE_INFO (e)->back_edge_prob
2590 = ((e->probability * BLOCK_INFO (bb)->frequency)
2591 / REG_BR_PROB_BASE); */
b8698a0f 2592
fd27ffab 2593 sreal tmp = e->probability;
618b7f29
TS
2594 tmp *= BLOCK_INFO (bb)->frequency;
2595 EDGE_INFO (e)->back_edge_prob = tmp * real_inv_br_prob_base;
9ff3d2de 2596 }
861f9cd0 2597
57cb6d52 2598 /* Propagate to successor blocks. */
628f6a4e 2599 FOR_EACH_EDGE (e, ei, bb->succs)
247a370b 2600 if (!(e->flags & EDGE_DFS_BACK)
754d9299 2601 && BLOCK_INFO (e->dest)->npredecessors)
861f9cd0 2602 {
754d9299
JM
2603 BLOCK_INFO (e->dest)->npredecessors--;
2604 if (!BLOCK_INFO (e->dest)->npredecessors)
247a370b
JH
2605 {
2606 if (!nextbb)
2607 nextbb = e->dest;
2608 else
2609 BLOCK_INFO (last)->next = e->dest;
b8698a0f 2610
247a370b
JH
2611 last = e->dest;
2612 }
628f6a4e 2613 }
861f9cd0
JH
2614 }
2615}
2616
67fa7880 2617/* Estimate frequencies in loops at same nest level. */
bfdade77 2618
861f9cd0 2619static void
598ec7bd 2620estimate_loops_at_level (struct loop *first_loop)
861f9cd0 2621{
2ecfd709 2622 struct loop *loop;
861f9cd0
JH
2623
2624 for (loop = first_loop; loop; loop = loop->next)
2625 {
861f9cd0 2626 edge e;
2ecfd709 2627 basic_block *bbs;
3d436d2a 2628 unsigned i;
598ec7bd 2629 bitmap tovisit = BITMAP_ALLOC (NULL);
861f9cd0 2630
598ec7bd 2631 estimate_loops_at_level (loop->inner);
79a490a9 2632
598ec7bd
ZD
2633 /* Find current loop back edge and mark it. */
2634 e = loop_latch_edge (loop);
2635 EDGE_INFO (e)->back_edge = 1;
2ecfd709
ZD
2636
2637 bbs = get_loop_body (loop);
2638 for (i = 0; i < loop->num_nodes; i++)
8a998e0c 2639 bitmap_set_bit (tovisit, bbs[i]->index);
2ecfd709 2640 free (bbs);
598ec7bd
ZD
2641 propagate_freq (loop->header, tovisit);
2642 BITMAP_FREE (tovisit);
861f9cd0
JH
2643 }
2644}
2645
2f8e468b 2646/* Propagates frequencies through structure of loops. */
598ec7bd
ZD
2647
2648static void
d73be268 2649estimate_loops (void)
598ec7bd
ZD
2650{
2651 bitmap tovisit = BITMAP_ALLOC (NULL);
2652 basic_block bb;
2653
2654 /* Start by estimating the frequencies in the loops. */
0fc822d0 2655 if (number_of_loops (cfun) > 1)
d73be268 2656 estimate_loops_at_level (current_loops->tree_root->inner);
598ec7bd
ZD
2657
2658 /* Now propagate the frequencies through all the blocks. */
04a90bec 2659 FOR_ALL_BB_FN (bb, cfun)
598ec7bd
ZD
2660 {
2661 bitmap_set_bit (tovisit, bb->index);
2662 }
fefa31b5 2663 propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit);
598ec7bd
ZD
2664 BITMAP_FREE (tovisit);
2665}
2666
eb4b92c1 2667/* Drop the profile for NODE to guessed, and update its frequency based on
4c7d0777 2668 whether it is expected to be hot given the CALL_COUNT. */
eb4b92c1
TJ
2669
2670static void
4c7d0777 2671drop_profile (struct cgraph_node *node, gcov_type call_count)
eb4b92c1
TJ
2672{
2673 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
4c7d0777
TJ
2674 /* In the case where this was called by another function with a
2675 dropped profile, call_count will be 0. Since there are no
2676 non-zero call counts to this function, we don't know for sure
2677 whether it is hot, and therefore it will be marked normal below. */
2678 bool hot = maybe_hot_count_p (NULL, call_count);
eb4b92c1
TJ
2679
2680 if (dump_file)
2681 fprintf (dump_file,
2682 "Dropping 0 profile for %s/%i. %s based on calls.\n",
fec39fa6 2683 node->name (), node->order,
eb4b92c1
TJ
2684 hot ? "Function is hot" : "Function is normal");
2685 /* We only expect to miss profiles for functions that are reached
2686 via non-zero call edges in cases where the function may have
2687 been linked from another module or library (COMDATs and extern
4c7d0777
TJ
2688 templates). See the comments below for handle_missing_profiles.
2689 Also, only warn in cases where the missing counts exceed the
2690 number of training runs. In certain cases with an execv followed
2691 by a no-return call the profile for the no-return call is not
2692 dumped and there can be a mismatch. */
2693 if (!DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl)
2694 && call_count > profile_info->runs)
eb4b92c1
TJ
2695 {
2696 if (flag_profile_correction)
2697 {
2698 if (dump_file)
2699 fprintf (dump_file,
2700 "Missing counts for called function %s/%i\n",
fec39fa6 2701 node->name (), node->order);
eb4b92c1
TJ
2702 }
2703 else
4c7d0777 2704 warning (0, "Missing counts for called function %s/%i",
fec39fa6 2705 node->name (), node->order);
eb4b92c1
TJ
2706 }
2707
ea19eb9f 2708 profile_status_for_fn (fn)
eb4b92c1
TJ
2709 = (flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT);
2710 node->frequency
2711 = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL;
2712}
2713
2714/* In the case of COMDAT routines, multiple object files will contain the same
2715 function and the linker will select one for the binary. In that case
2716 all the other copies from the profile instrument binary will be missing
2717 profile counts. Look for cases where this happened, due to non-zero
2718 call counts going to 0-count functions, and drop the profile to guessed
2719 so that we can use the estimated probabilities and avoid optimizing only
2720 for size.
2721
2722 The other case where the profile may be missing is when the routine
2723 is not going to be emitted to the object file, e.g. for "extern template"
2724 class methods. Those will be marked DECL_EXTERNAL. Emit a warning in
2725 all other cases of non-zero calls to 0-count functions. */
2726
2727void
2728handle_missing_profiles (void)
2729{
2730 struct cgraph_node *node;
2731 int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION);
2732 vec<struct cgraph_node *> worklist;
2733 worklist.create (64);
2734
2735 /* See if 0 count function has non-0 count callers. In this case we
2736 lost some profile. Drop its function profile to PROFILE_GUESSED. */
2737 FOR_EACH_DEFINED_FUNCTION (node)
2738 {
2739 struct cgraph_edge *e;
2740 gcov_type call_count = 0;
9cec31f4 2741 gcov_type max_tp_first_run = 0;
eb4b92c1
TJ
2742 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2743
2744 if (node->count)
2745 continue;
2746 for (e = node->callers; e; e = e->next_caller)
9cec31f4 2747 {
eb4b92c1 2748 call_count += e->count;
9cec31f4
ML
2749
2750 if (e->caller->tp_first_run > max_tp_first_run)
2751 max_tp_first_run = e->caller->tp_first_run;
2752 }
2753
2754 /* If time profile is missing, let assign the maximum that comes from
2755 caller functions. */
2756 if (!node->tp_first_run && max_tp_first_run)
2757 node->tp_first_run = max_tp_first_run + 1;
2758
eb4b92c1
TJ
2759 if (call_count
2760 && fn && fn->cfg
2761 && (call_count * unlikely_count_fraction >= profile_info->runs))
2762 {
4c7d0777 2763 drop_profile (node, call_count);
eb4b92c1
TJ
2764 worklist.safe_push (node);
2765 }
2766 }
2767
2768 /* Propagate the profile dropping to other 0-count COMDATs that are
2769 potentially called by COMDATs we already dropped the profile on. */
2770 while (worklist.length () > 0)
2771 {
2772 struct cgraph_edge *e;
2773
2774 node = worklist.pop ();
2775 for (e = node->callees; e; e = e->next_caller)
2776 {
2777 struct cgraph_node *callee = e->callee;
2778 struct function *fn = DECL_STRUCT_FUNCTION (callee->decl);
2779
2780 if (callee->count > 0)
2781 continue;
2782 if (DECL_COMDAT (callee->decl) && fn && fn->cfg
ea19eb9f 2783 && profile_status_for_fn (fn) == PROFILE_READ)
eb4b92c1 2784 {
4c7d0777 2785 drop_profile (node, 0);
eb4b92c1
TJ
2786 worklist.safe_push (callee);
2787 }
2788 }
2789 }
2790 worklist.release ();
2791}
2792
02307675
R
2793/* Convert counts measured by profile driven feedback to frequencies.
2794 Return nonzero iff there was any nonzero execution count. */
bfdade77 2795
bbd236a1 2796int
79a490a9 2797counts_to_freqs (void)
861f9cd0 2798{
02307675 2799 gcov_type count_max, true_count_max = 0;
e0082a72 2800 basic_block bb;
0b17ab2f 2801
eb4b92c1
TJ
2802 /* Don't overwrite the estimated frequencies when the profile for
2803 the function is missing. We may drop this function PROFILE_GUESSED
2804 later in drop_profile (). */
be3c16c4 2805 if (!flag_auto_profile && !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
eb4b92c1
TJ
2806 return 0;
2807
fefa31b5 2808 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
02307675 2809 true_count_max = MAX (bb->count, true_count_max);
861f9cd0 2810
02307675 2811 count_max = MAX (true_count_max, 1);
fefa31b5 2812 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
e0082a72 2813 bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
6bad2617 2814
02307675 2815 return true_count_max;
861f9cd0
JH
2816}
2817
bfdade77
RK
2818/* Return true if function is likely to be expensive, so there is no point to
2819 optimize performance of prologue, epilogue or do inlining at the expense
d55d8fc7 2820 of code size growth. THRESHOLD is the limit of number of instructions
bfdade77
RK
2821 function can execute at average to be still considered not expensive. */
2822
6ab16dd9 2823bool
79a490a9 2824expensive_function_p (int threshold)
6ab16dd9
JH
2825{
2826 unsigned int sum = 0;
e0082a72 2827 basic_block bb;
5197bd50 2828 unsigned int limit;
6ab16dd9
JH
2829
2830 /* We can not compute accurately for large thresholds due to scaled
2831 frequencies. */
e16acfcd 2832 gcc_assert (threshold <= BB_FREQ_MAX);
6ab16dd9 2833
eaec9b3d 2834 /* Frequencies are out of range. This either means that function contains
6ab16dd9
JH
2835 internal loop executing more than BB_FREQ_MAX times or profile feedback
2836 is available and function has not been executed at all. */
fefa31b5 2837 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency == 0)
6ab16dd9 2838 return true;
6a4d6760 2839
6ab16dd9 2840 /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
fefa31b5 2841 limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency * threshold;
11cd3bed 2842 FOR_EACH_BB_FN (bb, cfun)
6ab16dd9 2843 {
9f215bf5 2844 rtx_insn *insn;
6ab16dd9 2845
39718607 2846 FOR_BB_INSNS (bb, insn)
bfdade77
RK
2847 if (active_insn_p (insn))
2848 {
2849 sum += bb->frequency;
2850 if (sum > limit)
2851 return true;
6ab16dd9
JH
2852 }
2853 }
bfdade77 2854
6ab16dd9
JH
2855 return false;
2856}
2857
67fa7880
TJ
2858/* Estimate and propagate basic block frequencies using the given branch
2859 probabilities. If FORCE is true, the frequencies are used to estimate
2860 the counts even when there are already non-zero profile counts. */
bfdade77 2861
45a80bb9 2862void
67fa7880 2863estimate_bb_frequencies (bool force)
861f9cd0 2864{
e0082a72 2865 basic_block bb;
ac5e69da 2866 sreal freq_max;
8aa18a7d 2867
0a6a6ac9 2868 if (force || profile_status_for_fn (cfun) != PROFILE_READ || !counts_to_freqs ())
194734e9 2869 {
c4f6b78e
RE
2870 static int real_values_initialized = 0;
2871
2872 if (!real_values_initialized)
2873 {
85bb9c2a 2874 real_values_initialized = 1;
fd27ffab
ML
2875 real_br_prob_base = REG_BR_PROB_BASE;
2876 real_bb_freq_max = BB_FREQ_MAX;
618b7f29 2877 real_one_half = sreal (1, -1);
fd27ffab
ML
2878 real_inv_br_prob_base = sreal (1) / real_br_prob_base;
2879 real_almost_one = sreal (1) - real_inv_br_prob_base;
c4f6b78e 2880 }
861f9cd0 2881
194734e9 2882 mark_dfs_back_edges ();
194734e9 2883
fefa31b5
DM
2884 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability =
2885 REG_BR_PROB_BASE;
194734e9
JH
2886
2887 /* Set up block info for each basic block. */
11478306
JH
2888 alloc_aux_for_blocks (sizeof (block_info));
2889 alloc_aux_for_edges (sizeof (edge_prob_info));
fefa31b5 2890 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
861f9cd0 2891 {
861f9cd0 2892 edge e;
628f6a4e 2893 edge_iterator ei;
194734e9 2894
628f6a4e 2895 FOR_EACH_EDGE (e, ei, bb->succs)
861f9cd0 2896 {
fd27ffab 2897 EDGE_INFO (e)->back_edge_prob = e->probability;
618b7f29 2898 EDGE_INFO (e)->back_edge_prob *= real_inv_br_prob_base;
861f9cd0 2899 }
861f9cd0 2900 }
bfdade77 2901
67fa7880
TJ
2902 /* First compute frequencies locally for each loop from innermost
2903 to outermost to examine frequencies for back edges. */
d73be268 2904 estimate_loops ();
861f9cd0 2905
fd27ffab 2906 freq_max = 0;
11cd3bed 2907 FOR_EACH_BB_FN (bb, cfun)
618b7f29
TS
2908 if (freq_max < BLOCK_INFO (bb)->frequency)
2909 freq_max = BLOCK_INFO (bb)->frequency;
fbe3b30b 2910
618b7f29 2911 freq_max = real_bb_freq_max / freq_max;
fefa31b5 2912 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
8aa18a7d 2913 {
618b7f29
TS
2914 sreal tmp = BLOCK_INFO (bb)->frequency * freq_max + real_one_half;
2915 bb->frequency = tmp.to_int ();
194734e9 2916 }
bfdade77 2917
194734e9
JH
2918 free_aux_for_blocks ();
2919 free_aux_for_edges ();
2920 }
2921 compute_function_frequency ();
194734e9 2922}
861f9cd0 2923
194734e9 2924/* Decide whether function is hot, cold or unlikely executed. */
965b98d0 2925void
79a490a9 2926compute_function_frequency (void)
194734e9 2927{
e0082a72 2928 basic_block bb;
d52f5295 2929 struct cgraph_node *node = cgraph_node::get (current_function_decl);
daf5c770 2930
844db5d0
JH
2931 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2932 || MAIN_NAME_P (DECL_NAME (current_function_decl)))
2933 node->only_called_at_startup = true;
2934 if (DECL_STATIC_DESTRUCTOR (current_function_decl))
2935 node->only_called_at_exit = true;
e0082a72 2936
0a6a6ac9 2937 if (profile_status_for_fn (cfun) != PROFILE_READ)
52bf96d2 2938 {
5fefcf92 2939 int flags = flags_from_decl_or_type (current_function_decl);
52bf96d2
JH
2940 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
2941 != NULL)
5fefcf92 2942 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
52bf96d2
JH
2943 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
2944 != NULL)
5fefcf92
JH
2945 node->frequency = NODE_FREQUENCY_HOT;
2946 else if (flags & ECF_NORETURN)
2947 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2948 else if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
2949 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2950 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2951 || DECL_STATIC_DESTRUCTOR (current_function_decl))
2952 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
52bf96d2
JH
2953 return;
2954 }
daf5c770
JH
2955
2956 /* Only first time try to drop function into unlikely executed.
2957 After inlining the roundoff errors may confuse us.
2958 Ipa-profile pass will drop functions only called from unlikely
2959 functions to unlikely and that is most of what we care about. */
2960 if (!cfun->after_inlining)
2961 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
11cd3bed 2962 FOR_EACH_BB_FN (bb, cfun)
861f9cd0 2963 {
2eb712b4 2964 if (maybe_hot_bb_p (cfun, bb))
194734e9 2965 {
5fefcf92 2966 node->frequency = NODE_FREQUENCY_HOT;
194734e9
JH
2967 return;
2968 }
2eb712b4 2969 if (!probably_never_executed_bb_p (cfun, bb))
5fefcf92 2970 node->frequency = NODE_FREQUENCY_NORMAL;
861f9cd0 2971 }
194734e9 2972}
861f9cd0 2973
2e28e797
JH
2974/* Build PREDICT_EXPR. */
2975tree
2976build_predict_expr (enum br_predictor predictor, enum prediction taken)
2977{
9d7e5c4d 2978 tree t = build1 (PREDICT_EXPR, void_type_node,
9f616812 2979 build_int_cst (integer_type_node, predictor));
bbbbb16a 2980 SET_PREDICT_EXPR_OUTCOME (t, taken);
2e28e797
JH
2981 return t;
2982}
2983
2984const char *
2985predictor_name (enum br_predictor predictor)
2986{
2987 return predictor_info[predictor].name;
2988}
2989
be55bfe6
TS
2990/* Predict branch probabilities and estimate profile of the tree CFG. */
2991
27a4cd48
DM
2992namespace {
2993
2994const pass_data pass_data_profile =
2995{
2996 GIMPLE_PASS, /* type */
2997 "profile_estimate", /* name */
2998 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
2999 TV_BRANCH_PROB, /* tv_id */
3000 PROP_cfg, /* properties_required */
3001 0, /* properties_provided */
3002 0, /* properties_destroyed */
3003 0, /* todo_flags_start */
3bea341f 3004 0, /* todo_flags_finish */
6de9cd9a 3005};
7299cb99 3006
27a4cd48
DM
3007class pass_profile : public gimple_opt_pass
3008{
3009public:
c3284718
RS
3010 pass_profile (gcc::context *ctxt)
3011 : gimple_opt_pass (pass_data_profile, ctxt)
27a4cd48
DM
3012 {}
3013
3014 /* opt_pass methods: */
1a3d085c 3015 virtual bool gate (function *) { return flag_guess_branch_prob; }
be55bfe6 3016 virtual unsigned int execute (function *);
27a4cd48
DM
3017
3018}; // class pass_profile
3019
be55bfe6
TS
3020unsigned int
3021pass_profile::execute (function *fun)
3022{
3023 unsigned nb_loops;
3024
10881cff
JH
3025 if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
3026 return 0;
3027
be55bfe6
TS
3028 loop_optimizer_init (LOOPS_NORMAL);
3029 if (dump_file && (dump_flags & TDF_DETAILS))
3030 flow_loops_dump (dump_file, NULL, 0);
3031
3032 mark_irreducible_loops ();
3033
3034 nb_loops = number_of_loops (fun);
3035 if (nb_loops > 1)
3036 scev_initialize ();
3037
3038 tree_estimate_probability ();
3039
3040 if (nb_loops > 1)
3041 scev_finalize ();
3042
3043 loop_optimizer_finalize ();
3044 if (dump_file && (dump_flags & TDF_DETAILS))
3045 gimple_dump_cfg (dump_file, dump_flags);
3046 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
3047 profile_status_for_fn (fun) = PROFILE_GUESSED;
3048 return 0;
3049}
3050
27a4cd48
DM
3051} // anon namespace
3052
3053gimple_opt_pass *
3054make_pass_profile (gcc::context *ctxt)
3055{
3056 return new pass_profile (ctxt);
3057}
3058
3059namespace {
3060
3061const pass_data pass_data_strip_predict_hints =
3062{
3063 GIMPLE_PASS, /* type */
3064 "*strip_predict_hints", /* name */
3065 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3066 TV_BRANCH_PROB, /* tv_id */
3067 PROP_cfg, /* properties_required */
3068 0, /* properties_provided */
3069 0, /* properties_destroyed */
3070 0, /* todo_flags_start */
3bea341f 3071 0, /* todo_flags_finish */
7299cb99 3072};
b35366ce 3073
27a4cd48
DM
3074class pass_strip_predict_hints : public gimple_opt_pass
3075{
3076public:
c3284718
RS
3077 pass_strip_predict_hints (gcc::context *ctxt)
3078 : gimple_opt_pass (pass_data_strip_predict_hints, ctxt)
27a4cd48
DM
3079 {}
3080
3081 /* opt_pass methods: */
65d3284b 3082 opt_pass * clone () { return new pass_strip_predict_hints (m_ctxt); }
be55bfe6 3083 virtual unsigned int execute (function *);
27a4cd48
DM
3084
3085}; // class pass_strip_predict_hints
3086
be55bfe6
TS
3087/* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
3088 we no longer need. */
3089unsigned int
3090pass_strip_predict_hints::execute (function *fun)
3091{
3092 basic_block bb;
355fe088 3093 gimple *ass_stmt;
be55bfe6
TS
3094 tree var;
3095
3096 FOR_EACH_BB_FN (bb, fun)
3097 {
3098 gimple_stmt_iterator bi;
3099 for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
3100 {
355fe088 3101 gimple *stmt = gsi_stmt (bi);
be55bfe6
TS
3102
3103 if (gimple_code (stmt) == GIMPLE_PREDICT)
3104 {
3105 gsi_remove (&bi, true);
3106 continue;
3107 }
3108 else if (is_gimple_call (stmt))
3109 {
3110 tree fndecl = gimple_call_fndecl (stmt);
3111
3112 if ((fndecl
3113 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3114 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
3115 && gimple_call_num_args (stmt) == 2)
3116 || (gimple_call_internal_p (stmt)
3117 && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT))
3118 {
3119 var = gimple_call_lhs (stmt);
3120 if (var)
3121 {
3122 ass_stmt
3123 = gimple_build_assign (var, gimple_call_arg (stmt, 0));
3124 gsi_replace (&bi, ass_stmt, true);
3125 }
3126 else
3127 {
3128 gsi_remove (&bi, true);
3129 continue;
3130 }
3131 }
3132 }
3133 gsi_next (&bi);
3134 }
3135 }
3136 return 0;
3137}
3138
27a4cd48
DM
3139} // anon namespace
3140
3141gimple_opt_pass *
3142make_pass_strip_predict_hints (gcc::context *ctxt)
3143{
3144 return new pass_strip_predict_hints (ctxt);
3145}
3146
b35366ce
JH
3147/* Rebuild function frequencies. Passes are in general expected to
3148 maintain profile by hand, however in some cases this is not possible:
3149 for example when inlining several functions with loops freuqencies might run
3150 out of scale and thus needs to be recomputed. */
3151
3152void
3153rebuild_frequencies (void)
3154{
a222c01a 3155 timevar_push (TV_REBUILD_FREQUENCIES);
67fa7880
TJ
3156
3157 /* When the max bb count in the function is small, there is a higher
3158 chance that there were truncation errors in the integer scaling
3159 of counts by inlining and other optimizations. This could lead
3160 to incorrect classification of code as being cold when it isn't.
3161 In that case, force the estimation of bb counts/frequencies from the
3162 branch probabilities, rather than computing frequencies from counts,
3163 which may also lead to frequencies incorrectly reduced to 0. There
3164 is less precision in the probabilities, so we only do this for small
3165 max counts. */
3166 gcov_type count_max = 0;
3167 basic_block bb;
fefa31b5 3168 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
67fa7880
TJ
3169 count_max = MAX (bb->count, count_max);
3170
0a6a6ac9 3171 if (profile_status_for_fn (cfun) == PROFILE_GUESSED
be3c16c4
DC
3172 || (!flag_auto_profile && profile_status_for_fn (cfun) == PROFILE_READ
3173 && count_max < REG_BR_PROB_BASE/10))
b35366ce
JH
3174 {
3175 loop_optimizer_init (0);
3176 add_noreturn_fake_exit_edges ();
3177 mark_irreducible_loops ();
3178 connect_infinite_loops_to_exit ();
67fa7880 3179 estimate_bb_frequencies (true);
b35366ce
JH
3180 remove_fake_exit_edges ();
3181 loop_optimizer_finalize ();
3182 }
0a6a6ac9 3183 else if (profile_status_for_fn (cfun) == PROFILE_READ)
b35366ce
JH
3184 counts_to_freqs ();
3185 else
3186 gcc_unreachable ();
a222c01a 3187 timevar_pop (TV_REBUILD_FREQUENCIES);
b35366ce 3188}