]>
Commit | Line | Data |
---|---|---|
59423b59 | 1 | /* Branch prediction routines for the GNU compiler. |
fbd26352 | 2 | Copyright (C) 2000-2019 Free Software Foundation, Inc. |
59423b59 | 3 | |
e6751e9a | 4 | This file is part of GCC. |
59423b59 | 5 | |
e6751e9a | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
e6751e9a | 9 | version. |
59423b59 | 10 | |
e6751e9a | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
59423b59 | 15 | |
e6751e9a | 16 | You should have received a copy of the GNU General Public License |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
59423b59 | 19 | |
20 | /* References: | |
21 | ||
22 | [1] "Branch Prediction for Free" | |
23 | Ball and Larus; PLDI '93. | |
24 | [2] "Static Branch Frequency and Program Profile Analysis" | |
25 | Wu and Larus; MICRO-27. | |
26 | [3] "Corpus-based Static Branch Prediction" | |
04641143 | 27 | Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */ |
59423b59 | 28 | |
29 | ||
30 | #include "config.h" | |
31 | #include "system.h" | |
805e22b2 | 32 | #include "coretypes.h" |
9ef16211 | 33 | #include "backend.h" |
7c29e30e | 34 | #include "rtl.h" |
59423b59 | 35 | #include "tree.h" |
9ef16211 | 36 | #include "gimple.h" |
7c29e30e | 37 | #include "cfghooks.h" |
38 | #include "tree-pass.h" | |
9ef16211 | 39 | #include "ssa.h" |
ad7b10a2 | 40 | #include "memmodel.h" |
7c29e30e | 41 | #include "emit-rtl.h" |
7c29e30e | 42 | #include "cgraph.h" |
43 | #include "coverage.h" | |
44 | #include "diagnostic-core.h" | |
45 | #include "gimple-predict.h" | |
b20a8bb4 | 46 | #include "fold-const.h" |
9ed99284 | 47 | #include "calls.h" |
94ea8568 | 48 | #include "cfganal.h" |
886c1262 | 49 | #include "profile.h" |
e9d7220b | 50 | #include "sreal.h" |
429fa7fa | 51 | #include "params.h" |
862be747 | 52 | #include "cfgloop.h" |
dcf1a1ec | 53 | #include "gimple-iterator.h" |
073c1fd5 | 54 | #include "tree-cfg.h" |
05d9c18a | 55 | #include "tree-ssa-loop-niter.h" |
073c1fd5 | 56 | #include "tree-ssa-loop.h" |
d27b0b64 | 57 | #include "tree-scalar-evolution.h" |
89beffc9 | 58 | #include "ipa-utils.h" |
cbcc4297 | 59 | #include "gimple-pretty-print.h" |
71e39b3b | 60 | #include "selftest.h" |
720cfc43 | 61 | #include "cfgrtl.h" |
30a86690 | 62 | #include "stringpool.h" |
63 | #include "attribs.h" | |
56ff4880 | 64 | |
3f76cceb | 65 | /* Enum with reasons why a predictor is ignored. */ |
66 | ||
67 | enum predictor_reason | |
68 | { | |
abb2c3fe | 69 | REASON_NONE, |
70 | REASON_IGNORED, | |
71 | REASON_SINGLE_EDGE_DUPLICATE, | |
72 | REASON_EDGE_PAIR_DUPLICATE | |
3f76cceb | 73 | }; |
74 | ||
75 | /* String messages for the aforementioned enum. */ | |
76 | ||
77 | static const char *reason_messages[] = {"", " (ignored)", | |
78 | " (single edge duplicate)", " (edge pair duplicate)"}; | |
79 | ||
2e3c56e8 | 80 | /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE, |
81 | 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */ | |
8201d1f6 | 82 | static sreal real_almost_one, real_br_prob_base, |
e9d7220b | 83 | real_inv_br_prob_base, real_one_half, real_bb_freq_max; |
59423b59 | 84 | |
ee5f6585 | 85 | static void combine_predictions_for_insn (rtx_insn *, basic_block); |
3f76cceb | 86 | static void dump_prediction (FILE *, enum br_predictor, int, basic_block, |
87 | enum predictor_reason, edge); | |
e09883e4 | 88 | static void predict_paths_leading_to (basic_block, enum br_predictor, |
89 | enum prediction, | |
2e966e2a | 90 | class loop *in_loop = NULL); |
e09883e4 | 91 | static void predict_paths_leading_to_edge (edge, enum br_predictor, |
92 | enum prediction, | |
2e966e2a | 93 | class loop *in_loop = NULL); |
ee5f6585 | 94 | static bool can_predict_insn_p (const rtx_insn *); |
01107f42 | 95 | static HOST_WIDE_INT get_predictor_value (br_predictor, HOST_WIDE_INT); |
63c3361d | 96 | static void determine_unlikely_bbs (); |
5e96f51e | 97 | |
13488c51 | 98 | /* Information we hold about each branch predictor. |
99 | Filled using information from predict.def. */ | |
e6751e9a | 100 | |
13488c51 | 101 | struct predictor_info |
5e96f51e | 102 | { |
e99c3a1d | 103 | const char *const name; /* Name used in the debugging dumps. */ |
104 | const int hitrate; /* Expected hitrate used by | |
105 | predict_insn_def call. */ | |
106 | const int flags; | |
13488c51 | 107 | }; |
5e96f51e | 108 | |
eb429644 | 109 | /* Use given predictor without Dempster-Shaffer theory if it matches |
110 | using first_match heuristics. */ | |
111 | #define PRED_FLAG_FIRST_MATCH 1 | |
112 | ||
113 | /* Recompute hitrate in percent to our representation. */ | |
114 | ||
e6751e9a | 115 | #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100) |
eb429644 | 116 | |
117 | #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS}, | |
e6751e9a | 118 | static const struct predictor_info predictor_info[]= { |
13488c51 | 119 | #include "predict.def" |
120 | ||
aa40f561 | 121 | /* Upper bound on predictors. */ |
eb429644 | 122 | {NULL, 0, 0} |
13488c51 | 123 | }; |
124 | #undef DEF_PREDICTOR | |
429fa7fa | 125 | |
9e179a64 | 126 | static gcov_type min_count = -1; |
127 | ||
128 | /* Determine the threshold for hot BB counts. */ | |
129 | ||
130 | gcov_type | |
131 | get_hot_bb_threshold () | |
132 | { | |
9e179a64 | 133 | if (min_count == -1) |
134 | { | |
13293fb2 | 135 | const int hot_frac = PARAM_VALUE (HOT_BB_COUNT_FRACTION); |
136 | const gcov_type min_hot_count | |
137 | = hot_frac | |
138 | ? profile_info->sum_max / hot_frac | |
139 | : (gcov_type)profile_count::max_count; | |
140 | set_hot_bb_threshold (min_hot_count); | |
56621355 | 141 | if (dump_file) |
142 | fprintf (dump_file, "Setting hotness threshold to %" PRId64 ".\n", | |
13293fb2 | 143 | min_hot_count); |
9e179a64 | 144 | } |
145 | return min_count; | |
146 | } | |
147 | ||
148 | /* Set the threshold for hot BB counts. */ | |
149 | ||
150 | void | |
151 | set_hot_bb_threshold (gcov_type min) | |
152 | { | |
153 | min_count = min; | |
154 | } | |
155 | ||
8070b8d5 | 156 | /* Return TRUE if COUNT is considered to be hot in function FUN. */ |
f29b326e | 157 | |
94bed7c3 | 158 | bool |
205ce1aa | 159 | maybe_hot_count_p (struct function *fun, profile_count count) |
f29b326e | 160 | { |
db9cef39 | 161 | if (!count.initialized_p ()) |
f29b326e | 162 | return true; |
205ce1aa | 163 | if (count.ipa () == profile_count::zero ()) |
164 | return false; | |
688b6bc6 | 165 | if (!count.ipa_p ()) |
166 | { | |
167 | struct cgraph_node *node = cgraph_node::get (fun->decl); | |
168 | if (!profile_info || profile_status_for_fn (fun) != PROFILE_READ) | |
169 | { | |
170 | if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED) | |
171 | return false; | |
172 | if (node->frequency == NODE_FREQUENCY_HOT) | |
173 | return true; | |
174 | } | |
175 | if (profile_status_for_fn (fun) == PROFILE_ABSENT) | |
176 | return true; | |
177 | if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE | |
178 | && count < (ENTRY_BLOCK_PTR_FOR_FN (fun)->count.apply_scale (2, 3))) | |
179 | return false; | |
688b6bc6 | 180 | if (count.apply_scale (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION), 1) |
181 | < ENTRY_BLOCK_PTR_FOR_FN (fun)->count) | |
182 | return false; | |
183 | return true; | |
184 | } | |
f29b326e | 185 | /* Code executed at most once is not hot. */ |
db9cef39 | 186 | if (count <= MAX (profile_info ? profile_info->runs : 1, 1)) |
f29b326e | 187 | return false; |
db9cef39 | 188 | return (count.to_gcov_type () >= get_hot_bb_threshold ()); |
f29b326e | 189 | } |
190 | ||
8070b8d5 | 191 | /* Return true if basic block BB of function FUN can be CPU intensive |
192 | and should thus be optimized for maximum performance. */ | |
429fa7fa | 193 | |
194 | bool | |
8d672d12 | 195 | maybe_hot_bb_p (struct function *fun, const_basic_block bb) |
429fa7fa | 196 | { |
8d672d12 | 197 | gcc_checking_assert (fun); |
205ce1aa | 198 | return maybe_hot_count_p (fun, bb->count); |
eb7df8c2 | 199 | } |
200 | ||
8070b8d5 | 201 | /* Return true if edge E can be CPU intensive and should thus be optimized |
202 | for maximum performance. */ | |
eb7df8c2 | 203 | |
204 | bool | |
205 | maybe_hot_edge_p (edge e) | |
206 | { | |
205ce1aa | 207 | return maybe_hot_count_p (cfun, e->count ()); |
429fa7fa | 208 | } |
209 | ||
8070b8d5 | 210 | /* Return true if COUNT is considered to be never executed in function FUN |
211 | or if function FUN is considered so in the static profile. */ | |
dcc9b351 | 212 | |
213 | static bool | |
8070b8d5 | 214 | probably_never_executed (struct function *fun, profile_count count) |
429fa7fa | 215 | { |
8d672d12 | 216 | gcc_checking_assert (fun); |
5b94633f | 217 | if (count.ipa () == profile_count::zero ()) |
f08c22c4 | 218 | return true; |
f12452a9 | 219 | /* Do not trust adjusted counts. This will make us to drop int cold section |
220 | code with low execution count as a result of inlining. These low counts | |
221 | are not safe even with read profile and may lead us to dropping | |
222 | code which actually gets executed into cold section of binary that is not | |
223 | desirable. */ | |
224 | if (count.precise_p () && profile_status_for_fn (fun) == PROFILE_READ) | |
4befb9f4 | 225 | { |
8070b8d5 | 226 | const int unlikely_frac = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION); |
227 | if (count.apply_scale (unlikely_frac, 1) >= profile_info->runs) | |
4befb9f4 | 228 | return false; |
4befb9f4 | 229 | return true; |
230 | } | |
a74a34e6 | 231 | if ((!profile_info || profile_status_for_fn (fun) != PROFILE_READ) |
415d1b9a | 232 | && (cgraph_node::get (fun->decl)->frequency |
fd6a3c41 | 233 | == NODE_FREQUENCY_UNLIKELY_EXECUTED)) |
5de92639 | 234 | return true; |
429fa7fa | 235 | return false; |
236 | } | |
237 | ||
8070b8d5 | 238 | /* Return true if basic block BB of function FUN is probably never executed. */ |
dcc9b351 | 239 | |
240 | bool | |
241 | probably_never_executed_bb_p (struct function *fun, const_basic_block bb) | |
242 | { | |
205ce1aa | 243 | return probably_never_executed (fun, bb->count); |
dcc9b351 | 244 | } |
245 | ||
8070b8d5 | 246 | /* Return true if edge E is unlikely executed for obvious reasons. */ |
655af1f9 | 247 | |
248 | static bool | |
249 | unlikely_executed_edge_p (edge e) | |
250 | { | |
ea5d3981 | 251 | return (e->count () == profile_count::zero () |
ca69b069 | 252 | || e->probability == profile_probability::never ()) |
655af1f9 | 253 | || (e->flags & (EDGE_EH | EDGE_FAKE)); |
254 | } | |
255 | ||
8070b8d5 | 256 | /* Return true if edge E of function FUN is probably never executed. */ |
80adc5a6 | 257 | |
258 | bool | |
259 | probably_never_executed_edge_p (struct function *fun, edge e) | |
429fa7fa | 260 | { |
ca69b069 | 261 | if (unlikely_executed_edge_p (e)) |
262 | return true; | |
205ce1aa | 263 | return probably_never_executed (fun, e->count ()); |
429fa7fa | 264 | } |
265 | ||
8070b8d5 | 266 | /* Return true if function FUN should always be optimized for size. */ |
cf262be9 | 267 | |
268 | bool | |
269 | optimize_function_for_size_p (struct function *fun) | |
270 | { | |
cf262be9 | 271 | if (!fun || !fun->decl) |
69ad6a32 | 272 | return optimize_size; |
415d1b9a | 273 | cgraph_node *n = cgraph_node::get (fun->decl); |
274 | return n && n->optimize_for_size_p (); | |
cf262be9 | 275 | } |
276 | ||
8070b8d5 | 277 | /* Return true if function FUN should always be optimized for speed. */ |
533af0db | 278 | |
279 | bool | |
280 | optimize_function_for_speed_p (struct function *fun) | |
281 | { | |
282 | return !optimize_function_for_size_p (fun); | |
7dfb44a0 | 283 | } |
284 | ||
8070b8d5 | 285 | /* Return the optimization type that should be used for function FUN. */ |
acdfe9e0 | 286 | |
287 | optimization_type | |
288 | function_optimization_type (struct function *fun) | |
289 | { | |
290 | return (optimize_function_for_speed_p (fun) | |
291 | ? OPTIMIZE_FOR_SPEED | |
292 | : OPTIMIZE_FOR_SIZE); | |
293 | } | |
294 | ||
8070b8d5 | 295 | /* Return TRUE if basic block BB should be optimized for size. */ |
7dfb44a0 | 296 | |
297 | bool | |
94ba1cf1 | 298 | optimize_bb_for_size_p (const_basic_block bb) |
7dfb44a0 | 299 | { |
b9ea678c | 300 | return (optimize_function_for_size_p (cfun) |
301 | || (bb && !maybe_hot_bb_p (cfun, bb))); | |
7dfb44a0 | 302 | } |
303 | ||
8070b8d5 | 304 | /* Return TRUE if basic block BB should be optimized for speed. */ |
7dfb44a0 | 305 | |
306 | bool | |
94ba1cf1 | 307 | optimize_bb_for_speed_p (const_basic_block bb) |
7dfb44a0 | 308 | { |
309 | return !optimize_bb_for_size_p (bb); | |
310 | } | |
311 | ||
8070b8d5 | 312 | /* Return the optimization type that should be used for basic block BB. */ |
acdfe9e0 | 313 | |
314 | optimization_type | |
315 | bb_optimization_type (const_basic_block bb) | |
316 | { | |
317 | return (optimize_bb_for_speed_p (bb) | |
318 | ? OPTIMIZE_FOR_SPEED | |
319 | : OPTIMIZE_FOR_SIZE); | |
320 | } | |
321 | ||
8070b8d5 | 322 | /* Return TRUE if edge E should be optimized for size. */ |
7dfb44a0 | 323 | |
324 | bool | |
325 | optimize_edge_for_size_p (edge e) | |
326 | { | |
533af0db | 327 | return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e); |
7dfb44a0 | 328 | } |
329 | ||
8070b8d5 | 330 | /* Return TRUE if edge E should be optimized for speed. */ |
7dfb44a0 | 331 | |
332 | bool | |
333 | optimize_edge_for_speed_p (edge e) | |
334 | { | |
335 | return !optimize_edge_for_size_p (e); | |
336 | } | |
337 | ||
8070b8d5 | 338 | /* Return TRUE if the current function is optimized for size. */ |
7dfb44a0 | 339 | |
340 | bool | |
341 | optimize_insn_for_size_p (void) | |
342 | { | |
533af0db | 343 | return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p; |
7dfb44a0 | 344 | } |
345 | ||
8070b8d5 | 346 | /* Return TRUE if the current function is optimized for speed. */ |
7dfb44a0 | 347 | |
348 | bool | |
349 | optimize_insn_for_speed_p (void) | |
350 | { | |
351 | return !optimize_insn_for_size_p (); | |
352 | } | |
353 | ||
8070b8d5 | 354 | /* Return TRUE if LOOP should be optimized for size. */ |
94ba1cf1 | 355 | |
356 | bool | |
2e966e2a | 357 | optimize_loop_for_size_p (class loop *loop) |
94ba1cf1 | 358 | { |
359 | return optimize_bb_for_size_p (loop->header); | |
360 | } | |
361 | ||
8070b8d5 | 362 | /* Return TRUE if LOOP should be optimized for speed. */ |
94ba1cf1 | 363 | |
364 | bool | |
2e966e2a | 365 | optimize_loop_for_speed_p (class loop *loop) |
94ba1cf1 | 366 | { |
367 | return optimize_bb_for_speed_p (loop->header); | |
368 | } | |
369 | ||
8070b8d5 | 370 | /* Return TRUE if nest rooted at LOOP should be optimized for speed. */ |
0bfd8d5c | 371 | |
372 | bool | |
2e966e2a | 373 | optimize_loop_nest_for_speed_p (class loop *loop) |
0bfd8d5c | 374 | { |
2e966e2a | 375 | class loop *l = loop; |
0bfd8d5c | 376 | if (optimize_loop_for_speed_p (loop)) |
377 | return true; | |
378 | l = loop->inner; | |
53be41ae | 379 | while (l && l != loop) |
0bfd8d5c | 380 | { |
381 | if (optimize_loop_for_speed_p (l)) | |
382 | return true; | |
383 | if (l->inner) | |
384 | l = l->inner; | |
385 | else if (l->next) | |
386 | l = l->next; | |
387 | else | |
7baffbd3 | 388 | { |
389 | while (l != loop && !l->next) | |
390 | l = loop_outer (l); | |
391 | if (l != loop) | |
392 | l = l->next; | |
393 | } | |
0bfd8d5c | 394 | } |
395 | return false; | |
396 | } | |
397 | ||
8070b8d5 | 398 | /* Return TRUE if nest rooted at LOOP should be optimized for size. */ |
0bfd8d5c | 399 | |
400 | bool | |
2e966e2a | 401 | optimize_loop_nest_for_size_p (class loop *loop) |
0bfd8d5c | 402 | { |
403 | return !optimize_loop_nest_for_speed_p (loop); | |
404 | } | |
405 | ||
8070b8d5 | 406 | /* Return true if edge E is likely to be well predictable by branch |
4a9d7ef7 | 407 | predictor. */ |
408 | ||
409 | bool | |
410 | predictable_edge_p (edge e) | |
411 | { | |
720cfc43 | 412 | if (!e->probability.initialized_p ()) |
4a9d7ef7 | 413 | return false; |
720cfc43 | 414 | if ((e->probability.to_reg_br_prob_base () |
4a9d7ef7 | 415 | <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100) |
720cfc43 | 416 | || (REG_BR_PROB_BASE - e->probability.to_reg_br_prob_base () |
4a9d7ef7 | 417 | <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)) |
418 | return true; | |
419 | return false; | |
420 | } | |
421 | ||
422 | ||
7dfb44a0 | 423 | /* Set RTL expansion for BB profile. */ |
424 | ||
425 | void | |
426 | rtl_profile_for_bb (basic_block bb) | |
427 | { | |
8d672d12 | 428 | crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb); |
7dfb44a0 | 429 | } |
430 | ||
431 | /* Set RTL expansion for edge profile. */ | |
432 | ||
433 | void | |
434 | rtl_profile_for_edge (edge e) | |
435 | { | |
436 | crtl->maybe_hot_insn_p = maybe_hot_edge_p (e); | |
437 | } | |
438 | ||
439 | /* Set RTL expansion to default mode (i.e. when profile info is not known). */ | |
440 | void | |
441 | default_rtl_profile (void) | |
442 | { | |
443 | crtl->maybe_hot_insn_p = true; | |
444 | } | |
445 | ||
cd0fe062 | 446 | /* Return true if the one of outgoing edges is already predicted by |
447 | PREDICTOR. */ | |
448 | ||
4ee9c684 | 449 | bool |
5493cb9a | 450 | rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor) |
cd0fe062 | 451 | { |
452 | rtx note; | |
5496dbfc | 453 | if (!INSN_P (BB_END (bb))) |
cd0fe062 | 454 | return false; |
5496dbfc | 455 | for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1)) |
cd0fe062 | 456 | if (REG_NOTE_KIND (note) == REG_BR_PRED |
457 | && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor) | |
458 | return true; | |
459 | return false; | |
460 | } | |
5e96f51e | 461 | |
eeb030c4 | 462 | /* Structure representing predictions in tree level. */ |
463 | ||
464 | struct edge_prediction { | |
465 | struct edge_prediction *ep_next; | |
466 | edge ep_edge; | |
467 | enum br_predictor ep_predictor; | |
468 | int ep_probability; | |
469 | }; | |
470 | ||
06ecf488 | 471 | /* This map contains for a basic block the list of predictions for the |
472 | outgoing edges. */ | |
473 | ||
474 | static hash_map<const_basic_block, edge_prediction *> *bb_predictions; | |
475 | ||
4ee9c684 | 476 | /* Return true if the one of outgoing edges is already predicted by |
477 | PREDICTOR. */ | |
478 | ||
479 | bool | |
75a70cf9 | 480 | gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor) |
4ee9c684 | 481 | { |
7ea47fbd | 482 | struct edge_prediction *i; |
06ecf488 | 483 | edge_prediction **preds = bb_predictions->get (bb); |
b3723726 | 484 | |
485 | if (!preds) | |
486 | return false; | |
48e1416a | 487 | |
06ecf488 | 488 | for (i = *preds; i; i = i->ep_next) |
f45e9182 | 489 | if (i->ep_predictor == predictor) |
4ee9c684 | 490 | return true; |
491 | return false; | |
492 | } | |
493 | ||
f6e0b8d0 | 494 | /* Return true if the one of outgoing edges is already predicted by |
495 | PREDICTOR for edge E predicted as TAKEN. */ | |
496 | ||
497 | bool | |
498 | edge_predicted_by_p (edge e, enum br_predictor predictor, bool taken) | |
499 | { | |
500 | struct edge_prediction *i; | |
501 | basic_block bb = e->src; | |
502 | edge_prediction **preds = bb_predictions->get (bb); | |
503 | if (!preds) | |
504 | return false; | |
505 | ||
506 | int probability = predictor_info[(int) predictor].hitrate; | |
507 | ||
508 | if (taken != TAKEN) | |
509 | probability = REG_BR_PROB_BASE - probability; | |
510 | ||
511 | for (i = *preds; i; i = i->ep_next) | |
512 | if (i->ep_predictor == predictor | |
513 | && i->ep_edge == e | |
514 | && i->ep_probability == probability) | |
515 | return true; | |
516 | return false; | |
517 | } | |
518 | ||
b41438e5 | 519 | /* Same predicate as above, working on edges. */ |
520 | bool | |
7ecb5bb2 | 521 | edge_probability_reliable_p (const_edge e) |
b41438e5 | 522 | { |
61cb1816 | 523 | return e->probability.probably_reliable_p (); |
b41438e5 | 524 | } |
525 | ||
526 | /* Same predicate as edge_probability_reliable_p, working on notes. */ | |
527 | bool | |
7ecb5bb2 | 528 | br_prob_note_reliable_p (const_rtx note) |
b41438e5 | 529 | { |
530 | gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB); | |
61cb1816 | 531 | return profile_probability::from_reg_br_prob_note |
532 | (XINT (note, 0)).probably_reliable_p (); | |
b41438e5 | 533 | } |
534 | ||
aa157ca4 | 535 | static void |
ee5f6585 | 536 | predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability) |
13488c51 | 537 | { |
876760f6 | 538 | gcc_assert (any_condjump_p (insn)); |
b28bedce | 539 | if (!flag_guess_branch_prob) |
540 | return; | |
e6751e9a | 541 | |
a1ddb869 | 542 | add_reg_note (insn, REG_BR_PRED, |
543 | gen_rtx_CONCAT (VOIDmode, | |
544 | GEN_INT ((int) predictor), | |
545 | GEN_INT ((int) probability))); | |
13488c51 | 546 | } |
547 | ||
548 | /* Predict insn by given predictor. */ | |
e6751e9a | 549 | |
13488c51 | 550 | void |
ee5f6585 | 551 | predict_insn_def (rtx_insn *insn, enum br_predictor predictor, |
d598ad0d | 552 | enum prediction taken) |
13488c51 | 553 | { |
554 | int probability = predictor_info[(int) predictor].hitrate; | |
8d7738e1 | 555 | gcc_assert (probability != PROB_UNINITIALIZED); |
e6751e9a | 556 | |
13488c51 | 557 | if (taken != TAKEN) |
558 | probability = REG_BR_PROB_BASE - probability; | |
e6751e9a | 559 | |
13488c51 | 560 | predict_insn (insn, predictor, probability); |
5e96f51e | 561 | } |
562 | ||
563 | /* Predict edge E with given probability if possible. */ | |
e6751e9a | 564 | |
13488c51 | 565 | void |
4ee9c684 | 566 | rtl_predict_edge (edge e, enum br_predictor predictor, int probability) |
5e96f51e | 567 | { |
ee5f6585 | 568 | rtx_insn *last_insn; |
5496dbfc | 569 | last_insn = BB_END (e->src); |
5e96f51e | 570 | |
571 | /* We can store the branch prediction information only about | |
572 | conditional jumps. */ | |
573 | if (!any_condjump_p (last_insn)) | |
574 | return; | |
575 | ||
576 | /* We always store probability of branching. */ | |
577 | if (e->flags & EDGE_FALLTHRU) | |
578 | probability = REG_BR_PROB_BASE - probability; | |
579 | ||
13488c51 | 580 | predict_insn (last_insn, predictor, probability); |
581 | } | |
582 | ||
4ee9c684 | 583 | /* Predict edge E with the given PROBABILITY. */ |
584 | void | |
75a70cf9 | 585 | gimple_predict_edge (edge e, enum br_predictor predictor, int probability) |
4ee9c684 | 586 | { |
2d9e68f0 | 587 | if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) |
588 | && EDGE_COUNT (e->src->succs) > 1 | |
589 | && flag_guess_branch_prob | |
590 | && optimize) | |
ebd65d12 | 591 | { |
b3723726 | 592 | struct edge_prediction *i = XNEW (struct edge_prediction); |
06ecf488 | 593 | edge_prediction *&preds = bb_predictions->get_or_insert (e->src); |
4ee9c684 | 594 | |
06ecf488 | 595 | i->ep_next = preds; |
596 | preds = i; | |
f45e9182 | 597 | i->ep_probability = probability; |
598 | i->ep_predictor = predictor; | |
599 | i->ep_edge = e; | |
ebd65d12 | 600 | } |
4ee9c684 | 601 | } |
602 | ||
3651d60e | 603 | /* Filter edge predictions PREDS by a function FILTER. DATA are passed |
604 | to the filter function. */ | |
605 | ||
631fa7de | 606 | void |
3651d60e | 607 | filter_predictions (edge_prediction **preds, |
608 | bool (*filter) (edge_prediction *, void *), void *data) | |
631fa7de | 609 | { |
b3723726 | 610 | if (!bb_predictions) |
611 | return; | |
612 | ||
b3723726 | 613 | if (preds) |
631fa7de | 614 | { |
06ecf488 | 615 | struct edge_prediction **prediction = preds; |
b3723726 | 616 | struct edge_prediction *next; |
617 | ||
631fa7de | 618 | while (*prediction) |
619 | { | |
3651d60e | 620 | if ((*filter) (*prediction, data)) |
621 | prediction = &((*prediction)->ep_next); | |
622 | else | |
b3723726 | 623 | { |
624 | next = (*prediction)->ep_next; | |
625 | free (*prediction); | |
626 | *prediction = next; | |
627 | } | |
631fa7de | 628 | } |
629 | } | |
630 | } | |
631 | ||
3651d60e | 632 | /* Filter function predicate that returns true for a edge predicate P |
633 | if its edge is equal to DATA. */ | |
634 | ||
635 | bool | |
636 | equal_edge_p (edge_prediction *p, void *data) | |
637 | { | |
638 | return p->ep_edge == (edge)data; | |
639 | } | |
640 | ||
641 | /* Remove all predictions on given basic block that are attached | |
642 | to edge E. */ | |
643 | void | |
644 | remove_predictions_associated_with_edge (edge e) | |
645 | { | |
646 | if (!bb_predictions) | |
647 | return; | |
648 | ||
649 | edge_prediction **preds = bb_predictions->get (e->src); | |
650 | filter_predictions (preds, equal_edge_p, e); | |
651 | } | |
652 | ||
b3723726 | 653 | /* Clears the list of predictions stored for BB. */ |
654 | ||
655 | static void | |
656 | clear_bb_predictions (basic_block bb) | |
657 | { | |
06ecf488 | 658 | edge_prediction **preds = bb_predictions->get (bb); |
b3723726 | 659 | struct edge_prediction *pred, *next; |
660 | ||
661 | if (!preds) | |
662 | return; | |
663 | ||
06ecf488 | 664 | for (pred = *preds; pred; pred = next) |
b3723726 | 665 | { |
666 | next = pred->ep_next; | |
667 | free (pred); | |
668 | } | |
669 | *preds = NULL; | |
670 | } | |
671 | ||
1a12dac4 | 672 | /* Return true when we can store prediction on insn INSN. |
673 | At the moment we represent predictions only on conditional | |
674 | jumps, not at computed jump or other complicated cases. */ | |
675 | static bool | |
ee5f6585 | 676 | can_predict_insn_p (const rtx_insn *insn) |
1a12dac4 | 677 | { |
6d7dc5b9 | 678 | return (JUMP_P (insn) |
1a12dac4 | 679 | && any_condjump_p (insn) |
cd665a06 | 680 | && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2); |
1a12dac4 | 681 | } |
682 | ||
13488c51 | 683 | /* Predict edge E by given predictor if possible. */ |
e6751e9a | 684 | |
13488c51 | 685 | void |
d598ad0d | 686 | predict_edge_def (edge e, enum br_predictor predictor, |
687 | enum prediction taken) | |
13488c51 | 688 | { |
689 | int probability = predictor_info[(int) predictor].hitrate; | |
690 | ||
691 | if (taken != TAKEN) | |
692 | probability = REG_BR_PROB_BASE - probability; | |
e6751e9a | 693 | |
13488c51 | 694 | predict_edge (e, predictor, probability); |
695 | } | |
696 | ||
697 | /* Invert all branch predictions or probability notes in the INSN. This needs | |
698 | to be done each time we invert the condition used by the jump. */ | |
e6751e9a | 699 | |
13488c51 | 700 | void |
d598ad0d | 701 | invert_br_probabilities (rtx insn) |
13488c51 | 702 | { |
e6751e9a | 703 | rtx note; |
704 | ||
705 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
706 | if (REG_NOTE_KIND (note) == REG_BR_PROB) | |
61cb1816 | 707 | XINT (note, 0) = profile_probability::from_reg_br_prob_note |
708 | (XINT (note, 0)).invert ().to_reg_br_prob_note (); | |
e6751e9a | 709 | else if (REG_NOTE_KIND (note) == REG_BR_PRED) |
710 | XEXP (XEXP (note, 0), 1) | |
711 | = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1))); | |
13488c51 | 712 | } |
713 | ||
714 | /* Dump information about the branch prediction to the output file. */ | |
e6751e9a | 715 | |
13488c51 | 716 | static void |
4ee9c684 | 717 | dump_prediction (FILE *file, enum br_predictor predictor, int probability, |
abb2c3fe | 718 | basic_block bb, enum predictor_reason reason = REASON_NONE, |
3f76cceb | 719 | edge ep_edge = NULL) |
13488c51 | 720 | { |
3f76cceb | 721 | edge e = ep_edge; |
cd665a06 | 722 | edge_iterator ei; |
13488c51 | 723 | |
4ee9c684 | 724 | if (!file) |
13488c51 | 725 | return; |
726 | ||
3f76cceb | 727 | if (e == NULL) |
728 | FOR_EACH_EDGE (e, ei, bb->succs) | |
729 | if (! (e->flags & EDGE_FALLTHRU)) | |
730 | break; | |
731 | ||
732 | char edge_info_str[128]; | |
733 | if (ep_edge) | |
734 | sprintf (edge_info_str, " of edge %d->%d", ep_edge->src->index, | |
735 | ep_edge->dest->index); | |
736 | else | |
737 | edge_info_str[0] = '\0'; | |
13488c51 | 738 | |
cc647a3c | 739 | fprintf (file, " %s heuristics%s%s: %.2f%%", |
13488c51 | 740 | predictor_info[predictor].name, |
3f76cceb | 741 | edge_info_str, reason_messages[reason], |
742 | probability * 100.0 / REG_BR_PROB_BASE); | |
13488c51 | 743 | |
db9cef39 | 744 | if (bb->count.initialized_p ()) |
17a81216 | 745 | { |
db9cef39 | 746 | fprintf (file, " exec "); |
747 | bb->count.dump (file); | |
12c94d25 | 748 | if (e) |
749 | { | |
db9cef39 | 750 | fprintf (file, " hit "); |
ea5d3981 | 751 | e->count ().dump (file); |
752 | fprintf (file, " (%.1f%%)", e->count ().to_gcov_type() * 100.0 | |
db9cef39 | 753 | / bb->count.to_gcov_type ()); |
12c94d25 | 754 | } |
17a81216 | 755 | } |
e6751e9a | 756 | |
4ee9c684 | 757 | fprintf (file, "\n"); |
041d9b52 | 758 | |
759 | /* Print output that be easily read by analyze_brprob.py script. We are | |
760 | interested only in counts that are read from GCDA files. */ | |
761 | if (dump_file && (dump_flags & TDF_DETAILS) | |
762 | && bb->count.precise_p () | |
763 | && reason == REASON_NONE) | |
764 | { | |
765 | gcc_assert (e->count ().precise_p ()); | |
766 | fprintf (file, ";;heuristics;%s;%" PRId64 ";%" PRId64 ";%.1f;\n", | |
767 | predictor_info[predictor].name, | |
768 | bb->count.to_gcov_type (), e->count ().to_gcov_type (), | |
769 | probability * 100.0 / REG_BR_PROB_BASE); | |
770 | } | |
13488c51 | 771 | } |
772 | ||
f08c22c4 | 773 | /* Return true if STMT is known to be unlikely executed. */ |
774 | ||
775 | static bool | |
776 | unlikely_executed_stmt_p (gimple *stmt) | |
777 | { | |
778 | if (!is_gimple_call (stmt)) | |
779 | return false; | |
23653b72 | 780 | /* NORETURN attribute alone is not strong enough: exit() may be quite |
f08c22c4 | 781 | likely executed once during program run. */ |
782 | if (gimple_call_fntype (stmt) | |
783 | && lookup_attribute ("cold", | |
784 | TYPE_ATTRIBUTES (gimple_call_fntype (stmt))) | |
785 | && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))) | |
786 | return true; | |
787 | tree decl = gimple_call_fndecl (stmt); | |
788 | if (!decl) | |
789 | return false; | |
790 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl)) | |
791 | && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))) | |
792 | return true; | |
793 | ||
794 | cgraph_node *n = cgraph_node::get (decl); | |
795 | if (!n) | |
796 | return false; | |
1ccb310e | 797 | |
798 | availability avail; | |
f08c22c4 | 799 | n = n->ultimate_alias_target (&avail); |
800 | if (avail < AVAIL_AVAILABLE) | |
1ccb310e | 801 | return false; |
f08c22c4 | 802 | if (!n->analyzed |
803 | || n->decl == current_function_decl) | |
804 | return false; | |
805 | return n->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED; | |
806 | } | |
807 | ||
808 | /* Return true if BB is unlikely executed. */ | |
809 | ||
810 | static bool | |
811 | unlikely_executed_bb_p (basic_block bb) | |
812 | { | |
813 | if (bb->count == profile_count::zero ()) | |
814 | return true; | |
815 | if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) || bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
816 | return false; | |
817 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); | |
818 | !gsi_end_p (gsi); gsi_next (&gsi)) | |
819 | { | |
820 | if (unlikely_executed_stmt_p (gsi_stmt (gsi))) | |
821 | return true; | |
822 | if (stmt_can_terminate_bb_p (gsi_stmt (gsi))) | |
823 | return false; | |
824 | } | |
825 | return false; | |
826 | } | |
827 | ||
f4d3c071 | 828 | /* We cannot predict the probabilities of outgoing edges of bb. Set them |
3989f1fe | 829 | evenly and hope for the best. If UNLIKELY_EDGES is not null, distribute |
830 | even probability for all edges not mentioned in the set. These edges | |
44b41fe7 | 831 | are given PROB_VERY_UNLIKELY probability. Similarly for LIKELY_EDGES, |
832 | if we have exactly one likely edge, make the other edges predicted | |
833 | as not probable. */ | |
3989f1fe | 834 | |
83c8a977 | 835 | static void |
3989f1fe | 836 | set_even_probabilities (basic_block bb, |
44b41fe7 | 837 | hash_set<edge> *unlikely_edges = NULL, |
838 | hash_set<edge_prediction *> *likely_edges = NULL) | |
83c8a977 | 839 | { |
c7a0aa22 | 840 | unsigned nedges = 0, unlikely_count = 0; |
1cda36f6 | 841 | edge e = NULL; |
cd665a06 | 842 | edge_iterator ei; |
c7a0aa22 | 843 | profile_probability all = profile_probability::always (); |
83c8a977 | 844 | |
cd665a06 | 845 | FOR_EACH_EDGE (e, ei, bb->succs) |
c7a0aa22 | 846 | if (e->probability.initialized_p ()) |
847 | all -= e->probability; | |
848 | else if (!unlikely_executed_edge_p (e)) | |
849 | { | |
44b41fe7 | 850 | nedges++; |
c7a0aa22 | 851 | if (unlikely_edges != NULL && unlikely_edges->contains (e)) |
852 | { | |
853 | all -= profile_probability::very_unlikely (); | |
854 | unlikely_count++; | |
855 | } | |
856 | } | |
3989f1fe | 857 | |
858 | /* Make the distribution even if all edges are unlikely. */ | |
44b41fe7 | 859 | unsigned likely_count = likely_edges ? likely_edges->elements () : 0; |
3989f1fe | 860 | if (unlikely_count == nedges) |
861 | { | |
862 | unlikely_edges = NULL; | |
863 | unlikely_count = 0; | |
864 | } | |
865 | ||
44b41fe7 | 866 | /* If we have one likely edge, then use its probability and distribute |
867 | remaining probabilities as even. */ | |
868 | if (likely_count == 1) | |
869 | { | |
870 | FOR_EACH_EDGE (e, ei, bb->succs) | |
871 | if (e->probability.initialized_p ()) | |
872 | ; | |
873 | else if (!unlikely_executed_edge_p (e)) | |
874 | { | |
875 | edge_prediction *prediction = *likely_edges->begin (); | |
876 | int p = prediction->ep_probability; | |
877 | profile_probability prob | |
878 | = profile_probability::from_reg_br_prob_base (p); | |
44b41fe7 | 879 | |
880 | if (prediction->ep_edge == e) | |
881 | e->probability = prob; | |
c914d1b3 | 882 | else if (unlikely_edges != NULL && unlikely_edges->contains (e)) |
883 | e->probability = profile_probability::very_unlikely (); | |
44b41fe7 | 884 | else |
79cbb1a8 | 885 | { |
886 | profile_probability remainder = prob.invert (); | |
887 | remainder -= profile_probability::very_unlikely () | |
888 | .apply_scale (unlikely_count, 1); | |
889 | int count = nedges - unlikely_count - 1; | |
890 | gcc_assert (count >= 0); | |
891 | ||
892 | e->probability = remainder.apply_scale (1, count); | |
893 | } | |
44b41fe7 | 894 | } |
3989f1fe | 895 | else |
44b41fe7 | 896 | e->probability = profile_probability::never (); |
897 | } | |
898 | else | |
899 | { | |
900 | /* Make all unlikely edges unlikely and the rest will have even | |
901 | probability. */ | |
902 | unsigned scale = nedges - unlikely_count; | |
903 | FOR_EACH_EDGE (e, ei, bb->succs) | |
904 | if (e->probability.initialized_p ()) | |
905 | ; | |
906 | else if (!unlikely_executed_edge_p (e)) | |
907 | { | |
908 | if (unlikely_edges != NULL && unlikely_edges->contains (e)) | |
909 | e->probability = profile_probability::very_unlikely (); | |
910 | else | |
911 | e->probability = all.apply_scale (1, scale); | |
912 | } | |
913 | else | |
914 | e->probability = profile_probability::never (); | |
915 | } | |
83c8a977 | 916 | } |
917 | ||
61cb1816 | 918 | /* Add REG_BR_PROB note to JUMP with PROB. */ |
919 | ||
920 | void | |
921 | add_reg_br_prob_note (rtx_insn *jump, profile_probability prob) | |
922 | { | |
923 | gcc_checking_assert (JUMP_P (jump) && !find_reg_note (jump, REG_BR_PROB, 0)); | |
924 | add_int_reg_note (jump, REG_BR_PROB, prob.to_reg_br_prob_note ()); | |
925 | } | |
926 | ||
13488c51 | 927 | /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB |
928 | note if not already present. Remove now useless REG_BR_PRED notes. */ | |
e6751e9a | 929 | |
13488c51 | 930 | static void |
ee5f6585 | 931 | combine_predictions_for_insn (rtx_insn *insn, basic_block bb) |
13488c51 | 932 | { |
83c8a977 | 933 | rtx prob_note; |
934 | rtx *pnote; | |
e6751e9a | 935 | rtx note; |
13488c51 | 936 | int best_probability = PROB_EVEN; |
b9c74b4d | 937 | enum br_predictor best_predictor = END_PREDICTORS; |
eb429644 | 938 | int combined_probability = REG_BR_PROB_BASE / 2; |
939 | int d; | |
49d7c0db | 940 | bool first_match = false; |
941 | bool found = false; | |
13488c51 | 942 | |
83c8a977 | 943 | if (!can_predict_insn_p (insn)) |
944 | { | |
945 | set_even_probabilities (bb); | |
946 | return; | |
947 | } | |
948 | ||
949 | prob_note = find_reg_note (insn, REG_BR_PROB, 0); | |
950 | pnote = ®_NOTES (insn); | |
450d042a | 951 | if (dump_file) |
952 | fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn), | |
b3d6de89 | 953 | bb->index); |
13488c51 | 954 | |
955 | /* We implement "first match" heuristics and use probability guessed | |
4ee9c684 | 956 | by predictor with smallest index. */ |
e6751e9a | 957 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
958 | if (REG_NOTE_KIND (note) == REG_BR_PRED) | |
959 | { | |
bc620c5c | 960 | enum br_predictor predictor = ((enum br_predictor) |
961 | INTVAL (XEXP (XEXP (note, 0), 0))); | |
e6751e9a | 962 | int probability = INTVAL (XEXP (XEXP (note, 0), 1)); |
963 | ||
964 | found = true; | |
19109630 | 965 | if (best_predictor > predictor |
966 | && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH) | |
e6751e9a | 967 | best_probability = probability, best_predictor = predictor; |
968 | ||
969 | d = (combined_probability * probability | |
970 | + (REG_BR_PROB_BASE - combined_probability) | |
971 | * (REG_BR_PROB_BASE - probability)); | |
972 | ||
973 | /* Use FP math to avoid overflows of 32bit integers. */ | |
c4a616f2 | 974 | if (d == 0) |
975 | /* If one probability is 0% and one 100%, avoid division by zero. */ | |
976 | combined_probability = REG_BR_PROB_BASE / 2; | |
977 | else | |
978 | combined_probability = (((double) combined_probability) * probability | |
979 | * REG_BR_PROB_BASE / d + 0.5); | |
e6751e9a | 980 | } |
981 | ||
982 | /* Decide which heuristic to use. In case we didn't match anything, | |
983 | use no_prediction heuristic, in case we did match, use either | |
49d7c0db | 984 | first match or Dempster-Shaffer theory depending on the flags. */ |
985 | ||
19109630 | 986 | if (best_predictor != END_PREDICTORS) |
49d7c0db | 987 | first_match = true; |
988 | ||
989 | if (!found) | |
4ee9c684 | 990 | dump_prediction (dump_file, PRED_NO_PREDICTION, |
3f76cceb | 991 | combined_probability, bb); |
49d7c0db | 992 | else |
993 | { | |
19109630 | 994 | if (!first_match) |
995 | dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, | |
996 | bb, !first_match ? REASON_NONE : REASON_IGNORED); | |
997 | else | |
998 | dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, | |
999 | bb, first_match ? REASON_NONE : REASON_IGNORED); | |
49d7c0db | 1000 | } |
1001 | ||
1002 | if (first_match) | |
eb429644 | 1003 | combined_probability = best_probability; |
3f76cceb | 1004 | dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb); |
49d7c0db | 1005 | |
1006 | while (*pnote) | |
1007 | { | |
1008 | if (REG_NOTE_KIND (*pnote) == REG_BR_PRED) | |
1009 | { | |
bc620c5c | 1010 | enum br_predictor predictor = ((enum br_predictor) |
1011 | INTVAL (XEXP (XEXP (*pnote, 0), 0))); | |
49d7c0db | 1012 | int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1)); |
1013 | ||
4ee9c684 | 1014 | dump_prediction (dump_file, predictor, probability, bb, |
3f76cceb | 1015 | (!first_match || best_predictor == predictor) |
abb2c3fe | 1016 | ? REASON_NONE : REASON_IGNORED); |
195731ad | 1017 | *pnote = XEXP (*pnote, 1); |
49d7c0db | 1018 | } |
1019 | else | |
195731ad | 1020 | pnote = &XEXP (*pnote, 1); |
49d7c0db | 1021 | } |
e6751e9a | 1022 | |
13488c51 | 1023 | if (!prob_note) |
1024 | { | |
61cb1816 | 1025 | profile_probability p |
1026 | = profile_probability::from_reg_br_prob_base (combined_probability); | |
1027 | add_reg_br_prob_note (insn, p); | |
e6751e9a | 1028 | |
eb429644 | 1029 | /* Save the prediction into CFG in case we are seeing non-degenerated |
1030 | conditional jump. */ | |
ea091dfd | 1031 | if (!single_succ_p (bb)) |
eb429644 | 1032 | { |
61cb1816 | 1033 | BRANCH_EDGE (bb)->probability = p; |
e6751e9a | 1034 | FALLTHRU_EDGE (bb)->probability |
720cfc43 | 1035 | = BRANCH_EDGE (bb)->probability.invert (); |
eb429644 | 1036 | } |
13488c51 | 1037 | } |
ea091dfd | 1038 | else if (!single_succ_p (bb)) |
d8c70625 | 1039 | { |
61cb1816 | 1040 | profile_probability prob = profile_probability::from_reg_br_prob_note |
1041 | (XINT (prob_note, 0)); | |
d8c70625 | 1042 | |
61cb1816 | 1043 | BRANCH_EDGE (bb)->probability = prob; |
1044 | FALLTHRU_EDGE (bb)->probability = prob.invert (); | |
d8c70625 | 1045 | } |
1046 | else | |
720cfc43 | 1047 | single_succ_edge (bb)->probability = profile_probability::always (); |
5e96f51e | 1048 | } |
1049 | ||
3f76cceb | 1050 | /* Edge prediction hash traits. */ |
1051 | ||
1052 | struct predictor_hash: pointer_hash <edge_prediction> | |
1053 | { | |
1054 | ||
1055 | static inline hashval_t hash (const edge_prediction *); | |
1056 | static inline bool equal (const edge_prediction *, const edge_prediction *); | |
1057 | }; | |
1058 | ||
1059 | /* Calculate hash value of an edge prediction P based on predictor and | |
1060 | normalized probability. */ | |
1061 | ||
1062 | inline hashval_t | |
1063 | predictor_hash::hash (const edge_prediction *p) | |
1064 | { | |
1065 | inchash::hash hstate; | |
1066 | hstate.add_int (p->ep_predictor); | |
1067 | ||
1068 | int prob = p->ep_probability; | |
1069 | if (prob > REG_BR_PROB_BASE / 2) | |
1070 | prob = REG_BR_PROB_BASE - prob; | |
1071 | ||
1072 | hstate.add_int (prob); | |
1073 | ||
1074 | return hstate.end (); | |
1075 | } | |
1076 | ||
1077 | /* Return true whether edge predictions P1 and P2 use the same predictor and | |
1078 | have equal (or opposed probability). */ | |
1079 | ||
1080 | inline bool | |
1081 | predictor_hash::equal (const edge_prediction *p1, const edge_prediction *p2) | |
1082 | { | |
1083 | return (p1->ep_predictor == p2->ep_predictor | |
1084 | && (p1->ep_probability == p2->ep_probability | |
1085 | || p1->ep_probability == REG_BR_PROB_BASE - p2->ep_probability)); | |
1086 | } | |
1087 | ||
1088 | struct predictor_hash_traits: predictor_hash, | |
1089 | typed_noop_remove <edge_prediction *> {}; | |
1090 | ||
1091 | /* Return true if edge prediction P is not in DATA hash set. */ | |
1092 | ||
1093 | static bool | |
1094 | not_removed_prediction_p (edge_prediction *p, void *data) | |
1095 | { | |
1096 | hash_set<edge_prediction *> *remove = (hash_set<edge_prediction *> *) data; | |
1097 | return !remove->contains (p); | |
1098 | } | |
1099 | ||
1100 | /* Prune predictions for a basic block BB. Currently we do following | |
1101 | clean-up steps: | |
1102 | ||
1103 | 1) remove duplicate prediction that is guessed with the same probability | |
1104 | (different than 1/2) to both edge | |
1105 | 2) remove duplicates for a prediction that belongs with the same probability | |
1106 | to a single edge | |
1107 | ||
1108 | */ | |
1109 | ||
1110 | static void | |
1111 | prune_predictions_for_bb (basic_block bb) | |
1112 | { | |
1113 | edge_prediction **preds = bb_predictions->get (bb); | |
1114 | ||
1115 | if (preds) | |
1116 | { | |
1117 | hash_table <predictor_hash_traits> s (13); | |
1118 | hash_set <edge_prediction *> remove; | |
1119 | ||
1120 | /* Step 1: identify predictors that should be removed. */ | |
1121 | for (edge_prediction *pred = *preds; pred; pred = pred->ep_next) | |
1122 | { | |
1123 | edge_prediction *existing = s.find (pred); | |
1124 | if (existing) | |
1125 | { | |
1126 | if (pred->ep_edge == existing->ep_edge | |
1127 | && pred->ep_probability == existing->ep_probability) | |
1128 | { | |
1129 | /* Remove a duplicate predictor. */ | |
1130 | dump_prediction (dump_file, pred->ep_predictor, | |
1131 | pred->ep_probability, bb, | |
abb2c3fe | 1132 | REASON_SINGLE_EDGE_DUPLICATE, pred->ep_edge); |
3f76cceb | 1133 | |
1134 | remove.add (pred); | |
1135 | } | |
1136 | else if (pred->ep_edge != existing->ep_edge | |
1137 | && pred->ep_probability == existing->ep_probability | |
1138 | && pred->ep_probability != REG_BR_PROB_BASE / 2) | |
1139 | { | |
1140 | /* Remove both predictors as they predict the same | |
1141 | for both edges. */ | |
1142 | dump_prediction (dump_file, existing->ep_predictor, | |
1143 | pred->ep_probability, bb, | |
abb2c3fe | 1144 | REASON_EDGE_PAIR_DUPLICATE, |
3f76cceb | 1145 | existing->ep_edge); |
1146 | dump_prediction (dump_file, pred->ep_predictor, | |
1147 | pred->ep_probability, bb, | |
abb2c3fe | 1148 | REASON_EDGE_PAIR_DUPLICATE, |
3f76cceb | 1149 | pred->ep_edge); |
1150 | ||
1151 | remove.add (existing); | |
1152 | remove.add (pred); | |
1153 | } | |
1154 | } | |
1155 | ||
1156 | edge_prediction **slot2 = s.find_slot (pred, INSERT); | |
1157 | *slot2 = pred; | |
1158 | } | |
1159 | ||
1160 | /* Step 2: Remove predictors. */ | |
1161 | filter_predictions (preds, not_removed_prediction_p, &remove); | |
1162 | } | |
1163 | } | |
1164 | ||
4ee9c684 | 1165 | /* Combine predictions into single probability and store them into CFG. |
5a5f50e9 | 1166 | Remove now useless prediction entries. |
1167 | If DRY_RUN is set, only produce dumps and do not modify profile. */ | |
59423b59 | 1168 | |
4ee9c684 | 1169 | static void |
5a5f50e9 | 1170 | combine_predictions_for_bb (basic_block bb, bool dry_run) |
59423b59 | 1171 | { |
4ee9c684 | 1172 | int best_probability = PROB_EVEN; |
b9c74b4d | 1173 | enum br_predictor best_predictor = END_PREDICTORS; |
4ee9c684 | 1174 | int combined_probability = REG_BR_PROB_BASE / 2; |
1175 | int d; | |
1176 | bool first_match = false; | |
1177 | bool found = false; | |
1178 | struct edge_prediction *pred; | |
1179 | int nedges = 0; | |
1180 | edge e, first = NULL, second = NULL; | |
cd665a06 | 1181 | edge_iterator ei; |
3f9a545c | 1182 | int nzero = 0; |
1183 | int nunknown = 0; | |
59423b59 | 1184 | |
cd665a06 | 1185 | FOR_EACH_EDGE (e, ei, bb->succs) |
3f9a545c | 1186 | { |
1187 | if (!unlikely_executed_edge_p (e)) | |
1188 | { | |
1189 | nedges ++; | |
1190 | if (first && !second) | |
1191 | second = e; | |
1192 | if (!first) | |
1193 | first = e; | |
1194 | } | |
1195 | else if (!e->probability.initialized_p ()) | |
1196 | e->probability = profile_probability::never (); | |
1197 | if (!e->probability.initialized_p ()) | |
1198 | nunknown++; | |
1199 | else if (e->probability == profile_probability::never ()) | |
1200 | nzero++; | |
1201 | } | |
4ee9c684 | 1202 | |
48e1416a | 1203 | /* When there is no successor or only one choice, prediction is easy. |
4ee9c684 | 1204 | |
3989f1fe | 1205 | When we have a basic block with more than 2 successors, the situation |
1206 | is more complicated as DS theory cannot be used literally. | |
1207 | More precisely, let's assume we predicted edge e1 with probability p1, | |
1208 | thus: m1({b1}) = p1. As we're going to combine more than 2 edges, we | |
1209 | need to find probability of e.g. m1({b2}), which we don't know. | |
1210 | The only approximation is to equally distribute 1-p1 to all edges | |
1211 | different from b1. | |
1212 | ||
1213 | According to numbers we've got from SPEC2006 benchark, there's only | |
1214 | one interesting reliable predictor (noreturn call), which can be | |
1215 | handled with a bit easier approach. */ | |
4ee9c684 | 1216 | if (nedges != 2) |
1217 | { | |
3989f1fe | 1218 | hash_set<edge> unlikely_edges (4); |
44b41fe7 | 1219 | hash_set<edge_prediction *> likely_edges (4); |
3989f1fe | 1220 | |
1221 | /* Identify all edges that have a probability close to very unlikely. | |
1222 | Doing the approach for very unlikely doesn't worth for doing as | |
1223 | there's no such probability in SPEC2006 benchmark. */ | |
1224 | edge_prediction **preds = bb_predictions->get (bb); | |
1225 | if (preds) | |
1226 | for (pred = *preds; pred; pred = pred->ep_next) | |
44b41fe7 | 1227 | { |
c914d1b3 | 1228 | if (pred->ep_probability <= PROB_VERY_UNLIKELY |
1229 | || pred->ep_predictor == PRED_COLD_LABEL) | |
44b41fe7 | 1230 | unlikely_edges.add (pred->ep_edge); |
99e80969 | 1231 | else if (pred->ep_probability >= PROB_VERY_LIKELY |
1232 | || pred->ep_predictor == PRED_BUILTIN_EXPECT | |
1233 | || pred->ep_predictor == PRED_HOT_LABEL) | |
44b41fe7 | 1234 | likely_edges.add (pred); |
1235 | } | |
3989f1fe | 1236 | |
99e80969 | 1237 | /* It can happen that an edge is both in likely_edges and unlikely_edges. |
1238 | Clear both sets in that situation. */ | |
1239 | for (hash_set<edge_prediction *>::iterator it = likely_edges.begin (); | |
1240 | it != likely_edges.end (); ++it) | |
1241 | if (unlikely_edges.contains ((*it)->ep_edge)) | |
1242 | { | |
1243 | likely_edges.empty (); | |
1244 | unlikely_edges.empty (); | |
1245 | break; | |
1246 | } | |
1247 | ||
c7a0aa22 | 1248 | if (!dry_run) |
44b41fe7 | 1249 | set_even_probabilities (bb, &unlikely_edges, &likely_edges); |
b3723726 | 1250 | clear_bb_predictions (bb); |
3f5be5f4 | 1251 | if (dump_file) |
3989f1fe | 1252 | { |
1253 | fprintf (dump_file, "Predictions for bb %i\n", bb->index); | |
9a78b979 | 1254 | if (unlikely_edges.is_empty ()) |
3989f1fe | 1255 | fprintf (dump_file, |
1256 | "%i edges in bb %i predicted to even probabilities\n", | |
1257 | nedges, bb->index); | |
1258 | else | |
1259 | { | |
1260 | fprintf (dump_file, | |
1261 | "%i edges in bb %i predicted with some unlikely edges\n", | |
1262 | nedges, bb->index); | |
1263 | FOR_EACH_EDGE (e, ei, bb->succs) | |
f08c22c4 | 1264 | if (!unlikely_executed_edge_p (e)) |
720cfc43 | 1265 | dump_prediction (dump_file, PRED_COMBINED, |
1266 | e->probability.to_reg_br_prob_base (), bb, REASON_NONE, e); | |
3989f1fe | 1267 | } |
1268 | } | |
4ee9c684 | 1269 | return; |
1270 | } | |
1271 | ||
3f5be5f4 | 1272 | if (dump_file) |
1273 | fprintf (dump_file, "Predictions for bb %i\n", bb->index); | |
4ee9c684 | 1274 | |
3f76cceb | 1275 | prune_predictions_for_bb (bb); |
1276 | ||
06ecf488 | 1277 | edge_prediction **preds = bb_predictions->get (bb); |
3f76cceb | 1278 | |
b3723726 | 1279 | if (preds) |
4ee9c684 | 1280 | { |
b3723726 | 1281 | /* We implement "first match" heuristics and use probability guessed |
1282 | by predictor with smallest index. */ | |
06ecf488 | 1283 | for (pred = *preds; pred; pred = pred->ep_next) |
b3723726 | 1284 | { |
b9c74b4d | 1285 | enum br_predictor predictor = pred->ep_predictor; |
b3723726 | 1286 | int probability = pred->ep_probability; |
4ee9c684 | 1287 | |
b3723726 | 1288 | if (pred->ep_edge != first) |
1289 | probability = REG_BR_PROB_BASE - probability; | |
4ee9c684 | 1290 | |
b3723726 | 1291 | found = true; |
9f694a82 | 1292 | /* First match heuristics would be widly confused if we predicted |
1293 | both directions. */ | |
19109630 | 1294 | if (best_predictor > predictor |
1295 | && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH) | |
9f694a82 | 1296 | { |
1297 | struct edge_prediction *pred2; | |
1298 | int prob = probability; | |
1299 | ||
c83059be | 1300 | for (pred2 = (struct edge_prediction *) *preds; |
1301 | pred2; pred2 = pred2->ep_next) | |
9f694a82 | 1302 | if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor) |
1303 | { | |
9ae89a4e | 1304 | int probability2 = pred2->ep_probability; |
9f694a82 | 1305 | |
1306 | if (pred2->ep_edge != first) | |
1307 | probability2 = REG_BR_PROB_BASE - probability2; | |
1308 | ||
48e1416a | 1309 | if ((probability < REG_BR_PROB_BASE / 2) != |
9f694a82 | 1310 | (probability2 < REG_BR_PROB_BASE / 2)) |
1311 | break; | |
1312 | ||
1313 | /* If the same predictor later gave better result, go for it! */ | |
1314 | if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability)) | |
1315 | || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability))) | |
1316 | prob = probability2; | |
1317 | } | |
1318 | if (!pred2) | |
1319 | best_probability = prob, best_predictor = predictor; | |
1320 | } | |
4ee9c684 | 1321 | |
b3723726 | 1322 | d = (combined_probability * probability |
1323 | + (REG_BR_PROB_BASE - combined_probability) | |
1324 | * (REG_BR_PROB_BASE - probability)); | |
4ee9c684 | 1325 | |
b3723726 | 1326 | /* Use FP math to avoid overflows of 32bit integers. */ |
1327 | if (d == 0) | |
1328 | /* If one probability is 0% and one 100%, avoid division by zero. */ | |
1329 | combined_probability = REG_BR_PROB_BASE / 2; | |
1330 | else | |
1331 | combined_probability = (((double) combined_probability) | |
1332 | * probability | |
1333 | * REG_BR_PROB_BASE / d + 0.5); | |
1334 | } | |
4ee9c684 | 1335 | } |
1336 | ||
1337 | /* Decide which heuristic to use. In case we didn't match anything, | |
1338 | use no_prediction heuristic, in case we did match, use either | |
1339 | first match or Dempster-Shaffer theory depending on the flags. */ | |
1340 | ||
19109630 | 1341 | if (best_predictor != END_PREDICTORS) |
4ee9c684 | 1342 | first_match = true; |
1343 | ||
1344 | if (!found) | |
3f76cceb | 1345 | dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb); |
4ee9c684 | 1346 | else |
1347 | { | |
19109630 | 1348 | if (!first_match) |
1349 | dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb, | |
1350 | !first_match ? REASON_NONE : REASON_IGNORED); | |
1351 | else | |
1352 | dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb, | |
1353 | first_match ? REASON_NONE : REASON_IGNORED); | |
4ee9c684 | 1354 | } |
1355 | ||
1356 | if (first_match) | |
1357 | combined_probability = best_probability; | |
3f76cceb | 1358 | dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb); |
4ee9c684 | 1359 | |
b3723726 | 1360 | if (preds) |
4ee9c684 | 1361 | { |
4077bf7a | 1362 | for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next) |
b3723726 | 1363 | { |
b9c74b4d | 1364 | enum br_predictor predictor = pred->ep_predictor; |
b3723726 | 1365 | int probability = pred->ep_probability; |
4ee9c684 | 1366 | |
b3723726 | 1367 | dump_prediction (dump_file, predictor, probability, bb, |
3f76cceb | 1368 | (!first_match || best_predictor == predictor) |
abb2c3fe | 1369 | ? REASON_NONE : REASON_IGNORED, pred->ep_edge); |
b3723726 | 1370 | } |
4ee9c684 | 1371 | } |
b3723726 | 1372 | clear_bb_predictions (bb); |
4ee9c684 | 1373 | |
3f9a545c | 1374 | |
1375 | /* If we have only one successor which is unknown, we can compute missing | |
1376 | probablity. */ | |
1377 | if (nunknown == 1) | |
1378 | { | |
1379 | profile_probability prob = profile_probability::always (); | |
1380 | edge missing = NULL; | |
1381 | ||
1382 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1383 | if (e->probability.initialized_p ()) | |
1384 | prob -= e->probability; | |
1385 | else if (missing == NULL) | |
1386 | missing = e; | |
1387 | else | |
1388 | gcc_unreachable (); | |
1389 | missing->probability = prob; | |
1390 | } | |
1391 | /* If nothing is unknown, we have nothing to update. */ | |
1392 | else if (!nunknown && nzero != (int)EDGE_COUNT (bb->succs)) | |
1393 | ; | |
1394 | else if (!dry_run) | |
83c8a977 | 1395 | { |
720cfc43 | 1396 | first->probability |
1397 | = profile_probability::from_reg_br_prob_base (combined_probability); | |
1398 | second->probability = first->probability.invert (); | |
83c8a977 | 1399 | } |
4ee9c684 | 1400 | } |
1401 | ||
fd757b76 | 1402 | /* Check if T1 and T2 satisfy the IV_COMPARE condition. |
1403 | Return the SSA_NAME if the condition satisfies, NULL otherwise. | |
1404 | ||
1405 | T1 and T2 should be one of the following cases: | |
1406 | 1. T1 is SSA_NAME, T2 is NULL | |
1407 | 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4] | |
1408 | 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */ | |
1409 | ||
1410 | static tree | |
1411 | strips_small_constant (tree t1, tree t2) | |
1412 | { | |
1413 | tree ret = NULL; | |
1414 | int value = 0; | |
1415 | ||
1416 | if (!t1) | |
1417 | return NULL; | |
1418 | else if (TREE_CODE (t1) == SSA_NAME) | |
1419 | ret = t1; | |
e913b5cd | 1420 | else if (tree_fits_shwi_p (t1)) |
1421 | value = tree_to_shwi (t1); | |
fd757b76 | 1422 | else |
1423 | return NULL; | |
1424 | ||
1425 | if (!t2) | |
1426 | return ret; | |
e913b5cd | 1427 | else if (tree_fits_shwi_p (t2)) |
1428 | value = tree_to_shwi (t2); | |
fd757b76 | 1429 | else if (TREE_CODE (t2) == SSA_NAME) |
1430 | { | |
1431 | if (ret) | |
1432 | return NULL; | |
1433 | else | |
1434 | ret = t2; | |
1435 | } | |
1436 | ||
1437 | if (value <= 4 && value >= -4) | |
1438 | return ret; | |
1439 | else | |
1440 | return NULL; | |
1441 | } | |
1442 | ||
1443 | /* Return the SSA_NAME in T or T's operands. | |
1444 | Return NULL if SSA_NAME cannot be found. */ | |
1445 | ||
1446 | static tree | |
1447 | get_base_value (tree t) | |
1448 | { | |
1449 | if (TREE_CODE (t) == SSA_NAME) | |
1450 | return t; | |
1451 | ||
1452 | if (!BINARY_CLASS_P (t)) | |
1453 | return NULL; | |
1454 | ||
1455 | switch (TREE_OPERAND_LENGTH (t)) | |
1456 | { | |
1457 | case 1: | |
1458 | return strips_small_constant (TREE_OPERAND (t, 0), NULL); | |
1459 | case 2: | |
1460 | return strips_small_constant (TREE_OPERAND (t, 0), | |
1461 | TREE_OPERAND (t, 1)); | |
1462 | default: | |
1463 | return NULL; | |
1464 | } | |
1465 | } | |
1466 | ||
1467 | /* Check the compare STMT in LOOP. If it compares an induction | |
1468 | variable to a loop invariant, return true, and save | |
1469 | LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP. | |
1470 | Otherwise return false and set LOOP_INVAIANT to NULL. */ | |
1471 | ||
1472 | static bool | |
2e966e2a | 1473 | is_comparison_with_loop_invariant_p (gcond *stmt, class loop *loop, |
fd757b76 | 1474 | tree *loop_invariant, |
1475 | enum tree_code *compare_code, | |
b3269f54 | 1476 | tree *loop_step, |
fd757b76 | 1477 | tree *loop_iv_base) |
1478 | { | |
1479 | tree op0, op1, bound, base; | |
1480 | affine_iv iv0, iv1; | |
1481 | enum tree_code code; | |
b3269f54 | 1482 | tree step; |
fd757b76 | 1483 | |
1484 | code = gimple_cond_code (stmt); | |
1485 | *loop_invariant = NULL; | |
1486 | ||
1487 | switch (code) | |
1488 | { | |
1489 | case GT_EXPR: | |
1490 | case GE_EXPR: | |
1491 | case NE_EXPR: | |
1492 | case LT_EXPR: | |
1493 | case LE_EXPR: | |
1494 | case EQ_EXPR: | |
1495 | break; | |
1496 | ||
1497 | default: | |
1498 | return false; | |
1499 | } | |
1500 | ||
1501 | op0 = gimple_cond_lhs (stmt); | |
1502 | op1 = gimple_cond_rhs (stmt); | |
1503 | ||
1504 | if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST) | |
1505 | || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST)) | |
1506 | return false; | |
1507 | if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true)) | |
1508 | return false; | |
1509 | if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true)) | |
1510 | return false; | |
1511 | if (TREE_CODE (iv0.step) != INTEGER_CST | |
1512 | || TREE_CODE (iv1.step) != INTEGER_CST) | |
1513 | return false; | |
1514 | if ((integer_zerop (iv0.step) && integer_zerop (iv1.step)) | |
1515 | || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step))) | |
1516 | return false; | |
1517 | ||
1518 | if (integer_zerop (iv0.step)) | |
1519 | { | |
1520 | if (code != NE_EXPR && code != EQ_EXPR) | |
1521 | code = invert_tree_comparison (code, false); | |
1522 | bound = iv0.base; | |
1523 | base = iv1.base; | |
e913b5cd | 1524 | if (tree_fits_shwi_p (iv1.step)) |
b3269f54 | 1525 | step = iv1.step; |
fd757b76 | 1526 | else |
1527 | return false; | |
1528 | } | |
1529 | else | |
1530 | { | |
1531 | bound = iv1.base; | |
1532 | base = iv0.base; | |
e913b5cd | 1533 | if (tree_fits_shwi_p (iv0.step)) |
b3269f54 | 1534 | step = iv0.step; |
fd757b76 | 1535 | else |
1536 | return false; | |
1537 | } | |
1538 | ||
1539 | if (TREE_CODE (bound) != INTEGER_CST) | |
1540 | bound = get_base_value (bound); | |
1541 | if (!bound) | |
1542 | return false; | |
1543 | if (TREE_CODE (base) != INTEGER_CST) | |
1544 | base = get_base_value (base); | |
1545 | if (!base) | |
1546 | return false; | |
1547 | ||
1548 | *loop_invariant = bound; | |
1549 | *compare_code = code; | |
1550 | *loop_step = step; | |
1551 | *loop_iv_base = base; | |
1552 | return true; | |
1553 | } | |
1554 | ||
1555 | /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */ | |
1556 | ||
1557 | static bool | |
1558 | expr_coherent_p (tree t1, tree t2) | |
1559 | { | |
42acab1c | 1560 | gimple *stmt; |
fd757b76 | 1561 | tree ssa_name_1 = NULL; |
1562 | tree ssa_name_2 = NULL; | |
1563 | ||
1564 | gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST); | |
1565 | gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST); | |
1566 | ||
1567 | if (t1 == t2) | |
1568 | return true; | |
1569 | ||
1570 | if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST) | |
1571 | return true; | |
1572 | if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST) | |
1573 | return false; | |
1574 | ||
1575 | /* Check to see if t1 is expressed/defined with t2. */ | |
1576 | stmt = SSA_NAME_DEF_STMT (t1); | |
1577 | gcc_assert (stmt != NULL); | |
1578 | if (is_gimple_assign (stmt)) | |
1579 | { | |
1580 | ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE); | |
1581 | if (ssa_name_1 && ssa_name_1 == t2) | |
1582 | return true; | |
1583 | } | |
1584 | ||
1585 | /* Check to see if t2 is expressed/defined with t1. */ | |
1586 | stmt = SSA_NAME_DEF_STMT (t2); | |
1587 | gcc_assert (stmt != NULL); | |
1588 | if (is_gimple_assign (stmt)) | |
1589 | { | |
1590 | ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE); | |
1591 | if (ssa_name_2 && ssa_name_2 == t1) | |
1592 | return true; | |
1593 | } | |
1594 | ||
1595 | /* Compare if t1 and t2's def_stmts are identical. */ | |
1596 | if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2) | |
1597 | return true; | |
1598 | else | |
1599 | return false; | |
1600 | } | |
1601 | ||
d07b2b6f | 1602 | /* Return true if E is predicted by one of loop heuristics. */ |
1603 | ||
1604 | static bool | |
1605 | predicted_by_loop_heuristics_p (basic_block bb) | |
1606 | { | |
1607 | struct edge_prediction *i; | |
1608 | edge_prediction **preds = bb_predictions->get (bb); | |
1609 | ||
1610 | if (!preds) | |
1611 | return false; | |
1612 | ||
1613 | for (i = *preds; i; i = i->ep_next) | |
1614 | if (i->ep_predictor == PRED_LOOP_ITERATIONS_GUESSED | |
1615 | || i->ep_predictor == PRED_LOOP_ITERATIONS_MAX | |
1616 | || i->ep_predictor == PRED_LOOP_ITERATIONS | |
1617 | || i->ep_predictor == PRED_LOOP_EXIT | |
cbcc4297 | 1618 | || i->ep_predictor == PRED_LOOP_EXIT_WITH_RECURSION |
d07b2b6f | 1619 | || i->ep_predictor == PRED_LOOP_EXTRA_EXIT) |
1620 | return true; | |
1621 | return false; | |
1622 | } | |
1623 | ||
fd757b76 | 1624 | /* Predict branch probability of BB when BB contains a branch that compares |
1625 | an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The | |
1626 | loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP. | |
1627 | ||
1628 | E.g. | |
1629 | for (int i = 0; i < bound; i++) { | |
1630 | if (i < bound - 2) | |
1631 | computation_1(); | |
1632 | else | |
1633 | computation_2(); | |
1634 | } | |
1635 | ||
1636 | In this loop, we will predict the branch inside the loop to be taken. */ | |
1637 | ||
1638 | static void | |
2e966e2a | 1639 | predict_iv_comparison (class loop *loop, basic_block bb, |
fd757b76 | 1640 | tree loop_bound_var, |
1641 | tree loop_iv_base_var, | |
1642 | enum tree_code loop_bound_code, | |
1643 | int loop_bound_step) | |
1644 | { | |
42acab1c | 1645 | gimple *stmt; |
fd757b76 | 1646 | tree compare_var, compare_base; |
1647 | enum tree_code compare_code; | |
b3269f54 | 1648 | tree compare_step_var; |
fd757b76 | 1649 | edge then_edge; |
1650 | edge_iterator ei; | |
1651 | ||
d07b2b6f | 1652 | if (predicted_by_loop_heuristics_p (bb)) |
fd757b76 | 1653 | return; |
1654 | ||
1655 | stmt = last_stmt (bb); | |
1656 | if (!stmt || gimple_code (stmt) != GIMPLE_COND) | |
1657 | return; | |
1a91d914 | 1658 | if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt), |
1659 | loop, &compare_var, | |
fd757b76 | 1660 | &compare_code, |
b3269f54 | 1661 | &compare_step_var, |
fd757b76 | 1662 | &compare_base)) |
1663 | return; | |
1664 | ||
1665 | /* Find the taken edge. */ | |
1666 | FOR_EACH_EDGE (then_edge, ei, bb->succs) | |
1667 | if (then_edge->flags & EDGE_TRUE_VALUE) | |
1668 | break; | |
1669 | ||
1670 | /* When comparing an IV to a loop invariant, NE is more likely to be | |
1671 | taken while EQ is more likely to be not-taken. */ | |
1672 | if (compare_code == NE_EXPR) | |
1673 | { | |
1674 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1675 | return; | |
1676 | } | |
1677 | else if (compare_code == EQ_EXPR) | |
1678 | { | |
1679 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1680 | return; | |
1681 | } | |
1682 | ||
1683 | if (!expr_coherent_p (loop_iv_base_var, compare_base)) | |
1684 | return; | |
1685 | ||
1686 | /* If loop bound, base and compare bound are all constants, we can | |
1687 | calculate the probability directly. */ | |
e913b5cd | 1688 | if (tree_fits_shwi_p (loop_bound_var) |
1689 | && tree_fits_shwi_p (compare_var) | |
1690 | && tree_fits_shwi_p (compare_base)) | |
fd757b76 | 1691 | { |
1692 | int probability; | |
30b5769f | 1693 | wi::overflow_type overflow; |
1694 | bool overall_overflow = false; | |
ab2c1de8 | 1695 | widest_int compare_count, tem; |
b3269f54 | 1696 | |
b3269f54 | 1697 | /* (loop_bound - base) / compare_step */ |
c311b856 | 1698 | tem = wi::sub (wi::to_widest (loop_bound_var), |
1699 | wi::to_widest (compare_base), SIGNED, &overflow); | |
e913b5cd | 1700 | overall_overflow |= overflow; |
c311b856 | 1701 | widest_int loop_count = wi::div_trunc (tem, |
1702 | wi::to_widest (compare_step_var), | |
1703 | SIGNED, &overflow); | |
e913b5cd | 1704 | overall_overflow |= overflow; |
1705 | ||
c311b856 | 1706 | if (!wi::neg_p (wi::to_widest (compare_step_var)) |
fd757b76 | 1707 | ^ (compare_code == LT_EXPR || compare_code == LE_EXPR)) |
b3269f54 | 1708 | { |
1709 | /* (loop_bound - compare_bound) / compare_step */ | |
c311b856 | 1710 | tem = wi::sub (wi::to_widest (loop_bound_var), |
1711 | wi::to_widest (compare_var), SIGNED, &overflow); | |
e913b5cd | 1712 | overall_overflow |= overflow; |
c311b856 | 1713 | compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var), |
1714 | SIGNED, &overflow); | |
e913b5cd | 1715 | overall_overflow |= overflow; |
b3269f54 | 1716 | } |
fd757b76 | 1717 | else |
b3269f54 | 1718 | { |
1719 | /* (compare_bound - base) / compare_step */ | |
c311b856 | 1720 | tem = wi::sub (wi::to_widest (compare_var), |
1721 | wi::to_widest (compare_base), SIGNED, &overflow); | |
e913b5cd | 1722 | overall_overflow |= overflow; |
c311b856 | 1723 | compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var), |
1724 | SIGNED, &overflow); | |
e913b5cd | 1725 | overall_overflow |= overflow; |
b3269f54 | 1726 | } |
fd757b76 | 1727 | if (compare_code == LE_EXPR || compare_code == GE_EXPR) |
b3269f54 | 1728 | ++compare_count; |
fd757b76 | 1729 | if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR) |
b3269f54 | 1730 | ++loop_count; |
796b6678 | 1731 | if (wi::neg_p (compare_count)) |
e913b5cd | 1732 | compare_count = 0; |
796b6678 | 1733 | if (wi::neg_p (loop_count)) |
e913b5cd | 1734 | loop_count = 0; |
796b6678 | 1735 | if (loop_count == 0) |
fd757b76 | 1736 | probability = 0; |
796b6678 | 1737 | else if (wi::cmps (compare_count, loop_count) == 1) |
fd757b76 | 1738 | probability = REG_BR_PROB_BASE; |
1739 | else | |
b3269f54 | 1740 | { |
e913b5cd | 1741 | tem = compare_count * REG_BR_PROB_BASE; |
796b6678 | 1742 | tem = wi::udiv_trunc (tem, loop_count); |
b3269f54 | 1743 | probability = tem.to_uhwi (); |
1744 | } | |
1745 | ||
d3cb49c9 | 1746 | /* FIXME: The branch prediction seems broken. It has only 20% hitrate. */ |
e913b5cd | 1747 | if (!overall_overflow) |
b3269f54 | 1748 | predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability); |
1749 | ||
fd757b76 | 1750 | return; |
1751 | } | |
1752 | ||
1753 | if (expr_coherent_p (loop_bound_var, compare_var)) | |
1754 | { | |
1755 | if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR) | |
1756 | && (compare_code == LT_EXPR || compare_code == LE_EXPR)) | |
1757 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1758 | else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR) | |
1759 | && (compare_code == GT_EXPR || compare_code == GE_EXPR)) | |
1760 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1761 | else if (loop_bound_code == NE_EXPR) | |
1762 | { | |
1763 | /* If the loop backedge condition is "(i != bound)", we do | |
1764 | the comparison based on the step of IV: | |
1765 | * step < 0 : backedge condition is like (i > bound) | |
1766 | * step > 0 : backedge condition is like (i < bound) */ | |
1767 | gcc_assert (loop_bound_step != 0); | |
1768 | if (loop_bound_step > 0 | |
1769 | && (compare_code == LT_EXPR | |
1770 | || compare_code == LE_EXPR)) | |
1771 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1772 | else if (loop_bound_step < 0 | |
1773 | && (compare_code == GT_EXPR | |
1774 | || compare_code == GE_EXPR)) | |
1775 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1776 | else | |
1777 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1778 | } | |
1779 | else | |
1780 | /* The branch is predicted not-taken if loop_bound_code is | |
1781 | opposite with compare_code. */ | |
1782 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1783 | } | |
1784 | else if (expr_coherent_p (loop_iv_base_var, compare_var)) | |
1785 | { | |
1786 | /* For cases like: | |
1787 | for (i = s; i < h; i++) | |
1788 | if (i > s + 2) .... | |
1789 | The branch should be predicted taken. */ | |
1790 | if (loop_bound_step > 0 | |
1791 | && (compare_code == GT_EXPR || compare_code == GE_EXPR)) | |
1792 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1793 | else if (loop_bound_step < 0 | |
1794 | && (compare_code == LT_EXPR || compare_code == LE_EXPR)) | |
1795 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1796 | else | |
1797 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1798 | } | |
1799 | } | |
4ca17abf | 1800 | |
1801 | /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop | |
1802 | exits are resulted from short-circuit conditions that will generate an | |
1803 | if_tmp. E.g.: | |
1804 | ||
1805 | if (foo() || global > 10) | |
1806 | break; | |
1807 | ||
1808 | This will be translated into: | |
1809 | ||
1810 | BB3: | |
1811 | loop header... | |
1812 | BB4: | |
1813 | if foo() goto BB6 else goto BB5 | |
1814 | BB5: | |
1815 | if global > 10 goto BB6 else goto BB7 | |
1816 | BB6: | |
1817 | goto BB7 | |
1818 | BB7: | |
1819 | iftmp = (PHI 0(BB5), 1(BB6)) | |
1820 | if iftmp == 1 goto BB8 else goto BB3 | |
1821 | BB8: | |
1822 | outside of the loop... | |
1823 | ||
1824 | The edge BB7->BB8 is loop exit because BB8 is outside of the loop. | |
1825 | From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop | |
1826 | exits. This function takes BB7->BB8 as input, and finds out the extra loop | |
76f8f901 | 1827 | exits to predict them using PRED_LOOP_EXTRA_EXIT. */ |
4ca17abf | 1828 | |
1829 | static void | |
1830 | predict_extra_loop_exits (edge exit_edge) | |
1831 | { | |
1832 | unsigned i; | |
1833 | bool check_value_one; | |
42acab1c | 1834 | gimple *lhs_def_stmt; |
1a91d914 | 1835 | gphi *phi_stmt; |
4ca17abf | 1836 | tree cmp_rhs, cmp_lhs; |
42acab1c | 1837 | gimple *last; |
1a91d914 | 1838 | gcond *cmp_stmt; |
4ca17abf | 1839 | |
1a91d914 | 1840 | last = last_stmt (exit_edge->src); |
1841 | if (!last) | |
1842 | return; | |
1843 | cmp_stmt = dyn_cast <gcond *> (last); | |
1844 | if (!cmp_stmt) | |
4ca17abf | 1845 | return; |
1a91d914 | 1846 | |
4ca17abf | 1847 | cmp_rhs = gimple_cond_rhs (cmp_stmt); |
1848 | cmp_lhs = gimple_cond_lhs (cmp_stmt); | |
1849 | if (!TREE_CONSTANT (cmp_rhs) | |
1850 | || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs))) | |
1851 | return; | |
1852 | if (TREE_CODE (cmp_lhs) != SSA_NAME) | |
1853 | return; | |
1854 | ||
1855 | /* If check_value_one is true, only the phi_args with value '1' will lead | |
1856 | to loop exit. Otherwise, only the phi_args with value '0' will lead to | |
1857 | loop exit. */ | |
1858 | check_value_one = (((integer_onep (cmp_rhs)) | |
1859 | ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR)) | |
1860 | ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0)); | |
1861 | ||
1a91d914 | 1862 | lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs); |
1863 | if (!lhs_def_stmt) | |
1864 | return; | |
1865 | ||
1866 | phi_stmt = dyn_cast <gphi *> (lhs_def_stmt); | |
1867 | if (!phi_stmt) | |
4ca17abf | 1868 | return; |
1869 | ||
1870 | for (i = 0; i < gimple_phi_num_args (phi_stmt); i++) | |
1871 | { | |
1872 | edge e1; | |
1873 | edge_iterator ei; | |
1874 | tree val = gimple_phi_arg_def (phi_stmt, i); | |
1875 | edge e = gimple_phi_arg_edge (phi_stmt, i); | |
1876 | ||
1877 | if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val))) | |
1878 | continue; | |
1879 | if ((check_value_one ^ integer_onep (val)) == 1) | |
1880 | continue; | |
1881 | if (EDGE_COUNT (e->src->succs) != 1) | |
1882 | { | |
76f8f901 | 1883 | predict_paths_leading_to_edge (e, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN); |
4ca17abf | 1884 | continue; |
1885 | } | |
1886 | ||
1887 | FOR_EACH_EDGE (e1, ei, e->src->preds) | |
76f8f901 | 1888 | predict_paths_leading_to_edge (e1, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN); |
4ca17abf | 1889 | } |
1890 | } | |
1891 | ||
d07b2b6f | 1892 | |
7194de72 | 1893 | /* Predict edge probabilities by exploiting loop structure. */ |
1894 | ||
4ee9c684 | 1895 | static void |
7194de72 | 1896 | predict_loops (void) |
4ee9c684 | 1897 | { |
2e966e2a | 1898 | class loop *loop; |
cbcc4297 | 1899 | basic_block bb; |
2e966e2a | 1900 | hash_set <class loop *> with_recursion(10); |
cbcc4297 | 1901 | |
1902 | FOR_EACH_BB_FN (bb, cfun) | |
1903 | { | |
1904 | gimple_stmt_iterator gsi; | |
1905 | tree decl; | |
1906 | ||
1907 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1908 | if (is_gimple_call (gsi_stmt (gsi)) | |
1909 | && (decl = gimple_call_fndecl (gsi_stmt (gsi))) != NULL | |
1910 | && recursive_call_p (current_function_decl, decl)) | |
1911 | { | |
1912 | loop = bb->loop_father; | |
1913 | while (loop && !with_recursion.add (loop)) | |
1914 | loop = loop_outer (loop); | |
1915 | } | |
1916 | } | |
c12f2fcb | 1917 | |
7fcadf62 | 1918 | /* Try to predict out blocks in a loop that are not part of a |
1919 | natural loop. */ | |
d07b2b6f | 1920 | FOR_EACH_LOOP (loop, LI_FROM_INNERMOST) |
59423b59 | 1921 | { |
7fb12188 | 1922 | basic_block bb, *bbs; |
d07b2b6f | 1923 | unsigned j, n_exits = 0; |
f1f41a6c | 1924 | vec<edge> exits; |
2e966e2a | 1925 | class tree_niter_desc niter_desc; |
749ea85f | 1926 | edge ex; |
2e966e2a | 1927 | class nb_iter_bound *nb_iter; |
fd757b76 | 1928 | enum tree_code loop_bound_code = ERROR_MARK; |
b3269f54 | 1929 | tree loop_bound_step = NULL; |
fd757b76 | 1930 | tree loop_bound_var = NULL; |
1931 | tree loop_iv_base = NULL; | |
1a91d914 | 1932 | gcond *stmt = NULL; |
cbcc4297 | 1933 | bool recursion = with_recursion.contains (loop); |
59423b59 | 1934 | |
749ea85f | 1935 | exits = get_loop_exit_edges (loop); |
d07b2b6f | 1936 | FOR_EACH_VEC_ELT (exits, j, ex) |
f08c22c4 | 1937 | if (!unlikely_executed_edge_p (ex) && !(ex->flags & EDGE_ABNORMAL_CALL)) |
d07b2b6f | 1938 | n_exits ++; |
5d865361 | 1939 | if (!n_exits) |
1940 | { | |
f1f41a6c | 1941 | exits.release (); |
5d865361 | 1942 | continue; |
1943 | } | |
ba38e12b | 1944 | |
cbcc4297 | 1945 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1946 | fprintf (dump_file, "Predicting loop %i%s with %i exits.\n", | |
1947 | loop->num, recursion ? " (with recursion)":"", n_exits); | |
1948 | if (dump_file && (dump_flags & TDF_DETAILS) | |
1949 | && max_loop_iterations_int (loop) >= 0) | |
1950 | { | |
1951 | fprintf (dump_file, | |
1952 | "Loop %d iterates at most %i times.\n", loop->num, | |
1953 | (int)max_loop_iterations_int (loop)); | |
1954 | } | |
1955 | if (dump_file && (dump_flags & TDF_DETAILS) | |
1956 | && likely_max_loop_iterations_int (loop) >= 0) | |
1957 | { | |
1958 | fprintf (dump_file, "Loop %d likely iterates at most %i times.\n", | |
1959 | loop->num, (int)likely_max_loop_iterations_int (loop)); | |
1960 | } | |
1961 | ||
f1f41a6c | 1962 | FOR_EACH_VEC_ELT (exits, j, ex) |
d27b0b64 | 1963 | { |
3b0b2309 | 1964 | tree niter = NULL; |
d500fef3 | 1965 | HOST_WIDE_INT nitercst; |
1966 | int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS); | |
1967 | int probability; | |
1968 | enum br_predictor predictor; | |
d07b2b6f | 1969 | widest_int nit; |
d27b0b64 | 1970 | |
f08c22c4 | 1971 | if (unlikely_executed_edge_p (ex) |
1972 | || (ex->flags & EDGE_ABNORMAL_CALL)) | |
d07b2b6f | 1973 | continue; |
1974 | /* Loop heuristics do not expect exit conditional to be inside | |
1975 | inner loop. We predict from innermost to outermost loop. */ | |
1976 | if (predicted_by_loop_heuristics_p (ex->src)) | |
cbcc4297 | 1977 | { |
1978 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1979 | fprintf (dump_file, "Skipping exit %i->%i because " | |
1980 | "it is already predicted.\n", | |
1981 | ex->src->index, ex->dest->index); | |
1982 | continue; | |
1983 | } | |
4ca17abf | 1984 | predict_extra_loop_exits (ex); |
1985 | ||
3f78e715 | 1986 | if (number_of_iterations_exit (loop, ex, &niter_desc, false, false)) |
3b0b2309 | 1987 | niter = niter_desc.niter; |
1988 | if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST) | |
749ea85f | 1989 | niter = loop_niter_by_eval (loop, ex); |
cbcc4297 | 1990 | if (dump_file && (dump_flags & TDF_DETAILS) |
1991 | && TREE_CODE (niter) == INTEGER_CST) | |
1992 | { | |
1993 | fprintf (dump_file, "Exit %i->%i %d iterates ", | |
1994 | ex->src->index, ex->dest->index, | |
1995 | loop->num); | |
1996 | print_generic_expr (dump_file, niter, TDF_SLIM); | |
1997 | fprintf (dump_file, " times.\n"); | |
1998 | } | |
d27b0b64 | 1999 | |
3b0b2309 | 2000 | if (TREE_CODE (niter) == INTEGER_CST) |
2001 | { | |
e913b5cd | 2002 | if (tree_fits_uhwi_p (niter) |
ed60f27f | 2003 | && max |
2004 | && compare_tree_int (niter, max - 1) == -1) | |
e913b5cd | 2005 | nitercst = tree_to_uhwi (niter) + 1; |
3b0b2309 | 2006 | else |
d500fef3 | 2007 | nitercst = max; |
2008 | predictor = PRED_LOOP_ITERATIONS; | |
2009 | } | |
2010 | /* If we have just one exit and we can derive some information about | |
2011 | the number of iterations of the loop from the statements inside | |
2012 | the loop, use it to predict this exit. */ | |
d07b2b6f | 2013 | else if (n_exits == 1 |
2014 | && estimated_stmt_executions (loop, &nit)) | |
d500fef3 | 2015 | { |
d07b2b6f | 2016 | if (wi::gtu_p (nit, max)) |
d500fef3 | 2017 | nitercst = max; |
d07b2b6f | 2018 | else |
2019 | nitercst = nit.to_shwi (); | |
d500fef3 | 2020 | predictor = PRED_LOOP_ITERATIONS_GUESSED; |
3b0b2309 | 2021 | } |
d07b2b6f | 2022 | /* If we have likely upper bound, trust it for very small iteration |
2023 | counts. Such loops would otherwise get mispredicted by standard | |
2024 | LOOP_EXIT heuristics. */ | |
2025 | else if (n_exits == 1 | |
2026 | && likely_max_stmt_executions (loop, &nit) | |
2027 | && wi::ltu_p (nit, | |
2028 | RDIV (REG_BR_PROB_BASE, | |
2029 | REG_BR_PROB_BASE | |
2030 | - predictor_info | |
cbcc4297 | 2031 | [recursion |
2032 | ? PRED_LOOP_EXIT_WITH_RECURSION | |
2033 | : PRED_LOOP_EXIT].hitrate))) | |
d07b2b6f | 2034 | { |
2035 | nitercst = nit.to_shwi (); | |
2036 | predictor = PRED_LOOP_ITERATIONS_MAX; | |
2037 | } | |
d500fef3 | 2038 | else |
cbcc4297 | 2039 | { |
2040 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2041 | fprintf (dump_file, "Nothing known about exit %i->%i.\n", | |
2042 | ex->src->index, ex->dest->index); | |
2043 | continue; | |
2044 | } | |
d500fef3 | 2045 | |
cbcc4297 | 2046 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2047 | fprintf (dump_file, "Recording prediction to %i iterations by %s.\n", | |
2048 | (int)nitercst, predictor_info[predictor].name); | |
afa7ed87 | 2049 | /* If the prediction for number of iterations is zero, do not |
2050 | predict the exit edges. */ | |
2051 | if (nitercst == 0) | |
2052 | continue; | |
2053 | ||
d07b2b6f | 2054 | probability = RDIV (REG_BR_PROB_BASE, nitercst); |
d500fef3 | 2055 | predict_edge (ex, predictor, probability); |
d27b0b64 | 2056 | } |
f1f41a6c | 2057 | exits.release (); |
862be747 | 2058 | |
fd757b76 | 2059 | /* Find information about loop bound variables. */ |
2060 | for (nb_iter = loop->bounds; nb_iter; | |
2061 | nb_iter = nb_iter->next) | |
2062 | if (nb_iter->stmt | |
2063 | && gimple_code (nb_iter->stmt) == GIMPLE_COND) | |
2064 | { | |
1a91d914 | 2065 | stmt = as_a <gcond *> (nb_iter->stmt); |
fd757b76 | 2066 | break; |
2067 | } | |
2068 | if (!stmt && last_stmt (loop->header) | |
2069 | && gimple_code (last_stmt (loop->header)) == GIMPLE_COND) | |
1a91d914 | 2070 | stmt = as_a <gcond *> (last_stmt (loop->header)); |
fd757b76 | 2071 | if (stmt) |
2072 | is_comparison_with_loop_invariant_p (stmt, loop, | |
2073 | &loop_bound_var, | |
2074 | &loop_bound_code, | |
2075 | &loop_bound_step, | |
2076 | &loop_iv_base); | |
2077 | ||
7fb12188 | 2078 | bbs = get_loop_body (loop); |
4ee9c684 | 2079 | |
7fb12188 | 2080 | for (j = 0; j < loop->num_nodes; j++) |
2081 | { | |
7fb12188 | 2082 | edge e; |
cd665a06 | 2083 | edge_iterator ei; |
7fb12188 | 2084 | |
2085 | bb = bbs[j]; | |
e6751e9a | 2086 | |
cd0fe062 | 2087 | /* Bypass loop heuristics on continue statement. These |
2088 | statements construct loops via "non-loop" constructs | |
2089 | in the source language and are better to be handled | |
2090 | separately. */ | |
3b0b2309 | 2091 | if (predicted_by_p (bb, PRED_CONTINUE)) |
cbcc4297 | 2092 | { |
2093 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2094 | fprintf (dump_file, "BB %i predicted by continue.\n", | |
2095 | bb->index); | |
2096 | continue; | |
2097 | } | |
cd0fe062 | 2098 | |
cbcc4297 | 2099 | /* If we already used more reliable loop exit predictors, do not |
2100 | bother with PRED_LOOP_EXIT. */ | |
2101 | if (!predicted_by_loop_heuristics_p (bb)) | |
b41438e5 | 2102 | { |
2103 | /* For loop with many exits we don't want to predict all exits | |
2104 | with the pretty large probability, because if all exits are | |
2105 | considered in row, the loop would be predicted to iterate | |
2106 | almost never. The code to divide probability by number of | |
2107 | exits is very rough. It should compute the number of exits | |
2108 | taken in each patch through function (not the overall number | |
2109 | of exits that might be a lot higher for loops with wide switch | |
2110 | statements in them) and compute n-th square root. | |
2111 | ||
2112 | We limit the minimal probability by 2% to avoid | |
2113 | EDGE_PROBABILITY_RELIABLE from trusting the branch prediction | |
2114 | as this was causing regression in perl benchmark containing such | |
2115 | a wide loop. */ | |
48e1416a | 2116 | |
b41438e5 | 2117 | int probability = ((REG_BR_PROB_BASE |
cbcc4297 | 2118 | - predictor_info |
2119 | [recursion | |
2120 | ? PRED_LOOP_EXIT_WITH_RECURSION | |
2121 | : PRED_LOOP_EXIT].hitrate) | |
b41438e5 | 2122 | / n_exits); |
2123 | if (probability < HITRATE (2)) | |
2124 | probability = HITRATE (2); | |
2125 | FOR_EACH_EDGE (e, ei, bb->succs) | |
2126 | if (e->dest->index < NUM_FIXED_BLOCKS | |
2127 | || !flow_bb_inside_loop_p (loop, e->dest)) | |
cbcc4297 | 2128 | { |
2129 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2130 | fprintf (dump_file, | |
2131 | "Predicting exit %i->%i with prob %i.\n", | |
2132 | e->src->index, e->dest->index, probability); | |
2133 | predict_edge (e, | |
2134 | recursion ? PRED_LOOP_EXIT_WITH_RECURSION | |
2135 | : PRED_LOOP_EXIT, probability); | |
2136 | } | |
b41438e5 | 2137 | } |
fd757b76 | 2138 | if (loop_bound_var) |
2139 | predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base, | |
2140 | loop_bound_code, | |
e913b5cd | 2141 | tree_to_shwi (loop_bound_step)); |
7fb12188 | 2142 | } |
48e1416a | 2143 | |
e09883e4 | 2144 | /* In the following code |
2145 | for (loop1) | |
2146 | if (cond) | |
2147 | for (loop2) | |
2148 | body; | |
2149 | guess that cond is unlikely. */ | |
2150 | if (loop_outer (loop)->num) | |
2151 | { | |
2152 | basic_block bb = NULL; | |
2153 | edge preheader_edge = loop_preheader_edge (loop); | |
2154 | ||
2155 | if (single_pred_p (preheader_edge->src) | |
2156 | && single_succ_p (preheader_edge->src)) | |
2157 | preheader_edge = single_pred_edge (preheader_edge->src); | |
2158 | ||
2159 | gimple *stmt = last_stmt (preheader_edge->src); | |
2160 | /* Pattern match fortran loop preheader: | |
2161 | _16 = BUILTIN_EXPECT (_15, 1, PRED_FORTRAN_LOOP_PREHEADER); | |
2162 | _17 = (logical(kind=4)) _16; | |
2163 | if (_17 != 0) | |
2164 | goto <bb 11>; | |
2165 | else | |
2166 | goto <bb 13>; | |
2167 | ||
2168 | Loop guard branch prediction says nothing about duplicated loop | |
2169 | headers produced by fortran frontend and in this case we want | |
2170 | to predict paths leading to this preheader. */ | |
2171 | ||
2172 | if (stmt | |
2173 | && gimple_code (stmt) == GIMPLE_COND | |
2174 | && gimple_cond_code (stmt) == NE_EXPR | |
2175 | && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME | |
2176 | && integer_zerop (gimple_cond_rhs (stmt))) | |
2177 | { | |
2178 | gimple *call_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt)); | |
2179 | if (gimple_code (call_stmt) == GIMPLE_ASSIGN | |
2180 | && gimple_expr_code (call_stmt) == NOP_EXPR | |
2181 | && TREE_CODE (gimple_assign_rhs1 (call_stmt)) == SSA_NAME) | |
2182 | call_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (call_stmt)); | |
7408cd7d | 2183 | if (gimple_call_internal_p (call_stmt, IFN_BUILTIN_EXPECT) |
e09883e4 | 2184 | && TREE_CODE (gimple_call_arg (call_stmt, 2)) == INTEGER_CST |
2185 | && tree_fits_uhwi_p (gimple_call_arg (call_stmt, 2)) | |
2186 | && tree_to_uhwi (gimple_call_arg (call_stmt, 2)) | |
2187 | == PRED_FORTRAN_LOOP_PREHEADER) | |
2188 | bb = preheader_edge->src; | |
2189 | } | |
2190 | if (!bb) | |
2191 | { | |
2192 | if (!dominated_by_p (CDI_DOMINATORS, | |
2193 | loop_outer (loop)->latch, loop->header)) | |
2194 | predict_paths_leading_to_edge (loop_preheader_edge (loop), | |
cbcc4297 | 2195 | recursion |
2196 | ? PRED_LOOP_GUARD_WITH_RECURSION | |
2197 | : PRED_LOOP_GUARD, | |
e09883e4 | 2198 | NOT_TAKEN, |
2199 | loop_outer (loop)); | |
2200 | } | |
2201 | else | |
2202 | { | |
2203 | if (!dominated_by_p (CDI_DOMINATORS, | |
2204 | loop_outer (loop)->latch, bb)) | |
2205 | predict_paths_leading_to (bb, | |
cbcc4297 | 2206 | recursion |
2207 | ? PRED_LOOP_GUARD_WITH_RECURSION | |
2208 | : PRED_LOOP_GUARD, | |
e09883e4 | 2209 | NOT_TAKEN, |
2210 | loop_outer (loop)); | |
2211 | } | |
2212 | } | |
2213 | ||
21dda4ee | 2214 | /* Free basic blocks from get_loop_body. */ |
dcd8fd01 | 2215 | free (bbs); |
59423b59 | 2216 | } |
4ee9c684 | 2217 | } |
2218 | ||
83c8a977 | 2219 | /* Attempt to predict probabilities of BB outgoing edges using local |
2220 | properties. */ | |
2221 | static void | |
2222 | bb_estimate_probability_locally (basic_block bb) | |
2223 | { | |
ee5f6585 | 2224 | rtx_insn *last_insn = BB_END (bb); |
83c8a977 | 2225 | rtx cond; |
2226 | ||
2227 | if (! can_predict_insn_p (last_insn)) | |
2228 | return; | |
2229 | cond = get_condition (last_insn, NULL, false, false); | |
2230 | if (! cond) | |
2231 | return; | |
2232 | ||
2233 | /* Try "pointer heuristic." | |
2234 | A comparison ptr == 0 is predicted as false. | |
2235 | Similarly, a comparison ptr1 == ptr2 is predicted as false. */ | |
2236 | if (COMPARISON_P (cond) | |
2237 | && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0))) | |
2238 | || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1))))) | |
2239 | { | |
2240 | if (GET_CODE (cond) == EQ) | |
2241 | predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN); | |
2242 | else if (GET_CODE (cond) == NE) | |
2243 | predict_insn_def (last_insn, PRED_POINTER, TAKEN); | |
2244 | } | |
2245 | else | |
2246 | ||
2247 | /* Try "opcode heuristic." | |
2248 | EQ tests are usually false and NE tests are usually true. Also, | |
2249 | most quantities are positive, so we can make the appropriate guesses | |
2250 | about signed comparisons against zero. */ | |
2251 | switch (GET_CODE (cond)) | |
2252 | { | |
2253 | case CONST_INT: | |
2254 | /* Unconditional branch. */ | |
2255 | predict_insn_def (last_insn, PRED_UNCONDITIONAL, | |
2256 | cond == const0_rtx ? NOT_TAKEN : TAKEN); | |
2257 | break; | |
2258 | ||
2259 | case EQ: | |
2260 | case UNEQ: | |
2261 | /* Floating point comparisons appears to behave in a very | |
2262 | unpredictable way because of special role of = tests in | |
2263 | FP code. */ | |
2264 | if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))) | |
2265 | ; | |
2266 | /* Comparisons with 0 are often used for booleans and there is | |
2267 | nothing useful to predict about them. */ | |
2268 | else if (XEXP (cond, 1) == const0_rtx | |
2269 | || XEXP (cond, 0) == const0_rtx) | |
2270 | ; | |
2271 | else | |
2272 | predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN); | |
2273 | break; | |
2274 | ||
2275 | case NE: | |
2276 | case LTGT: | |
2277 | /* Floating point comparisons appears to behave in a very | |
2278 | unpredictable way because of special role of = tests in | |
2279 | FP code. */ | |
2280 | if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))) | |
2281 | ; | |
2282 | /* Comparisons with 0 are often used for booleans and there is | |
2283 | nothing useful to predict about them. */ | |
2284 | else if (XEXP (cond, 1) == const0_rtx | |
2285 | || XEXP (cond, 0) == const0_rtx) | |
2286 | ; | |
2287 | else | |
2288 | predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN); | |
2289 | break; | |
2290 | ||
2291 | case ORDERED: | |
2292 | predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN); | |
2293 | break; | |
2294 | ||
2295 | case UNORDERED: | |
2296 | predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN); | |
2297 | break; | |
2298 | ||
2299 | case LE: | |
2300 | case LT: | |
2301 | if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx | |
2302 | || XEXP (cond, 1) == constm1_rtx) | |
2303 | predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN); | |
2304 | break; | |
2305 | ||
2306 | case GE: | |
2307 | case GT: | |
2308 | if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx | |
2309 | || XEXP (cond, 1) == constm1_rtx) | |
2310 | predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN); | |
2311 | break; | |
2312 | ||
2313 | default: | |
2314 | break; | |
2315 | } | |
2316 | } | |
2317 | ||
7edd21a5 | 2318 | /* Set edge->probability for each successor edge of BB. */ |
83c8a977 | 2319 | void |
2320 | guess_outgoing_edge_probabilities (basic_block bb) | |
2321 | { | |
2322 | bb_estimate_probability_locally (bb); | |
2323 | combine_predictions_for_insn (BB_END (bb), bb); | |
2324 | } | |
4ee9c684 | 2325 | \f |
01107f42 | 2326 | static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor, |
2327 | HOST_WIDE_INT *probability); | |
75a70cf9 | 2328 | |
2329 | /* Helper function for expr_expected_value. */ | |
42975b1f | 2330 | |
2331 | static tree | |
2380e91e | 2332 | expr_expected_value_1 (tree type, tree op0, enum tree_code code, |
01107f42 | 2333 | tree op1, bitmap visited, enum br_predictor *predictor, |
2334 | HOST_WIDE_INT *probability) | |
42975b1f | 2335 | { |
42acab1c | 2336 | gimple *def; |
75a70cf9 | 2337 | |
01107f42 | 2338 | /* Reset returned probability value. */ |
2339 | *probability = -1; | |
2340 | *predictor = PRED_UNCONDITIONAL; | |
c83059be | 2341 | |
75a70cf9 | 2342 | if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS) |
42975b1f | 2343 | { |
75a70cf9 | 2344 | if (TREE_CONSTANT (op0)) |
2345 | return op0; | |
2346 | ||
5a5ef659 | 2347 | if (code == IMAGPART_EXPR) |
2348 | { | |
2349 | if (TREE_CODE (TREE_OPERAND (op0, 0)) == SSA_NAME) | |
2350 | { | |
2351 | def = SSA_NAME_DEF_STMT (TREE_OPERAND (op0, 0)); | |
2352 | if (is_gimple_call (def) | |
2353 | && gimple_call_internal_p (def) | |
2354 | && (gimple_call_internal_fn (def) | |
2355 | == IFN_ATOMIC_COMPARE_EXCHANGE)) | |
2356 | { | |
2357 | /* Assume that any given atomic operation has low contention, | |
2358 | and thus the compare-and-swap operation succeeds. */ | |
01107f42 | 2359 | *predictor = PRED_COMPARE_AND_SWAP; |
5a5ef659 | 2360 | return build_one_cst (TREE_TYPE (op0)); |
2361 | } | |
2362 | } | |
2363 | } | |
2364 | ||
75a70cf9 | 2365 | if (code != SSA_NAME) |
2366 | return NULL_TREE; | |
2367 | ||
2368 | def = SSA_NAME_DEF_STMT (op0); | |
42975b1f | 2369 | |
2370 | /* If we were already here, break the infinite cycle. */ | |
6ef9bbe0 | 2371 | if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0))) |
42975b1f | 2372 | return NULL; |
42975b1f | 2373 | |
75a70cf9 | 2374 | if (gimple_code (def) == GIMPLE_PHI) |
42975b1f | 2375 | { |
2376 | /* All the arguments of the PHI node must have the same constant | |
2377 | length. */ | |
75a70cf9 | 2378 | int i, n = gimple_phi_num_args (def); |
42975b1f | 2379 | tree val = NULL, new_val; |
4ee9c684 | 2380 | |
75a70cf9 | 2381 | for (i = 0; i < n; i++) |
42975b1f | 2382 | { |
2383 | tree arg = PHI_ARG_DEF (def, i); | |
c83059be | 2384 | enum br_predictor predictor2; |
42975b1f | 2385 | |
2386 | /* If this PHI has itself as an argument, we cannot | |
2387 | determine the string length of this argument. However, | |
86481e89 | 2388 | if we can find an expected constant value for the other |
42975b1f | 2389 | PHI args then we can still be sure that this is |
2390 | likely a constant. So be optimistic and just | |
2391 | continue with the next argument. */ | |
2392 | if (arg == PHI_RESULT (def)) | |
2393 | continue; | |
2394 | ||
e431579a | 2395 | HOST_WIDE_INT probability2; |
01107f42 | 2396 | new_val = expr_expected_value (arg, visited, &predictor2, |
e431579a | 2397 | &probability2); |
c83059be | 2398 | |
2399 | /* It is difficult to combine value predictors. Simply assume | |
2400 | that later predictor is weaker and take its prediction. */ | |
01107f42 | 2401 | if (*predictor < predictor2) |
e431579a | 2402 | { |
2403 | *predictor = predictor2; | |
2404 | *probability = probability2; | |
2405 | } | |
42975b1f | 2406 | if (!new_val) |
2407 | return NULL; | |
2408 | if (!val) | |
2409 | val = new_val; | |
2410 | else if (!operand_equal_p (val, new_val, false)) | |
2411 | return NULL; | |
2412 | } | |
2413 | return val; | |
2414 | } | |
75a70cf9 | 2415 | if (is_gimple_assign (def)) |
42975b1f | 2416 | { |
75a70cf9 | 2417 | if (gimple_assign_lhs (def) != op0) |
2418 | return NULL; | |
42975b1f | 2419 | |
75a70cf9 | 2420 | return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)), |
2421 | gimple_assign_rhs1 (def), | |
2422 | gimple_assign_rhs_code (def), | |
2423 | gimple_assign_rhs2 (def), | |
01107f42 | 2424 | visited, predictor, probability); |
75a70cf9 | 2425 | } |
2426 | ||
2427 | if (is_gimple_call (def)) | |
2428 | { | |
2429 | tree decl = gimple_call_fndecl (def); | |
2430 | if (!decl) | |
c83059be | 2431 | { |
2432 | if (gimple_call_internal_p (def) | |
2433 | && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT) | |
2434 | { | |
2435 | gcc_assert (gimple_call_num_args (def) == 3); | |
2436 | tree val = gimple_call_arg (def, 0); | |
2437 | if (TREE_CONSTANT (val)) | |
2438 | return val; | |
01107f42 | 2439 | tree val2 = gimple_call_arg (def, 2); |
2440 | gcc_assert (TREE_CODE (val2) == INTEGER_CST | |
2441 | && tree_fits_uhwi_p (val2) | |
2442 | && tree_to_uhwi (val2) < END_PREDICTORS); | |
2443 | *predictor = (enum br_predictor) tree_to_uhwi (val2); | |
2444 | if (*predictor == PRED_BUILTIN_EXPECT) | |
2445 | *probability | |
2446 | = HITRATE (PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY)); | |
c83059be | 2447 | return gimple_call_arg (def, 1); |
2448 | } | |
2449 | return NULL; | |
2450 | } | |
ba9d6f44 | 2451 | |
92e4277c | 2452 | if (DECL_IS_MALLOC (decl) || DECL_IS_OPERATOR_NEW_P (decl)) |
ba9d6f44 | 2453 | { |
2454 | if (predictor) | |
2455 | *predictor = PRED_MALLOC_NONNULL; | |
2456 | return boolean_true_node; | |
2457 | } | |
2458 | ||
2380e91e | 2459 | if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) |
2460 | switch (DECL_FUNCTION_CODE (decl)) | |
2461 | { | |
2462 | case BUILT_IN_EXPECT: | |
2463 | { | |
2464 | tree val; | |
2465 | if (gimple_call_num_args (def) != 2) | |
2466 | return NULL; | |
2467 | val = gimple_call_arg (def, 0); | |
2468 | if (TREE_CONSTANT (val)) | |
2469 | return val; | |
01107f42 | 2470 | *predictor = PRED_BUILTIN_EXPECT; |
2471 | *probability | |
2472 | = HITRATE (PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY)); | |
2473 | return gimple_call_arg (def, 1); | |
2474 | } | |
2475 | case BUILT_IN_EXPECT_WITH_PROBABILITY: | |
2476 | { | |
2477 | tree val; | |
2478 | if (gimple_call_num_args (def) != 3) | |
2479 | return NULL; | |
2480 | val = gimple_call_arg (def, 0); | |
2481 | if (TREE_CONSTANT (val)) | |
2482 | return val; | |
2483 | /* Compute final probability as: | |
2484 | probability * REG_BR_PROB_BASE. */ | |
2485 | tree prob = gimple_call_arg (def, 2); | |
2486 | tree t = TREE_TYPE (prob); | |
2487 | tree base = build_int_cst (integer_type_node, | |
2488 | REG_BR_PROB_BASE); | |
2489 | base = build_real_from_int_cst (t, base); | |
e431579a | 2490 | tree r = fold_build2_initializer_loc (UNKNOWN_LOCATION, |
2491 | MULT_EXPR, t, prob, base); | |
26edb6f6 | 2492 | if (TREE_CODE (r) != REAL_CST) |
2493 | { | |
2494 | error_at (gimple_location (def), | |
5bebfaa2 | 2495 | "probability %qE must be " |
2496 | "constant floating-point expression", prob); | |
26edb6f6 | 2497 | return NULL; |
2498 | } | |
01107f42 | 2499 | HOST_WIDE_INT probi |
2500 | = real_to_integer (TREE_REAL_CST_PTR (r)); | |
2501 | if (probi >= 0 && probi <= REG_BR_PROB_BASE) | |
2502 | { | |
2503 | *predictor = PRED_BUILTIN_EXPECT_WITH_PROBABILITY; | |
2504 | *probability = probi; | |
2505 | } | |
26edb6f6 | 2506 | else |
2507 | error_at (gimple_location (def), | |
5bebfaa2 | 2508 | "probability %qE is outside " |
2509 | "the range [0.0, 1.0]", prob); | |
26edb6f6 | 2510 | |
2380e91e | 2511 | return gimple_call_arg (def, 1); |
2512 | } | |
75a70cf9 | 2513 | |
2380e91e | 2514 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N: |
2515 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1: | |
2516 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2: | |
2517 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4: | |
2518 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8: | |
2519 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16: | |
2520 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE: | |
2521 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N: | |
2522 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1: | |
2523 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2: | |
2524 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4: | |
2525 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8: | |
2526 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16: | |
2527 | /* Assume that any given atomic operation has low contention, | |
2528 | and thus the compare-and-swap operation succeeds. */ | |
01107f42 | 2529 | *predictor = PRED_COMPARE_AND_SWAP; |
2380e91e | 2530 | return boolean_true_node; |
ba9d6f44 | 2531 | case BUILT_IN_REALLOC: |
2532 | if (predictor) | |
2533 | *predictor = PRED_MALLOC_NONNULL; | |
2534 | return boolean_true_node; | |
5213d6c9 | 2535 | default: |
2536 | break; | |
75a70cf9 | 2537 | } |
42975b1f | 2538 | } |
75a70cf9 | 2539 | |
2540 | return NULL; | |
42975b1f | 2541 | } |
75a70cf9 | 2542 | |
2543 | if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS) | |
42975b1f | 2544 | { |
75a70cf9 | 2545 | tree res; |
c83059be | 2546 | enum br_predictor predictor2; |
01107f42 | 2547 | HOST_WIDE_INT probability2; |
2548 | op0 = expr_expected_value (op0, visited, predictor, probability); | |
42975b1f | 2549 | if (!op0) |
2550 | return NULL; | |
01107f42 | 2551 | op1 = expr_expected_value (op1, visited, &predictor2, &probability2); |
42975b1f | 2552 | if (!op1) |
2553 | return NULL; | |
75a70cf9 | 2554 | res = fold_build2 (code, type, op0, op1); |
01107f42 | 2555 | if (TREE_CODE (res) == INTEGER_CST |
2556 | && TREE_CODE (op0) == INTEGER_CST | |
2557 | && TREE_CODE (op1) == INTEGER_CST) | |
2558 | { | |
2559 | /* Combine binary predictions. */ | |
2560 | if (*probability != -1 || probability2 != -1) | |
2561 | { | |
2562 | HOST_WIDE_INT p1 = get_predictor_value (*predictor, *probability); | |
2563 | HOST_WIDE_INT p2 = get_predictor_value (predictor2, probability2); | |
2564 | *probability = RDIV (p1 * p2, REG_BR_PROB_BASE); | |
2565 | } | |
2566 | ||
2567 | if (*predictor < predictor2) | |
2568 | *predictor = predictor2; | |
2569 | ||
2570 | return res; | |
2571 | } | |
42975b1f | 2572 | return NULL; |
2573 | } | |
75a70cf9 | 2574 | if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS) |
42975b1f | 2575 | { |
75a70cf9 | 2576 | tree res; |
01107f42 | 2577 | op0 = expr_expected_value (op0, visited, predictor, probability); |
42975b1f | 2578 | if (!op0) |
2579 | return NULL; | |
75a70cf9 | 2580 | res = fold_build1 (code, type, op0); |
42975b1f | 2581 | if (TREE_CONSTANT (res)) |
2582 | return res; | |
2583 | return NULL; | |
2584 | } | |
2585 | return NULL; | |
2586 | } | |
75a70cf9 | 2587 | |
48e1416a | 2588 | /* Return constant EXPR will likely have at execution time, NULL if unknown. |
75a70cf9 | 2589 | The function is used by builtin_expect branch predictor so the evidence |
2590 | must come from this construct and additional possible constant folding. | |
48e1416a | 2591 | |
75a70cf9 | 2592 | We may want to implement more involved value guess (such as value range |
2593 | propagation based prediction), but such tricks shall go to new | |
2594 | implementation. */ | |
2595 | ||
2596 | static tree | |
c83059be | 2597 | expr_expected_value (tree expr, bitmap visited, |
01107f42 | 2598 | enum br_predictor *predictor, |
2599 | HOST_WIDE_INT *probability) | |
75a70cf9 | 2600 | { |
2601 | enum tree_code code; | |
2602 | tree op0, op1; | |
2603 | ||
2604 | if (TREE_CONSTANT (expr)) | |
c83059be | 2605 | { |
01107f42 | 2606 | *predictor = PRED_UNCONDITIONAL; |
2607 | *probability = -1; | |
c83059be | 2608 | return expr; |
2609 | } | |
75a70cf9 | 2610 | |
2611 | extract_ops_from_tree (expr, &code, &op0, &op1); | |
2612 | return expr_expected_value_1 (TREE_TYPE (expr), | |
01107f42 | 2613 | op0, code, op1, visited, predictor, |
2614 | probability); | |
75a70cf9 | 2615 | } |
42975b1f | 2616 | \f |
01107f42 | 2617 | |
2618 | /* Return probability of a PREDICTOR. If the predictor has variable | |
2619 | probability return passed PROBABILITY. */ | |
2620 | ||
2621 | static HOST_WIDE_INT | |
2622 | get_predictor_value (br_predictor predictor, HOST_WIDE_INT probability) | |
2623 | { | |
2624 | switch (predictor) | |
2625 | { | |
2626 | case PRED_BUILTIN_EXPECT: | |
2627 | case PRED_BUILTIN_EXPECT_WITH_PROBABILITY: | |
2628 | gcc_assert (probability != -1); | |
2629 | return probability; | |
2630 | default: | |
2631 | gcc_assert (probability == -1); | |
2632 | return predictor_info[(int) predictor].hitrate; | |
2633 | } | |
2634 | } | |
2635 | ||
4ee9c684 | 2636 | /* Predict using opcode of the last statement in basic block. */ |
2637 | static void | |
2638 | tree_predict_by_opcode (basic_block bb) | |
2639 | { | |
42acab1c | 2640 | gimple *stmt = last_stmt (bb); |
4ee9c684 | 2641 | edge then_edge; |
75a70cf9 | 2642 | tree op0, op1; |
4ee9c684 | 2643 | tree type; |
42975b1f | 2644 | tree val; |
75a70cf9 | 2645 | enum tree_code cmp; |
cd665a06 | 2646 | edge_iterator ei; |
c83059be | 2647 | enum br_predictor predictor; |
01107f42 | 2648 | HOST_WIDE_INT probability; |
4ee9c684 | 2649 | |
44b41fe7 | 2650 | if (!stmt) |
2651 | return; | |
2652 | ||
2653 | if (gswitch *sw = dyn_cast <gswitch *> (stmt)) | |
2654 | { | |
2655 | tree index = gimple_switch_index (sw); | |
2656 | tree val = expr_expected_value (index, auto_bitmap (), | |
2657 | &predictor, &probability); | |
2658 | if (val && TREE_CODE (val) == INTEGER_CST) | |
2659 | { | |
2660 | edge e = find_taken_edge_switch_expr (sw, val); | |
2661 | if (predictor == PRED_BUILTIN_EXPECT) | |
2662 | { | |
2663 | int percent = PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY); | |
2664 | gcc_assert (percent >= 0 && percent <= 100); | |
2665 | predict_edge (e, PRED_BUILTIN_EXPECT, | |
2666 | HITRATE (percent)); | |
2667 | } | |
2668 | else | |
2669 | predict_edge_def (e, predictor, TAKEN); | |
2670 | } | |
2671 | } | |
2672 | ||
2673 | if (gimple_code (stmt) != GIMPLE_COND) | |
4ee9c684 | 2674 | return; |
cd665a06 | 2675 | FOR_EACH_EDGE (then_edge, ei, bb->succs) |
4ee9c684 | 2676 | if (then_edge->flags & EDGE_TRUE_VALUE) |
cd665a06 | 2677 | break; |
75a70cf9 | 2678 | op0 = gimple_cond_lhs (stmt); |
2679 | op1 = gimple_cond_rhs (stmt); | |
2680 | cmp = gimple_cond_code (stmt); | |
4ee9c684 | 2681 | type = TREE_TYPE (op0); |
035def86 | 2682 | val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, auto_bitmap (), |
01107f42 | 2683 | &predictor, &probability); |
c83059be | 2684 | if (val && TREE_CODE (val) == INTEGER_CST) |
42975b1f | 2685 | { |
01107f42 | 2686 | HOST_WIDE_INT prob = get_predictor_value (predictor, probability); |
2687 | if (integer_zerop (val)) | |
2688 | prob = REG_BR_PROB_BASE - prob; | |
2689 | predict_edge (then_edge, predictor, prob); | |
42975b1f | 2690 | } |
4ee9c684 | 2691 | /* Try "pointer heuristic." |
2692 | A comparison ptr == 0 is predicted as false. | |
2693 | Similarly, a comparison ptr1 == ptr2 is predicted as false. */ | |
2694 | if (POINTER_TYPE_P (type)) | |
2695 | { | |
75a70cf9 | 2696 | if (cmp == EQ_EXPR) |
4ee9c684 | 2697 | predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN); |
75a70cf9 | 2698 | else if (cmp == NE_EXPR) |
4ee9c684 | 2699 | predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN); |
2700 | } | |
2701 | else | |
2702 | ||
2703 | /* Try "opcode heuristic." | |
2704 | EQ tests are usually false and NE tests are usually true. Also, | |
2705 | most quantities are positive, so we can make the appropriate guesses | |
2706 | about signed comparisons against zero. */ | |
75a70cf9 | 2707 | switch (cmp) |
4ee9c684 | 2708 | { |
2709 | case EQ_EXPR: | |
2710 | case UNEQ_EXPR: | |
2711 | /* Floating point comparisons appears to behave in a very | |
2712 | unpredictable way because of special role of = tests in | |
2713 | FP code. */ | |
2714 | if (FLOAT_TYPE_P (type)) | |
2715 | ; | |
2716 | /* Comparisons with 0 are often used for booleans and there is | |
2717 | nothing useful to predict about them. */ | |
75a70cf9 | 2718 | else if (integer_zerop (op0) || integer_zerop (op1)) |
4ee9c684 | 2719 | ; |
2720 | else | |
2721 | predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN); | |
2722 | break; | |
2723 | ||
2724 | case NE_EXPR: | |
318a728f | 2725 | case LTGT_EXPR: |
4ee9c684 | 2726 | /* Floating point comparisons appears to behave in a very |
2727 | unpredictable way because of special role of = tests in | |
2728 | FP code. */ | |
2729 | if (FLOAT_TYPE_P (type)) | |
2730 | ; | |
2731 | /* Comparisons with 0 are often used for booleans and there is | |
2732 | nothing useful to predict about them. */ | |
2733 | else if (integer_zerop (op0) | |
75a70cf9 | 2734 | || integer_zerop (op1)) |
4ee9c684 | 2735 | ; |
2736 | else | |
2737 | predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN); | |
2738 | break; | |
2739 | ||
2740 | case ORDERED_EXPR: | |
2741 | predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN); | |
2742 | break; | |
2743 | ||
2744 | case UNORDERED_EXPR: | |
2745 | predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN); | |
2746 | break; | |
2747 | ||
2748 | case LE_EXPR: | |
2749 | case LT_EXPR: | |
75a70cf9 | 2750 | if (integer_zerop (op1) |
2751 | || integer_onep (op1) | |
2752 | || integer_all_onesp (op1) | |
2753 | || real_zerop (op1) | |
2754 | || real_onep (op1) | |
2755 | || real_minus_onep (op1)) | |
4ee9c684 | 2756 | predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN); |
2757 | break; | |
2758 | ||
2759 | case GE_EXPR: | |
2760 | case GT_EXPR: | |
75a70cf9 | 2761 | if (integer_zerop (op1) |
2762 | || integer_onep (op1) | |
2763 | || integer_all_onesp (op1) | |
2764 | || real_zerop (op1) | |
2765 | || real_onep (op1) | |
2766 | || real_minus_onep (op1)) | |
4ee9c684 | 2767 | predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN); |
2768 | break; | |
2769 | ||
2770 | default: | |
2771 | break; | |
2772 | } | |
2773 | } | |
2774 | ||
30f2983e | 2775 | /* Returns TRUE if the STMT is exit(0) like statement. */ |
2776 | ||
2777 | static bool | |
2778 | is_exit_with_zero_arg (const gimple *stmt) | |
2779 | { | |
2780 | /* This is not exit, _exit or _Exit. */ | |
2781 | if (!gimple_call_builtin_p (stmt, BUILT_IN_EXIT) | |
2782 | && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT) | |
2783 | && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT2)) | |
2784 | return false; | |
2785 | ||
2786 | /* Argument is an interger zero. */ | |
2787 | return integer_zerop (gimple_call_arg (stmt, 0)); | |
2788 | } | |
2789 | ||
f816ec49 | 2790 | /* Try to guess whether the value of return means error code. */ |
75a70cf9 | 2791 | |
f816ec49 | 2792 | static enum br_predictor |
2793 | return_prediction (tree val, enum prediction *prediction) | |
2794 | { | |
2795 | /* VOID. */ | |
2796 | if (!val) | |
2797 | return PRED_NO_PREDICTION; | |
2798 | /* Different heuristics for pointers and scalars. */ | |
2799 | if (POINTER_TYPE_P (TREE_TYPE (val))) | |
2800 | { | |
2801 | /* NULL is usually not returned. */ | |
2802 | if (integer_zerop (val)) | |
2803 | { | |
2804 | *prediction = NOT_TAKEN; | |
2805 | return PRED_NULL_RETURN; | |
2806 | } | |
2807 | } | |
2808 | else if (INTEGRAL_TYPE_P (TREE_TYPE (val))) | |
2809 | { | |
2810 | /* Negative return values are often used to indicate | |
2811 | errors. */ | |
2812 | if (TREE_CODE (val) == INTEGER_CST | |
2813 | && tree_int_cst_sgn (val) < 0) | |
2814 | { | |
2815 | *prediction = NOT_TAKEN; | |
2816 | return PRED_NEGATIVE_RETURN; | |
2817 | } | |
2818 | /* Constant return values seems to be commonly taken. | |
2819 | Zero/one often represent booleans so exclude them from the | |
2820 | heuristics. */ | |
2821 | if (TREE_CONSTANT (val) | |
2822 | && (!integer_zerop (val) && !integer_onep (val))) | |
2823 | { | |
d3cb49c9 | 2824 | *prediction = NOT_TAKEN; |
4a4e4487 | 2825 | return PRED_CONST_RETURN; |
f816ec49 | 2826 | } |
2827 | } | |
2828 | return PRED_NO_PREDICTION; | |
2829 | } | |
2830 | ||
7c4f5bde | 2831 | /* Return zero if phi result could have values other than -1, 0 or 1, |
2832 | otherwise return a bitmask, with bits 0, 1 and 2 set if -1, 0 and 1 | |
2833 | values are used or likely. */ | |
2834 | ||
2835 | static int | |
2836 | zero_one_minusone (gphi *phi, int limit) | |
2837 | { | |
2838 | int phi_num_args = gimple_phi_num_args (phi); | |
2839 | int ret = 0; | |
2840 | for (int i = 0; i < phi_num_args; i++) | |
2841 | { | |
2842 | tree t = PHI_ARG_DEF (phi, i); | |
2843 | if (TREE_CODE (t) != INTEGER_CST) | |
2844 | continue; | |
2845 | wide_int w = wi::to_wide (t); | |
2846 | if (w == -1) | |
2847 | ret |= 1; | |
2848 | else if (w == 0) | |
2849 | ret |= 2; | |
2850 | else if (w == 1) | |
2851 | ret |= 4; | |
2852 | else | |
2853 | return 0; | |
2854 | } | |
2855 | for (int i = 0; i < phi_num_args; i++) | |
2856 | { | |
2857 | tree t = PHI_ARG_DEF (phi, i); | |
2858 | if (TREE_CODE (t) == INTEGER_CST) | |
2859 | continue; | |
2860 | if (TREE_CODE (t) != SSA_NAME) | |
2861 | return 0; | |
2862 | gimple *g = SSA_NAME_DEF_STMT (t); | |
2863 | if (gimple_code (g) == GIMPLE_PHI && limit > 0) | |
2864 | if (int r = zero_one_minusone (as_a <gphi *> (g), limit - 1)) | |
2865 | { | |
2866 | ret |= r; | |
2867 | continue; | |
2868 | } | |
2869 | if (!is_gimple_assign (g)) | |
2870 | return 0; | |
2871 | if (gimple_assign_cast_p (g)) | |
2872 | { | |
2873 | tree rhs1 = gimple_assign_rhs1 (g); | |
2874 | if (TREE_CODE (rhs1) != SSA_NAME | |
2875 | || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
2876 | || TYPE_PRECISION (TREE_TYPE (rhs1)) != 1 | |
2877 | || !TYPE_UNSIGNED (TREE_TYPE (rhs1))) | |
2878 | return 0; | |
2879 | ret |= (2 | 4); | |
2880 | continue; | |
2881 | } | |
2882 | if (TREE_CODE_CLASS (gimple_assign_rhs_code (g)) != tcc_comparison) | |
2883 | return 0; | |
2884 | ret |= (2 | 4); | |
2885 | } | |
2886 | return ret; | |
2887 | } | |
2888 | ||
f816ec49 | 2889 | /* Find the basic block with return expression and look up for possible |
2890 | return value trying to apply RETURN_PREDICTION heuristics. */ | |
2891 | static void | |
d704ea82 | 2892 | apply_return_prediction (void) |
f816ec49 | 2893 | { |
1a91d914 | 2894 | greturn *return_stmt = NULL; |
f816ec49 | 2895 | tree return_val; |
2896 | edge e; | |
1a91d914 | 2897 | gphi *phi; |
f816ec49 | 2898 | int phi_num_args, i; |
2899 | enum br_predictor pred; | |
2900 | enum prediction direction; | |
cd665a06 | 2901 | edge_iterator ei; |
f816ec49 | 2902 | |
34154e27 | 2903 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
f816ec49 | 2904 | { |
42acab1c | 2905 | gimple *last = last_stmt (e->src); |
1a91d914 | 2906 | if (last |
2907 | && gimple_code (last) == GIMPLE_RETURN) | |
2908 | { | |
2909 | return_stmt = as_a <greturn *> (last); | |
2910 | break; | |
2911 | } | |
f816ec49 | 2912 | } |
2913 | if (!e) | |
2914 | return; | |
75a70cf9 | 2915 | return_val = gimple_return_retval (return_stmt); |
f816ec49 | 2916 | if (!return_val) |
2917 | return; | |
f816ec49 | 2918 | if (TREE_CODE (return_val) != SSA_NAME |
2919 | || !SSA_NAME_DEF_STMT (return_val) | |
75a70cf9 | 2920 | || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI) |
f816ec49 | 2921 | return; |
1a91d914 | 2922 | phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val)); |
75a70cf9 | 2923 | phi_num_args = gimple_phi_num_args (phi); |
f816ec49 | 2924 | pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction); |
2925 | ||
7c4f5bde | 2926 | /* Avoid the case where the function returns -1, 0 and 1 values and |
2927 | nothing else. Those could be qsort etc. comparison functions | |
2928 | where the negative return isn't less probable than positive. | |
2929 | For this require that the function returns at least -1 or 1 | |
2930 | or -1 and a boolean value or comparison result, so that functions | |
2931 | returning just -1 and 0 are treated as if -1 represents error value. */ | |
2932 | if (INTEGRAL_TYPE_P (TREE_TYPE (return_val)) | |
2933 | && !TYPE_UNSIGNED (TREE_TYPE (return_val)) | |
2934 | && TYPE_PRECISION (TREE_TYPE (return_val)) > 1) | |
2935 | if (int r = zero_one_minusone (phi, 3)) | |
2936 | if ((r & (1 | 4)) == (1 | 4)) | |
2937 | return; | |
2938 | ||
f816ec49 | 2939 | /* Avoid the degenerate case where all return values form the function |
2940 | belongs to same category (ie they are all positive constants) | |
2941 | so we can hardly say something about them. */ | |
2942 | for (i = 1; i < phi_num_args; i++) | |
2943 | if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction)) | |
2944 | break; | |
2945 | if (i != phi_num_args) | |
2946 | for (i = 0; i < phi_num_args; i++) | |
2947 | { | |
2948 | pred = return_prediction (PHI_ARG_DEF (phi, i), &direction); | |
2949 | if (pred != PRED_NO_PREDICTION) | |
5707768a | 2950 | predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred, |
2951 | direction); | |
f816ec49 | 2952 | } |
2953 | } | |
2954 | ||
2955 | /* Look for basic block that contains unlikely to happen events | |
2956 | (such as noreturn calls) and mark all paths leading to execution | |
2957 | of this basic blocks as unlikely. */ | |
2958 | ||
2959 | static void | |
2960 | tree_bb_level_predictions (void) | |
2961 | { | |
2962 | basic_block bb; | |
9f694a82 | 2963 | bool has_return_edges = false; |
2964 | edge e; | |
2965 | edge_iterator ei; | |
2966 | ||
34154e27 | 2967 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
f08c22c4 | 2968 | if (!unlikely_executed_edge_p (e) && !(e->flags & EDGE_ABNORMAL_CALL)) |
9f694a82 | 2969 | { |
2970 | has_return_edges = true; | |
2971 | break; | |
2972 | } | |
f816ec49 | 2973 | |
d704ea82 | 2974 | apply_return_prediction (); |
f816ec49 | 2975 | |
fc00614f | 2976 | FOR_EACH_BB_FN (bb, cfun) |
f816ec49 | 2977 | { |
75a70cf9 | 2978 | gimple_stmt_iterator gsi; |
f816ec49 | 2979 | |
1add270f | 2980 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
f816ec49 | 2981 | { |
42acab1c | 2982 | gimple *stmt = gsi_stmt (gsi); |
5de92639 | 2983 | tree decl; |
3ed4a4a1 | 2984 | |
75a70cf9 | 2985 | if (is_gimple_call (stmt)) |
f816ec49 | 2986 | { |
30f2983e | 2987 | if (gimple_call_noreturn_p (stmt) |
2988 | && has_return_edges | |
2989 | && !is_exit_with_zero_arg (stmt)) | |
75a70cf9 | 2990 | predict_paths_leading_to (bb, PRED_NORETURN, |
2991 | NOT_TAKEN); | |
2992 | decl = gimple_call_fndecl (stmt); | |
2993 | if (decl | |
2994 | && lookup_attribute ("cold", | |
2995 | DECL_ATTRIBUTES (decl))) | |
2996 | predict_paths_leading_to (bb, PRED_COLD_FUNCTION, | |
2997 | NOT_TAKEN); | |
89beffc9 | 2998 | if (decl && recursive_call_p (current_function_decl, decl)) |
2999 | predict_paths_leading_to (bb, PRED_RECURSIVE_CALL, | |
3000 | NOT_TAKEN); | |
f816ec49 | 3001 | } |
75a70cf9 | 3002 | else if (gimple_code (stmt) == GIMPLE_PREDICT) |
3003 | { | |
3004 | predict_paths_leading_to (bb, gimple_predict_predictor (stmt), | |
3005 | gimple_predict_outcome (stmt)); | |
1add270f | 3006 | /* Keep GIMPLE_PREDICT around so early inlining will propagate |
3007 | hints to callers. */ | |
75a70cf9 | 3008 | } |
f816ec49 | 3009 | } |
3010 | } | |
f816ec49 | 3011 | } |
3012 | ||
06ecf488 | 3013 | /* Callback for hash_map::traverse, asserts that the pointer map is |
b3723726 | 3014 | empty. */ |
3015 | ||
06ecf488 | 3016 | bool |
3017 | assert_is_empty (const_basic_block const &, edge_prediction *const &value, | |
3018 | void *) | |
b3723726 | 3019 | { |
06ecf488 | 3020 | gcc_assert (!value); |
b3723726 | 3021 | return false; |
3022 | } | |
b3723726 | 3023 | |
fc935416 | 3024 | /* Predict branch probabilities and estimate profile for basic block BB. |
3025 | When LOCAL_ONLY is set do not use any global properties of CFG. */ | |
675d86b2 | 3026 | |
3027 | static void | |
fc935416 | 3028 | tree_estimate_probability_bb (basic_block bb, bool local_only) |
675d86b2 | 3029 | { |
3030 | edge e; | |
3031 | edge_iterator ei; | |
675d86b2 | 3032 | |
3033 | FOR_EACH_EDGE (e, ei, bb->succs) | |
3034 | { | |
675d86b2 | 3035 | /* Look for block we are guarding (ie we dominate it, |
3036 | but it doesn't postdominate us). */ | |
34154e27 | 3037 | if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb |
fc935416 | 3038 | && !local_only |
675d86b2 | 3039 | && dominated_by_p (CDI_DOMINATORS, e->dest, e->src) |
3040 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest)) | |
3041 | { | |
3042 | gimple_stmt_iterator bi; | |
3043 | ||
3044 | /* The call heuristic claims that a guarded function call | |
3045 | is improbable. This is because such calls are often used | |
3046 | to signal exceptional situations such as printing error | |
3047 | messages. */ | |
3048 | for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi); | |
3049 | gsi_next (&bi)) | |
3050 | { | |
42acab1c | 3051 | gimple *stmt = gsi_stmt (bi); |
675d86b2 | 3052 | if (is_gimple_call (stmt) |
89beffc9 | 3053 | && !gimple_inexpensive_call_p (as_a <gcall *> (stmt)) |
675d86b2 | 3054 | /* Constant and pure calls are hardly used to signalize |
3055 | something exceptional. */ | |
3056 | && gimple_has_side_effects (stmt)) | |
3057 | { | |
1cb6c2eb | 3058 | if (gimple_call_fndecl (stmt)) |
3059 | predict_edge_def (e, PRED_CALL, NOT_TAKEN); | |
3060 | else if (virtual_method_call_p (gimple_call_fn (stmt))) | |
1d07104f | 3061 | predict_edge_def (e, PRED_POLYMORPHIC_CALL, NOT_TAKEN); |
1cb6c2eb | 3062 | else |
3063 | predict_edge_def (e, PRED_INDIR_CALL, TAKEN); | |
675d86b2 | 3064 | break; |
3065 | } | |
3066 | } | |
3067 | } | |
3068 | } | |
3069 | tree_predict_by_opcode (bb); | |
3070 | } | |
3071 | ||
3072 | /* Predict branch probabilities and estimate profile of the tree CFG. | |
3073 | This function can be called from the loop optimizers to recompute | |
5a5f50e9 | 3074 | the profile information. |
3075 | If DRY_RUN is set, do not modify CFG and only produce dump files. */ | |
675d86b2 | 3076 | |
3077 | void | |
5a5f50e9 | 3078 | tree_estimate_probability (bool dry_run) |
4ee9c684 | 3079 | { |
3080 | basic_block bb; | |
4ee9c684 | 3081 | |
f816ec49 | 3082 | add_noreturn_fake_exit_edges (); |
4ee9c684 | 3083 | connect_infinite_loops_to_exit (); |
d8a0d6b8 | 3084 | /* We use loop_niter_by_eval, which requires that the loops have |
3085 | preheaders. */ | |
3086 | create_preheaders (CP_SIMPLE_PREHEADERS); | |
4ee9c684 | 3087 | calculate_dominance_info (CDI_POST_DOMINATORS); |
63c3361d | 3088 | /* Decide which edges are known to be unlikely. This improves later |
3089 | branch prediction. */ | |
3090 | determine_unlikely_bbs (); | |
4ee9c684 | 3091 | |
06ecf488 | 3092 | bb_predictions = new hash_map<const_basic_block, edge_prediction *>; |
f816ec49 | 3093 | tree_bb_level_predictions (); |
d500fef3 | 3094 | record_loop_exits (); |
675d86b2 | 3095 | |
41f75a99 | 3096 | if (number_of_loops (cfun) > 1) |
7194de72 | 3097 | predict_loops (); |
4ee9c684 | 3098 | |
fc00614f | 3099 | FOR_EACH_BB_FN (bb, cfun) |
fc935416 | 3100 | tree_estimate_probability_bb (bb, false); |
4ee9c684 | 3101 | |
fc00614f | 3102 | FOR_EACH_BB_FN (bb, cfun) |
5a5f50e9 | 3103 | combine_predictions_for_bb (bb, dry_run); |
f81d9f78 | 3104 | |
382ecba7 | 3105 | if (flag_checking) |
3106 | bb_predictions->traverse<void *, assert_is_empty> (NULL); | |
3107 | ||
06ecf488 | 3108 | delete bb_predictions; |
b3723726 | 3109 | bb_predictions = NULL; |
3110 | ||
5a5f50e9 | 3111 | if (!dry_run) |
3112 | estimate_bb_frequencies (false); | |
4ee9c684 | 3113 | free_dominance_info (CDI_POST_DOMINATORS); |
41d24834 | 3114 | remove_fake_exit_edges (); |
675d86b2 | 3115 | } |
fc935416 | 3116 | |
3117 | /* Set edge->probability for each successor edge of BB. */ | |
3118 | void | |
3119 | tree_guess_outgoing_edge_probabilities (basic_block bb) | |
3120 | { | |
3121 | bb_predictions = new hash_map<const_basic_block, edge_prediction *>; | |
3122 | tree_estimate_probability_bb (bb, true); | |
3123 | combine_predictions_for_bb (bb, false); | |
3124 | if (flag_checking) | |
3125 | bb_predictions->traverse<void *, assert_is_empty> (NULL); | |
3126 | delete bb_predictions; | |
3127 | bb_predictions = NULL; | |
3128 | } | |
89cfe6e5 | 3129 | \f |
f0b5f617 | 3130 | /* Predict edges to successors of CUR whose sources are not postdominated by |
d704ea82 | 3131 | BB by PRED and recurse to all postdominators. */ |
f816ec49 | 3132 | |
3133 | static void | |
d704ea82 | 3134 | predict_paths_for_bb (basic_block cur, basic_block bb, |
3135 | enum br_predictor pred, | |
d3443011 | 3136 | enum prediction taken, |
2e966e2a | 3137 | bitmap visited, class loop *in_loop = NULL) |
f816ec49 | 3138 | { |
3139 | edge e; | |
cd665a06 | 3140 | edge_iterator ei; |
d704ea82 | 3141 | basic_block son; |
f816ec49 | 3142 | |
e09883e4 | 3143 | /* If we exited the loop or CUR is unconditional in the loop, there is |
3144 | nothing to do. */ | |
3145 | if (in_loop | |
3146 | && (!flow_bb_inside_loop_p (in_loop, cur) | |
3147 | || dominated_by_p (CDI_DOMINATORS, in_loop->latch, cur))) | |
3148 | return; | |
3149 | ||
d704ea82 | 3150 | /* We are looking for all edges forming edge cut induced by |
3151 | set of all blocks postdominated by BB. */ | |
3152 | FOR_EACH_EDGE (e, ei, cur->preds) | |
3153 | if (e->src->index >= NUM_FIXED_BLOCKS | |
3154 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb)) | |
f816ec49 | 3155 | { |
f1d5a92b | 3156 | edge e2; |
3157 | edge_iterator ei2; | |
3158 | bool found = false; | |
3159 | ||
5707768a | 3160 | /* Ignore fake edges and eh, we predict them as not taken anyway. */ |
f08c22c4 | 3161 | if (unlikely_executed_edge_p (e)) |
f1d5a92b | 3162 | continue; |
d704ea82 | 3163 | gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb)); |
f1d5a92b | 3164 | |
d3443011 | 3165 | /* See if there is an edge from e->src that is not abnormal |
e09883e4 | 3166 | and does not lead to BB and does not exit the loop. */ |
f1d5a92b | 3167 | FOR_EACH_EDGE (e2, ei2, e->src->succs) |
3168 | if (e2 != e | |
f08c22c4 | 3169 | && !unlikely_executed_edge_p (e2) |
e09883e4 | 3170 | && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb) |
3171 | && (!in_loop || !loop_exit_edge_p (in_loop, e2))) | |
f1d5a92b | 3172 | { |
3173 | found = true; | |
3174 | break; | |
3175 | } | |
3176 | ||
3177 | /* If there is non-abnormal path leaving e->src, predict edge | |
3178 | using predictor. Otherwise we need to look for paths | |
d3443011 | 3179 | leading to e->src. |
3180 | ||
3181 | The second may lead to infinite loop in the case we are predicitng | |
3182 | regions that are only reachable by abnormal edges. We simply | |
3183 | prevent visiting given BB twice. */ | |
f1d5a92b | 3184 | if (found) |
f6e0b8d0 | 3185 | { |
3186 | if (!edge_predicted_by_p (e, pred, taken)) | |
3187 | predict_edge_def (e, pred, taken); | |
3188 | } | |
6e3803fb | 3189 | else if (bitmap_set_bit (visited, e->src->index)) |
e09883e4 | 3190 | predict_paths_for_bb (e->src, e->src, pred, taken, visited, in_loop); |
f816ec49 | 3191 | } |
d704ea82 | 3192 | for (son = first_dom_son (CDI_POST_DOMINATORS, cur); |
3193 | son; | |
3194 | son = next_dom_son (CDI_POST_DOMINATORS, son)) | |
e09883e4 | 3195 | predict_paths_for_bb (son, bb, pred, taken, visited, in_loop); |
d704ea82 | 3196 | } |
f816ec49 | 3197 | |
d704ea82 | 3198 | /* Sets branch probabilities according to PREDiction and |
3199 | FLAGS. */ | |
f816ec49 | 3200 | |
d704ea82 | 3201 | static void |
3202 | predict_paths_leading_to (basic_block bb, enum br_predictor pred, | |
2e966e2a | 3203 | enum prediction taken, class loop *in_loop) |
d704ea82 | 3204 | { |
035def86 | 3205 | predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop); |
f816ec49 | 3206 | } |
5707768a | 3207 | |
3208 | /* Like predict_paths_leading_to but take edge instead of basic block. */ | |
3209 | ||
3210 | static void | |
3211 | predict_paths_leading_to_edge (edge e, enum br_predictor pred, | |
2e966e2a | 3212 | enum prediction taken, class loop *in_loop) |
5707768a | 3213 | { |
3214 | bool has_nonloop_edge = false; | |
3215 | edge_iterator ei; | |
3216 | edge e2; | |
3217 | ||
3218 | basic_block bb = e->src; | |
3219 | FOR_EACH_EDGE (e2, ei, bb->succs) | |
3220 | if (e2->dest != e->src && e2->dest != e->dest | |
f08c22c4 | 3221 | && !unlikely_executed_edge_p (e) |
5707768a | 3222 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest)) |
3223 | { | |
3224 | has_nonloop_edge = true; | |
3225 | break; | |
3226 | } | |
3227 | if (!has_nonloop_edge) | |
d3443011 | 3228 | { |
035def86 | 3229 | predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop); |
d3443011 | 3230 | } |
5707768a | 3231 | else |
3232 | predict_edge_def (e, pred, taken); | |
3233 | } | |
cd0fe062 | 3234 | \f |
e725f898 | 3235 | /* This is used to carry information about basic blocks. It is |
f81d9f78 | 3236 | attached to the AUX field of the standard CFG block. */ |
3237 | ||
251317e4 | 3238 | class block_info |
f81d9f78 | 3239 | { |
251317e4 | 3240 | public: |
f81d9f78 | 3241 | /* Estimated frequency of execution of basic_block. */ |
e9d7220b | 3242 | sreal frequency; |
f81d9f78 | 3243 | |
3244 | /* To keep queue of basic blocks to process. */ | |
3245 | basic_block next; | |
3246 | ||
4a82352a | 3247 | /* Number of predecessors we need to visit first. */ |
4ad72a03 | 3248 | int npredecessors; |
9908fe4d | 3249 | }; |
f81d9f78 | 3250 | |
3251 | /* Similar information for edges. */ | |
251317e4 | 3252 | class edge_prob_info |
f81d9f78 | 3253 | { |
251317e4 | 3254 | public: |
77aa6362 | 3255 | /* In case edge is a loopback edge, the probability edge will be reached |
f81d9f78 | 3256 | in case header is. Estimated number of iterations of the loop can be |
56ff4880 | 3257 | then computed as 1 / (1 - back_edge_prob). */ |
e9d7220b | 3258 | sreal back_edge_prob; |
77aa6362 | 3259 | /* True if the edge is a loopback edge in the natural loop. */ |
74cbb553 | 3260 | unsigned int back_edge:1; |
9908fe4d | 3261 | }; |
f81d9f78 | 3262 | |
9908fe4d | 3263 | #define BLOCK_INFO(B) ((block_info *) (B)->aux) |
886c1262 | 3264 | #undef EDGE_INFO |
9908fe4d | 3265 | #define EDGE_INFO(E) ((edge_prob_info *) (E)->aux) |
f81d9f78 | 3266 | |
3267 | /* Helper function for estimate_bb_frequencies. | |
88e6f696 | 3268 | Propagate the frequencies in blocks marked in |
3269 | TOVISIT, starting in HEAD. */ | |
e6751e9a | 3270 | |
f81d9f78 | 3271 | static void |
88e6f696 | 3272 | propagate_freq (basic_block head, bitmap tovisit) |
f81d9f78 | 3273 | { |
4c26117a | 3274 | basic_block bb; |
3275 | basic_block last; | |
9ea83aa5 | 3276 | unsigned i; |
f81d9f78 | 3277 | edge e; |
3278 | basic_block nextbb; | |
b1bb9b10 | 3279 | bitmap_iterator bi; |
312866af | 3280 | |
4a82352a | 3281 | /* For each basic block we need to visit count number of his predecessors |
312866af | 3282 | we need to visit first. */ |
b1bb9b10 | 3283 | EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi) |
312866af | 3284 | { |
b1bb9b10 | 3285 | edge_iterator ei; |
3286 | int count = 0; | |
3287 | ||
f5a6b05f | 3288 | bb = BASIC_BLOCK_FOR_FN (cfun, i); |
e6751e9a | 3289 | |
b1bb9b10 | 3290 | FOR_EACH_EDGE (e, ei, bb->preds) |
3291 | { | |
3292 | bool visit = bitmap_bit_p (tovisit, e->src->index); | |
3293 | ||
3294 | if (visit && !(e->flags & EDGE_DFS_BACK)) | |
3295 | count++; | |
3296 | else if (visit && dump_file && !EDGE_INFO (e)->back_edge) | |
3297 | fprintf (dump_file, | |
3298 | "Irreducible region hit, ignoring edge to %i->%i\n", | |
3299 | e->src->index, bb->index); | |
312866af | 3300 | } |
9ea83aa5 | 3301 | BLOCK_INFO (bb)->npredecessors = count; |
555e8b05 | 3302 | /* When function never returns, we will never process exit block. */ |
34154e27 | 3303 | if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
205ce1aa | 3304 | bb->count = profile_count::zero (); |
312866af | 3305 | } |
f81d9f78 | 3306 | |
8201d1f6 | 3307 | BLOCK_INFO (head)->frequency = 1; |
4c26117a | 3308 | last = head; |
3309 | for (bb = head; bb; bb = nextbb) | |
f81d9f78 | 3310 | { |
cd665a06 | 3311 | edge_iterator ei; |
8201d1f6 | 3312 | sreal cyclic_probability = 0; |
3313 | sreal frequency = 0; | |
f81d9f78 | 3314 | |
3315 | nextbb = BLOCK_INFO (bb)->next; | |
3316 | BLOCK_INFO (bb)->next = NULL; | |
3317 | ||
3318 | /* Compute frequency of basic block. */ | |
3319 | if (bb != head) | |
3320 | { | |
382ecba7 | 3321 | if (flag_checking) |
3322 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3323 | gcc_assert (!bitmap_bit_p (tovisit, e->src->index) | |
3324 | || (e->flags & EDGE_DFS_BACK)); | |
f81d9f78 | 3325 | |
cd665a06 | 3326 | FOR_EACH_EDGE (e, ei, bb->preds) |
f81d9f78 | 3327 | if (EDGE_INFO (e)->back_edge) |
56ff4880 | 3328 | { |
23a92fc7 | 3329 | cyclic_probability += EDGE_INFO (e)->back_edge_prob; |
56ff4880 | 3330 | } |
312866af | 3331 | else if (!(e->flags & EDGE_DFS_BACK)) |
56ff4880 | 3332 | { |
56ff4880 | 3333 | /* frequency += (e->probability |
3334 | * BLOCK_INFO (e->src)->frequency / | |
3335 | REG_BR_PROB_BASE); */ | |
3336 | ||
205ce1aa | 3337 | /* FIXME: Graphite is producing edges with no profile. Once |
3338 | this is fixed, drop this. */ | |
3339 | sreal tmp = e->probability.initialized_p () ? | |
3340 | e->probability.to_reg_br_prob_base () : 0; | |
23a92fc7 | 3341 | tmp *= BLOCK_INFO (e->src)->frequency; |
3342 | tmp *= real_inv_br_prob_base; | |
3343 | frequency += tmp; | |
56ff4880 | 3344 | } |
3345 | ||
8201d1f6 | 3346 | if (cyclic_probability == 0) |
e9d7220b | 3347 | { |
23a92fc7 | 3348 | BLOCK_INFO (bb)->frequency = frequency; |
e9d7220b | 3349 | } |
2e3c56e8 | 3350 | else |
3351 | { | |
23a92fc7 | 3352 | if (cyclic_probability > real_almost_one) |
3353 | cyclic_probability = real_almost_one; | |
f81d9f78 | 3354 | |
d598ad0d | 3355 | /* BLOCK_INFO (bb)->frequency = frequency |
e9d7220b | 3356 | / (1 - cyclic_probability) */ |
f81d9f78 | 3357 | |
8201d1f6 | 3358 | cyclic_probability = sreal (1) - cyclic_probability; |
23a92fc7 | 3359 | BLOCK_INFO (bb)->frequency = frequency / cyclic_probability; |
2e3c56e8 | 3360 | } |
f81d9f78 | 3361 | } |
3362 | ||
b1bb9b10 | 3363 | bitmap_clear_bit (tovisit, bb->index); |
f81d9f78 | 3364 | |
c6356c17 | 3365 | e = find_edge (bb, head); |
3366 | if (e) | |
3367 | { | |
c6356c17 | 3368 | /* EDGE_INFO (e)->back_edge_prob |
3369 | = ((e->probability * BLOCK_INFO (bb)->frequency) | |
3370 | / REG_BR_PROB_BASE); */ | |
48e1416a | 3371 | |
205ce1aa | 3372 | /* FIXME: Graphite is producing edges with no profile. Once |
3373 | this is fixed, drop this. */ | |
3374 | sreal tmp = e->probability.initialized_p () ? | |
3375 | e->probability.to_reg_br_prob_base () : 0; | |
23a92fc7 | 3376 | tmp *= BLOCK_INFO (bb)->frequency; |
3377 | EDGE_INFO (e)->back_edge_prob = tmp * real_inv_br_prob_base; | |
c6356c17 | 3378 | } |
f81d9f78 | 3379 | |
e725f898 | 3380 | /* Propagate to successor blocks. */ |
cd665a06 | 3381 | FOR_EACH_EDGE (e, ei, bb->succs) |
312866af | 3382 | if (!(e->flags & EDGE_DFS_BACK) |
4ad72a03 | 3383 | && BLOCK_INFO (e->dest)->npredecessors) |
f81d9f78 | 3384 | { |
4ad72a03 | 3385 | BLOCK_INFO (e->dest)->npredecessors--; |
3386 | if (!BLOCK_INFO (e->dest)->npredecessors) | |
312866af | 3387 | { |
3388 | if (!nextbb) | |
3389 | nextbb = e->dest; | |
3390 | else | |
3391 | BLOCK_INFO (last)->next = e->dest; | |
48e1416a | 3392 | |
312866af | 3393 | last = e->dest; |
3394 | } | |
cd665a06 | 3395 | } |
f81d9f78 | 3396 | } |
3397 | } | |
3398 | ||
5327650f | 3399 | /* Estimate frequencies in loops at same nest level. */ |
e6751e9a | 3400 | |
f81d9f78 | 3401 | static void |
2e966e2a | 3402 | estimate_loops_at_level (class loop *first_loop) |
f81d9f78 | 3403 | { |
2e966e2a | 3404 | class loop *loop; |
f81d9f78 | 3405 | |
3406 | for (loop = first_loop; loop; loop = loop->next) | |
3407 | { | |
f81d9f78 | 3408 | edge e; |
7fb12188 | 3409 | basic_block *bbs; |
862be747 | 3410 | unsigned i; |
035def86 | 3411 | auto_bitmap tovisit; |
f81d9f78 | 3412 | |
88e6f696 | 3413 | estimate_loops_at_level (loop->inner); |
d598ad0d | 3414 | |
88e6f696 | 3415 | /* Find current loop back edge and mark it. */ |
3416 | e = loop_latch_edge (loop); | |
3417 | EDGE_INFO (e)->back_edge = 1; | |
7fb12188 | 3418 | |
3419 | bbs = get_loop_body (loop); | |
3420 | for (i = 0; i < loop->num_nodes; i++) | |
b1bb9b10 | 3421 | bitmap_set_bit (tovisit, bbs[i]->index); |
7fb12188 | 3422 | free (bbs); |
88e6f696 | 3423 | propagate_freq (loop->header, tovisit); |
f81d9f78 | 3424 | } |
3425 | } | |
3426 | ||
fa7637bd | 3427 | /* Propagates frequencies through structure of loops. */ |
88e6f696 | 3428 | |
3429 | static void | |
7194de72 | 3430 | estimate_loops (void) |
88e6f696 | 3431 | { |
035def86 | 3432 | auto_bitmap tovisit; |
88e6f696 | 3433 | basic_block bb; |
3434 | ||
3435 | /* Start by estimating the frequencies in the loops. */ | |
41f75a99 | 3436 | if (number_of_loops (cfun) > 1) |
7194de72 | 3437 | estimate_loops_at_level (current_loops->tree_root->inner); |
88e6f696 | 3438 | |
3439 | /* Now propagate the frequencies through all the blocks. */ | |
ed7d889a | 3440 | FOR_ALL_BB_FN (bb, cfun) |
88e6f696 | 3441 | { |
3442 | bitmap_set_bit (tovisit, bb->index); | |
3443 | } | |
34154e27 | 3444 | propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit); |
88e6f696 | 3445 | } |
3446 | ||
38a65d4e | 3447 | /* Drop the profile for NODE to guessed, and update its frequency based on |
901d3ddc | 3448 | whether it is expected to be hot given the CALL_COUNT. */ |
38a65d4e | 3449 | |
3450 | static void | |
db9cef39 | 3451 | drop_profile (struct cgraph_node *node, profile_count call_count) |
38a65d4e | 3452 | { |
3453 | struct function *fn = DECL_STRUCT_FUNCTION (node->decl); | |
901d3ddc | 3454 | /* In the case where this was called by another function with a |
3455 | dropped profile, call_count will be 0. Since there are no | |
3456 | non-zero call counts to this function, we don't know for sure | |
3457 | whether it is hot, and therefore it will be marked normal below. */ | |
3458 | bool hot = maybe_hot_count_p (NULL, call_count); | |
38a65d4e | 3459 | |
3460 | if (dump_file) | |
3461 | fprintf (dump_file, | |
0e388735 | 3462 | "Dropping 0 profile for %s. %s based on calls.\n", |
3463 | node->dump_name (), | |
3464 | hot ? "Function is hot" : "Function is normal"); | |
38a65d4e | 3465 | /* We only expect to miss profiles for functions that are reached |
3466 | via non-zero call edges in cases where the function may have | |
3467 | been linked from another module or library (COMDATs and extern | |
901d3ddc | 3468 | templates). See the comments below for handle_missing_profiles. |
3469 | Also, only warn in cases where the missing counts exceed the | |
3470 | number of training runs. In certain cases with an execv followed | |
3471 | by a no-return call the profile for the no-return call is not | |
3472 | dumped and there can be a mismatch. */ | |
3473 | if (!DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl) | |
3474 | && call_count > profile_info->runs) | |
38a65d4e | 3475 | { |
3476 | if (flag_profile_correction) | |
3477 | { | |
3478 | if (dump_file) | |
3479 | fprintf (dump_file, | |
0e388735 | 3480 | "Missing counts for called function %s\n", |
3481 | node->dump_name ()); | |
38a65d4e | 3482 | } |
3483 | else | |
0e388735 | 3484 | warning (0, "Missing counts for called function %s", |
3485 | node->dump_name ()); | |
38a65d4e | 3486 | } |
3487 | ||
2cbbeb51 | 3488 | basic_block bb; |
22af35f6 | 3489 | if (opt_for_fn (node->decl, flag_guess_branch_prob)) |
205ce1aa | 3490 | { |
3491 | bool clear_zeros | |
22af35f6 | 3492 | = !ENTRY_BLOCK_PTR_FOR_FN (fn)->count.nonzero_p (); |
205ce1aa | 3493 | FOR_ALL_BB_FN (bb, fn) |
3494 | if (clear_zeros || !(bb->count == profile_count::zero ())) | |
3495 | bb->count = bb->count.guessed_local (); | |
22af35f6 | 3496 | fn->cfg->count_max = fn->cfg->count_max.guessed_local (); |
205ce1aa | 3497 | } |
3498 | else | |
2cbbeb51 | 3499 | { |
205ce1aa | 3500 | FOR_ALL_BB_FN (bb, fn) |
3501 | bb->count = profile_count::uninitialized (); | |
22af35f6 | 3502 | fn->cfg->count_max = profile_count::uninitialized (); |
2cbbeb51 | 3503 | } |
3504 | ||
3505 | struct cgraph_edge *e; | |
151b9ff5 | 3506 | for (e = node->callees; e; e = e->next_callee) |
3507 | e->count = gimple_bb (e->call_stmt)->count; | |
3508 | for (e = node->indirect_calls; e; e = e->next_callee) | |
3509 | e->count = gimple_bb (e->call_stmt)->count; | |
22af35f6 | 3510 | node->count = ENTRY_BLOCK_PTR_FOR_FN (fn)->count; |
2cbbeb51 | 3511 | |
3bedbae3 | 3512 | profile_status_for_fn (fn) |
38a65d4e | 3513 | = (flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT); |
3514 | node->frequency | |
3515 | = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL; | |
3516 | } | |
3517 | ||
3518 | /* In the case of COMDAT routines, multiple object files will contain the same | |
3519 | function and the linker will select one for the binary. In that case | |
3520 | all the other copies from the profile instrument binary will be missing | |
3521 | profile counts. Look for cases where this happened, due to non-zero | |
3522 | call counts going to 0-count functions, and drop the profile to guessed | |
3523 | so that we can use the estimated probabilities and avoid optimizing only | |
3524 | for size. | |
3525 | ||
3526 | The other case where the profile may be missing is when the routine | |
3527 | is not going to be emitted to the object file, e.g. for "extern template" | |
3528 | class methods. Those will be marked DECL_EXTERNAL. Emit a warning in | |
3529 | all other cases of non-zero calls to 0-count functions. */ | |
3530 | ||
3531 | void | |
3532 | handle_missing_profiles (void) | |
3533 | { | |
8070b8d5 | 3534 | const int unlikely_frac = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION); |
38a65d4e | 3535 | struct cgraph_node *node; |
e9258aee | 3536 | auto_vec<struct cgraph_node *, 64> worklist; |
38a65d4e | 3537 | |
3538 | /* See if 0 count function has non-0 count callers. In this case we | |
3539 | lost some profile. Drop its function profile to PROFILE_GUESSED. */ | |
3540 | FOR_EACH_DEFINED_FUNCTION (node) | |
3541 | { | |
3542 | struct cgraph_edge *e; | |
db9cef39 | 3543 | profile_count call_count = profile_count::zero (); |
af48f0b1 | 3544 | gcov_type max_tp_first_run = 0; |
38a65d4e | 3545 | struct function *fn = DECL_STRUCT_FUNCTION (node->decl); |
3546 | ||
22af35f6 | 3547 | if (node->count.ipa ().nonzero_p ()) |
38a65d4e | 3548 | continue; |
3549 | for (e = node->callers; e; e = e->next_caller) | |
22af35f6 | 3550 | if (e->count.ipa ().initialized_p () && e->count.ipa () > 0) |
db9cef39 | 3551 | { |
22af35f6 | 3552 | call_count = call_count + e->count.ipa (); |
af48f0b1 | 3553 | |
db9cef39 | 3554 | if (e->caller->tp_first_run > max_tp_first_run) |
3555 | max_tp_first_run = e->caller->tp_first_run; | |
3556 | } | |
af48f0b1 | 3557 | |
3558 | /* If time profile is missing, let assign the maximum that comes from | |
3559 | caller functions. */ | |
3560 | if (!node->tp_first_run && max_tp_first_run) | |
3561 | node->tp_first_run = max_tp_first_run + 1; | |
3562 | ||
db9cef39 | 3563 | if (call_count > 0 |
38a65d4e | 3564 | && fn && fn->cfg |
8070b8d5 | 3565 | && call_count.apply_scale (unlikely_frac, 1) >= profile_info->runs) |
38a65d4e | 3566 | { |
901d3ddc | 3567 | drop_profile (node, call_count); |
38a65d4e | 3568 | worklist.safe_push (node); |
3569 | } | |
3570 | } | |
3571 | ||
3572 | /* Propagate the profile dropping to other 0-count COMDATs that are | |
3573 | potentially called by COMDATs we already dropped the profile on. */ | |
3574 | while (worklist.length () > 0) | |
3575 | { | |
3576 | struct cgraph_edge *e; | |
3577 | ||
3578 | node = worklist.pop (); | |
3579 | for (e = node->callees; e; e = e->next_caller) | |
3580 | { | |
3581 | struct cgraph_node *callee = e->callee; | |
3582 | struct function *fn = DECL_STRUCT_FUNCTION (callee->decl); | |
3583 | ||
22af35f6 | 3584 | if (!(e->count.ipa () == profile_count::zero ()) |
3585 | && callee->count.ipa ().nonzero_p ()) | |
38a65d4e | 3586 | continue; |
2cbbeb51 | 3587 | if ((DECL_COMDAT (callee->decl) || DECL_EXTERNAL (callee->decl)) |
3588 | && fn && fn->cfg | |
3bedbae3 | 3589 | && profile_status_for_fn (fn) == PROFILE_READ) |
38a65d4e | 3590 | { |
db9cef39 | 3591 | drop_profile (node, profile_count::zero ()); |
38a65d4e | 3592 | worklist.safe_push (callee); |
3593 | } | |
3594 | } | |
3595 | } | |
38a65d4e | 3596 | } |
3597 | ||
3f18719c | 3598 | /* Convert counts measured by profile driven feedback to frequencies. |
3599 | Return nonzero iff there was any nonzero execution count. */ | |
e6751e9a | 3600 | |
db9cef39 | 3601 | bool |
688b6bc6 | 3602 | update_max_bb_count (void) |
f81d9f78 | 3603 | { |
205ce1aa | 3604 | profile_count true_count_max = profile_count::uninitialized (); |
4c26117a | 3605 | basic_block bb; |
b3d6de89 | 3606 | |
34154e27 | 3607 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
84895d28 | 3608 | true_count_max = true_count_max.max (bb->count); |
db9cef39 | 3609 | |
205ce1aa | 3610 | cfun->cfg->count_max = true_count_max; |
2cbbeb51 | 3611 | |
688b6bc6 | 3612 | return true_count_max.ipa ().nonzero_p (); |
f81d9f78 | 3613 | } |
3614 | ||
e6751e9a | 3615 | /* Return true if function is likely to be expensive, so there is no point to |
3616 | optimize performance of prologue, epilogue or do inlining at the expense | |
41a6f238 | 3617 | of code size growth. THRESHOLD is the limit of number of instructions |
e6751e9a | 3618 | function can execute at average to be still considered not expensive. */ |
3619 | ||
f4c0c1a2 | 3620 | bool |
d598ad0d | 3621 | expensive_function_p (int threshold) |
f4c0c1a2 | 3622 | { |
4c26117a | 3623 | basic_block bb; |
f4c0c1a2 | 3624 | |
688b6bc6 | 3625 | /* If profile was scaled in a way entry block has count 0, then the function |
3626 | is deifnitly taking a lot of time. */ | |
3627 | if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.nonzero_p ()) | |
f4c0c1a2 | 3628 | return true; |
195731ad | 3629 | |
688b6bc6 | 3630 | profile_count limit = ENTRY_BLOCK_PTR_FOR_FN |
3631 | (cfun)->count.apply_scale (threshold, 1); | |
3632 | profile_count sum = profile_count::zero (); | |
fc00614f | 3633 | FOR_EACH_BB_FN (bb, cfun) |
f4c0c1a2 | 3634 | { |
ee5f6585 | 3635 | rtx_insn *insn; |
f4c0c1a2 | 3636 | |
688b6bc6 | 3637 | if (!bb->count.initialized_p ()) |
3638 | { | |
3639 | if (dump_file) | |
3640 | fprintf (dump_file, "Function is considered expensive because" | |
3641 | " count of bb %i is not initialized\n", bb->index); | |
3642 | return true; | |
3643 | } | |
3644 | ||
91f71fa3 | 3645 | FOR_BB_INSNS (bb, insn) |
e6751e9a | 3646 | if (active_insn_p (insn)) |
3647 | { | |
688b6bc6 | 3648 | sum += bb->count; |
e6751e9a | 3649 | if (sum > limit) |
3650 | return true; | |
f4c0c1a2 | 3651 | } |
3652 | } | |
e6751e9a | 3653 | |
f4c0c1a2 | 3654 | return false; |
3655 | } | |
3656 | ||
84242aee | 3657 | /* All basic blocks that are reachable only from unlikely basic blocks are |
3658 | unlikely. */ | |
3659 | ||
3660 | void | |
3661 | propagate_unlikely_bbs_forward (void) | |
3662 | { | |
3663 | auto_vec<basic_block, 64> worklist; | |
3664 | basic_block bb; | |
3665 | edge_iterator ei; | |
3666 | edge e; | |
3667 | ||
3668 | if (!(ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ())) | |
3669 | { | |
3670 | ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(size_t) 1; | |
3671 | worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
3672 | ||
3673 | while (worklist.length () > 0) | |
3674 | { | |
3675 | bb = worklist.pop (); | |
3676 | FOR_EACH_EDGE (e, ei, bb->succs) | |
ea5d3981 | 3677 | if (!(e->count () == profile_count::zero ()) |
84242aee | 3678 | && !(e->dest->count == profile_count::zero ()) |
3679 | && !e->dest->aux) | |
3680 | { | |
3681 | e->dest->aux = (void *)(size_t) 1; | |
3682 | worklist.safe_push (e->dest); | |
3683 | } | |
3684 | } | |
3685 | } | |
3686 | ||
3687 | FOR_ALL_BB_FN (bb, cfun) | |
3688 | { | |
3689 | if (!bb->aux) | |
3690 | { | |
3691 | if (!(bb->count == profile_count::zero ()) | |
3692 | && (dump_file && (dump_flags & TDF_DETAILS))) | |
3693 | fprintf (dump_file, | |
3694 | "Basic block %i is marked unlikely by forward prop\n", | |
3695 | bb->index); | |
3696 | bb->count = profile_count::zero (); | |
84242aee | 3697 | } |
3698 | else | |
3699 | bb->aux = NULL; | |
3700 | } | |
3701 | } | |
3702 | ||
f08c22c4 | 3703 | /* Determine basic blocks/edges that are known to be unlikely executed and set |
3704 | their counters to zero. | |
3705 | This is done with first identifying obviously unlikely BBs/edges and then | |
3706 | propagating in both directions. */ | |
3707 | ||
3708 | static void | |
3709 | determine_unlikely_bbs () | |
3710 | { | |
3711 | basic_block bb; | |
3712 | auto_vec<basic_block, 64> worklist; | |
3713 | edge_iterator ei; | |
3714 | edge e; | |
3715 | ||
3716 | FOR_EACH_BB_FN (bb, cfun) | |
3717 | { | |
3718 | if (!(bb->count == profile_count::zero ()) | |
3719 | && unlikely_executed_bb_p (bb)) | |
3720 | { | |
3721 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3722 | fprintf (dump_file, "Basic block %i is locally unlikely\n", | |
3723 | bb->index); | |
3724 | bb->count = profile_count::zero (); | |
3725 | } | |
3726 | ||
f08c22c4 | 3727 | FOR_EACH_EDGE (e, ei, bb->succs) |
ea5d3981 | 3728 | if (!(e->probability == profile_probability::never ()) |
f08c22c4 | 3729 | && unlikely_executed_edge_p (e)) |
3730 | { | |
3731 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3732 | fprintf (dump_file, "Edge %i->%i is locally unlikely\n", | |
3733 | bb->index, e->dest->index); | |
ea5d3981 | 3734 | e->probability = profile_probability::never (); |
f08c22c4 | 3735 | } |
3736 | ||
3737 | gcc_checking_assert (!bb->aux); | |
3738 | } | |
3f9a545c | 3739 | propagate_unlikely_bbs_forward (); |
f08c22c4 | 3740 | |
f08c22c4 | 3741 | auto_vec<int, 64> nsuccs; |
3742 | nsuccs.safe_grow_cleared (last_basic_block_for_fn (cfun)); | |
3743 | FOR_ALL_BB_FN (bb, cfun) | |
3744 | if (!(bb->count == profile_count::zero ()) | |
3745 | && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
3746 | { | |
3747 | nsuccs[bb->index] = 0; | |
3748 | FOR_EACH_EDGE (e, ei, bb->succs) | |
ea5d3981 | 3749 | if (!(e->probability == profile_probability::never ()) |
3750 | && !(e->dest->count == profile_count::zero ())) | |
f08c22c4 | 3751 | nsuccs[bb->index]++; |
3752 | if (!nsuccs[bb->index]) | |
3753 | worklist.safe_push (bb); | |
3754 | } | |
3755 | while (worklist.length () > 0) | |
3756 | { | |
3757 | bb = worklist.pop (); | |
6595f9c7 | 3758 | if (bb->count == profile_count::zero ()) |
3759 | continue; | |
f08c22c4 | 3760 | if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
3761 | { | |
3762 | bool found = false; | |
3763 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); | |
3764 | !gsi_end_p (gsi); gsi_next (&gsi)) | |
3765 | if (stmt_can_terminate_bb_p (gsi_stmt (gsi)) | |
3766 | /* stmt_can_terminate_bb_p special cases noreturns because it | |
3767 | assumes that fake edges are created. We want to know that | |
3768 | noreturn alone does not imply BB to be unlikely. */ | |
3769 | || (is_gimple_call (gsi_stmt (gsi)) | |
3770 | && (gimple_call_flags (gsi_stmt (gsi)) & ECF_NORETURN))) | |
3771 | { | |
3772 | found = true; | |
3773 | break; | |
3774 | } | |
3775 | if (found) | |
3776 | continue; | |
3777 | } | |
6595f9c7 | 3778 | if (dump_file && (dump_flags & TDF_DETAILS)) |
f08c22c4 | 3779 | fprintf (dump_file, |
3780 | "Basic block %i is marked unlikely by backward prop\n", | |
3781 | bb->index); | |
3782 | bb->count = profile_count::zero (); | |
f08c22c4 | 3783 | FOR_EACH_EDGE (e, ei, bb->preds) |
ea5d3981 | 3784 | if (!(e->probability == profile_probability::never ())) |
f08c22c4 | 3785 | { |
f08c22c4 | 3786 | if (!(e->src->count == profile_count::zero ())) |
3787 | { | |
6595f9c7 | 3788 | gcc_checking_assert (nsuccs[e->src->index] > 0); |
f08c22c4 | 3789 | nsuccs[e->src->index]--; |
3790 | if (!nsuccs[e->src->index]) | |
3791 | worklist.safe_push (e->src); | |
3792 | } | |
3793 | } | |
3794 | } | |
3f9a545c | 3795 | /* Finally all edges from non-0 regions to 0 are unlikely. */ |
3796 | FOR_ALL_BB_FN (bb, cfun) | |
63c3361d | 3797 | { |
3798 | if (!(bb->count == profile_count::zero ())) | |
3799 | FOR_EACH_EDGE (e, ei, bb->succs) | |
3800 | if (!(e->probability == profile_probability::never ()) | |
3801 | && e->dest->count == profile_count::zero ()) | |
3802 | { | |
3803 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3804 | fprintf (dump_file, "Edge %i->%i is unlikely because " | |
3805 | "it enters unlikely block\n", | |
3806 | bb->index, e->dest->index); | |
3807 | e->probability = profile_probability::never (); | |
3808 | } | |
3809 | ||
3810 | edge other = NULL; | |
3811 | ||
3f9a545c | 3812 | FOR_EACH_EDGE (e, ei, bb->succs) |
63c3361d | 3813 | if (e->probability == profile_probability::never ()) |
3814 | ; | |
3815 | else if (other) | |
3816 | { | |
3817 | other = NULL; | |
3818 | break; | |
3819 | } | |
3820 | else | |
3821 | other = e; | |
3822 | if (other | |
3823 | && !(other->probability == profile_probability::always ())) | |
3824 | { | |
3825 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3826 | fprintf (dump_file, "Edge %i->%i is locally likely\n", | |
3827 | bb->index, other->dest->index); | |
3828 | other->probability = profile_probability::always (); | |
3829 | } | |
3830 | } | |
7ae0128a | 3831 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ()) |
3832 | cgraph_node::get (current_function_decl)->count = profile_count::zero (); | |
f08c22c4 | 3833 | } |
3834 | ||
5327650f | 3835 | /* Estimate and propagate basic block frequencies using the given branch |
3836 | probabilities. If FORCE is true, the frequencies are used to estimate | |
3837 | the counts even when there are already non-zero profile counts. */ | |
e6751e9a | 3838 | |
4ae20857 | 3839 | void |
5327650f | 3840 | estimate_bb_frequencies (bool force) |
f81d9f78 | 3841 | { |
4c26117a | 3842 | basic_block bb; |
e9d7220b | 3843 | sreal freq_max; |
56ff4880 | 3844 | |
f08c22c4 | 3845 | determine_unlikely_bbs (); |
3846 | ||
3847 | if (force || profile_status_for_fn (cfun) != PROFILE_READ | |
688b6bc6 | 3848 | || !update_max_bb_count ()) |
429fa7fa | 3849 | { |
31e4010e | 3850 | static int real_values_initialized = 0; |
3851 | ||
3852 | if (!real_values_initialized) | |
3853 | { | |
fc22704f | 3854 | real_values_initialized = 1; |
8201d1f6 | 3855 | real_br_prob_base = REG_BR_PROB_BASE; |
7ae0128a | 3856 | /* Scaling frequencies up to maximal profile count may result in |
3857 | frequent overflows especially when inlining loops. | |
3858 | Small scalling results in unnecesary precision loss. Stay in | |
3859 | the half of the (exponential) range. */ | |
3860 | real_bb_freq_max = (uint64_t)1 << (profile_count::n_bits / 2); | |
23a92fc7 | 3861 | real_one_half = sreal (1, -1); |
8201d1f6 | 3862 | real_inv_br_prob_base = sreal (1) / real_br_prob_base; |
3863 | real_almost_one = sreal (1) - real_inv_br_prob_base; | |
31e4010e | 3864 | } |
f81d9f78 | 3865 | |
429fa7fa | 3866 | mark_dfs_back_edges (); |
429fa7fa | 3867 | |
34154e27 | 3868 | single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability = |
720cfc43 | 3869 | profile_probability::always (); |
429fa7fa | 3870 | |
3871 | /* Set up block info for each basic block. */ | |
9908fe4d | 3872 | alloc_aux_for_blocks (sizeof (block_info)); |
3873 | alloc_aux_for_edges (sizeof (edge_prob_info)); | |
34154e27 | 3874 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
f81d9f78 | 3875 | { |
f81d9f78 | 3876 | edge e; |
cd665a06 | 3877 | edge_iterator ei; |
429fa7fa | 3878 | |
cd665a06 | 3879 | FOR_EACH_EDGE (e, ei, bb->succs) |
f81d9f78 | 3880 | { |
205ce1aa | 3881 | /* FIXME: Graphite is producing edges with no profile. Once |
3882 | this is fixed, drop this. */ | |
3883 | if (e->probability.initialized_p ()) | |
3884 | EDGE_INFO (e)->back_edge_prob | |
3885 | = e->probability.to_reg_br_prob_base (); | |
3886 | else | |
3887 | EDGE_INFO (e)->back_edge_prob = REG_BR_PROB_BASE / 2; | |
23a92fc7 | 3888 | EDGE_INFO (e)->back_edge_prob *= real_inv_br_prob_base; |
f81d9f78 | 3889 | } |
f81d9f78 | 3890 | } |
e6751e9a | 3891 | |
5327650f | 3892 | /* First compute frequencies locally for each loop from innermost |
3893 | to outermost to examine frequencies for back edges. */ | |
7194de72 | 3894 | estimate_loops (); |
f81d9f78 | 3895 | |
8201d1f6 | 3896 | freq_max = 0; |
fc00614f | 3897 | FOR_EACH_BB_FN (bb, cfun) |
23a92fc7 | 3898 | if (freq_max < BLOCK_INFO (bb)->frequency) |
3899 | freq_max = BLOCK_INFO (bb)->frequency; | |
2e3c56e8 | 3900 | |
23a92fc7 | 3901 | freq_max = real_bb_freq_max / freq_max; |
7ae0128a | 3902 | if (freq_max < 16) |
3903 | freq_max = 16; | |
ed0831a9 | 3904 | profile_count ipa_count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa (); |
205ce1aa | 3905 | cfun->cfg->count_max = profile_count::uninitialized (); |
34154e27 | 3906 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
56ff4880 | 3907 | { |
23a92fc7 | 3908 | sreal tmp = BLOCK_INFO (bb)->frequency * freq_max + real_one_half; |
205ce1aa | 3909 | profile_count count = profile_count::from_gcov_type (tmp.to_int ()); |
3910 | ||
3911 | /* If we have profile feedback in which this function was never | |
3912 | executed, then preserve this info. */ | |
ed0831a9 | 3913 | if (!(bb->count == profile_count::zero ())) |
3914 | bb->count = count.guessed_local ().combine_with_ipa_count (ipa_count); | |
205ce1aa | 3915 | cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count); |
429fa7fa | 3916 | } |
e6751e9a | 3917 | |
429fa7fa | 3918 | free_aux_for_blocks (); |
3919 | free_aux_for_edges (); | |
3920 | } | |
3921 | compute_function_frequency (); | |
429fa7fa | 3922 | } |
f81d9f78 | 3923 | |
429fa7fa | 3924 | /* Decide whether function is hot, cold or unlikely executed. */ |
63aab97d | 3925 | void |
d598ad0d | 3926 | compute_function_frequency (void) |
429fa7fa | 3927 | { |
4c26117a | 3928 | basic_block bb; |
415d1b9a | 3929 | struct cgraph_node *node = cgraph_node::get (current_function_decl); |
e27f29dd | 3930 | |
0f9fb931 | 3931 | if (DECL_STATIC_CONSTRUCTOR (current_function_decl) |
3932 | || MAIN_NAME_P (DECL_NAME (current_function_decl))) | |
3933 | node->only_called_at_startup = true; | |
3934 | if (DECL_STATIC_DESTRUCTOR (current_function_decl)) | |
3935 | node->only_called_at_exit = true; | |
4c26117a | 3936 | |
f26d8580 | 3937 | if (profile_status_for_fn (cfun) != PROFILE_READ) |
5de92639 | 3938 | { |
125b6d78 | 3939 | int flags = flags_from_decl_or_type (current_function_decl); |
205ce1aa | 3940 | if ((ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa_p () |
3941 | && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa() == profile_count::zero ()) | |
f08c22c4 | 3942 | || lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)) |
3943 | != NULL) | |
60722a03 | 3944 | { |
3945 | node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; | |
3946 | warn_function_cold (current_function_decl); | |
3947 | } | |
5de92639 | 3948 | else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl)) |
3949 | != NULL) | |
125b6d78 | 3950 | node->frequency = NODE_FREQUENCY_HOT; |
3951 | else if (flags & ECF_NORETURN) | |
3952 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
3953 | else if (MAIN_NAME_P (DECL_NAME (current_function_decl))) | |
3954 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
3955 | else if (DECL_STATIC_CONSTRUCTOR (current_function_decl) | |
3956 | || DECL_STATIC_DESTRUCTOR (current_function_decl)) | |
3957 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
5de92639 | 3958 | return; |
3959 | } | |
e27f29dd | 3960 | |
f12452a9 | 3961 | node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; |
3962 | warn_function_cold (current_function_decl); | |
3963 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa() == profile_count::zero ()) | |
3964 | return; | |
fc00614f | 3965 | FOR_EACH_BB_FN (bb, cfun) |
f81d9f78 | 3966 | { |
8d672d12 | 3967 | if (maybe_hot_bb_p (cfun, bb)) |
429fa7fa | 3968 | { |
125b6d78 | 3969 | node->frequency = NODE_FREQUENCY_HOT; |
429fa7fa | 3970 | return; |
3971 | } | |
8d672d12 | 3972 | if (!probably_never_executed_bb_p (cfun, bb)) |
125b6d78 | 3973 | node->frequency = NODE_FREQUENCY_NORMAL; |
f81d9f78 | 3974 | } |
429fa7fa | 3975 | } |
f81d9f78 | 3976 | |
4a1849e3 | 3977 | /* Build PREDICT_EXPR. */ |
3978 | tree | |
3979 | build_predict_expr (enum br_predictor predictor, enum prediction taken) | |
3980 | { | |
08f62b1b | 3981 | tree t = build1 (PREDICT_EXPR, void_type_node, |
b3d480fb | 3982 | build_int_cst (integer_type_node, predictor)); |
b9c74b4d | 3983 | SET_PREDICT_EXPR_OUTCOME (t, taken); |
4a1849e3 | 3984 | return t; |
3985 | } | |
3986 | ||
3987 | const char * | |
3988 | predictor_name (enum br_predictor predictor) | |
3989 | { | |
3990 | return predictor_info[predictor].name; | |
3991 | } | |
3992 | ||
65b0537f | 3993 | /* Predict branch probabilities and estimate profile of the tree CFG. */ |
3994 | ||
cbe8bda8 | 3995 | namespace { |
3996 | ||
3997 | const pass_data pass_data_profile = | |
3998 | { | |
3999 | GIMPLE_PASS, /* type */ | |
4000 | "profile_estimate", /* name */ | |
4001 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 4002 | TV_BRANCH_PROB, /* tv_id */ |
4003 | PROP_cfg, /* properties_required */ | |
4004 | 0, /* properties_provided */ | |
4005 | 0, /* properties_destroyed */ | |
4006 | 0, /* todo_flags_start */ | |
8b88439e | 4007 | 0, /* todo_flags_finish */ |
4ee9c684 | 4008 | }; |
1add270f | 4009 | |
cbe8bda8 | 4010 | class pass_profile : public gimple_opt_pass |
4011 | { | |
4012 | public: | |
9af5ce0c | 4013 | pass_profile (gcc::context *ctxt) |
4014 | : gimple_opt_pass (pass_data_profile, ctxt) | |
cbe8bda8 | 4015 | {} |
4016 | ||
4017 | /* opt_pass methods: */ | |
31315c24 | 4018 | virtual bool gate (function *) { return flag_guess_branch_prob; } |
65b0537f | 4019 | virtual unsigned int execute (function *); |
cbe8bda8 | 4020 | |
4021 | }; // class pass_profile | |
4022 | ||
65b0537f | 4023 | unsigned int |
4024 | pass_profile::execute (function *fun) | |
4025 | { | |
4026 | unsigned nb_loops; | |
4027 | ||
3a9f48e7 | 4028 | if (profile_status_for_fn (cfun) == PROFILE_GUESSED) |
4029 | return 0; | |
4030 | ||
65b0537f | 4031 | loop_optimizer_init (LOOPS_NORMAL); |
4032 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4033 | flow_loops_dump (dump_file, NULL, 0); | |
4034 | ||
4035 | mark_irreducible_loops (); | |
4036 | ||
4037 | nb_loops = number_of_loops (fun); | |
4038 | if (nb_loops > 1) | |
4039 | scev_initialize (); | |
4040 | ||
5a5f50e9 | 4041 | tree_estimate_probability (false); |
65b0537f | 4042 | |
4043 | if (nb_loops > 1) | |
4044 | scev_finalize (); | |
4045 | ||
4046 | loop_optimizer_finalize (); | |
4047 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4048 | gimple_dump_cfg (dump_file, dump_flags); | |
4049 | if (profile_status_for_fn (fun) == PROFILE_ABSENT) | |
4050 | profile_status_for_fn (fun) = PROFILE_GUESSED; | |
cbcc4297 | 4051 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4052 | { | |
2e966e2a | 4053 | class loop *loop; |
cbcc4297 | 4054 | FOR_EACH_LOOP (loop, LI_FROM_INNERMOST) |
205ce1aa | 4055 | if (loop->header->count.initialized_p ()) |
cbcc4297 | 4056 | fprintf (dump_file, "Loop got predicted %d to iterate %i times.\n", |
4057 | loop->num, | |
4058 | (int)expected_loop_iterations_unbounded (loop)); | |
4059 | } | |
65b0537f | 4060 | return 0; |
4061 | } | |
4062 | ||
cbe8bda8 | 4063 | } // anon namespace |
4064 | ||
4065 | gimple_opt_pass * | |
4066 | make_pass_profile (gcc::context *ctxt) | |
4067 | { | |
4068 | return new pass_profile (ctxt); | |
4069 | } | |
4070 | ||
c03de6c9 | 4071 | /* Return true when PRED predictor should be removed after early |
4072 | tree passes. Most of the predictors are beneficial to survive | |
4073 | as early inlining can also distribute then into caller's bodies. */ | |
cbe8bda8 | 4074 | |
c03de6c9 | 4075 | static bool |
4076 | strip_predictor_early (enum br_predictor pred) | |
cbe8bda8 | 4077 | { |
c03de6c9 | 4078 | switch (pred) |
4079 | { | |
4080 | case PRED_TREE_EARLY_RETURN: | |
4081 | return true; | |
4082 | default: | |
4083 | return false; | |
4084 | } | |
4085 | } | |
cbe8bda8 | 4086 | |
65b0537f | 4087 | /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements |
c03de6c9 | 4088 | we no longer need. EARLY is set to true when called from early |
4089 | optimizations. */ | |
4090 | ||
65b0537f | 4091 | unsigned int |
c03de6c9 | 4092 | strip_predict_hints (function *fun, bool early) |
65b0537f | 4093 | { |
4094 | basic_block bb; | |
42acab1c | 4095 | gimple *ass_stmt; |
65b0537f | 4096 | tree var; |
7349d65b | 4097 | bool changed = false; |
65b0537f | 4098 | |
4099 | FOR_EACH_BB_FN (bb, fun) | |
4100 | { | |
4101 | gimple_stmt_iterator bi; | |
4102 | for (bi = gsi_start_bb (bb); !gsi_end_p (bi);) | |
4103 | { | |
42acab1c | 4104 | gimple *stmt = gsi_stmt (bi); |
65b0537f | 4105 | |
4106 | if (gimple_code (stmt) == GIMPLE_PREDICT) | |
4107 | { | |
c03de6c9 | 4108 | if (!early |
4109 | || strip_predictor_early (gimple_predict_predictor (stmt))) | |
4110 | { | |
4111 | gsi_remove (&bi, true); | |
4112 | changed = true; | |
4113 | continue; | |
4114 | } | |
65b0537f | 4115 | } |
4116 | else if (is_gimple_call (stmt)) | |
4117 | { | |
4118 | tree fndecl = gimple_call_fndecl (stmt); | |
4119 | ||
01107f42 | 4120 | if (!early |
a0e9bfbb | 4121 | && ((fndecl != NULL_TREE |
4122 | && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT) | |
01107f42 | 4123 | && gimple_call_num_args (stmt) == 2) |
a0e9bfbb | 4124 | || (fndecl != NULL_TREE |
4125 | && fndecl_built_in_p (fndecl, | |
4126 | BUILT_IN_EXPECT_WITH_PROBABILITY) | |
01107f42 | 4127 | && gimple_call_num_args (stmt) == 3) |
4128 | || (gimple_call_internal_p (stmt) | |
4129 | && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT))) | |
65b0537f | 4130 | { |
4131 | var = gimple_call_lhs (stmt); | |
7349d65b | 4132 | changed = true; |
65b0537f | 4133 | if (var) |
4134 | { | |
4135 | ass_stmt | |
4136 | = gimple_build_assign (var, gimple_call_arg (stmt, 0)); | |
4137 | gsi_replace (&bi, ass_stmt, true); | |
4138 | } | |
4139 | else | |
4140 | { | |
4141 | gsi_remove (&bi, true); | |
4142 | continue; | |
4143 | } | |
4144 | } | |
4145 | } | |
4146 | gsi_next (&bi); | |
4147 | } | |
4148 | } | |
7349d65b | 4149 | return changed ? TODO_cleanup_cfg : 0; |
65b0537f | 4150 | } |
4151 | ||
c03de6c9 | 4152 | namespace { |
4153 | ||
4154 | const pass_data pass_data_strip_predict_hints = | |
4155 | { | |
4156 | GIMPLE_PASS, /* type */ | |
4157 | "*strip_predict_hints", /* name */ | |
4158 | OPTGROUP_NONE, /* optinfo_flags */ | |
4159 | TV_BRANCH_PROB, /* tv_id */ | |
4160 | PROP_cfg, /* properties_required */ | |
4161 | 0, /* properties_provided */ | |
4162 | 0, /* properties_destroyed */ | |
4163 | 0, /* todo_flags_start */ | |
4164 | 0, /* todo_flags_finish */ | |
4165 | }; | |
4166 | ||
4167 | class pass_strip_predict_hints : public gimple_opt_pass | |
4168 | { | |
4169 | public: | |
4170 | pass_strip_predict_hints (gcc::context *ctxt) | |
4171 | : gimple_opt_pass (pass_data_strip_predict_hints, ctxt) | |
4172 | {} | |
4173 | ||
4174 | /* opt_pass methods: */ | |
4175 | opt_pass * clone () { return new pass_strip_predict_hints (m_ctxt); } | |
4176 | void set_pass_param (unsigned int n, bool param) | |
4177 | { | |
4178 | gcc_assert (n == 0); | |
4179 | early_p = param; | |
4180 | } | |
4181 | ||
4182 | virtual unsigned int execute (function *); | |
4183 | ||
4184 | private: | |
4185 | bool early_p; | |
4186 | ||
4187 | }; // class pass_strip_predict_hints | |
4188 | ||
4189 | unsigned int | |
4190 | pass_strip_predict_hints::execute (function *fun) | |
4191 | { | |
4192 | return strip_predict_hints (fun, early_p); | |
4193 | } | |
4194 | ||
cbe8bda8 | 4195 | } // anon namespace |
4196 | ||
4197 | gimple_opt_pass * | |
4198 | make_pass_strip_predict_hints (gcc::context *ctxt) | |
4199 | { | |
4200 | return new pass_strip_predict_hints (ctxt); | |
4201 | } | |
4202 | ||
555e8b05 | 4203 | /* Rebuild function frequencies. Passes are in general expected to |
4204 | maintain profile by hand, however in some cases this is not possible: | |
4205 | for example when inlining several functions with loops freuqencies might run | |
4206 | out of scale and thus needs to be recomputed. */ | |
4207 | ||
4208 | void | |
4209 | rebuild_frequencies (void) | |
4210 | { | |
4b366dd3 | 4211 | timevar_push (TV_REBUILD_FREQUENCIES); |
5327650f | 4212 | |
4213 | /* When the max bb count in the function is small, there is a higher | |
4214 | chance that there were truncation errors in the integer scaling | |
4215 | of counts by inlining and other optimizations. This could lead | |
4216 | to incorrect classification of code as being cold when it isn't. | |
4217 | In that case, force the estimation of bb counts/frequencies from the | |
4218 | branch probabilities, rather than computing frequencies from counts, | |
4219 | which may also lead to frequencies incorrectly reduced to 0. There | |
4220 | is less precision in the probabilities, so we only do this for small | |
4221 | max counts. */ | |
205ce1aa | 4222 | cfun->cfg->count_max = profile_count::uninitialized (); |
5327650f | 4223 | basic_block bb; |
34154e27 | 4224 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
205ce1aa | 4225 | cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count); |
5327650f | 4226 | |
205ce1aa | 4227 | if (profile_status_for_fn (cfun) == PROFILE_GUESSED) |
555e8b05 | 4228 | { |
4229 | loop_optimizer_init (0); | |
4230 | add_noreturn_fake_exit_edges (); | |
4231 | mark_irreducible_loops (); | |
4232 | connect_infinite_loops_to_exit (); | |
5327650f | 4233 | estimate_bb_frequencies (true); |
555e8b05 | 4234 | remove_fake_exit_edges (); |
4235 | loop_optimizer_finalize (); | |
4236 | } | |
f26d8580 | 4237 | else if (profile_status_for_fn (cfun) == PROFILE_READ) |
688b6bc6 | 4238 | update_max_bb_count (); |
c06305e2 | 4239 | else if (profile_status_for_fn (cfun) == PROFILE_ABSENT |
4240 | && !flag_guess_branch_prob) | |
4241 | ; | |
555e8b05 | 4242 | else |
4243 | gcc_unreachable (); | |
4b366dd3 | 4244 | timevar_pop (TV_REBUILD_FREQUENCIES); |
555e8b05 | 4245 | } |
5a5f50e9 | 4246 | |
4247 | /* Perform a dry run of the branch prediction pass and report comparsion of | |
4248 | the predicted and real profile into the dump file. */ | |
4249 | ||
4250 | void | |
4251 | report_predictor_hitrates (void) | |
4252 | { | |
4253 | unsigned nb_loops; | |
4254 | ||
4255 | loop_optimizer_init (LOOPS_NORMAL); | |
4256 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4257 | flow_loops_dump (dump_file, NULL, 0); | |
4258 | ||
4259 | mark_irreducible_loops (); | |
4260 | ||
4261 | nb_loops = number_of_loops (cfun); | |
4262 | if (nb_loops > 1) | |
4263 | scev_initialize (); | |
4264 | ||
4265 | tree_estimate_probability (true); | |
4266 | ||
4267 | if (nb_loops > 1) | |
4268 | scev_finalize (); | |
4269 | ||
4270 | loop_optimizer_finalize (); | |
4271 | } | |
4272 | ||
eedd711b | 4273 | /* Force edge E to be cold. |
4274 | If IMPOSSIBLE is true, for edge to have count and probability 0 otherwise | |
4275 | keep low probability to represent possible error in a guess. This is used | |
4276 | i.e. in case we predict loop to likely iterate given number of times but | |
4277 | we are not 100% sure. | |
4278 | ||
4279 | This function locally updates profile without attempt to keep global | |
f4d3c071 | 4280 | consistency which cannot be reached in full generality without full profile |
eedd711b | 4281 | rebuild from probabilities alone. Doing so is not necessarily a good idea |
4282 | because frequencies and counts may be more realistic then probabilities. | |
4283 | ||
4284 | In some cases (such as for elimination of early exits during full loop | |
4285 | unrolling) the caller can ensure that profile will get consistent | |
4286 | afterwards. */ | |
4287 | ||
4288 | void | |
4289 | force_edge_cold (edge e, bool impossible) | |
4290 | { | |
db9cef39 | 4291 | profile_count count_sum = profile_count::zero (); |
720cfc43 | 4292 | profile_probability prob_sum = profile_probability::never (); |
eedd711b | 4293 | edge_iterator ei; |
4294 | edge e2; | |
caa64124 | 4295 | bool uninitialized_exit = false; |
eedd711b | 4296 | |
1b06efcb | 4297 | /* When branch probability guesses are not known, then do nothing. */ |
4298 | if (!impossible && !e->count ().initialized_p ()) | |
4299 | return; | |
4300 | ||
720cfc43 | 4301 | profile_probability goal = (impossible ? profile_probability::never () |
4302 | : profile_probability::very_unlikely ()); | |
4303 | ||
eedd711b | 4304 | /* If edge is already improbably or cold, just return. */ |
720cfc43 | 4305 | if (e->probability <= goal |
ea5d3981 | 4306 | && (!impossible || e->count () == profile_count::zero ())) |
eedd711b | 4307 | return; |
4308 | FOR_EACH_EDGE (e2, ei, e->src->succs) | |
4309 | if (e2 != e) | |
4310 | { | |
1b06efcb | 4311 | if (e->flags & EDGE_FAKE) |
4312 | continue; | |
ea5d3981 | 4313 | if (e2->count ().initialized_p ()) |
4314 | count_sum += e2->count (); | |
720cfc43 | 4315 | if (e2->probability.initialized_p ()) |
4316 | prob_sum += e2->probability; | |
1b06efcb | 4317 | else |
4318 | uninitialized_exit = true; | |
eedd711b | 4319 | } |
4320 | ||
1b06efcb | 4321 | /* If we are not guessing profiles but have some other edges out, |
4322 | just assume the control flow goes elsewhere. */ | |
4323 | if (uninitialized_exit) | |
4324 | e->probability = goal; | |
eedd711b | 4325 | /* If there are other edges out of e->src, redistribute probabilitity |
4326 | there. */ | |
1b06efcb | 4327 | else if (prob_sum > profile_probability::never ()) |
eedd711b | 4328 | { |
720cfc43 | 4329 | if (!(e->probability < goal)) |
4330 | e->probability = goal; | |
eedd711b | 4331 | |
720cfc43 | 4332 | profile_probability prob_comp = prob_sum / e->probability.invert (); |
4333 | ||
eedd711b | 4334 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4335 | fprintf (dump_file, "Making edge %i->%i %s by redistributing " | |
4336 | "probability to other edges.\n", | |
4337 | e->src->index, e->dest->index, | |
7b3e5dc9 | 4338 | impossible ? "impossible" : "cold"); |
eedd711b | 4339 | FOR_EACH_EDGE (e2, ei, e->src->succs) |
4340 | if (e2 != e) | |
4341 | { | |
720cfc43 | 4342 | e2->probability /= prob_comp; |
eedd711b | 4343 | } |
4bb697cd | 4344 | if (current_ir_type () != IR_GIMPLE |
4345 | && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)) | |
720cfc43 | 4346 | update_br_prob_note (e->src); |
eedd711b | 4347 | } |
4348 | /* If all edges out of e->src are unlikely, the basic block itself | |
4349 | is unlikely. */ | |
4350 | else | |
4351 | { | |
4bb697cd | 4352 | if (prob_sum == profile_probability::never ()) |
4353 | e->probability = profile_probability::always (); | |
4354 | else | |
4355 | { | |
4356 | if (impossible) | |
4357 | e->probability = profile_probability::never (); | |
f4d3c071 | 4358 | /* If BB has some edges out that are not impossible, we cannot |
4bb697cd | 4359 | assume that BB itself is. */ |
4360 | impossible = false; | |
4361 | } | |
4362 | if (current_ir_type () != IR_GIMPLE | |
4363 | && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)) | |
720cfc43 | 4364 | update_br_prob_note (e->src); |
caa64124 | 4365 | if (e->src->count == profile_count::zero ()) |
4366 | return; | |
1b06efcb | 4367 | if (count_sum == profile_count::zero () && impossible) |
caa64124 | 4368 | { |
4369 | bool found = false; | |
720cfc43 | 4370 | if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
4371 | ; | |
4372 | else if (current_ir_type () == IR_GIMPLE) | |
4373 | for (gimple_stmt_iterator gsi = gsi_start_bb (e->src); | |
4374 | !gsi_end_p (gsi); gsi_next (&gsi)) | |
4375 | { | |
4376 | if (stmt_can_terminate_bb_p (gsi_stmt (gsi))) | |
4377 | { | |
4378 | found = true; | |
4379 | break; | |
4380 | } | |
4381 | } | |
4382 | /* FIXME: Implement RTL path. */ | |
4383 | else | |
4384 | found = true; | |
caa64124 | 4385 | if (!found) |
4386 | { | |
4387 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4388 | fprintf (dump_file, | |
4389 | "Making bb %i impossible and dropping count to 0.\n", | |
4390 | e->src->index); | |
caa64124 | 4391 | e->src->count = profile_count::zero (); |
4392 | FOR_EACH_EDGE (e2, ei, e->src->preds) | |
4393 | force_edge_cold (e2, impossible); | |
4394 | return; | |
4395 | } | |
4396 | } | |
eedd711b | 4397 | |
4398 | /* If we did not adjusting, the source basic block has no likely edeges | |
4399 | leaving other direction. In that case force that bb cold, too. | |
4400 | This in general is difficult task to do, but handle special case when | |
4401 | BB has only one predecestor. This is common case when we are updating | |
4402 | after loop transforms. */ | |
720cfc43 | 4403 | if (!(prob_sum > profile_probability::never ()) |
4404 | && count_sum == profile_count::zero () | |
205ce1aa | 4405 | && single_pred_p (e->src) && e->src->count.to_frequency (cfun) |
4406 | > (impossible ? 0 : 1)) | |
eedd711b | 4407 | { |
205ce1aa | 4408 | int old_frequency = e->src->count.to_frequency (cfun); |
eedd711b | 4409 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4410 | fprintf (dump_file, "Making bb %i %s.\n", e->src->index, | |
7b3e5dc9 | 4411 | impossible ? "impossible" : "cold"); |
205ce1aa | 4412 | int new_frequency = MIN (e->src->count.to_frequency (cfun), |
4413 | impossible ? 0 : 1); | |
db9cef39 | 4414 | if (impossible) |
ea5d3981 | 4415 | e->src->count = profile_count::zero (); |
db9cef39 | 4416 | else |
205ce1aa | 4417 | e->src->count = e->count ().apply_scale (new_frequency, |
ea5d3981 | 4418 | old_frequency); |
eedd711b | 4419 | force_edge_cold (single_pred_edge (e->src), impossible); |
4420 | } | |
4421 | else if (dump_file && (dump_flags & TDF_DETAILS) | |
4422 | && maybe_hot_bb_p (cfun, e->src)) | |
4423 | fprintf (dump_file, "Giving up on making bb %i %s.\n", e->src->index, | |
7b3e5dc9 | 4424 | impossible ? "impossible" : "cold"); |
eedd711b | 4425 | } |
4426 | } | |
71e39b3b | 4427 | |
4428 | #if CHECKING_P | |
4429 | ||
4430 | namespace selftest { | |
4431 | ||
4432 | /* Test that value range of predictor values defined in predict.def is | |
4433 | within range (50, 100]. */ | |
4434 | ||
4435 | struct branch_predictor | |
4436 | { | |
4437 | const char *name; | |
8d7738e1 | 4438 | int probability; |
71e39b3b | 4439 | }; |
4440 | ||
4441 | #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) { NAME, HITRATE }, | |
4442 | ||
4443 | static void | |
4444 | test_prediction_value_range () | |
4445 | { | |
4446 | branch_predictor predictors[] = { | |
4447 | #include "predict.def" | |
3be5a61d | 4448 | { NULL, PROB_UNINITIALIZED } |
71e39b3b | 4449 | }; |
4450 | ||
4451 | for (unsigned i = 0; predictors[i].name != NULL; i++) | |
4452 | { | |
8d7738e1 | 4453 | if (predictors[i].probability == PROB_UNINITIALIZED) |
4454 | continue; | |
4455 | ||
71e39b3b | 4456 | unsigned p = 100 * predictors[i].probability / REG_BR_PROB_BASE; |
3a600fec | 4457 | ASSERT_TRUE (p >= 50 && p <= 100); |
71e39b3b | 4458 | } |
4459 | } | |
4460 | ||
4461 | #undef DEF_PREDICTOR | |
4462 | ||
4463 | /* Run all of the selfests within this file. */ | |
4464 | ||
4465 | void | |
4466 | predict_c_tests () | |
4467 | { | |
4468 | test_prediction_value_range (); | |
4469 | } | |
4470 | ||
4471 | } // namespace selftest | |
4472 | #endif /* CHECKING_P. */ |