]>
Commit | Line | Data |
---|---|---|
59423b59 | 1 | /* Branch prediction routines for the GNU compiler. |
aad93da1 | 2 | Copyright (C) 2000-2017 Free Software Foundation, Inc. |
59423b59 | 3 | |
e6751e9a | 4 | This file is part of GCC. |
59423b59 | 5 | |
e6751e9a | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
e6751e9a | 9 | version. |
59423b59 | 10 | |
e6751e9a | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
59423b59 | 15 | |
e6751e9a | 16 | You should have received a copy of the GNU General Public License |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
59423b59 | 19 | |
20 | /* References: | |
21 | ||
22 | [1] "Branch Prediction for Free" | |
23 | Ball and Larus; PLDI '93. | |
24 | [2] "Static Branch Frequency and Program Profile Analysis" | |
25 | Wu and Larus; MICRO-27. | |
26 | [3] "Corpus-based Static Branch Prediction" | |
04641143 | 27 | Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */ |
59423b59 | 28 | |
29 | ||
30 | #include "config.h" | |
31 | #include "system.h" | |
805e22b2 | 32 | #include "coretypes.h" |
9ef16211 | 33 | #include "backend.h" |
7c29e30e | 34 | #include "rtl.h" |
59423b59 | 35 | #include "tree.h" |
9ef16211 | 36 | #include "gimple.h" |
7c29e30e | 37 | #include "cfghooks.h" |
38 | #include "tree-pass.h" | |
9ef16211 | 39 | #include "ssa.h" |
ad7b10a2 | 40 | #include "memmodel.h" |
7c29e30e | 41 | #include "emit-rtl.h" |
7c29e30e | 42 | #include "cgraph.h" |
43 | #include "coverage.h" | |
44 | #include "diagnostic-core.h" | |
45 | #include "gimple-predict.h" | |
b20a8bb4 | 46 | #include "fold-const.h" |
9ed99284 | 47 | #include "calls.h" |
94ea8568 | 48 | #include "cfganal.h" |
886c1262 | 49 | #include "profile.h" |
e9d7220b | 50 | #include "sreal.h" |
429fa7fa | 51 | #include "params.h" |
862be747 | 52 | #include "cfgloop.h" |
dcf1a1ec | 53 | #include "gimple-iterator.h" |
073c1fd5 | 54 | #include "tree-cfg.h" |
05d9c18a | 55 | #include "tree-ssa-loop-niter.h" |
073c1fd5 | 56 | #include "tree-ssa-loop.h" |
d27b0b64 | 57 | #include "tree-scalar-evolution.h" |
89beffc9 | 58 | #include "ipa-utils.h" |
cbcc4297 | 59 | #include "gimple-pretty-print.h" |
71e39b3b | 60 | #include "selftest.h" |
720cfc43 | 61 | #include "cfgrtl.h" |
30a86690 | 62 | #include "stringpool.h" |
63 | #include "attribs.h" | |
56ff4880 | 64 | |
3f76cceb | 65 | /* Enum with reasons why a predictor is ignored. */ |
66 | ||
67 | enum predictor_reason | |
68 | { | |
abb2c3fe | 69 | REASON_NONE, |
70 | REASON_IGNORED, | |
71 | REASON_SINGLE_EDGE_DUPLICATE, | |
72 | REASON_EDGE_PAIR_DUPLICATE | |
3f76cceb | 73 | }; |
74 | ||
75 | /* String messages for the aforementioned enum. */ | |
76 | ||
77 | static const char *reason_messages[] = {"", " (ignored)", | |
78 | " (single edge duplicate)", " (edge pair duplicate)"}; | |
79 | ||
2e3c56e8 | 80 | /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE, |
81 | 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */ | |
8201d1f6 | 82 | static sreal real_almost_one, real_br_prob_base, |
e9d7220b | 83 | real_inv_br_prob_base, real_one_half, real_bb_freq_max; |
59423b59 | 84 | |
ee5f6585 | 85 | static void combine_predictions_for_insn (rtx_insn *, basic_block); |
3f76cceb | 86 | static void dump_prediction (FILE *, enum br_predictor, int, basic_block, |
87 | enum predictor_reason, edge); | |
e09883e4 | 88 | static void predict_paths_leading_to (basic_block, enum br_predictor, |
89 | enum prediction, | |
90 | struct loop *in_loop = NULL); | |
91 | static void predict_paths_leading_to_edge (edge, enum br_predictor, | |
92 | enum prediction, | |
93 | struct loop *in_loop = NULL); | |
ee5f6585 | 94 | static bool can_predict_insn_p (const rtx_insn *); |
5e96f51e | 95 | |
13488c51 | 96 | /* Information we hold about each branch predictor. |
97 | Filled using information from predict.def. */ | |
e6751e9a | 98 | |
13488c51 | 99 | struct predictor_info |
5e96f51e | 100 | { |
e99c3a1d | 101 | const char *const name; /* Name used in the debugging dumps. */ |
102 | const int hitrate; /* Expected hitrate used by | |
103 | predict_insn_def call. */ | |
104 | const int flags; | |
13488c51 | 105 | }; |
5e96f51e | 106 | |
eb429644 | 107 | /* Use given predictor without Dempster-Shaffer theory if it matches |
108 | using first_match heuristics. */ | |
109 | #define PRED_FLAG_FIRST_MATCH 1 | |
110 | ||
111 | /* Recompute hitrate in percent to our representation. */ | |
112 | ||
e6751e9a | 113 | #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100) |
eb429644 | 114 | |
115 | #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS}, | |
e6751e9a | 116 | static const struct predictor_info predictor_info[]= { |
13488c51 | 117 | #include "predict.def" |
118 | ||
aa40f561 | 119 | /* Upper bound on predictors. */ |
eb429644 | 120 | {NULL, 0, 0} |
13488c51 | 121 | }; |
122 | #undef DEF_PREDICTOR | |
429fa7fa | 123 | |
eb7df8c2 | 124 | /* Return TRUE if frequency FREQ is considered to be hot. */ |
f29b326e | 125 | |
126 | static inline bool | |
8d672d12 | 127 | maybe_hot_frequency_p (struct function *fun, int freq) |
eb7df8c2 | 128 | { |
415d1b9a | 129 | struct cgraph_node *node = cgraph_node::get (fun->decl); |
a74a34e6 | 130 | if (!profile_info || profile_status_for_fn (fun) != PROFILE_READ) |
eb7df8c2 | 131 | { |
125b6d78 | 132 | if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED) |
eb7df8c2 | 133 | return false; |
125b6d78 | 134 | if (node->frequency == NODE_FREQUENCY_HOT) |
eb7df8c2 | 135 | return true; |
136 | } | |
3bedbae3 | 137 | if (profile_status_for_fn (fun) == PROFILE_ABSENT) |
aa5f4f32 | 138 | return true; |
125b6d78 | 139 | if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE |
34154e27 | 140 | && freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency * 2 / 3)) |
125b6d78 | 141 | return false; |
6040d650 | 142 | if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0) |
143 | return false; | |
367782e7 | 144 | if (freq * PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) |
145 | < ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency) | |
eb7df8c2 | 146 | return false; |
147 | return true; | |
148 | } | |
149 | ||
9e179a64 | 150 | static gcov_type min_count = -1; |
151 | ||
152 | /* Determine the threshold for hot BB counts. */ | |
153 | ||
154 | gcov_type | |
155 | get_hot_bb_threshold () | |
156 | { | |
157 | gcov_working_set_t *ws; | |
158 | if (min_count == -1) | |
159 | { | |
160 | ws = find_working_set (PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE)); | |
161 | gcc_assert (ws); | |
162 | min_count = ws->min_counter; | |
163 | } | |
164 | return min_count; | |
165 | } | |
166 | ||
167 | /* Set the threshold for hot BB counts. */ | |
168 | ||
169 | void | |
170 | set_hot_bb_threshold (gcov_type min) | |
171 | { | |
172 | min_count = min; | |
173 | } | |
174 | ||
f29b326e | 175 | /* Return TRUE if frequency FREQ is considered to be hot. */ |
176 | ||
94bed7c3 | 177 | bool |
db9cef39 | 178 | maybe_hot_count_p (struct function *, profile_count count) |
f29b326e | 179 | { |
db9cef39 | 180 | if (!count.initialized_p ()) |
f29b326e | 181 | return true; |
182 | /* Code executed at most once is not hot. */ | |
db9cef39 | 183 | if (count <= MAX (profile_info ? profile_info->runs : 1, 1)) |
f29b326e | 184 | return false; |
db9cef39 | 185 | return (count.to_gcov_type () >= get_hot_bb_threshold ()); |
f29b326e | 186 | } |
187 | ||
429fa7fa | 188 | /* Return true in case BB can be CPU intensive and should be optimized |
41a6f238 | 189 | for maximal performance. */ |
429fa7fa | 190 | |
191 | bool | |
8d672d12 | 192 | maybe_hot_bb_p (struct function *fun, const_basic_block bb) |
429fa7fa | 193 | { |
8d672d12 | 194 | gcc_checking_assert (fun); |
f08c22c4 | 195 | if (!maybe_hot_count_p (fun, bb->count)) |
196 | return false; | |
8d672d12 | 197 | return maybe_hot_frequency_p (fun, bb->frequency); |
eb7df8c2 | 198 | } |
199 | ||
200 | /* Return true in case BB can be CPU intensive and should be optimized | |
201 | for maximal performance. */ | |
202 | ||
203 | bool | |
204 | maybe_hot_edge_p (edge e) | |
205 | { | |
f08c22c4 | 206 | if (!maybe_hot_count_p (cfun, e->count)) |
207 | return false; | |
8d672d12 | 208 | return maybe_hot_frequency_p (cfun, EDGE_FREQUENCY (e)); |
429fa7fa | 209 | } |
210 | ||
dcc9b351 | 211 | /* Return true if profile COUNT and FREQUENCY, or function FUN static |
212 | node frequency reflects never being executed. */ | |
213 | ||
214 | static bool | |
215 | probably_never_executed (struct function *fun, | |
db9cef39 | 216 | profile_count count, int) |
429fa7fa | 217 | { |
8d672d12 | 218 | gcc_checking_assert (fun); |
f08c22c4 | 219 | if (count == profile_count::zero ()) |
220 | return true; | |
db9cef39 | 221 | if (count.initialized_p () && profile_status_for_fn (fun) == PROFILE_READ) |
4befb9f4 | 222 | { |
db9cef39 | 223 | int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION); |
224 | if (count.apply_scale (unlikely_count_fraction, 1) >= profile_info->runs) | |
4befb9f4 | 225 | return false; |
4befb9f4 | 226 | return true; |
227 | } | |
a74a34e6 | 228 | if ((!profile_info || profile_status_for_fn (fun) != PROFILE_READ) |
415d1b9a | 229 | && (cgraph_node::get (fun->decl)->frequency |
fd6a3c41 | 230 | == NODE_FREQUENCY_UNLIKELY_EXECUTED)) |
5de92639 | 231 | return true; |
429fa7fa | 232 | return false; |
233 | } | |
234 | ||
80adc5a6 | 235 | |
dcc9b351 | 236 | /* Return true in case BB is probably never executed. */ |
237 | ||
238 | bool | |
239 | probably_never_executed_bb_p (struct function *fun, const_basic_block bb) | |
240 | { | |
241 | return probably_never_executed (fun, bb->count, bb->frequency); | |
242 | } | |
243 | ||
244 | ||
655af1f9 | 245 | /* Return true if E is unlikely executed for obvious reasons. */ |
246 | ||
247 | static bool | |
248 | unlikely_executed_edge_p (edge e) | |
249 | { | |
ca69b069 | 250 | return (e->count == profile_count::zero () |
251 | || e->probability == profile_probability::never ()) | |
655af1f9 | 252 | || (e->flags & (EDGE_EH | EDGE_FAKE)); |
253 | } | |
254 | ||
80adc5a6 | 255 | /* Return true in case edge E is probably never executed. */ |
256 | ||
257 | bool | |
258 | probably_never_executed_edge_p (struct function *fun, edge e) | |
429fa7fa | 259 | { |
ca69b069 | 260 | if (unlikely_executed_edge_p (e)) |
261 | return true; | |
dcc9b351 | 262 | return probably_never_executed (fun, e->count, EDGE_FREQUENCY (e)); |
429fa7fa | 263 | } |
264 | ||
cf262be9 | 265 | /* Return true when current function should always be optimized for size. */ |
266 | ||
267 | bool | |
268 | optimize_function_for_size_p (struct function *fun) | |
269 | { | |
cf262be9 | 270 | if (!fun || !fun->decl) |
69ad6a32 | 271 | return optimize_size; |
415d1b9a | 272 | cgraph_node *n = cgraph_node::get (fun->decl); |
273 | return n && n->optimize_for_size_p (); | |
cf262be9 | 274 | } |
275 | ||
533af0db | 276 | /* Return true when current function should always be optimized for speed. */ |
277 | ||
278 | bool | |
279 | optimize_function_for_speed_p (struct function *fun) | |
280 | { | |
281 | return !optimize_function_for_size_p (fun); | |
7dfb44a0 | 282 | } |
283 | ||
acdfe9e0 | 284 | /* Return the optimization type that should be used for the function FUN. */ |
285 | ||
286 | optimization_type | |
287 | function_optimization_type (struct function *fun) | |
288 | { | |
289 | return (optimize_function_for_speed_p (fun) | |
290 | ? OPTIMIZE_FOR_SPEED | |
291 | : OPTIMIZE_FOR_SIZE); | |
292 | } | |
293 | ||
7dfb44a0 | 294 | /* Return TRUE when BB should be optimized for size. */ |
295 | ||
296 | bool | |
94ba1cf1 | 297 | optimize_bb_for_size_p (const_basic_block bb) |
7dfb44a0 | 298 | { |
b9ea678c | 299 | return (optimize_function_for_size_p (cfun) |
300 | || (bb && !maybe_hot_bb_p (cfun, bb))); | |
7dfb44a0 | 301 | } |
302 | ||
303 | /* Return TRUE when BB should be optimized for speed. */ | |
304 | ||
305 | bool | |
94ba1cf1 | 306 | optimize_bb_for_speed_p (const_basic_block bb) |
7dfb44a0 | 307 | { |
308 | return !optimize_bb_for_size_p (bb); | |
309 | } | |
310 | ||
acdfe9e0 | 311 | /* Return the optimization type that should be used for block BB. */ |
312 | ||
313 | optimization_type | |
314 | bb_optimization_type (const_basic_block bb) | |
315 | { | |
316 | return (optimize_bb_for_speed_p (bb) | |
317 | ? OPTIMIZE_FOR_SPEED | |
318 | : OPTIMIZE_FOR_SIZE); | |
319 | } | |
320 | ||
7dfb44a0 | 321 | /* Return TRUE when BB should be optimized for size. */ |
322 | ||
323 | bool | |
324 | optimize_edge_for_size_p (edge e) | |
325 | { | |
533af0db | 326 | return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e); |
7dfb44a0 | 327 | } |
328 | ||
329 | /* Return TRUE when BB should be optimized for speed. */ | |
330 | ||
331 | bool | |
332 | optimize_edge_for_speed_p (edge e) | |
333 | { | |
334 | return !optimize_edge_for_size_p (e); | |
335 | } | |
336 | ||
337 | /* Return TRUE when BB should be optimized for size. */ | |
338 | ||
339 | bool | |
340 | optimize_insn_for_size_p (void) | |
341 | { | |
533af0db | 342 | return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p; |
7dfb44a0 | 343 | } |
344 | ||
345 | /* Return TRUE when BB should be optimized for speed. */ | |
346 | ||
347 | bool | |
348 | optimize_insn_for_speed_p (void) | |
349 | { | |
350 | return !optimize_insn_for_size_p (); | |
351 | } | |
352 | ||
94ba1cf1 | 353 | /* Return TRUE when LOOP should be optimized for size. */ |
354 | ||
355 | bool | |
356 | optimize_loop_for_size_p (struct loop *loop) | |
357 | { | |
358 | return optimize_bb_for_size_p (loop->header); | |
359 | } | |
360 | ||
361 | /* Return TRUE when LOOP should be optimized for speed. */ | |
362 | ||
363 | bool | |
364 | optimize_loop_for_speed_p (struct loop *loop) | |
365 | { | |
366 | return optimize_bb_for_speed_p (loop->header); | |
367 | } | |
368 | ||
0bfd8d5c | 369 | /* Return TRUE when LOOP nest should be optimized for speed. */ |
370 | ||
371 | bool | |
372 | optimize_loop_nest_for_speed_p (struct loop *loop) | |
373 | { | |
374 | struct loop *l = loop; | |
375 | if (optimize_loop_for_speed_p (loop)) | |
376 | return true; | |
377 | l = loop->inner; | |
53be41ae | 378 | while (l && l != loop) |
0bfd8d5c | 379 | { |
380 | if (optimize_loop_for_speed_p (l)) | |
381 | return true; | |
382 | if (l->inner) | |
383 | l = l->inner; | |
384 | else if (l->next) | |
385 | l = l->next; | |
386 | else | |
7baffbd3 | 387 | { |
388 | while (l != loop && !l->next) | |
389 | l = loop_outer (l); | |
390 | if (l != loop) | |
391 | l = l->next; | |
392 | } | |
0bfd8d5c | 393 | } |
394 | return false; | |
395 | } | |
396 | ||
397 | /* Return TRUE when LOOP nest should be optimized for size. */ | |
398 | ||
399 | bool | |
400 | optimize_loop_nest_for_size_p (struct loop *loop) | |
401 | { | |
402 | return !optimize_loop_nest_for_speed_p (loop); | |
403 | } | |
404 | ||
4a9d7ef7 | 405 | /* Return true when edge E is likely to be well predictable by branch |
406 | predictor. */ | |
407 | ||
408 | bool | |
409 | predictable_edge_p (edge e) | |
410 | { | |
720cfc43 | 411 | if (!e->probability.initialized_p ()) |
4a9d7ef7 | 412 | return false; |
720cfc43 | 413 | if ((e->probability.to_reg_br_prob_base () |
4a9d7ef7 | 414 | <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100) |
720cfc43 | 415 | || (REG_BR_PROB_BASE - e->probability.to_reg_br_prob_base () |
4a9d7ef7 | 416 | <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)) |
417 | return true; | |
418 | return false; | |
419 | } | |
420 | ||
421 | ||
7dfb44a0 | 422 | /* Set RTL expansion for BB profile. */ |
423 | ||
424 | void | |
425 | rtl_profile_for_bb (basic_block bb) | |
426 | { | |
8d672d12 | 427 | crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb); |
7dfb44a0 | 428 | } |
429 | ||
430 | /* Set RTL expansion for edge profile. */ | |
431 | ||
432 | void | |
433 | rtl_profile_for_edge (edge e) | |
434 | { | |
435 | crtl->maybe_hot_insn_p = maybe_hot_edge_p (e); | |
436 | } | |
437 | ||
438 | /* Set RTL expansion to default mode (i.e. when profile info is not known). */ | |
439 | void | |
440 | default_rtl_profile (void) | |
441 | { | |
442 | crtl->maybe_hot_insn_p = true; | |
443 | } | |
444 | ||
cd0fe062 | 445 | /* Return true if the one of outgoing edges is already predicted by |
446 | PREDICTOR. */ | |
447 | ||
4ee9c684 | 448 | bool |
5493cb9a | 449 | rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor) |
cd0fe062 | 450 | { |
451 | rtx note; | |
5496dbfc | 452 | if (!INSN_P (BB_END (bb))) |
cd0fe062 | 453 | return false; |
5496dbfc | 454 | for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1)) |
cd0fe062 | 455 | if (REG_NOTE_KIND (note) == REG_BR_PRED |
456 | && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor) | |
457 | return true; | |
458 | return false; | |
459 | } | |
5e96f51e | 460 | |
eeb030c4 | 461 | /* Structure representing predictions in tree level. */ |
462 | ||
463 | struct edge_prediction { | |
464 | struct edge_prediction *ep_next; | |
465 | edge ep_edge; | |
466 | enum br_predictor ep_predictor; | |
467 | int ep_probability; | |
468 | }; | |
469 | ||
06ecf488 | 470 | /* This map contains for a basic block the list of predictions for the |
471 | outgoing edges. */ | |
472 | ||
473 | static hash_map<const_basic_block, edge_prediction *> *bb_predictions; | |
474 | ||
4ee9c684 | 475 | /* Return true if the one of outgoing edges is already predicted by |
476 | PREDICTOR. */ | |
477 | ||
478 | bool | |
75a70cf9 | 479 | gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor) |
4ee9c684 | 480 | { |
7ea47fbd | 481 | struct edge_prediction *i; |
06ecf488 | 482 | edge_prediction **preds = bb_predictions->get (bb); |
b3723726 | 483 | |
484 | if (!preds) | |
485 | return false; | |
48e1416a | 486 | |
06ecf488 | 487 | for (i = *preds; i; i = i->ep_next) |
f45e9182 | 488 | if (i->ep_predictor == predictor) |
4ee9c684 | 489 | return true; |
490 | return false; | |
491 | } | |
492 | ||
f6e0b8d0 | 493 | /* Return true if the one of outgoing edges is already predicted by |
494 | PREDICTOR for edge E predicted as TAKEN. */ | |
495 | ||
496 | bool | |
497 | edge_predicted_by_p (edge e, enum br_predictor predictor, bool taken) | |
498 | { | |
499 | struct edge_prediction *i; | |
500 | basic_block bb = e->src; | |
501 | edge_prediction **preds = bb_predictions->get (bb); | |
502 | if (!preds) | |
503 | return false; | |
504 | ||
505 | int probability = predictor_info[(int) predictor].hitrate; | |
506 | ||
507 | if (taken != TAKEN) | |
508 | probability = REG_BR_PROB_BASE - probability; | |
509 | ||
510 | for (i = *preds; i; i = i->ep_next) | |
511 | if (i->ep_predictor == predictor | |
512 | && i->ep_edge == e | |
513 | && i->ep_probability == probability) | |
514 | return true; | |
515 | return false; | |
516 | } | |
517 | ||
b41438e5 | 518 | /* Same predicate as above, working on edges. */ |
519 | bool | |
7ecb5bb2 | 520 | edge_probability_reliable_p (const_edge e) |
b41438e5 | 521 | { |
61cb1816 | 522 | return e->probability.probably_reliable_p (); |
b41438e5 | 523 | } |
524 | ||
525 | /* Same predicate as edge_probability_reliable_p, working on notes. */ | |
526 | bool | |
7ecb5bb2 | 527 | br_prob_note_reliable_p (const_rtx note) |
b41438e5 | 528 | { |
529 | gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB); | |
61cb1816 | 530 | return profile_probability::from_reg_br_prob_note |
531 | (XINT (note, 0)).probably_reliable_p (); | |
b41438e5 | 532 | } |
533 | ||
aa157ca4 | 534 | static void |
ee5f6585 | 535 | predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability) |
13488c51 | 536 | { |
876760f6 | 537 | gcc_assert (any_condjump_p (insn)); |
b28bedce | 538 | if (!flag_guess_branch_prob) |
539 | return; | |
e6751e9a | 540 | |
a1ddb869 | 541 | add_reg_note (insn, REG_BR_PRED, |
542 | gen_rtx_CONCAT (VOIDmode, | |
543 | GEN_INT ((int) predictor), | |
544 | GEN_INT ((int) probability))); | |
13488c51 | 545 | } |
546 | ||
547 | /* Predict insn by given predictor. */ | |
e6751e9a | 548 | |
13488c51 | 549 | void |
ee5f6585 | 550 | predict_insn_def (rtx_insn *insn, enum br_predictor predictor, |
d598ad0d | 551 | enum prediction taken) |
13488c51 | 552 | { |
553 | int probability = predictor_info[(int) predictor].hitrate; | |
e6751e9a | 554 | |
13488c51 | 555 | if (taken != TAKEN) |
556 | probability = REG_BR_PROB_BASE - probability; | |
e6751e9a | 557 | |
13488c51 | 558 | predict_insn (insn, predictor, probability); |
5e96f51e | 559 | } |
560 | ||
561 | /* Predict edge E with given probability if possible. */ | |
e6751e9a | 562 | |
13488c51 | 563 | void |
4ee9c684 | 564 | rtl_predict_edge (edge e, enum br_predictor predictor, int probability) |
5e96f51e | 565 | { |
ee5f6585 | 566 | rtx_insn *last_insn; |
5496dbfc | 567 | last_insn = BB_END (e->src); |
5e96f51e | 568 | |
569 | /* We can store the branch prediction information only about | |
570 | conditional jumps. */ | |
571 | if (!any_condjump_p (last_insn)) | |
572 | return; | |
573 | ||
574 | /* We always store probability of branching. */ | |
575 | if (e->flags & EDGE_FALLTHRU) | |
576 | probability = REG_BR_PROB_BASE - probability; | |
577 | ||
13488c51 | 578 | predict_insn (last_insn, predictor, probability); |
579 | } | |
580 | ||
4ee9c684 | 581 | /* Predict edge E with the given PROBABILITY. */ |
582 | void | |
75a70cf9 | 583 | gimple_predict_edge (edge e, enum br_predictor predictor, int probability) |
4ee9c684 | 584 | { |
2d9e68f0 | 585 | if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) |
586 | && EDGE_COUNT (e->src->succs) > 1 | |
587 | && flag_guess_branch_prob | |
588 | && optimize) | |
ebd65d12 | 589 | { |
b3723726 | 590 | struct edge_prediction *i = XNEW (struct edge_prediction); |
06ecf488 | 591 | edge_prediction *&preds = bb_predictions->get_or_insert (e->src); |
4ee9c684 | 592 | |
06ecf488 | 593 | i->ep_next = preds; |
594 | preds = i; | |
f45e9182 | 595 | i->ep_probability = probability; |
596 | i->ep_predictor = predictor; | |
597 | i->ep_edge = e; | |
ebd65d12 | 598 | } |
4ee9c684 | 599 | } |
600 | ||
3651d60e | 601 | /* Filter edge predictions PREDS by a function FILTER. DATA are passed |
602 | to the filter function. */ | |
603 | ||
631fa7de | 604 | void |
3651d60e | 605 | filter_predictions (edge_prediction **preds, |
606 | bool (*filter) (edge_prediction *, void *), void *data) | |
631fa7de | 607 | { |
b3723726 | 608 | if (!bb_predictions) |
609 | return; | |
610 | ||
b3723726 | 611 | if (preds) |
631fa7de | 612 | { |
06ecf488 | 613 | struct edge_prediction **prediction = preds; |
b3723726 | 614 | struct edge_prediction *next; |
615 | ||
631fa7de | 616 | while (*prediction) |
617 | { | |
3651d60e | 618 | if ((*filter) (*prediction, data)) |
619 | prediction = &((*prediction)->ep_next); | |
620 | else | |
b3723726 | 621 | { |
622 | next = (*prediction)->ep_next; | |
623 | free (*prediction); | |
624 | *prediction = next; | |
625 | } | |
631fa7de | 626 | } |
627 | } | |
628 | } | |
629 | ||
3651d60e | 630 | /* Filter function predicate that returns true for a edge predicate P |
631 | if its edge is equal to DATA. */ | |
632 | ||
633 | bool | |
634 | equal_edge_p (edge_prediction *p, void *data) | |
635 | { | |
636 | return p->ep_edge == (edge)data; | |
637 | } | |
638 | ||
639 | /* Remove all predictions on given basic block that are attached | |
640 | to edge E. */ | |
641 | void | |
642 | remove_predictions_associated_with_edge (edge e) | |
643 | { | |
644 | if (!bb_predictions) | |
645 | return; | |
646 | ||
647 | edge_prediction **preds = bb_predictions->get (e->src); | |
648 | filter_predictions (preds, equal_edge_p, e); | |
649 | } | |
650 | ||
b3723726 | 651 | /* Clears the list of predictions stored for BB. */ |
652 | ||
653 | static void | |
654 | clear_bb_predictions (basic_block bb) | |
655 | { | |
06ecf488 | 656 | edge_prediction **preds = bb_predictions->get (bb); |
b3723726 | 657 | struct edge_prediction *pred, *next; |
658 | ||
659 | if (!preds) | |
660 | return; | |
661 | ||
06ecf488 | 662 | for (pred = *preds; pred; pred = next) |
b3723726 | 663 | { |
664 | next = pred->ep_next; | |
665 | free (pred); | |
666 | } | |
667 | *preds = NULL; | |
668 | } | |
669 | ||
1a12dac4 | 670 | /* Return true when we can store prediction on insn INSN. |
671 | At the moment we represent predictions only on conditional | |
672 | jumps, not at computed jump or other complicated cases. */ | |
673 | static bool | |
ee5f6585 | 674 | can_predict_insn_p (const rtx_insn *insn) |
1a12dac4 | 675 | { |
6d7dc5b9 | 676 | return (JUMP_P (insn) |
1a12dac4 | 677 | && any_condjump_p (insn) |
cd665a06 | 678 | && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2); |
1a12dac4 | 679 | } |
680 | ||
13488c51 | 681 | /* Predict edge E by given predictor if possible. */ |
e6751e9a | 682 | |
13488c51 | 683 | void |
d598ad0d | 684 | predict_edge_def (edge e, enum br_predictor predictor, |
685 | enum prediction taken) | |
13488c51 | 686 | { |
687 | int probability = predictor_info[(int) predictor].hitrate; | |
688 | ||
689 | if (taken != TAKEN) | |
690 | probability = REG_BR_PROB_BASE - probability; | |
e6751e9a | 691 | |
13488c51 | 692 | predict_edge (e, predictor, probability); |
693 | } | |
694 | ||
695 | /* Invert all branch predictions or probability notes in the INSN. This needs | |
696 | to be done each time we invert the condition used by the jump. */ | |
e6751e9a | 697 | |
13488c51 | 698 | void |
d598ad0d | 699 | invert_br_probabilities (rtx insn) |
13488c51 | 700 | { |
e6751e9a | 701 | rtx note; |
702 | ||
703 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
704 | if (REG_NOTE_KIND (note) == REG_BR_PROB) | |
61cb1816 | 705 | XINT (note, 0) = profile_probability::from_reg_br_prob_note |
706 | (XINT (note, 0)).invert ().to_reg_br_prob_note (); | |
e6751e9a | 707 | else if (REG_NOTE_KIND (note) == REG_BR_PRED) |
708 | XEXP (XEXP (note, 0), 1) | |
709 | = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1))); | |
13488c51 | 710 | } |
711 | ||
712 | /* Dump information about the branch prediction to the output file. */ | |
e6751e9a | 713 | |
13488c51 | 714 | static void |
4ee9c684 | 715 | dump_prediction (FILE *file, enum br_predictor predictor, int probability, |
abb2c3fe | 716 | basic_block bb, enum predictor_reason reason = REASON_NONE, |
3f76cceb | 717 | edge ep_edge = NULL) |
13488c51 | 718 | { |
3f76cceb | 719 | edge e = ep_edge; |
cd665a06 | 720 | edge_iterator ei; |
13488c51 | 721 | |
4ee9c684 | 722 | if (!file) |
13488c51 | 723 | return; |
724 | ||
3f76cceb | 725 | if (e == NULL) |
726 | FOR_EACH_EDGE (e, ei, bb->succs) | |
727 | if (! (e->flags & EDGE_FALLTHRU)) | |
728 | break; | |
729 | ||
730 | char edge_info_str[128]; | |
731 | if (ep_edge) | |
732 | sprintf (edge_info_str, " of edge %d->%d", ep_edge->src->index, | |
733 | ep_edge->dest->index); | |
734 | else | |
735 | edge_info_str[0] = '\0'; | |
13488c51 | 736 | |
3f76cceb | 737 | fprintf (file, " %s heuristics%s%s: %.1f%%", |
13488c51 | 738 | predictor_info[predictor].name, |
3f76cceb | 739 | edge_info_str, reason_messages[reason], |
740 | probability * 100.0 / REG_BR_PROB_BASE); | |
13488c51 | 741 | |
db9cef39 | 742 | if (bb->count.initialized_p ()) |
17a81216 | 743 | { |
db9cef39 | 744 | fprintf (file, " exec "); |
745 | bb->count.dump (file); | |
12c94d25 | 746 | if (e) |
747 | { | |
db9cef39 | 748 | fprintf (file, " hit "); |
749 | e->count.dump (file); | |
750 | fprintf (file, " (%.1f%%)", e->count.to_gcov_type() * 100.0 | |
751 | / bb->count.to_gcov_type ()); | |
12c94d25 | 752 | } |
17a81216 | 753 | } |
e6751e9a | 754 | |
4ee9c684 | 755 | fprintf (file, "\n"); |
13488c51 | 756 | } |
757 | ||
f08c22c4 | 758 | /* Return true if STMT is known to be unlikely executed. */ |
759 | ||
760 | static bool | |
761 | unlikely_executed_stmt_p (gimple *stmt) | |
762 | { | |
763 | if (!is_gimple_call (stmt)) | |
764 | return false; | |
23653b72 | 765 | /* NORETURN attribute alone is not strong enough: exit() may be quite |
f08c22c4 | 766 | likely executed once during program run. */ |
767 | if (gimple_call_fntype (stmt) | |
768 | && lookup_attribute ("cold", | |
769 | TYPE_ATTRIBUTES (gimple_call_fntype (stmt))) | |
770 | && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))) | |
771 | return true; | |
772 | tree decl = gimple_call_fndecl (stmt); | |
773 | if (!decl) | |
774 | return false; | |
775 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl)) | |
776 | && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))) | |
777 | return true; | |
778 | ||
779 | cgraph_node *n = cgraph_node::get (decl); | |
780 | if (!n) | |
781 | return false; | |
1ccb310e | 782 | |
783 | availability avail; | |
f08c22c4 | 784 | n = n->ultimate_alias_target (&avail); |
785 | if (avail < AVAIL_AVAILABLE) | |
1ccb310e | 786 | return false; |
f08c22c4 | 787 | if (!n->analyzed |
788 | || n->decl == current_function_decl) | |
789 | return false; | |
790 | return n->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED; | |
791 | } | |
792 | ||
793 | /* Return true if BB is unlikely executed. */ | |
794 | ||
795 | static bool | |
796 | unlikely_executed_bb_p (basic_block bb) | |
797 | { | |
798 | if (bb->count == profile_count::zero ()) | |
799 | return true; | |
800 | if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) || bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
801 | return false; | |
802 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); | |
803 | !gsi_end_p (gsi); gsi_next (&gsi)) | |
804 | { | |
805 | if (unlikely_executed_stmt_p (gsi_stmt (gsi))) | |
806 | return true; | |
807 | if (stmt_can_terminate_bb_p (gsi_stmt (gsi))) | |
808 | return false; | |
809 | } | |
810 | return false; | |
811 | } | |
812 | ||
7edd21a5 | 813 | /* We can not predict the probabilities of outgoing edges of bb. Set them |
3989f1fe | 814 | evenly and hope for the best. If UNLIKELY_EDGES is not null, distribute |
815 | even probability for all edges not mentioned in the set. These edges | |
816 | are given PROB_VERY_UNLIKELY probability. */ | |
817 | ||
83c8a977 | 818 | static void |
3989f1fe | 819 | set_even_probabilities (basic_block bb, |
820 | hash_set<edge> *unlikely_edges = NULL) | |
83c8a977 | 821 | { |
c7a0aa22 | 822 | unsigned nedges = 0, unlikely_count = 0; |
1cda36f6 | 823 | edge e = NULL; |
cd665a06 | 824 | edge_iterator ei; |
c7a0aa22 | 825 | profile_probability all = profile_probability::always (); |
83c8a977 | 826 | |
cd665a06 | 827 | FOR_EACH_EDGE (e, ei, bb->succs) |
c7a0aa22 | 828 | if (e->probability.initialized_p ()) |
829 | all -= e->probability; | |
830 | else if (!unlikely_executed_edge_p (e)) | |
831 | { | |
832 | nedges ++; | |
833 | if (unlikely_edges != NULL && unlikely_edges->contains (e)) | |
834 | { | |
835 | all -= profile_probability::very_unlikely (); | |
836 | unlikely_count++; | |
837 | } | |
838 | } | |
3989f1fe | 839 | |
840 | /* Make the distribution even if all edges are unlikely. */ | |
3989f1fe | 841 | if (unlikely_count == nedges) |
842 | { | |
843 | unlikely_edges = NULL; | |
844 | unlikely_count = 0; | |
845 | } | |
846 | ||
847 | unsigned c = nedges - unlikely_count; | |
848 | ||
cd665a06 | 849 | FOR_EACH_EDGE (e, ei, bb->succs) |
c7a0aa22 | 850 | if (e->probability.initialized_p ()) |
851 | ; | |
852 | else if (!unlikely_executed_edge_p (e)) | |
3989f1fe | 853 | { |
854 | if (unlikely_edges != NULL && unlikely_edges->contains (e)) | |
720cfc43 | 855 | e->probability = profile_probability::very_unlikely (); |
3989f1fe | 856 | else |
c7a0aa22 | 857 | e->probability = all.apply_scale (1, c).guessed (); |
3989f1fe | 858 | } |
83c8a977 | 859 | else |
720cfc43 | 860 | e->probability = profile_probability::never (); |
83c8a977 | 861 | } |
862 | ||
61cb1816 | 863 | /* Add REG_BR_PROB note to JUMP with PROB. */ |
864 | ||
865 | void | |
866 | add_reg_br_prob_note (rtx_insn *jump, profile_probability prob) | |
867 | { | |
868 | gcc_checking_assert (JUMP_P (jump) && !find_reg_note (jump, REG_BR_PROB, 0)); | |
869 | add_int_reg_note (jump, REG_BR_PROB, prob.to_reg_br_prob_note ()); | |
870 | } | |
871 | ||
13488c51 | 872 | /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB |
873 | note if not already present. Remove now useless REG_BR_PRED notes. */ | |
e6751e9a | 874 | |
13488c51 | 875 | static void |
ee5f6585 | 876 | combine_predictions_for_insn (rtx_insn *insn, basic_block bb) |
13488c51 | 877 | { |
83c8a977 | 878 | rtx prob_note; |
879 | rtx *pnote; | |
e6751e9a | 880 | rtx note; |
13488c51 | 881 | int best_probability = PROB_EVEN; |
b9c74b4d | 882 | enum br_predictor best_predictor = END_PREDICTORS; |
eb429644 | 883 | int combined_probability = REG_BR_PROB_BASE / 2; |
884 | int d; | |
49d7c0db | 885 | bool first_match = false; |
886 | bool found = false; | |
13488c51 | 887 | |
83c8a977 | 888 | if (!can_predict_insn_p (insn)) |
889 | { | |
890 | set_even_probabilities (bb); | |
891 | return; | |
892 | } | |
893 | ||
894 | prob_note = find_reg_note (insn, REG_BR_PROB, 0); | |
895 | pnote = ®_NOTES (insn); | |
450d042a | 896 | if (dump_file) |
897 | fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn), | |
b3d6de89 | 898 | bb->index); |
13488c51 | 899 | |
900 | /* We implement "first match" heuristics and use probability guessed | |
4ee9c684 | 901 | by predictor with smallest index. */ |
e6751e9a | 902 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
903 | if (REG_NOTE_KIND (note) == REG_BR_PRED) | |
904 | { | |
bc620c5c | 905 | enum br_predictor predictor = ((enum br_predictor) |
906 | INTVAL (XEXP (XEXP (note, 0), 0))); | |
e6751e9a | 907 | int probability = INTVAL (XEXP (XEXP (note, 0), 1)); |
908 | ||
909 | found = true; | |
19109630 | 910 | if (best_predictor > predictor |
911 | && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH) | |
e6751e9a | 912 | best_probability = probability, best_predictor = predictor; |
913 | ||
914 | d = (combined_probability * probability | |
915 | + (REG_BR_PROB_BASE - combined_probability) | |
916 | * (REG_BR_PROB_BASE - probability)); | |
917 | ||
918 | /* Use FP math to avoid overflows of 32bit integers. */ | |
c4a616f2 | 919 | if (d == 0) |
920 | /* If one probability is 0% and one 100%, avoid division by zero. */ | |
921 | combined_probability = REG_BR_PROB_BASE / 2; | |
922 | else | |
923 | combined_probability = (((double) combined_probability) * probability | |
924 | * REG_BR_PROB_BASE / d + 0.5); | |
e6751e9a | 925 | } |
926 | ||
927 | /* Decide which heuristic to use. In case we didn't match anything, | |
928 | use no_prediction heuristic, in case we did match, use either | |
49d7c0db | 929 | first match or Dempster-Shaffer theory depending on the flags. */ |
930 | ||
19109630 | 931 | if (best_predictor != END_PREDICTORS) |
49d7c0db | 932 | first_match = true; |
933 | ||
934 | if (!found) | |
4ee9c684 | 935 | dump_prediction (dump_file, PRED_NO_PREDICTION, |
3f76cceb | 936 | combined_probability, bb); |
49d7c0db | 937 | else |
938 | { | |
19109630 | 939 | if (!first_match) |
940 | dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, | |
941 | bb, !first_match ? REASON_NONE : REASON_IGNORED); | |
942 | else | |
943 | dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, | |
944 | bb, first_match ? REASON_NONE : REASON_IGNORED); | |
49d7c0db | 945 | } |
946 | ||
947 | if (first_match) | |
eb429644 | 948 | combined_probability = best_probability; |
3f76cceb | 949 | dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb); |
49d7c0db | 950 | |
951 | while (*pnote) | |
952 | { | |
953 | if (REG_NOTE_KIND (*pnote) == REG_BR_PRED) | |
954 | { | |
bc620c5c | 955 | enum br_predictor predictor = ((enum br_predictor) |
956 | INTVAL (XEXP (XEXP (*pnote, 0), 0))); | |
49d7c0db | 957 | int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1)); |
958 | ||
4ee9c684 | 959 | dump_prediction (dump_file, predictor, probability, bb, |
3f76cceb | 960 | (!first_match || best_predictor == predictor) |
abb2c3fe | 961 | ? REASON_NONE : REASON_IGNORED); |
195731ad | 962 | *pnote = XEXP (*pnote, 1); |
49d7c0db | 963 | } |
964 | else | |
195731ad | 965 | pnote = &XEXP (*pnote, 1); |
49d7c0db | 966 | } |
e6751e9a | 967 | |
13488c51 | 968 | if (!prob_note) |
969 | { | |
61cb1816 | 970 | profile_probability p |
971 | = profile_probability::from_reg_br_prob_base (combined_probability); | |
972 | add_reg_br_prob_note (insn, p); | |
e6751e9a | 973 | |
eb429644 | 974 | /* Save the prediction into CFG in case we are seeing non-degenerated |
975 | conditional jump. */ | |
ea091dfd | 976 | if (!single_succ_p (bb)) |
eb429644 | 977 | { |
61cb1816 | 978 | BRANCH_EDGE (bb)->probability = p; |
e6751e9a | 979 | FALLTHRU_EDGE (bb)->probability |
720cfc43 | 980 | = BRANCH_EDGE (bb)->probability.invert (); |
eb429644 | 981 | } |
13488c51 | 982 | } |
ea091dfd | 983 | else if (!single_succ_p (bb)) |
d8c70625 | 984 | { |
61cb1816 | 985 | profile_probability prob = profile_probability::from_reg_br_prob_note |
986 | (XINT (prob_note, 0)); | |
d8c70625 | 987 | |
61cb1816 | 988 | BRANCH_EDGE (bb)->probability = prob; |
989 | FALLTHRU_EDGE (bb)->probability = prob.invert (); | |
d8c70625 | 990 | } |
991 | else | |
720cfc43 | 992 | single_succ_edge (bb)->probability = profile_probability::always (); |
5e96f51e | 993 | } |
994 | ||
3f76cceb | 995 | /* Edge prediction hash traits. */ |
996 | ||
997 | struct predictor_hash: pointer_hash <edge_prediction> | |
998 | { | |
999 | ||
1000 | static inline hashval_t hash (const edge_prediction *); | |
1001 | static inline bool equal (const edge_prediction *, const edge_prediction *); | |
1002 | }; | |
1003 | ||
1004 | /* Calculate hash value of an edge prediction P based on predictor and | |
1005 | normalized probability. */ | |
1006 | ||
1007 | inline hashval_t | |
1008 | predictor_hash::hash (const edge_prediction *p) | |
1009 | { | |
1010 | inchash::hash hstate; | |
1011 | hstate.add_int (p->ep_predictor); | |
1012 | ||
1013 | int prob = p->ep_probability; | |
1014 | if (prob > REG_BR_PROB_BASE / 2) | |
1015 | prob = REG_BR_PROB_BASE - prob; | |
1016 | ||
1017 | hstate.add_int (prob); | |
1018 | ||
1019 | return hstate.end (); | |
1020 | } | |
1021 | ||
1022 | /* Return true whether edge predictions P1 and P2 use the same predictor and | |
1023 | have equal (or opposed probability). */ | |
1024 | ||
1025 | inline bool | |
1026 | predictor_hash::equal (const edge_prediction *p1, const edge_prediction *p2) | |
1027 | { | |
1028 | return (p1->ep_predictor == p2->ep_predictor | |
1029 | && (p1->ep_probability == p2->ep_probability | |
1030 | || p1->ep_probability == REG_BR_PROB_BASE - p2->ep_probability)); | |
1031 | } | |
1032 | ||
1033 | struct predictor_hash_traits: predictor_hash, | |
1034 | typed_noop_remove <edge_prediction *> {}; | |
1035 | ||
1036 | /* Return true if edge prediction P is not in DATA hash set. */ | |
1037 | ||
1038 | static bool | |
1039 | not_removed_prediction_p (edge_prediction *p, void *data) | |
1040 | { | |
1041 | hash_set<edge_prediction *> *remove = (hash_set<edge_prediction *> *) data; | |
1042 | return !remove->contains (p); | |
1043 | } | |
1044 | ||
1045 | /* Prune predictions for a basic block BB. Currently we do following | |
1046 | clean-up steps: | |
1047 | ||
1048 | 1) remove duplicate prediction that is guessed with the same probability | |
1049 | (different than 1/2) to both edge | |
1050 | 2) remove duplicates for a prediction that belongs with the same probability | |
1051 | to a single edge | |
1052 | ||
1053 | */ | |
1054 | ||
1055 | static void | |
1056 | prune_predictions_for_bb (basic_block bb) | |
1057 | { | |
1058 | edge_prediction **preds = bb_predictions->get (bb); | |
1059 | ||
1060 | if (preds) | |
1061 | { | |
1062 | hash_table <predictor_hash_traits> s (13); | |
1063 | hash_set <edge_prediction *> remove; | |
1064 | ||
1065 | /* Step 1: identify predictors that should be removed. */ | |
1066 | for (edge_prediction *pred = *preds; pred; pred = pred->ep_next) | |
1067 | { | |
1068 | edge_prediction *existing = s.find (pred); | |
1069 | if (existing) | |
1070 | { | |
1071 | if (pred->ep_edge == existing->ep_edge | |
1072 | && pred->ep_probability == existing->ep_probability) | |
1073 | { | |
1074 | /* Remove a duplicate predictor. */ | |
1075 | dump_prediction (dump_file, pred->ep_predictor, | |
1076 | pred->ep_probability, bb, | |
abb2c3fe | 1077 | REASON_SINGLE_EDGE_DUPLICATE, pred->ep_edge); |
3f76cceb | 1078 | |
1079 | remove.add (pred); | |
1080 | } | |
1081 | else if (pred->ep_edge != existing->ep_edge | |
1082 | && pred->ep_probability == existing->ep_probability | |
1083 | && pred->ep_probability != REG_BR_PROB_BASE / 2) | |
1084 | { | |
1085 | /* Remove both predictors as they predict the same | |
1086 | for both edges. */ | |
1087 | dump_prediction (dump_file, existing->ep_predictor, | |
1088 | pred->ep_probability, bb, | |
abb2c3fe | 1089 | REASON_EDGE_PAIR_DUPLICATE, |
3f76cceb | 1090 | existing->ep_edge); |
1091 | dump_prediction (dump_file, pred->ep_predictor, | |
1092 | pred->ep_probability, bb, | |
abb2c3fe | 1093 | REASON_EDGE_PAIR_DUPLICATE, |
3f76cceb | 1094 | pred->ep_edge); |
1095 | ||
1096 | remove.add (existing); | |
1097 | remove.add (pred); | |
1098 | } | |
1099 | } | |
1100 | ||
1101 | edge_prediction **slot2 = s.find_slot (pred, INSERT); | |
1102 | *slot2 = pred; | |
1103 | } | |
1104 | ||
1105 | /* Step 2: Remove predictors. */ | |
1106 | filter_predictions (preds, not_removed_prediction_p, &remove); | |
1107 | } | |
1108 | } | |
1109 | ||
4ee9c684 | 1110 | /* Combine predictions into single probability and store them into CFG. |
5a5f50e9 | 1111 | Remove now useless prediction entries. |
1112 | If DRY_RUN is set, only produce dumps and do not modify profile. */ | |
59423b59 | 1113 | |
4ee9c684 | 1114 | static void |
5a5f50e9 | 1115 | combine_predictions_for_bb (basic_block bb, bool dry_run) |
59423b59 | 1116 | { |
4ee9c684 | 1117 | int best_probability = PROB_EVEN; |
b9c74b4d | 1118 | enum br_predictor best_predictor = END_PREDICTORS; |
4ee9c684 | 1119 | int combined_probability = REG_BR_PROB_BASE / 2; |
1120 | int d; | |
1121 | bool first_match = false; | |
1122 | bool found = false; | |
1123 | struct edge_prediction *pred; | |
1124 | int nedges = 0; | |
1125 | edge e, first = NULL, second = NULL; | |
cd665a06 | 1126 | edge_iterator ei; |
59423b59 | 1127 | |
cd665a06 | 1128 | FOR_EACH_EDGE (e, ei, bb->succs) |
f08c22c4 | 1129 | if (!unlikely_executed_edge_p (e)) |
4ee9c684 | 1130 | { |
cd665a06 | 1131 | nedges ++; |
4ee9c684 | 1132 | if (first && !second) |
1133 | second = e; | |
1134 | if (!first) | |
1135 | first = e; | |
1136 | } | |
720cfc43 | 1137 | else if (!e->probability.initialized_p ()) |
1138 | e->probability = profile_probability::never (); | |
4ee9c684 | 1139 | |
48e1416a | 1140 | /* When there is no successor or only one choice, prediction is easy. |
4ee9c684 | 1141 | |
3989f1fe | 1142 | When we have a basic block with more than 2 successors, the situation |
1143 | is more complicated as DS theory cannot be used literally. | |
1144 | More precisely, let's assume we predicted edge e1 with probability p1, | |
1145 | thus: m1({b1}) = p1. As we're going to combine more than 2 edges, we | |
1146 | need to find probability of e.g. m1({b2}), which we don't know. | |
1147 | The only approximation is to equally distribute 1-p1 to all edges | |
1148 | different from b1. | |
1149 | ||
1150 | According to numbers we've got from SPEC2006 benchark, there's only | |
1151 | one interesting reliable predictor (noreturn call), which can be | |
1152 | handled with a bit easier approach. */ | |
4ee9c684 | 1153 | if (nedges != 2) |
1154 | { | |
3989f1fe | 1155 | hash_set<edge> unlikely_edges (4); |
1156 | ||
1157 | /* Identify all edges that have a probability close to very unlikely. | |
1158 | Doing the approach for very unlikely doesn't worth for doing as | |
1159 | there's no such probability in SPEC2006 benchmark. */ | |
1160 | edge_prediction **preds = bb_predictions->get (bb); | |
1161 | if (preds) | |
1162 | for (pred = *preds; pred; pred = pred->ep_next) | |
1163 | if (pred->ep_probability <= PROB_VERY_UNLIKELY) | |
1164 | unlikely_edges.add (pred->ep_edge); | |
1165 | ||
c7a0aa22 | 1166 | if (!dry_run) |
3989f1fe | 1167 | set_even_probabilities (bb, &unlikely_edges); |
b3723726 | 1168 | clear_bb_predictions (bb); |
3f5be5f4 | 1169 | if (dump_file) |
3989f1fe | 1170 | { |
1171 | fprintf (dump_file, "Predictions for bb %i\n", bb->index); | |
1172 | if (unlikely_edges.elements () == 0) | |
1173 | fprintf (dump_file, | |
1174 | "%i edges in bb %i predicted to even probabilities\n", | |
1175 | nedges, bb->index); | |
1176 | else | |
1177 | { | |
1178 | fprintf (dump_file, | |
1179 | "%i edges in bb %i predicted with some unlikely edges\n", | |
1180 | nedges, bb->index); | |
1181 | FOR_EACH_EDGE (e, ei, bb->succs) | |
f08c22c4 | 1182 | if (!unlikely_executed_edge_p (e)) |
720cfc43 | 1183 | dump_prediction (dump_file, PRED_COMBINED, |
1184 | e->probability.to_reg_br_prob_base (), bb, REASON_NONE, e); | |
3989f1fe | 1185 | } |
1186 | } | |
4ee9c684 | 1187 | return; |
1188 | } | |
1189 | ||
3f5be5f4 | 1190 | if (dump_file) |
1191 | fprintf (dump_file, "Predictions for bb %i\n", bb->index); | |
4ee9c684 | 1192 | |
3f76cceb | 1193 | prune_predictions_for_bb (bb); |
1194 | ||
06ecf488 | 1195 | edge_prediction **preds = bb_predictions->get (bb); |
3f76cceb | 1196 | |
b3723726 | 1197 | if (preds) |
4ee9c684 | 1198 | { |
b3723726 | 1199 | /* We implement "first match" heuristics and use probability guessed |
1200 | by predictor with smallest index. */ | |
06ecf488 | 1201 | for (pred = *preds; pred; pred = pred->ep_next) |
b3723726 | 1202 | { |
b9c74b4d | 1203 | enum br_predictor predictor = pred->ep_predictor; |
b3723726 | 1204 | int probability = pred->ep_probability; |
4ee9c684 | 1205 | |
b3723726 | 1206 | if (pred->ep_edge != first) |
1207 | probability = REG_BR_PROB_BASE - probability; | |
4ee9c684 | 1208 | |
b3723726 | 1209 | found = true; |
9f694a82 | 1210 | /* First match heuristics would be widly confused if we predicted |
1211 | both directions. */ | |
19109630 | 1212 | if (best_predictor > predictor |
1213 | && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH) | |
9f694a82 | 1214 | { |
1215 | struct edge_prediction *pred2; | |
1216 | int prob = probability; | |
1217 | ||
c83059be | 1218 | for (pred2 = (struct edge_prediction *) *preds; |
1219 | pred2; pred2 = pred2->ep_next) | |
9f694a82 | 1220 | if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor) |
1221 | { | |
9ae89a4e | 1222 | int probability2 = pred2->ep_probability; |
9f694a82 | 1223 | |
1224 | if (pred2->ep_edge != first) | |
1225 | probability2 = REG_BR_PROB_BASE - probability2; | |
1226 | ||
48e1416a | 1227 | if ((probability < REG_BR_PROB_BASE / 2) != |
9f694a82 | 1228 | (probability2 < REG_BR_PROB_BASE / 2)) |
1229 | break; | |
1230 | ||
1231 | /* If the same predictor later gave better result, go for it! */ | |
1232 | if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability)) | |
1233 | || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability))) | |
1234 | prob = probability2; | |
1235 | } | |
1236 | if (!pred2) | |
1237 | best_probability = prob, best_predictor = predictor; | |
1238 | } | |
4ee9c684 | 1239 | |
b3723726 | 1240 | d = (combined_probability * probability |
1241 | + (REG_BR_PROB_BASE - combined_probability) | |
1242 | * (REG_BR_PROB_BASE - probability)); | |
4ee9c684 | 1243 | |
b3723726 | 1244 | /* Use FP math to avoid overflows of 32bit integers. */ |
1245 | if (d == 0) | |
1246 | /* If one probability is 0% and one 100%, avoid division by zero. */ | |
1247 | combined_probability = REG_BR_PROB_BASE / 2; | |
1248 | else | |
1249 | combined_probability = (((double) combined_probability) | |
1250 | * probability | |
1251 | * REG_BR_PROB_BASE / d + 0.5); | |
1252 | } | |
4ee9c684 | 1253 | } |
1254 | ||
1255 | /* Decide which heuristic to use. In case we didn't match anything, | |
1256 | use no_prediction heuristic, in case we did match, use either | |
1257 | first match or Dempster-Shaffer theory depending on the flags. */ | |
1258 | ||
19109630 | 1259 | if (best_predictor != END_PREDICTORS) |
4ee9c684 | 1260 | first_match = true; |
1261 | ||
1262 | if (!found) | |
3f76cceb | 1263 | dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb); |
4ee9c684 | 1264 | else |
1265 | { | |
19109630 | 1266 | if (!first_match) |
1267 | dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb, | |
1268 | !first_match ? REASON_NONE : REASON_IGNORED); | |
1269 | else | |
1270 | dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb, | |
1271 | first_match ? REASON_NONE : REASON_IGNORED); | |
4ee9c684 | 1272 | } |
1273 | ||
1274 | if (first_match) | |
1275 | combined_probability = best_probability; | |
3f76cceb | 1276 | dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb); |
4ee9c684 | 1277 | |
b3723726 | 1278 | if (preds) |
4ee9c684 | 1279 | { |
4077bf7a | 1280 | for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next) |
b3723726 | 1281 | { |
b9c74b4d | 1282 | enum br_predictor predictor = pred->ep_predictor; |
b3723726 | 1283 | int probability = pred->ep_probability; |
4ee9c684 | 1284 | |
b3723726 | 1285 | dump_prediction (dump_file, predictor, probability, bb, |
3f76cceb | 1286 | (!first_match || best_predictor == predictor) |
abb2c3fe | 1287 | ? REASON_NONE : REASON_IGNORED, pred->ep_edge); |
b3723726 | 1288 | } |
4ee9c684 | 1289 | } |
b3723726 | 1290 | clear_bb_predictions (bb); |
4ee9c684 | 1291 | |
db9cef39 | 1292 | if (!bb->count.initialized_p () && !dry_run) |
83c8a977 | 1293 | { |
720cfc43 | 1294 | first->probability |
1295 | = profile_probability::from_reg_br_prob_base (combined_probability); | |
1296 | second->probability = first->probability.invert (); | |
83c8a977 | 1297 | } |
4ee9c684 | 1298 | } |
1299 | ||
fd757b76 | 1300 | /* Check if T1 and T2 satisfy the IV_COMPARE condition. |
1301 | Return the SSA_NAME if the condition satisfies, NULL otherwise. | |
1302 | ||
1303 | T1 and T2 should be one of the following cases: | |
1304 | 1. T1 is SSA_NAME, T2 is NULL | |
1305 | 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4] | |
1306 | 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */ | |
1307 | ||
1308 | static tree | |
1309 | strips_small_constant (tree t1, tree t2) | |
1310 | { | |
1311 | tree ret = NULL; | |
1312 | int value = 0; | |
1313 | ||
1314 | if (!t1) | |
1315 | return NULL; | |
1316 | else if (TREE_CODE (t1) == SSA_NAME) | |
1317 | ret = t1; | |
e913b5cd | 1318 | else if (tree_fits_shwi_p (t1)) |
1319 | value = tree_to_shwi (t1); | |
fd757b76 | 1320 | else |
1321 | return NULL; | |
1322 | ||
1323 | if (!t2) | |
1324 | return ret; | |
e913b5cd | 1325 | else if (tree_fits_shwi_p (t2)) |
1326 | value = tree_to_shwi (t2); | |
fd757b76 | 1327 | else if (TREE_CODE (t2) == SSA_NAME) |
1328 | { | |
1329 | if (ret) | |
1330 | return NULL; | |
1331 | else | |
1332 | ret = t2; | |
1333 | } | |
1334 | ||
1335 | if (value <= 4 && value >= -4) | |
1336 | return ret; | |
1337 | else | |
1338 | return NULL; | |
1339 | } | |
1340 | ||
1341 | /* Return the SSA_NAME in T or T's operands. | |
1342 | Return NULL if SSA_NAME cannot be found. */ | |
1343 | ||
1344 | static tree | |
1345 | get_base_value (tree t) | |
1346 | { | |
1347 | if (TREE_CODE (t) == SSA_NAME) | |
1348 | return t; | |
1349 | ||
1350 | if (!BINARY_CLASS_P (t)) | |
1351 | return NULL; | |
1352 | ||
1353 | switch (TREE_OPERAND_LENGTH (t)) | |
1354 | { | |
1355 | case 1: | |
1356 | return strips_small_constant (TREE_OPERAND (t, 0), NULL); | |
1357 | case 2: | |
1358 | return strips_small_constant (TREE_OPERAND (t, 0), | |
1359 | TREE_OPERAND (t, 1)); | |
1360 | default: | |
1361 | return NULL; | |
1362 | } | |
1363 | } | |
1364 | ||
1365 | /* Check the compare STMT in LOOP. If it compares an induction | |
1366 | variable to a loop invariant, return true, and save | |
1367 | LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP. | |
1368 | Otherwise return false and set LOOP_INVAIANT to NULL. */ | |
1369 | ||
1370 | static bool | |
1a91d914 | 1371 | is_comparison_with_loop_invariant_p (gcond *stmt, struct loop *loop, |
fd757b76 | 1372 | tree *loop_invariant, |
1373 | enum tree_code *compare_code, | |
b3269f54 | 1374 | tree *loop_step, |
fd757b76 | 1375 | tree *loop_iv_base) |
1376 | { | |
1377 | tree op0, op1, bound, base; | |
1378 | affine_iv iv0, iv1; | |
1379 | enum tree_code code; | |
b3269f54 | 1380 | tree step; |
fd757b76 | 1381 | |
1382 | code = gimple_cond_code (stmt); | |
1383 | *loop_invariant = NULL; | |
1384 | ||
1385 | switch (code) | |
1386 | { | |
1387 | case GT_EXPR: | |
1388 | case GE_EXPR: | |
1389 | case NE_EXPR: | |
1390 | case LT_EXPR: | |
1391 | case LE_EXPR: | |
1392 | case EQ_EXPR: | |
1393 | break; | |
1394 | ||
1395 | default: | |
1396 | return false; | |
1397 | } | |
1398 | ||
1399 | op0 = gimple_cond_lhs (stmt); | |
1400 | op1 = gimple_cond_rhs (stmt); | |
1401 | ||
1402 | if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST) | |
1403 | || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST)) | |
1404 | return false; | |
1405 | if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true)) | |
1406 | return false; | |
1407 | if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true)) | |
1408 | return false; | |
1409 | if (TREE_CODE (iv0.step) != INTEGER_CST | |
1410 | || TREE_CODE (iv1.step) != INTEGER_CST) | |
1411 | return false; | |
1412 | if ((integer_zerop (iv0.step) && integer_zerop (iv1.step)) | |
1413 | || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step))) | |
1414 | return false; | |
1415 | ||
1416 | if (integer_zerop (iv0.step)) | |
1417 | { | |
1418 | if (code != NE_EXPR && code != EQ_EXPR) | |
1419 | code = invert_tree_comparison (code, false); | |
1420 | bound = iv0.base; | |
1421 | base = iv1.base; | |
e913b5cd | 1422 | if (tree_fits_shwi_p (iv1.step)) |
b3269f54 | 1423 | step = iv1.step; |
fd757b76 | 1424 | else |
1425 | return false; | |
1426 | } | |
1427 | else | |
1428 | { | |
1429 | bound = iv1.base; | |
1430 | base = iv0.base; | |
e913b5cd | 1431 | if (tree_fits_shwi_p (iv0.step)) |
b3269f54 | 1432 | step = iv0.step; |
fd757b76 | 1433 | else |
1434 | return false; | |
1435 | } | |
1436 | ||
1437 | if (TREE_CODE (bound) != INTEGER_CST) | |
1438 | bound = get_base_value (bound); | |
1439 | if (!bound) | |
1440 | return false; | |
1441 | if (TREE_CODE (base) != INTEGER_CST) | |
1442 | base = get_base_value (base); | |
1443 | if (!base) | |
1444 | return false; | |
1445 | ||
1446 | *loop_invariant = bound; | |
1447 | *compare_code = code; | |
1448 | *loop_step = step; | |
1449 | *loop_iv_base = base; | |
1450 | return true; | |
1451 | } | |
1452 | ||
1453 | /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */ | |
1454 | ||
1455 | static bool | |
1456 | expr_coherent_p (tree t1, tree t2) | |
1457 | { | |
42acab1c | 1458 | gimple *stmt; |
fd757b76 | 1459 | tree ssa_name_1 = NULL; |
1460 | tree ssa_name_2 = NULL; | |
1461 | ||
1462 | gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST); | |
1463 | gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST); | |
1464 | ||
1465 | if (t1 == t2) | |
1466 | return true; | |
1467 | ||
1468 | if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST) | |
1469 | return true; | |
1470 | if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST) | |
1471 | return false; | |
1472 | ||
1473 | /* Check to see if t1 is expressed/defined with t2. */ | |
1474 | stmt = SSA_NAME_DEF_STMT (t1); | |
1475 | gcc_assert (stmt != NULL); | |
1476 | if (is_gimple_assign (stmt)) | |
1477 | { | |
1478 | ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE); | |
1479 | if (ssa_name_1 && ssa_name_1 == t2) | |
1480 | return true; | |
1481 | } | |
1482 | ||
1483 | /* Check to see if t2 is expressed/defined with t1. */ | |
1484 | stmt = SSA_NAME_DEF_STMT (t2); | |
1485 | gcc_assert (stmt != NULL); | |
1486 | if (is_gimple_assign (stmt)) | |
1487 | { | |
1488 | ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE); | |
1489 | if (ssa_name_2 && ssa_name_2 == t1) | |
1490 | return true; | |
1491 | } | |
1492 | ||
1493 | /* Compare if t1 and t2's def_stmts are identical. */ | |
1494 | if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2) | |
1495 | return true; | |
1496 | else | |
1497 | return false; | |
1498 | } | |
1499 | ||
d07b2b6f | 1500 | /* Return true if E is predicted by one of loop heuristics. */ |
1501 | ||
1502 | static bool | |
1503 | predicted_by_loop_heuristics_p (basic_block bb) | |
1504 | { | |
1505 | struct edge_prediction *i; | |
1506 | edge_prediction **preds = bb_predictions->get (bb); | |
1507 | ||
1508 | if (!preds) | |
1509 | return false; | |
1510 | ||
1511 | for (i = *preds; i; i = i->ep_next) | |
1512 | if (i->ep_predictor == PRED_LOOP_ITERATIONS_GUESSED | |
1513 | || i->ep_predictor == PRED_LOOP_ITERATIONS_MAX | |
1514 | || i->ep_predictor == PRED_LOOP_ITERATIONS | |
1515 | || i->ep_predictor == PRED_LOOP_EXIT | |
cbcc4297 | 1516 | || i->ep_predictor == PRED_LOOP_EXIT_WITH_RECURSION |
d07b2b6f | 1517 | || i->ep_predictor == PRED_LOOP_EXTRA_EXIT) |
1518 | return true; | |
1519 | return false; | |
1520 | } | |
1521 | ||
fd757b76 | 1522 | /* Predict branch probability of BB when BB contains a branch that compares |
1523 | an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The | |
1524 | loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP. | |
1525 | ||
1526 | E.g. | |
1527 | for (int i = 0; i < bound; i++) { | |
1528 | if (i < bound - 2) | |
1529 | computation_1(); | |
1530 | else | |
1531 | computation_2(); | |
1532 | } | |
1533 | ||
1534 | In this loop, we will predict the branch inside the loop to be taken. */ | |
1535 | ||
1536 | static void | |
1537 | predict_iv_comparison (struct loop *loop, basic_block bb, | |
1538 | tree loop_bound_var, | |
1539 | tree loop_iv_base_var, | |
1540 | enum tree_code loop_bound_code, | |
1541 | int loop_bound_step) | |
1542 | { | |
42acab1c | 1543 | gimple *stmt; |
fd757b76 | 1544 | tree compare_var, compare_base; |
1545 | enum tree_code compare_code; | |
b3269f54 | 1546 | tree compare_step_var; |
fd757b76 | 1547 | edge then_edge; |
1548 | edge_iterator ei; | |
1549 | ||
d07b2b6f | 1550 | if (predicted_by_loop_heuristics_p (bb)) |
fd757b76 | 1551 | return; |
1552 | ||
1553 | stmt = last_stmt (bb); | |
1554 | if (!stmt || gimple_code (stmt) != GIMPLE_COND) | |
1555 | return; | |
1a91d914 | 1556 | if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt), |
1557 | loop, &compare_var, | |
fd757b76 | 1558 | &compare_code, |
b3269f54 | 1559 | &compare_step_var, |
fd757b76 | 1560 | &compare_base)) |
1561 | return; | |
1562 | ||
1563 | /* Find the taken edge. */ | |
1564 | FOR_EACH_EDGE (then_edge, ei, bb->succs) | |
1565 | if (then_edge->flags & EDGE_TRUE_VALUE) | |
1566 | break; | |
1567 | ||
1568 | /* When comparing an IV to a loop invariant, NE is more likely to be | |
1569 | taken while EQ is more likely to be not-taken. */ | |
1570 | if (compare_code == NE_EXPR) | |
1571 | { | |
1572 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1573 | return; | |
1574 | } | |
1575 | else if (compare_code == EQ_EXPR) | |
1576 | { | |
1577 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1578 | return; | |
1579 | } | |
1580 | ||
1581 | if (!expr_coherent_p (loop_iv_base_var, compare_base)) | |
1582 | return; | |
1583 | ||
1584 | /* If loop bound, base and compare bound are all constants, we can | |
1585 | calculate the probability directly. */ | |
e913b5cd | 1586 | if (tree_fits_shwi_p (loop_bound_var) |
1587 | && tree_fits_shwi_p (compare_var) | |
1588 | && tree_fits_shwi_p (compare_base)) | |
fd757b76 | 1589 | { |
1590 | int probability; | |
e913b5cd | 1591 | bool overflow, overall_overflow = false; |
ab2c1de8 | 1592 | widest_int compare_count, tem; |
b3269f54 | 1593 | |
b3269f54 | 1594 | /* (loop_bound - base) / compare_step */ |
c311b856 | 1595 | tem = wi::sub (wi::to_widest (loop_bound_var), |
1596 | wi::to_widest (compare_base), SIGNED, &overflow); | |
e913b5cd | 1597 | overall_overflow |= overflow; |
c311b856 | 1598 | widest_int loop_count = wi::div_trunc (tem, |
1599 | wi::to_widest (compare_step_var), | |
1600 | SIGNED, &overflow); | |
e913b5cd | 1601 | overall_overflow |= overflow; |
1602 | ||
c311b856 | 1603 | if (!wi::neg_p (wi::to_widest (compare_step_var)) |
fd757b76 | 1604 | ^ (compare_code == LT_EXPR || compare_code == LE_EXPR)) |
b3269f54 | 1605 | { |
1606 | /* (loop_bound - compare_bound) / compare_step */ | |
c311b856 | 1607 | tem = wi::sub (wi::to_widest (loop_bound_var), |
1608 | wi::to_widest (compare_var), SIGNED, &overflow); | |
e913b5cd | 1609 | overall_overflow |= overflow; |
c311b856 | 1610 | compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var), |
1611 | SIGNED, &overflow); | |
e913b5cd | 1612 | overall_overflow |= overflow; |
b3269f54 | 1613 | } |
fd757b76 | 1614 | else |
b3269f54 | 1615 | { |
1616 | /* (compare_bound - base) / compare_step */ | |
c311b856 | 1617 | tem = wi::sub (wi::to_widest (compare_var), |
1618 | wi::to_widest (compare_base), SIGNED, &overflow); | |
e913b5cd | 1619 | overall_overflow |= overflow; |
c311b856 | 1620 | compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var), |
1621 | SIGNED, &overflow); | |
e913b5cd | 1622 | overall_overflow |= overflow; |
b3269f54 | 1623 | } |
fd757b76 | 1624 | if (compare_code == LE_EXPR || compare_code == GE_EXPR) |
b3269f54 | 1625 | ++compare_count; |
fd757b76 | 1626 | if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR) |
b3269f54 | 1627 | ++loop_count; |
796b6678 | 1628 | if (wi::neg_p (compare_count)) |
e913b5cd | 1629 | compare_count = 0; |
796b6678 | 1630 | if (wi::neg_p (loop_count)) |
e913b5cd | 1631 | loop_count = 0; |
796b6678 | 1632 | if (loop_count == 0) |
fd757b76 | 1633 | probability = 0; |
796b6678 | 1634 | else if (wi::cmps (compare_count, loop_count) == 1) |
fd757b76 | 1635 | probability = REG_BR_PROB_BASE; |
1636 | else | |
b3269f54 | 1637 | { |
e913b5cd | 1638 | tem = compare_count * REG_BR_PROB_BASE; |
796b6678 | 1639 | tem = wi::udiv_trunc (tem, loop_count); |
b3269f54 | 1640 | probability = tem.to_uhwi (); |
1641 | } | |
1642 | ||
d3cb49c9 | 1643 | /* FIXME: The branch prediction seems broken. It has only 20% hitrate. */ |
e913b5cd | 1644 | if (!overall_overflow) |
b3269f54 | 1645 | predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability); |
1646 | ||
fd757b76 | 1647 | return; |
1648 | } | |
1649 | ||
1650 | if (expr_coherent_p (loop_bound_var, compare_var)) | |
1651 | { | |
1652 | if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR) | |
1653 | && (compare_code == LT_EXPR || compare_code == LE_EXPR)) | |
1654 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1655 | else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR) | |
1656 | && (compare_code == GT_EXPR || compare_code == GE_EXPR)) | |
1657 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1658 | else if (loop_bound_code == NE_EXPR) | |
1659 | { | |
1660 | /* If the loop backedge condition is "(i != bound)", we do | |
1661 | the comparison based on the step of IV: | |
1662 | * step < 0 : backedge condition is like (i > bound) | |
1663 | * step > 0 : backedge condition is like (i < bound) */ | |
1664 | gcc_assert (loop_bound_step != 0); | |
1665 | if (loop_bound_step > 0 | |
1666 | && (compare_code == LT_EXPR | |
1667 | || compare_code == LE_EXPR)) | |
1668 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1669 | else if (loop_bound_step < 0 | |
1670 | && (compare_code == GT_EXPR | |
1671 | || compare_code == GE_EXPR)) | |
1672 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1673 | else | |
1674 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1675 | } | |
1676 | else | |
1677 | /* The branch is predicted not-taken if loop_bound_code is | |
1678 | opposite with compare_code. */ | |
1679 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1680 | } | |
1681 | else if (expr_coherent_p (loop_iv_base_var, compare_var)) | |
1682 | { | |
1683 | /* For cases like: | |
1684 | for (i = s; i < h; i++) | |
1685 | if (i > s + 2) .... | |
1686 | The branch should be predicted taken. */ | |
1687 | if (loop_bound_step > 0 | |
1688 | && (compare_code == GT_EXPR || compare_code == GE_EXPR)) | |
1689 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1690 | else if (loop_bound_step < 0 | |
1691 | && (compare_code == LT_EXPR || compare_code == LE_EXPR)) | |
1692 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1693 | else | |
1694 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1695 | } | |
1696 | } | |
4ca17abf | 1697 | |
1698 | /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop | |
1699 | exits are resulted from short-circuit conditions that will generate an | |
1700 | if_tmp. E.g.: | |
1701 | ||
1702 | if (foo() || global > 10) | |
1703 | break; | |
1704 | ||
1705 | This will be translated into: | |
1706 | ||
1707 | BB3: | |
1708 | loop header... | |
1709 | BB4: | |
1710 | if foo() goto BB6 else goto BB5 | |
1711 | BB5: | |
1712 | if global > 10 goto BB6 else goto BB7 | |
1713 | BB6: | |
1714 | goto BB7 | |
1715 | BB7: | |
1716 | iftmp = (PHI 0(BB5), 1(BB6)) | |
1717 | if iftmp == 1 goto BB8 else goto BB3 | |
1718 | BB8: | |
1719 | outside of the loop... | |
1720 | ||
1721 | The edge BB7->BB8 is loop exit because BB8 is outside of the loop. | |
1722 | From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop | |
1723 | exits. This function takes BB7->BB8 as input, and finds out the extra loop | |
76f8f901 | 1724 | exits to predict them using PRED_LOOP_EXTRA_EXIT. */ |
4ca17abf | 1725 | |
1726 | static void | |
1727 | predict_extra_loop_exits (edge exit_edge) | |
1728 | { | |
1729 | unsigned i; | |
1730 | bool check_value_one; | |
42acab1c | 1731 | gimple *lhs_def_stmt; |
1a91d914 | 1732 | gphi *phi_stmt; |
4ca17abf | 1733 | tree cmp_rhs, cmp_lhs; |
42acab1c | 1734 | gimple *last; |
1a91d914 | 1735 | gcond *cmp_stmt; |
4ca17abf | 1736 | |
1a91d914 | 1737 | last = last_stmt (exit_edge->src); |
1738 | if (!last) | |
1739 | return; | |
1740 | cmp_stmt = dyn_cast <gcond *> (last); | |
1741 | if (!cmp_stmt) | |
4ca17abf | 1742 | return; |
1a91d914 | 1743 | |
4ca17abf | 1744 | cmp_rhs = gimple_cond_rhs (cmp_stmt); |
1745 | cmp_lhs = gimple_cond_lhs (cmp_stmt); | |
1746 | if (!TREE_CONSTANT (cmp_rhs) | |
1747 | || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs))) | |
1748 | return; | |
1749 | if (TREE_CODE (cmp_lhs) != SSA_NAME) | |
1750 | return; | |
1751 | ||
1752 | /* If check_value_one is true, only the phi_args with value '1' will lead | |
1753 | to loop exit. Otherwise, only the phi_args with value '0' will lead to | |
1754 | loop exit. */ | |
1755 | check_value_one = (((integer_onep (cmp_rhs)) | |
1756 | ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR)) | |
1757 | ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0)); | |
1758 | ||
1a91d914 | 1759 | lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs); |
1760 | if (!lhs_def_stmt) | |
1761 | return; | |
1762 | ||
1763 | phi_stmt = dyn_cast <gphi *> (lhs_def_stmt); | |
1764 | if (!phi_stmt) | |
4ca17abf | 1765 | return; |
1766 | ||
1767 | for (i = 0; i < gimple_phi_num_args (phi_stmt); i++) | |
1768 | { | |
1769 | edge e1; | |
1770 | edge_iterator ei; | |
1771 | tree val = gimple_phi_arg_def (phi_stmt, i); | |
1772 | edge e = gimple_phi_arg_edge (phi_stmt, i); | |
1773 | ||
1774 | if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val))) | |
1775 | continue; | |
1776 | if ((check_value_one ^ integer_onep (val)) == 1) | |
1777 | continue; | |
1778 | if (EDGE_COUNT (e->src->succs) != 1) | |
1779 | { | |
76f8f901 | 1780 | predict_paths_leading_to_edge (e, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN); |
4ca17abf | 1781 | continue; |
1782 | } | |
1783 | ||
1784 | FOR_EACH_EDGE (e1, ei, e->src->preds) | |
76f8f901 | 1785 | predict_paths_leading_to_edge (e1, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN); |
4ca17abf | 1786 | } |
1787 | } | |
1788 | ||
d07b2b6f | 1789 | |
7194de72 | 1790 | /* Predict edge probabilities by exploiting loop structure. */ |
1791 | ||
4ee9c684 | 1792 | static void |
7194de72 | 1793 | predict_loops (void) |
4ee9c684 | 1794 | { |
17519ba0 | 1795 | struct loop *loop; |
cbcc4297 | 1796 | basic_block bb; |
1797 | hash_set <struct loop *> with_recursion(10); | |
1798 | ||
1799 | FOR_EACH_BB_FN (bb, cfun) | |
1800 | { | |
1801 | gimple_stmt_iterator gsi; | |
1802 | tree decl; | |
1803 | ||
1804 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1805 | if (is_gimple_call (gsi_stmt (gsi)) | |
1806 | && (decl = gimple_call_fndecl (gsi_stmt (gsi))) != NULL | |
1807 | && recursive_call_p (current_function_decl, decl)) | |
1808 | { | |
1809 | loop = bb->loop_father; | |
1810 | while (loop && !with_recursion.add (loop)) | |
1811 | loop = loop_outer (loop); | |
1812 | } | |
1813 | } | |
c12f2fcb | 1814 | |
7fcadf62 | 1815 | /* Try to predict out blocks in a loop that are not part of a |
1816 | natural loop. */ | |
d07b2b6f | 1817 | FOR_EACH_LOOP (loop, LI_FROM_INNERMOST) |
59423b59 | 1818 | { |
7fb12188 | 1819 | basic_block bb, *bbs; |
d07b2b6f | 1820 | unsigned j, n_exits = 0; |
f1f41a6c | 1821 | vec<edge> exits; |
3b0b2309 | 1822 | struct tree_niter_desc niter_desc; |
749ea85f | 1823 | edge ex; |
fd757b76 | 1824 | struct nb_iter_bound *nb_iter; |
1825 | enum tree_code loop_bound_code = ERROR_MARK; | |
b3269f54 | 1826 | tree loop_bound_step = NULL; |
fd757b76 | 1827 | tree loop_bound_var = NULL; |
1828 | tree loop_iv_base = NULL; | |
1a91d914 | 1829 | gcond *stmt = NULL; |
cbcc4297 | 1830 | bool recursion = with_recursion.contains (loop); |
59423b59 | 1831 | |
749ea85f | 1832 | exits = get_loop_exit_edges (loop); |
d07b2b6f | 1833 | FOR_EACH_VEC_ELT (exits, j, ex) |
f08c22c4 | 1834 | if (!unlikely_executed_edge_p (ex) && !(ex->flags & EDGE_ABNORMAL_CALL)) |
d07b2b6f | 1835 | n_exits ++; |
5d865361 | 1836 | if (!n_exits) |
1837 | { | |
f1f41a6c | 1838 | exits.release (); |
5d865361 | 1839 | continue; |
1840 | } | |
ba38e12b | 1841 | |
cbcc4297 | 1842 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1843 | fprintf (dump_file, "Predicting loop %i%s with %i exits.\n", | |
1844 | loop->num, recursion ? " (with recursion)":"", n_exits); | |
1845 | if (dump_file && (dump_flags & TDF_DETAILS) | |
1846 | && max_loop_iterations_int (loop) >= 0) | |
1847 | { | |
1848 | fprintf (dump_file, | |
1849 | "Loop %d iterates at most %i times.\n", loop->num, | |
1850 | (int)max_loop_iterations_int (loop)); | |
1851 | } | |
1852 | if (dump_file && (dump_flags & TDF_DETAILS) | |
1853 | && likely_max_loop_iterations_int (loop) >= 0) | |
1854 | { | |
1855 | fprintf (dump_file, "Loop %d likely iterates at most %i times.\n", | |
1856 | loop->num, (int)likely_max_loop_iterations_int (loop)); | |
1857 | } | |
1858 | ||
f1f41a6c | 1859 | FOR_EACH_VEC_ELT (exits, j, ex) |
d27b0b64 | 1860 | { |
3b0b2309 | 1861 | tree niter = NULL; |
d500fef3 | 1862 | HOST_WIDE_INT nitercst; |
1863 | int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS); | |
1864 | int probability; | |
1865 | enum br_predictor predictor; | |
d07b2b6f | 1866 | widest_int nit; |
d27b0b64 | 1867 | |
f08c22c4 | 1868 | if (unlikely_executed_edge_p (ex) |
1869 | || (ex->flags & EDGE_ABNORMAL_CALL)) | |
d07b2b6f | 1870 | continue; |
1871 | /* Loop heuristics do not expect exit conditional to be inside | |
1872 | inner loop. We predict from innermost to outermost loop. */ | |
1873 | if (predicted_by_loop_heuristics_p (ex->src)) | |
cbcc4297 | 1874 | { |
1875 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1876 | fprintf (dump_file, "Skipping exit %i->%i because " | |
1877 | "it is already predicted.\n", | |
1878 | ex->src->index, ex->dest->index); | |
1879 | continue; | |
1880 | } | |
4ca17abf | 1881 | predict_extra_loop_exits (ex); |
1882 | ||
3f78e715 | 1883 | if (number_of_iterations_exit (loop, ex, &niter_desc, false, false)) |
3b0b2309 | 1884 | niter = niter_desc.niter; |
1885 | if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST) | |
749ea85f | 1886 | niter = loop_niter_by_eval (loop, ex); |
cbcc4297 | 1887 | if (dump_file && (dump_flags & TDF_DETAILS) |
1888 | && TREE_CODE (niter) == INTEGER_CST) | |
1889 | { | |
1890 | fprintf (dump_file, "Exit %i->%i %d iterates ", | |
1891 | ex->src->index, ex->dest->index, | |
1892 | loop->num); | |
1893 | print_generic_expr (dump_file, niter, TDF_SLIM); | |
1894 | fprintf (dump_file, " times.\n"); | |
1895 | } | |
d27b0b64 | 1896 | |
3b0b2309 | 1897 | if (TREE_CODE (niter) == INTEGER_CST) |
1898 | { | |
e913b5cd | 1899 | if (tree_fits_uhwi_p (niter) |
ed60f27f | 1900 | && max |
1901 | && compare_tree_int (niter, max - 1) == -1) | |
e913b5cd | 1902 | nitercst = tree_to_uhwi (niter) + 1; |
3b0b2309 | 1903 | else |
d500fef3 | 1904 | nitercst = max; |
1905 | predictor = PRED_LOOP_ITERATIONS; | |
1906 | } | |
1907 | /* If we have just one exit and we can derive some information about | |
1908 | the number of iterations of the loop from the statements inside | |
1909 | the loop, use it to predict this exit. */ | |
d07b2b6f | 1910 | else if (n_exits == 1 |
1911 | && estimated_stmt_executions (loop, &nit)) | |
d500fef3 | 1912 | { |
d07b2b6f | 1913 | if (wi::gtu_p (nit, max)) |
d500fef3 | 1914 | nitercst = max; |
d07b2b6f | 1915 | else |
1916 | nitercst = nit.to_shwi (); | |
d500fef3 | 1917 | predictor = PRED_LOOP_ITERATIONS_GUESSED; |
3b0b2309 | 1918 | } |
d07b2b6f | 1919 | /* If we have likely upper bound, trust it for very small iteration |
1920 | counts. Such loops would otherwise get mispredicted by standard | |
1921 | LOOP_EXIT heuristics. */ | |
1922 | else if (n_exits == 1 | |
1923 | && likely_max_stmt_executions (loop, &nit) | |
1924 | && wi::ltu_p (nit, | |
1925 | RDIV (REG_BR_PROB_BASE, | |
1926 | REG_BR_PROB_BASE | |
1927 | - predictor_info | |
cbcc4297 | 1928 | [recursion |
1929 | ? PRED_LOOP_EXIT_WITH_RECURSION | |
1930 | : PRED_LOOP_EXIT].hitrate))) | |
d07b2b6f | 1931 | { |
1932 | nitercst = nit.to_shwi (); | |
1933 | predictor = PRED_LOOP_ITERATIONS_MAX; | |
1934 | } | |
d500fef3 | 1935 | else |
cbcc4297 | 1936 | { |
1937 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1938 | fprintf (dump_file, "Nothing known about exit %i->%i.\n", | |
1939 | ex->src->index, ex->dest->index); | |
1940 | continue; | |
1941 | } | |
d500fef3 | 1942 | |
cbcc4297 | 1943 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1944 | fprintf (dump_file, "Recording prediction to %i iterations by %s.\n", | |
1945 | (int)nitercst, predictor_info[predictor].name); | |
afa7ed87 | 1946 | /* If the prediction for number of iterations is zero, do not |
1947 | predict the exit edges. */ | |
1948 | if (nitercst == 0) | |
1949 | continue; | |
1950 | ||
d07b2b6f | 1951 | probability = RDIV (REG_BR_PROB_BASE, nitercst); |
d500fef3 | 1952 | predict_edge (ex, predictor, probability); |
d27b0b64 | 1953 | } |
f1f41a6c | 1954 | exits.release (); |
862be747 | 1955 | |
fd757b76 | 1956 | /* Find information about loop bound variables. */ |
1957 | for (nb_iter = loop->bounds; nb_iter; | |
1958 | nb_iter = nb_iter->next) | |
1959 | if (nb_iter->stmt | |
1960 | && gimple_code (nb_iter->stmt) == GIMPLE_COND) | |
1961 | { | |
1a91d914 | 1962 | stmt = as_a <gcond *> (nb_iter->stmt); |
fd757b76 | 1963 | break; |
1964 | } | |
1965 | if (!stmt && last_stmt (loop->header) | |
1966 | && gimple_code (last_stmt (loop->header)) == GIMPLE_COND) | |
1a91d914 | 1967 | stmt = as_a <gcond *> (last_stmt (loop->header)); |
fd757b76 | 1968 | if (stmt) |
1969 | is_comparison_with_loop_invariant_p (stmt, loop, | |
1970 | &loop_bound_var, | |
1971 | &loop_bound_code, | |
1972 | &loop_bound_step, | |
1973 | &loop_iv_base); | |
1974 | ||
7fb12188 | 1975 | bbs = get_loop_body (loop); |
4ee9c684 | 1976 | |
7fb12188 | 1977 | for (j = 0; j < loop->num_nodes; j++) |
1978 | { | |
7fb12188 | 1979 | edge e; |
cd665a06 | 1980 | edge_iterator ei; |
7fb12188 | 1981 | |
1982 | bb = bbs[j]; | |
e6751e9a | 1983 | |
cd0fe062 | 1984 | /* Bypass loop heuristics on continue statement. These |
1985 | statements construct loops via "non-loop" constructs | |
1986 | in the source language and are better to be handled | |
1987 | separately. */ | |
3b0b2309 | 1988 | if (predicted_by_p (bb, PRED_CONTINUE)) |
cbcc4297 | 1989 | { |
1990 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1991 | fprintf (dump_file, "BB %i predicted by continue.\n", | |
1992 | bb->index); | |
1993 | continue; | |
1994 | } | |
cd0fe062 | 1995 | |
cbcc4297 | 1996 | /* If we already used more reliable loop exit predictors, do not |
1997 | bother with PRED_LOOP_EXIT. */ | |
1998 | if (!predicted_by_loop_heuristics_p (bb)) | |
b41438e5 | 1999 | { |
2000 | /* For loop with many exits we don't want to predict all exits | |
2001 | with the pretty large probability, because if all exits are | |
2002 | considered in row, the loop would be predicted to iterate | |
2003 | almost never. The code to divide probability by number of | |
2004 | exits is very rough. It should compute the number of exits | |
2005 | taken in each patch through function (not the overall number | |
2006 | of exits that might be a lot higher for loops with wide switch | |
2007 | statements in them) and compute n-th square root. | |
2008 | ||
2009 | We limit the minimal probability by 2% to avoid | |
2010 | EDGE_PROBABILITY_RELIABLE from trusting the branch prediction | |
2011 | as this was causing regression in perl benchmark containing such | |
2012 | a wide loop. */ | |
48e1416a | 2013 | |
b41438e5 | 2014 | int probability = ((REG_BR_PROB_BASE |
cbcc4297 | 2015 | - predictor_info |
2016 | [recursion | |
2017 | ? PRED_LOOP_EXIT_WITH_RECURSION | |
2018 | : PRED_LOOP_EXIT].hitrate) | |
b41438e5 | 2019 | / n_exits); |
2020 | if (probability < HITRATE (2)) | |
2021 | probability = HITRATE (2); | |
2022 | FOR_EACH_EDGE (e, ei, bb->succs) | |
2023 | if (e->dest->index < NUM_FIXED_BLOCKS | |
2024 | || !flow_bb_inside_loop_p (loop, e->dest)) | |
cbcc4297 | 2025 | { |
2026 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2027 | fprintf (dump_file, | |
2028 | "Predicting exit %i->%i with prob %i.\n", | |
2029 | e->src->index, e->dest->index, probability); | |
2030 | predict_edge (e, | |
2031 | recursion ? PRED_LOOP_EXIT_WITH_RECURSION | |
2032 | : PRED_LOOP_EXIT, probability); | |
2033 | } | |
b41438e5 | 2034 | } |
fd757b76 | 2035 | if (loop_bound_var) |
2036 | predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base, | |
2037 | loop_bound_code, | |
e913b5cd | 2038 | tree_to_shwi (loop_bound_step)); |
7fb12188 | 2039 | } |
48e1416a | 2040 | |
e09883e4 | 2041 | /* In the following code |
2042 | for (loop1) | |
2043 | if (cond) | |
2044 | for (loop2) | |
2045 | body; | |
2046 | guess that cond is unlikely. */ | |
2047 | if (loop_outer (loop)->num) | |
2048 | { | |
2049 | basic_block bb = NULL; | |
2050 | edge preheader_edge = loop_preheader_edge (loop); | |
2051 | ||
2052 | if (single_pred_p (preheader_edge->src) | |
2053 | && single_succ_p (preheader_edge->src)) | |
2054 | preheader_edge = single_pred_edge (preheader_edge->src); | |
2055 | ||
2056 | gimple *stmt = last_stmt (preheader_edge->src); | |
2057 | /* Pattern match fortran loop preheader: | |
2058 | _16 = BUILTIN_EXPECT (_15, 1, PRED_FORTRAN_LOOP_PREHEADER); | |
2059 | _17 = (logical(kind=4)) _16; | |
2060 | if (_17 != 0) | |
2061 | goto <bb 11>; | |
2062 | else | |
2063 | goto <bb 13>; | |
2064 | ||
2065 | Loop guard branch prediction says nothing about duplicated loop | |
2066 | headers produced by fortran frontend and in this case we want | |
2067 | to predict paths leading to this preheader. */ | |
2068 | ||
2069 | if (stmt | |
2070 | && gimple_code (stmt) == GIMPLE_COND | |
2071 | && gimple_cond_code (stmt) == NE_EXPR | |
2072 | && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME | |
2073 | && integer_zerop (gimple_cond_rhs (stmt))) | |
2074 | { | |
2075 | gimple *call_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt)); | |
2076 | if (gimple_code (call_stmt) == GIMPLE_ASSIGN | |
2077 | && gimple_expr_code (call_stmt) == NOP_EXPR | |
2078 | && TREE_CODE (gimple_assign_rhs1 (call_stmt)) == SSA_NAME) | |
2079 | call_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (call_stmt)); | |
7408cd7d | 2080 | if (gimple_call_internal_p (call_stmt, IFN_BUILTIN_EXPECT) |
e09883e4 | 2081 | && TREE_CODE (gimple_call_arg (call_stmt, 2)) == INTEGER_CST |
2082 | && tree_fits_uhwi_p (gimple_call_arg (call_stmt, 2)) | |
2083 | && tree_to_uhwi (gimple_call_arg (call_stmt, 2)) | |
2084 | == PRED_FORTRAN_LOOP_PREHEADER) | |
2085 | bb = preheader_edge->src; | |
2086 | } | |
2087 | if (!bb) | |
2088 | { | |
2089 | if (!dominated_by_p (CDI_DOMINATORS, | |
2090 | loop_outer (loop)->latch, loop->header)) | |
2091 | predict_paths_leading_to_edge (loop_preheader_edge (loop), | |
cbcc4297 | 2092 | recursion |
2093 | ? PRED_LOOP_GUARD_WITH_RECURSION | |
2094 | : PRED_LOOP_GUARD, | |
e09883e4 | 2095 | NOT_TAKEN, |
2096 | loop_outer (loop)); | |
2097 | } | |
2098 | else | |
2099 | { | |
2100 | if (!dominated_by_p (CDI_DOMINATORS, | |
2101 | loop_outer (loop)->latch, bb)) | |
2102 | predict_paths_leading_to (bb, | |
cbcc4297 | 2103 | recursion |
2104 | ? PRED_LOOP_GUARD_WITH_RECURSION | |
2105 | : PRED_LOOP_GUARD, | |
e09883e4 | 2106 | NOT_TAKEN, |
2107 | loop_outer (loop)); | |
2108 | } | |
2109 | } | |
2110 | ||
21dda4ee | 2111 | /* Free basic blocks from get_loop_body. */ |
dcd8fd01 | 2112 | free (bbs); |
59423b59 | 2113 | } |
4ee9c684 | 2114 | } |
2115 | ||
83c8a977 | 2116 | /* Attempt to predict probabilities of BB outgoing edges using local |
2117 | properties. */ | |
2118 | static void | |
2119 | bb_estimate_probability_locally (basic_block bb) | |
2120 | { | |
ee5f6585 | 2121 | rtx_insn *last_insn = BB_END (bb); |
83c8a977 | 2122 | rtx cond; |
2123 | ||
2124 | if (! can_predict_insn_p (last_insn)) | |
2125 | return; | |
2126 | cond = get_condition (last_insn, NULL, false, false); | |
2127 | if (! cond) | |
2128 | return; | |
2129 | ||
2130 | /* Try "pointer heuristic." | |
2131 | A comparison ptr == 0 is predicted as false. | |
2132 | Similarly, a comparison ptr1 == ptr2 is predicted as false. */ | |
2133 | if (COMPARISON_P (cond) | |
2134 | && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0))) | |
2135 | || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1))))) | |
2136 | { | |
2137 | if (GET_CODE (cond) == EQ) | |
2138 | predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN); | |
2139 | else if (GET_CODE (cond) == NE) | |
2140 | predict_insn_def (last_insn, PRED_POINTER, TAKEN); | |
2141 | } | |
2142 | else | |
2143 | ||
2144 | /* Try "opcode heuristic." | |
2145 | EQ tests are usually false and NE tests are usually true. Also, | |
2146 | most quantities are positive, so we can make the appropriate guesses | |
2147 | about signed comparisons against zero. */ | |
2148 | switch (GET_CODE (cond)) | |
2149 | { | |
2150 | case CONST_INT: | |
2151 | /* Unconditional branch. */ | |
2152 | predict_insn_def (last_insn, PRED_UNCONDITIONAL, | |
2153 | cond == const0_rtx ? NOT_TAKEN : TAKEN); | |
2154 | break; | |
2155 | ||
2156 | case EQ: | |
2157 | case UNEQ: | |
2158 | /* Floating point comparisons appears to behave in a very | |
2159 | unpredictable way because of special role of = tests in | |
2160 | FP code. */ | |
2161 | if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))) | |
2162 | ; | |
2163 | /* Comparisons with 0 are often used for booleans and there is | |
2164 | nothing useful to predict about them. */ | |
2165 | else if (XEXP (cond, 1) == const0_rtx | |
2166 | || XEXP (cond, 0) == const0_rtx) | |
2167 | ; | |
2168 | else | |
2169 | predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN); | |
2170 | break; | |
2171 | ||
2172 | case NE: | |
2173 | case LTGT: | |
2174 | /* Floating point comparisons appears to behave in a very | |
2175 | unpredictable way because of special role of = tests in | |
2176 | FP code. */ | |
2177 | if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))) | |
2178 | ; | |
2179 | /* Comparisons with 0 are often used for booleans and there is | |
2180 | nothing useful to predict about them. */ | |
2181 | else if (XEXP (cond, 1) == const0_rtx | |
2182 | || XEXP (cond, 0) == const0_rtx) | |
2183 | ; | |
2184 | else | |
2185 | predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN); | |
2186 | break; | |
2187 | ||
2188 | case ORDERED: | |
2189 | predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN); | |
2190 | break; | |
2191 | ||
2192 | case UNORDERED: | |
2193 | predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN); | |
2194 | break; | |
2195 | ||
2196 | case LE: | |
2197 | case LT: | |
2198 | if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx | |
2199 | || XEXP (cond, 1) == constm1_rtx) | |
2200 | predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN); | |
2201 | break; | |
2202 | ||
2203 | case GE: | |
2204 | case GT: | |
2205 | if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx | |
2206 | || XEXP (cond, 1) == constm1_rtx) | |
2207 | predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN); | |
2208 | break; | |
2209 | ||
2210 | default: | |
2211 | break; | |
2212 | } | |
2213 | } | |
2214 | ||
7edd21a5 | 2215 | /* Set edge->probability for each successor edge of BB. */ |
83c8a977 | 2216 | void |
2217 | guess_outgoing_edge_probabilities (basic_block bb) | |
2218 | { | |
2219 | bb_estimate_probability_locally (bb); | |
2220 | combine_predictions_for_insn (BB_END (bb), bb); | |
2221 | } | |
4ee9c684 | 2222 | \f |
c83059be | 2223 | static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor); |
75a70cf9 | 2224 | |
2225 | /* Helper function for expr_expected_value. */ | |
42975b1f | 2226 | |
2227 | static tree | |
2380e91e | 2228 | expr_expected_value_1 (tree type, tree op0, enum tree_code code, |
c83059be | 2229 | tree op1, bitmap visited, enum br_predictor *predictor) |
42975b1f | 2230 | { |
42acab1c | 2231 | gimple *def; |
75a70cf9 | 2232 | |
c83059be | 2233 | if (predictor) |
2234 | *predictor = PRED_UNCONDITIONAL; | |
2235 | ||
75a70cf9 | 2236 | if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS) |
42975b1f | 2237 | { |
75a70cf9 | 2238 | if (TREE_CONSTANT (op0)) |
2239 | return op0; | |
2240 | ||
5a5ef659 | 2241 | if (code == IMAGPART_EXPR) |
2242 | { | |
2243 | if (TREE_CODE (TREE_OPERAND (op0, 0)) == SSA_NAME) | |
2244 | { | |
2245 | def = SSA_NAME_DEF_STMT (TREE_OPERAND (op0, 0)); | |
2246 | if (is_gimple_call (def) | |
2247 | && gimple_call_internal_p (def) | |
2248 | && (gimple_call_internal_fn (def) | |
2249 | == IFN_ATOMIC_COMPARE_EXCHANGE)) | |
2250 | { | |
2251 | /* Assume that any given atomic operation has low contention, | |
2252 | and thus the compare-and-swap operation succeeds. */ | |
2253 | if (predictor) | |
2254 | *predictor = PRED_COMPARE_AND_SWAP; | |
2255 | return build_one_cst (TREE_TYPE (op0)); | |
2256 | } | |
2257 | } | |
2258 | } | |
2259 | ||
75a70cf9 | 2260 | if (code != SSA_NAME) |
2261 | return NULL_TREE; | |
2262 | ||
2263 | def = SSA_NAME_DEF_STMT (op0); | |
42975b1f | 2264 | |
2265 | /* If we were already here, break the infinite cycle. */ | |
6ef9bbe0 | 2266 | if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0))) |
42975b1f | 2267 | return NULL; |
42975b1f | 2268 | |
75a70cf9 | 2269 | if (gimple_code (def) == GIMPLE_PHI) |
42975b1f | 2270 | { |
2271 | /* All the arguments of the PHI node must have the same constant | |
2272 | length. */ | |
75a70cf9 | 2273 | int i, n = gimple_phi_num_args (def); |
42975b1f | 2274 | tree val = NULL, new_val; |
4ee9c684 | 2275 | |
75a70cf9 | 2276 | for (i = 0; i < n; i++) |
42975b1f | 2277 | { |
2278 | tree arg = PHI_ARG_DEF (def, i); | |
c83059be | 2279 | enum br_predictor predictor2; |
42975b1f | 2280 | |
2281 | /* If this PHI has itself as an argument, we cannot | |
2282 | determine the string length of this argument. However, | |
86481e89 | 2283 | if we can find an expected constant value for the other |
42975b1f | 2284 | PHI args then we can still be sure that this is |
2285 | likely a constant. So be optimistic and just | |
2286 | continue with the next argument. */ | |
2287 | if (arg == PHI_RESULT (def)) | |
2288 | continue; | |
2289 | ||
c83059be | 2290 | new_val = expr_expected_value (arg, visited, &predictor2); |
2291 | ||
2292 | /* It is difficult to combine value predictors. Simply assume | |
2293 | that later predictor is weaker and take its prediction. */ | |
2294 | if (predictor && *predictor < predictor2) | |
2295 | *predictor = predictor2; | |
42975b1f | 2296 | if (!new_val) |
2297 | return NULL; | |
2298 | if (!val) | |
2299 | val = new_val; | |
2300 | else if (!operand_equal_p (val, new_val, false)) | |
2301 | return NULL; | |
2302 | } | |
2303 | return val; | |
2304 | } | |
75a70cf9 | 2305 | if (is_gimple_assign (def)) |
42975b1f | 2306 | { |
75a70cf9 | 2307 | if (gimple_assign_lhs (def) != op0) |
2308 | return NULL; | |
42975b1f | 2309 | |
75a70cf9 | 2310 | return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)), |
2311 | gimple_assign_rhs1 (def), | |
2312 | gimple_assign_rhs_code (def), | |
2313 | gimple_assign_rhs2 (def), | |
c83059be | 2314 | visited, predictor); |
75a70cf9 | 2315 | } |
2316 | ||
2317 | if (is_gimple_call (def)) | |
2318 | { | |
2319 | tree decl = gimple_call_fndecl (def); | |
2320 | if (!decl) | |
c83059be | 2321 | { |
2322 | if (gimple_call_internal_p (def) | |
2323 | && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT) | |
2324 | { | |
2325 | gcc_assert (gimple_call_num_args (def) == 3); | |
2326 | tree val = gimple_call_arg (def, 0); | |
2327 | if (TREE_CONSTANT (val)) | |
2328 | return val; | |
2329 | if (predictor) | |
2330 | { | |
c83059be | 2331 | tree val2 = gimple_call_arg (def, 2); |
2332 | gcc_assert (TREE_CODE (val2) == INTEGER_CST | |
2333 | && tree_fits_uhwi_p (val2) | |
2334 | && tree_to_uhwi (val2) < END_PREDICTORS); | |
2335 | *predictor = (enum br_predictor) tree_to_uhwi (val2); | |
2336 | } | |
2337 | return gimple_call_arg (def, 1); | |
2338 | } | |
2339 | return NULL; | |
2340 | } | |
2380e91e | 2341 | if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) |
2342 | switch (DECL_FUNCTION_CODE (decl)) | |
2343 | { | |
2344 | case BUILT_IN_EXPECT: | |
2345 | { | |
2346 | tree val; | |
2347 | if (gimple_call_num_args (def) != 2) | |
2348 | return NULL; | |
2349 | val = gimple_call_arg (def, 0); | |
2350 | if (TREE_CONSTANT (val)) | |
2351 | return val; | |
c83059be | 2352 | if (predictor) |
2353 | *predictor = PRED_BUILTIN_EXPECT; | |
2380e91e | 2354 | return gimple_call_arg (def, 1); |
2355 | } | |
75a70cf9 | 2356 | |
2380e91e | 2357 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N: |
2358 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1: | |
2359 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2: | |
2360 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4: | |
2361 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8: | |
2362 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16: | |
2363 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE: | |
2364 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N: | |
2365 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1: | |
2366 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2: | |
2367 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4: | |
2368 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8: | |
2369 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16: | |
2370 | /* Assume that any given atomic operation has low contention, | |
2371 | and thus the compare-and-swap operation succeeds. */ | |
c83059be | 2372 | if (predictor) |
2373 | *predictor = PRED_COMPARE_AND_SWAP; | |
2380e91e | 2374 | return boolean_true_node; |
5213d6c9 | 2375 | default: |
2376 | break; | |
75a70cf9 | 2377 | } |
42975b1f | 2378 | } |
75a70cf9 | 2379 | |
2380 | return NULL; | |
42975b1f | 2381 | } |
75a70cf9 | 2382 | |
2383 | if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS) | |
42975b1f | 2384 | { |
75a70cf9 | 2385 | tree res; |
c83059be | 2386 | enum br_predictor predictor2; |
2387 | op0 = expr_expected_value (op0, visited, predictor); | |
42975b1f | 2388 | if (!op0) |
2389 | return NULL; | |
c83059be | 2390 | op1 = expr_expected_value (op1, visited, &predictor2); |
2391 | if (predictor && *predictor < predictor2) | |
2392 | *predictor = predictor2; | |
42975b1f | 2393 | if (!op1) |
2394 | return NULL; | |
75a70cf9 | 2395 | res = fold_build2 (code, type, op0, op1); |
42975b1f | 2396 | if (TREE_CONSTANT (res)) |
2397 | return res; | |
2398 | return NULL; | |
2399 | } | |
75a70cf9 | 2400 | if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS) |
42975b1f | 2401 | { |
75a70cf9 | 2402 | tree res; |
c83059be | 2403 | op0 = expr_expected_value (op0, visited, predictor); |
42975b1f | 2404 | if (!op0) |
2405 | return NULL; | |
75a70cf9 | 2406 | res = fold_build1 (code, type, op0); |
42975b1f | 2407 | if (TREE_CONSTANT (res)) |
2408 | return res; | |
2409 | return NULL; | |
2410 | } | |
2411 | return NULL; | |
2412 | } | |
75a70cf9 | 2413 | |
48e1416a | 2414 | /* Return constant EXPR will likely have at execution time, NULL if unknown. |
75a70cf9 | 2415 | The function is used by builtin_expect branch predictor so the evidence |
2416 | must come from this construct and additional possible constant folding. | |
48e1416a | 2417 | |
75a70cf9 | 2418 | We may want to implement more involved value guess (such as value range |
2419 | propagation based prediction), but such tricks shall go to new | |
2420 | implementation. */ | |
2421 | ||
2422 | static tree | |
c83059be | 2423 | expr_expected_value (tree expr, bitmap visited, |
2424 | enum br_predictor *predictor) | |
75a70cf9 | 2425 | { |
2426 | enum tree_code code; | |
2427 | tree op0, op1; | |
2428 | ||
2429 | if (TREE_CONSTANT (expr)) | |
c83059be | 2430 | { |
2431 | if (predictor) | |
2432 | *predictor = PRED_UNCONDITIONAL; | |
2433 | return expr; | |
2434 | } | |
75a70cf9 | 2435 | |
2436 | extract_ops_from_tree (expr, &code, &op0, &op1); | |
2437 | return expr_expected_value_1 (TREE_TYPE (expr), | |
c83059be | 2438 | op0, code, op1, visited, predictor); |
75a70cf9 | 2439 | } |
42975b1f | 2440 | \f |
4ee9c684 | 2441 | /* Predict using opcode of the last statement in basic block. */ |
2442 | static void | |
2443 | tree_predict_by_opcode (basic_block bb) | |
2444 | { | |
42acab1c | 2445 | gimple *stmt = last_stmt (bb); |
4ee9c684 | 2446 | edge then_edge; |
75a70cf9 | 2447 | tree op0, op1; |
4ee9c684 | 2448 | tree type; |
42975b1f | 2449 | tree val; |
75a70cf9 | 2450 | enum tree_code cmp; |
cd665a06 | 2451 | edge_iterator ei; |
c83059be | 2452 | enum br_predictor predictor; |
4ee9c684 | 2453 | |
75a70cf9 | 2454 | if (!stmt || gimple_code (stmt) != GIMPLE_COND) |
4ee9c684 | 2455 | return; |
cd665a06 | 2456 | FOR_EACH_EDGE (then_edge, ei, bb->succs) |
4ee9c684 | 2457 | if (then_edge->flags & EDGE_TRUE_VALUE) |
cd665a06 | 2458 | break; |
75a70cf9 | 2459 | op0 = gimple_cond_lhs (stmt); |
2460 | op1 = gimple_cond_rhs (stmt); | |
2461 | cmp = gimple_cond_code (stmt); | |
4ee9c684 | 2462 | type = TREE_TYPE (op0); |
035def86 | 2463 | val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, auto_bitmap (), |
c83059be | 2464 | &predictor); |
c83059be | 2465 | if (val && TREE_CODE (val) == INTEGER_CST) |
42975b1f | 2466 | { |
c83059be | 2467 | if (predictor == PRED_BUILTIN_EXPECT) |
2468 | { | |
2469 | int percent = PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY); | |
21853731 | 2470 | |
c83059be | 2471 | gcc_assert (percent >= 0 && percent <= 100); |
2472 | if (integer_zerop (val)) | |
2473 | percent = 100 - percent; | |
2474 | predict_edge (then_edge, PRED_BUILTIN_EXPECT, HITRATE (percent)); | |
2475 | } | |
2476 | else | |
076d1a59 | 2477 | predict_edge_def (then_edge, predictor, |
2478 | integer_zerop (val) ? NOT_TAKEN : TAKEN); | |
42975b1f | 2479 | } |
4ee9c684 | 2480 | /* Try "pointer heuristic." |
2481 | A comparison ptr == 0 is predicted as false. | |
2482 | Similarly, a comparison ptr1 == ptr2 is predicted as false. */ | |
2483 | if (POINTER_TYPE_P (type)) | |
2484 | { | |
75a70cf9 | 2485 | if (cmp == EQ_EXPR) |
4ee9c684 | 2486 | predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN); |
75a70cf9 | 2487 | else if (cmp == NE_EXPR) |
4ee9c684 | 2488 | predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN); |
2489 | } | |
2490 | else | |
2491 | ||
2492 | /* Try "opcode heuristic." | |
2493 | EQ tests are usually false and NE tests are usually true. Also, | |
2494 | most quantities are positive, so we can make the appropriate guesses | |
2495 | about signed comparisons against zero. */ | |
75a70cf9 | 2496 | switch (cmp) |
4ee9c684 | 2497 | { |
2498 | case EQ_EXPR: | |
2499 | case UNEQ_EXPR: | |
2500 | /* Floating point comparisons appears to behave in a very | |
2501 | unpredictable way because of special role of = tests in | |
2502 | FP code. */ | |
2503 | if (FLOAT_TYPE_P (type)) | |
2504 | ; | |
2505 | /* Comparisons with 0 are often used for booleans and there is | |
2506 | nothing useful to predict about them. */ | |
75a70cf9 | 2507 | else if (integer_zerop (op0) || integer_zerop (op1)) |
4ee9c684 | 2508 | ; |
2509 | else | |
2510 | predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN); | |
2511 | break; | |
2512 | ||
2513 | case NE_EXPR: | |
318a728f | 2514 | case LTGT_EXPR: |
4ee9c684 | 2515 | /* Floating point comparisons appears to behave in a very |
2516 | unpredictable way because of special role of = tests in | |
2517 | FP code. */ | |
2518 | if (FLOAT_TYPE_P (type)) | |
2519 | ; | |
2520 | /* Comparisons with 0 are often used for booleans and there is | |
2521 | nothing useful to predict about them. */ | |
2522 | else if (integer_zerop (op0) | |
75a70cf9 | 2523 | || integer_zerop (op1)) |
4ee9c684 | 2524 | ; |
2525 | else | |
2526 | predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN); | |
2527 | break; | |
2528 | ||
2529 | case ORDERED_EXPR: | |
2530 | predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN); | |
2531 | break; | |
2532 | ||
2533 | case UNORDERED_EXPR: | |
2534 | predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN); | |
2535 | break; | |
2536 | ||
2537 | case LE_EXPR: | |
2538 | case LT_EXPR: | |
75a70cf9 | 2539 | if (integer_zerop (op1) |
2540 | || integer_onep (op1) | |
2541 | || integer_all_onesp (op1) | |
2542 | || real_zerop (op1) | |
2543 | || real_onep (op1) | |
2544 | || real_minus_onep (op1)) | |
4ee9c684 | 2545 | predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN); |
2546 | break; | |
2547 | ||
2548 | case GE_EXPR: | |
2549 | case GT_EXPR: | |
75a70cf9 | 2550 | if (integer_zerop (op1) |
2551 | || integer_onep (op1) | |
2552 | || integer_all_onesp (op1) | |
2553 | || real_zerop (op1) | |
2554 | || real_onep (op1) | |
2555 | || real_minus_onep (op1)) | |
4ee9c684 | 2556 | predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN); |
2557 | break; | |
2558 | ||
2559 | default: | |
2560 | break; | |
2561 | } | |
2562 | } | |
2563 | ||
30f2983e | 2564 | /* Returns TRUE if the STMT is exit(0) like statement. */ |
2565 | ||
2566 | static bool | |
2567 | is_exit_with_zero_arg (const gimple *stmt) | |
2568 | { | |
2569 | /* This is not exit, _exit or _Exit. */ | |
2570 | if (!gimple_call_builtin_p (stmt, BUILT_IN_EXIT) | |
2571 | && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT) | |
2572 | && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT2)) | |
2573 | return false; | |
2574 | ||
2575 | /* Argument is an interger zero. */ | |
2576 | return integer_zerop (gimple_call_arg (stmt, 0)); | |
2577 | } | |
2578 | ||
f816ec49 | 2579 | /* Try to guess whether the value of return means error code. */ |
75a70cf9 | 2580 | |
f816ec49 | 2581 | static enum br_predictor |
2582 | return_prediction (tree val, enum prediction *prediction) | |
2583 | { | |
2584 | /* VOID. */ | |
2585 | if (!val) | |
2586 | return PRED_NO_PREDICTION; | |
2587 | /* Different heuristics for pointers and scalars. */ | |
2588 | if (POINTER_TYPE_P (TREE_TYPE (val))) | |
2589 | { | |
2590 | /* NULL is usually not returned. */ | |
2591 | if (integer_zerop (val)) | |
2592 | { | |
2593 | *prediction = NOT_TAKEN; | |
2594 | return PRED_NULL_RETURN; | |
2595 | } | |
2596 | } | |
2597 | else if (INTEGRAL_TYPE_P (TREE_TYPE (val))) | |
2598 | { | |
2599 | /* Negative return values are often used to indicate | |
2600 | errors. */ | |
2601 | if (TREE_CODE (val) == INTEGER_CST | |
2602 | && tree_int_cst_sgn (val) < 0) | |
2603 | { | |
2604 | *prediction = NOT_TAKEN; | |
2605 | return PRED_NEGATIVE_RETURN; | |
2606 | } | |
2607 | /* Constant return values seems to be commonly taken. | |
2608 | Zero/one often represent booleans so exclude them from the | |
2609 | heuristics. */ | |
2610 | if (TREE_CONSTANT (val) | |
2611 | && (!integer_zerop (val) && !integer_onep (val))) | |
2612 | { | |
d3cb49c9 | 2613 | *prediction = NOT_TAKEN; |
4a4e4487 | 2614 | return PRED_CONST_RETURN; |
f816ec49 | 2615 | } |
2616 | } | |
2617 | return PRED_NO_PREDICTION; | |
2618 | } | |
2619 | ||
2620 | /* Find the basic block with return expression and look up for possible | |
2621 | return value trying to apply RETURN_PREDICTION heuristics. */ | |
2622 | static void | |
d704ea82 | 2623 | apply_return_prediction (void) |
f816ec49 | 2624 | { |
1a91d914 | 2625 | greturn *return_stmt = NULL; |
f816ec49 | 2626 | tree return_val; |
2627 | edge e; | |
1a91d914 | 2628 | gphi *phi; |
f816ec49 | 2629 | int phi_num_args, i; |
2630 | enum br_predictor pred; | |
2631 | enum prediction direction; | |
cd665a06 | 2632 | edge_iterator ei; |
f816ec49 | 2633 | |
34154e27 | 2634 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
f816ec49 | 2635 | { |
42acab1c | 2636 | gimple *last = last_stmt (e->src); |
1a91d914 | 2637 | if (last |
2638 | && gimple_code (last) == GIMPLE_RETURN) | |
2639 | { | |
2640 | return_stmt = as_a <greturn *> (last); | |
2641 | break; | |
2642 | } | |
f816ec49 | 2643 | } |
2644 | if (!e) | |
2645 | return; | |
75a70cf9 | 2646 | return_val = gimple_return_retval (return_stmt); |
f816ec49 | 2647 | if (!return_val) |
2648 | return; | |
f816ec49 | 2649 | if (TREE_CODE (return_val) != SSA_NAME |
2650 | || !SSA_NAME_DEF_STMT (return_val) | |
75a70cf9 | 2651 | || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI) |
f816ec49 | 2652 | return; |
1a91d914 | 2653 | phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val)); |
75a70cf9 | 2654 | phi_num_args = gimple_phi_num_args (phi); |
f816ec49 | 2655 | pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction); |
2656 | ||
2657 | /* Avoid the degenerate case where all return values form the function | |
2658 | belongs to same category (ie they are all positive constants) | |
2659 | so we can hardly say something about them. */ | |
2660 | for (i = 1; i < phi_num_args; i++) | |
2661 | if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction)) | |
2662 | break; | |
2663 | if (i != phi_num_args) | |
2664 | for (i = 0; i < phi_num_args; i++) | |
2665 | { | |
2666 | pred = return_prediction (PHI_ARG_DEF (phi, i), &direction); | |
2667 | if (pred != PRED_NO_PREDICTION) | |
5707768a | 2668 | predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred, |
2669 | direction); | |
f816ec49 | 2670 | } |
2671 | } | |
2672 | ||
2673 | /* Look for basic block that contains unlikely to happen events | |
2674 | (such as noreturn calls) and mark all paths leading to execution | |
2675 | of this basic blocks as unlikely. */ | |
2676 | ||
2677 | static void | |
2678 | tree_bb_level_predictions (void) | |
2679 | { | |
2680 | basic_block bb; | |
9f694a82 | 2681 | bool has_return_edges = false; |
2682 | edge e; | |
2683 | edge_iterator ei; | |
2684 | ||
34154e27 | 2685 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
f08c22c4 | 2686 | if (!unlikely_executed_edge_p (e) && !(e->flags & EDGE_ABNORMAL_CALL)) |
9f694a82 | 2687 | { |
2688 | has_return_edges = true; | |
2689 | break; | |
2690 | } | |
f816ec49 | 2691 | |
d704ea82 | 2692 | apply_return_prediction (); |
f816ec49 | 2693 | |
fc00614f | 2694 | FOR_EACH_BB_FN (bb, cfun) |
f816ec49 | 2695 | { |
75a70cf9 | 2696 | gimple_stmt_iterator gsi; |
f816ec49 | 2697 | |
1add270f | 2698 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
f816ec49 | 2699 | { |
42acab1c | 2700 | gimple *stmt = gsi_stmt (gsi); |
5de92639 | 2701 | tree decl; |
3ed4a4a1 | 2702 | |
75a70cf9 | 2703 | if (is_gimple_call (stmt)) |
f816ec49 | 2704 | { |
30f2983e | 2705 | if (gimple_call_noreturn_p (stmt) |
2706 | && has_return_edges | |
2707 | && !is_exit_with_zero_arg (stmt)) | |
75a70cf9 | 2708 | predict_paths_leading_to (bb, PRED_NORETURN, |
2709 | NOT_TAKEN); | |
2710 | decl = gimple_call_fndecl (stmt); | |
2711 | if (decl | |
2712 | && lookup_attribute ("cold", | |
2713 | DECL_ATTRIBUTES (decl))) | |
2714 | predict_paths_leading_to (bb, PRED_COLD_FUNCTION, | |
2715 | NOT_TAKEN); | |
89beffc9 | 2716 | if (decl && recursive_call_p (current_function_decl, decl)) |
2717 | predict_paths_leading_to (bb, PRED_RECURSIVE_CALL, | |
2718 | NOT_TAKEN); | |
f816ec49 | 2719 | } |
75a70cf9 | 2720 | else if (gimple_code (stmt) == GIMPLE_PREDICT) |
2721 | { | |
2722 | predict_paths_leading_to (bb, gimple_predict_predictor (stmt), | |
2723 | gimple_predict_outcome (stmt)); | |
1add270f | 2724 | /* Keep GIMPLE_PREDICT around so early inlining will propagate |
2725 | hints to callers. */ | |
75a70cf9 | 2726 | } |
f816ec49 | 2727 | } |
2728 | } | |
f816ec49 | 2729 | } |
2730 | ||
06ecf488 | 2731 | /* Callback for hash_map::traverse, asserts that the pointer map is |
b3723726 | 2732 | empty. */ |
2733 | ||
06ecf488 | 2734 | bool |
2735 | assert_is_empty (const_basic_block const &, edge_prediction *const &value, | |
2736 | void *) | |
b3723726 | 2737 | { |
06ecf488 | 2738 | gcc_assert (!value); |
b3723726 | 2739 | return false; |
2740 | } | |
b3723726 | 2741 | |
fc935416 | 2742 | /* Predict branch probabilities and estimate profile for basic block BB. |
2743 | When LOCAL_ONLY is set do not use any global properties of CFG. */ | |
675d86b2 | 2744 | |
2745 | static void | |
fc935416 | 2746 | tree_estimate_probability_bb (basic_block bb, bool local_only) |
675d86b2 | 2747 | { |
2748 | edge e; | |
2749 | edge_iterator ei; | |
675d86b2 | 2750 | |
2751 | FOR_EACH_EDGE (e, ei, bb->succs) | |
2752 | { | |
675d86b2 | 2753 | /* Look for block we are guarding (ie we dominate it, |
2754 | but it doesn't postdominate us). */ | |
34154e27 | 2755 | if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb |
fc935416 | 2756 | && !local_only |
675d86b2 | 2757 | && dominated_by_p (CDI_DOMINATORS, e->dest, e->src) |
2758 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest)) | |
2759 | { | |
2760 | gimple_stmt_iterator bi; | |
2761 | ||
2762 | /* The call heuristic claims that a guarded function call | |
2763 | is improbable. This is because such calls are often used | |
2764 | to signal exceptional situations such as printing error | |
2765 | messages. */ | |
2766 | for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi); | |
2767 | gsi_next (&bi)) | |
2768 | { | |
42acab1c | 2769 | gimple *stmt = gsi_stmt (bi); |
675d86b2 | 2770 | if (is_gimple_call (stmt) |
89beffc9 | 2771 | && !gimple_inexpensive_call_p (as_a <gcall *> (stmt)) |
675d86b2 | 2772 | /* Constant and pure calls are hardly used to signalize |
2773 | something exceptional. */ | |
2774 | && gimple_has_side_effects (stmt)) | |
2775 | { | |
1cb6c2eb | 2776 | if (gimple_call_fndecl (stmt)) |
2777 | predict_edge_def (e, PRED_CALL, NOT_TAKEN); | |
2778 | else if (virtual_method_call_p (gimple_call_fn (stmt))) | |
1d07104f | 2779 | predict_edge_def (e, PRED_POLYMORPHIC_CALL, NOT_TAKEN); |
1cb6c2eb | 2780 | else |
2781 | predict_edge_def (e, PRED_INDIR_CALL, TAKEN); | |
675d86b2 | 2782 | break; |
2783 | } | |
2784 | } | |
2785 | } | |
2786 | } | |
2787 | tree_predict_by_opcode (bb); | |
2788 | } | |
2789 | ||
2790 | /* Predict branch probabilities and estimate profile of the tree CFG. | |
2791 | This function can be called from the loop optimizers to recompute | |
5a5f50e9 | 2792 | the profile information. |
2793 | If DRY_RUN is set, do not modify CFG and only produce dump files. */ | |
675d86b2 | 2794 | |
2795 | void | |
5a5f50e9 | 2796 | tree_estimate_probability (bool dry_run) |
4ee9c684 | 2797 | { |
2798 | basic_block bb; | |
4ee9c684 | 2799 | |
f816ec49 | 2800 | add_noreturn_fake_exit_edges (); |
4ee9c684 | 2801 | connect_infinite_loops_to_exit (); |
d8a0d6b8 | 2802 | /* We use loop_niter_by_eval, which requires that the loops have |
2803 | preheaders. */ | |
2804 | create_preheaders (CP_SIMPLE_PREHEADERS); | |
4ee9c684 | 2805 | calculate_dominance_info (CDI_POST_DOMINATORS); |
2806 | ||
06ecf488 | 2807 | bb_predictions = new hash_map<const_basic_block, edge_prediction *>; |
f816ec49 | 2808 | tree_bb_level_predictions (); |
d500fef3 | 2809 | record_loop_exits (); |
675d86b2 | 2810 | |
41f75a99 | 2811 | if (number_of_loops (cfun) > 1) |
7194de72 | 2812 | predict_loops (); |
4ee9c684 | 2813 | |
fc00614f | 2814 | FOR_EACH_BB_FN (bb, cfun) |
fc935416 | 2815 | tree_estimate_probability_bb (bb, false); |
4ee9c684 | 2816 | |
fc00614f | 2817 | FOR_EACH_BB_FN (bb, cfun) |
5a5f50e9 | 2818 | combine_predictions_for_bb (bb, dry_run); |
f81d9f78 | 2819 | |
382ecba7 | 2820 | if (flag_checking) |
2821 | bb_predictions->traverse<void *, assert_is_empty> (NULL); | |
2822 | ||
06ecf488 | 2823 | delete bb_predictions; |
b3723726 | 2824 | bb_predictions = NULL; |
2825 | ||
5a5f50e9 | 2826 | if (!dry_run) |
2827 | estimate_bb_frequencies (false); | |
4ee9c684 | 2828 | free_dominance_info (CDI_POST_DOMINATORS); |
41d24834 | 2829 | remove_fake_exit_edges (); |
675d86b2 | 2830 | } |
fc935416 | 2831 | |
2832 | /* Set edge->probability for each successor edge of BB. */ | |
2833 | void | |
2834 | tree_guess_outgoing_edge_probabilities (basic_block bb) | |
2835 | { | |
2836 | bb_predictions = new hash_map<const_basic_block, edge_prediction *>; | |
2837 | tree_estimate_probability_bb (bb, true); | |
2838 | combine_predictions_for_bb (bb, false); | |
2839 | if (flag_checking) | |
2840 | bb_predictions->traverse<void *, assert_is_empty> (NULL); | |
2841 | delete bb_predictions; | |
2842 | bb_predictions = NULL; | |
2843 | } | |
89cfe6e5 | 2844 | \f |
f0b5f617 | 2845 | /* Predict edges to successors of CUR whose sources are not postdominated by |
d704ea82 | 2846 | BB by PRED and recurse to all postdominators. */ |
f816ec49 | 2847 | |
2848 | static void | |
d704ea82 | 2849 | predict_paths_for_bb (basic_block cur, basic_block bb, |
2850 | enum br_predictor pred, | |
d3443011 | 2851 | enum prediction taken, |
e09883e4 | 2852 | bitmap visited, struct loop *in_loop = NULL) |
f816ec49 | 2853 | { |
2854 | edge e; | |
cd665a06 | 2855 | edge_iterator ei; |
d704ea82 | 2856 | basic_block son; |
f816ec49 | 2857 | |
e09883e4 | 2858 | /* If we exited the loop or CUR is unconditional in the loop, there is |
2859 | nothing to do. */ | |
2860 | if (in_loop | |
2861 | && (!flow_bb_inside_loop_p (in_loop, cur) | |
2862 | || dominated_by_p (CDI_DOMINATORS, in_loop->latch, cur))) | |
2863 | return; | |
2864 | ||
d704ea82 | 2865 | /* We are looking for all edges forming edge cut induced by |
2866 | set of all blocks postdominated by BB. */ | |
2867 | FOR_EACH_EDGE (e, ei, cur->preds) | |
2868 | if (e->src->index >= NUM_FIXED_BLOCKS | |
2869 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb)) | |
f816ec49 | 2870 | { |
f1d5a92b | 2871 | edge e2; |
2872 | edge_iterator ei2; | |
2873 | bool found = false; | |
2874 | ||
5707768a | 2875 | /* Ignore fake edges and eh, we predict them as not taken anyway. */ |
f08c22c4 | 2876 | if (unlikely_executed_edge_p (e)) |
f1d5a92b | 2877 | continue; |
d704ea82 | 2878 | gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb)); |
f1d5a92b | 2879 | |
d3443011 | 2880 | /* See if there is an edge from e->src that is not abnormal |
e09883e4 | 2881 | and does not lead to BB and does not exit the loop. */ |
f1d5a92b | 2882 | FOR_EACH_EDGE (e2, ei2, e->src->succs) |
2883 | if (e2 != e | |
f08c22c4 | 2884 | && !unlikely_executed_edge_p (e2) |
e09883e4 | 2885 | && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb) |
2886 | && (!in_loop || !loop_exit_edge_p (in_loop, e2))) | |
f1d5a92b | 2887 | { |
2888 | found = true; | |
2889 | break; | |
2890 | } | |
2891 | ||
2892 | /* If there is non-abnormal path leaving e->src, predict edge | |
2893 | using predictor. Otherwise we need to look for paths | |
d3443011 | 2894 | leading to e->src. |
2895 | ||
2896 | The second may lead to infinite loop in the case we are predicitng | |
2897 | regions that are only reachable by abnormal edges. We simply | |
2898 | prevent visiting given BB twice. */ | |
f1d5a92b | 2899 | if (found) |
f6e0b8d0 | 2900 | { |
2901 | if (!edge_predicted_by_p (e, pred, taken)) | |
2902 | predict_edge_def (e, pred, taken); | |
2903 | } | |
6e3803fb | 2904 | else if (bitmap_set_bit (visited, e->src->index)) |
e09883e4 | 2905 | predict_paths_for_bb (e->src, e->src, pred, taken, visited, in_loop); |
f816ec49 | 2906 | } |
d704ea82 | 2907 | for (son = first_dom_son (CDI_POST_DOMINATORS, cur); |
2908 | son; | |
2909 | son = next_dom_son (CDI_POST_DOMINATORS, son)) | |
e09883e4 | 2910 | predict_paths_for_bb (son, bb, pred, taken, visited, in_loop); |
d704ea82 | 2911 | } |
f816ec49 | 2912 | |
d704ea82 | 2913 | /* Sets branch probabilities according to PREDiction and |
2914 | FLAGS. */ | |
f816ec49 | 2915 | |
d704ea82 | 2916 | static void |
2917 | predict_paths_leading_to (basic_block bb, enum br_predictor pred, | |
e09883e4 | 2918 | enum prediction taken, struct loop *in_loop) |
d704ea82 | 2919 | { |
035def86 | 2920 | predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop); |
f816ec49 | 2921 | } |
5707768a | 2922 | |
2923 | /* Like predict_paths_leading_to but take edge instead of basic block. */ | |
2924 | ||
2925 | static void | |
2926 | predict_paths_leading_to_edge (edge e, enum br_predictor pred, | |
e09883e4 | 2927 | enum prediction taken, struct loop *in_loop) |
5707768a | 2928 | { |
2929 | bool has_nonloop_edge = false; | |
2930 | edge_iterator ei; | |
2931 | edge e2; | |
2932 | ||
2933 | basic_block bb = e->src; | |
2934 | FOR_EACH_EDGE (e2, ei, bb->succs) | |
2935 | if (e2->dest != e->src && e2->dest != e->dest | |
f08c22c4 | 2936 | && !unlikely_executed_edge_p (e) |
5707768a | 2937 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest)) |
2938 | { | |
2939 | has_nonloop_edge = true; | |
2940 | break; | |
2941 | } | |
2942 | if (!has_nonloop_edge) | |
d3443011 | 2943 | { |
035def86 | 2944 | predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop); |
d3443011 | 2945 | } |
5707768a | 2946 | else |
2947 | predict_edge_def (e, pred, taken); | |
2948 | } | |
cd0fe062 | 2949 | \f |
e725f898 | 2950 | /* This is used to carry information about basic blocks. It is |
f81d9f78 | 2951 | attached to the AUX field of the standard CFG block. */ |
2952 | ||
9908fe4d | 2953 | struct block_info |
f81d9f78 | 2954 | { |
2955 | /* Estimated frequency of execution of basic_block. */ | |
e9d7220b | 2956 | sreal frequency; |
f81d9f78 | 2957 | |
2958 | /* To keep queue of basic blocks to process. */ | |
2959 | basic_block next; | |
2960 | ||
4a82352a | 2961 | /* Number of predecessors we need to visit first. */ |
4ad72a03 | 2962 | int npredecessors; |
9908fe4d | 2963 | }; |
f81d9f78 | 2964 | |
2965 | /* Similar information for edges. */ | |
9908fe4d | 2966 | struct edge_prob_info |
f81d9f78 | 2967 | { |
77aa6362 | 2968 | /* In case edge is a loopback edge, the probability edge will be reached |
f81d9f78 | 2969 | in case header is. Estimated number of iterations of the loop can be |
56ff4880 | 2970 | then computed as 1 / (1 - back_edge_prob). */ |
e9d7220b | 2971 | sreal back_edge_prob; |
77aa6362 | 2972 | /* True if the edge is a loopback edge in the natural loop. */ |
74cbb553 | 2973 | unsigned int back_edge:1; |
9908fe4d | 2974 | }; |
f81d9f78 | 2975 | |
9908fe4d | 2976 | #define BLOCK_INFO(B) ((block_info *) (B)->aux) |
886c1262 | 2977 | #undef EDGE_INFO |
9908fe4d | 2978 | #define EDGE_INFO(E) ((edge_prob_info *) (E)->aux) |
f81d9f78 | 2979 | |
2980 | /* Helper function for estimate_bb_frequencies. | |
88e6f696 | 2981 | Propagate the frequencies in blocks marked in |
2982 | TOVISIT, starting in HEAD. */ | |
e6751e9a | 2983 | |
f81d9f78 | 2984 | static void |
88e6f696 | 2985 | propagate_freq (basic_block head, bitmap tovisit) |
f81d9f78 | 2986 | { |
4c26117a | 2987 | basic_block bb; |
2988 | basic_block last; | |
9ea83aa5 | 2989 | unsigned i; |
f81d9f78 | 2990 | edge e; |
2991 | basic_block nextbb; | |
b1bb9b10 | 2992 | bitmap_iterator bi; |
312866af | 2993 | |
4a82352a | 2994 | /* For each basic block we need to visit count number of his predecessors |
312866af | 2995 | we need to visit first. */ |
b1bb9b10 | 2996 | EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi) |
312866af | 2997 | { |
b1bb9b10 | 2998 | edge_iterator ei; |
2999 | int count = 0; | |
3000 | ||
f5a6b05f | 3001 | bb = BASIC_BLOCK_FOR_FN (cfun, i); |
e6751e9a | 3002 | |
b1bb9b10 | 3003 | FOR_EACH_EDGE (e, ei, bb->preds) |
3004 | { | |
3005 | bool visit = bitmap_bit_p (tovisit, e->src->index); | |
3006 | ||
3007 | if (visit && !(e->flags & EDGE_DFS_BACK)) | |
3008 | count++; | |
3009 | else if (visit && dump_file && !EDGE_INFO (e)->back_edge) | |
3010 | fprintf (dump_file, | |
3011 | "Irreducible region hit, ignoring edge to %i->%i\n", | |
3012 | e->src->index, bb->index); | |
312866af | 3013 | } |
9ea83aa5 | 3014 | BLOCK_INFO (bb)->npredecessors = count; |
555e8b05 | 3015 | /* When function never returns, we will never process exit block. */ |
34154e27 | 3016 | if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
db9cef39 | 3017 | { |
3018 | bb->count = profile_count::zero (); | |
3019 | bb->frequency = 0; | |
3020 | } | |
312866af | 3021 | } |
f81d9f78 | 3022 | |
8201d1f6 | 3023 | BLOCK_INFO (head)->frequency = 1; |
4c26117a | 3024 | last = head; |
3025 | for (bb = head; bb; bb = nextbb) | |
f81d9f78 | 3026 | { |
cd665a06 | 3027 | edge_iterator ei; |
8201d1f6 | 3028 | sreal cyclic_probability = 0; |
3029 | sreal frequency = 0; | |
f81d9f78 | 3030 | |
3031 | nextbb = BLOCK_INFO (bb)->next; | |
3032 | BLOCK_INFO (bb)->next = NULL; | |
3033 | ||
3034 | /* Compute frequency of basic block. */ | |
3035 | if (bb != head) | |
3036 | { | |
382ecba7 | 3037 | if (flag_checking) |
3038 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3039 | gcc_assert (!bitmap_bit_p (tovisit, e->src->index) | |
3040 | || (e->flags & EDGE_DFS_BACK)); | |
f81d9f78 | 3041 | |
cd665a06 | 3042 | FOR_EACH_EDGE (e, ei, bb->preds) |
f81d9f78 | 3043 | if (EDGE_INFO (e)->back_edge) |
56ff4880 | 3044 | { |
23a92fc7 | 3045 | cyclic_probability += EDGE_INFO (e)->back_edge_prob; |
56ff4880 | 3046 | } |
312866af | 3047 | else if (!(e->flags & EDGE_DFS_BACK)) |
56ff4880 | 3048 | { |
56ff4880 | 3049 | /* frequency += (e->probability |
3050 | * BLOCK_INFO (e->src)->frequency / | |
3051 | REG_BR_PROB_BASE); */ | |
3052 | ||
720cfc43 | 3053 | sreal tmp = e->probability.to_reg_br_prob_base (); |
23a92fc7 | 3054 | tmp *= BLOCK_INFO (e->src)->frequency; |
3055 | tmp *= real_inv_br_prob_base; | |
3056 | frequency += tmp; | |
56ff4880 | 3057 | } |
3058 | ||
8201d1f6 | 3059 | if (cyclic_probability == 0) |
e9d7220b | 3060 | { |
23a92fc7 | 3061 | BLOCK_INFO (bb)->frequency = frequency; |
e9d7220b | 3062 | } |
2e3c56e8 | 3063 | else |
3064 | { | |
23a92fc7 | 3065 | if (cyclic_probability > real_almost_one) |
3066 | cyclic_probability = real_almost_one; | |
f81d9f78 | 3067 | |
d598ad0d | 3068 | /* BLOCK_INFO (bb)->frequency = frequency |
e9d7220b | 3069 | / (1 - cyclic_probability) */ |
f81d9f78 | 3070 | |
8201d1f6 | 3071 | cyclic_probability = sreal (1) - cyclic_probability; |
23a92fc7 | 3072 | BLOCK_INFO (bb)->frequency = frequency / cyclic_probability; |
2e3c56e8 | 3073 | } |
f81d9f78 | 3074 | } |
3075 | ||
b1bb9b10 | 3076 | bitmap_clear_bit (tovisit, bb->index); |
f81d9f78 | 3077 | |
c6356c17 | 3078 | e = find_edge (bb, head); |
3079 | if (e) | |
3080 | { | |
c6356c17 | 3081 | /* EDGE_INFO (e)->back_edge_prob |
3082 | = ((e->probability * BLOCK_INFO (bb)->frequency) | |
3083 | / REG_BR_PROB_BASE); */ | |
48e1416a | 3084 | |
720cfc43 | 3085 | sreal tmp = e->probability.to_reg_br_prob_base (); |
23a92fc7 | 3086 | tmp *= BLOCK_INFO (bb)->frequency; |
3087 | EDGE_INFO (e)->back_edge_prob = tmp * real_inv_br_prob_base; | |
c6356c17 | 3088 | } |
f81d9f78 | 3089 | |
e725f898 | 3090 | /* Propagate to successor blocks. */ |
cd665a06 | 3091 | FOR_EACH_EDGE (e, ei, bb->succs) |
312866af | 3092 | if (!(e->flags & EDGE_DFS_BACK) |
4ad72a03 | 3093 | && BLOCK_INFO (e->dest)->npredecessors) |
f81d9f78 | 3094 | { |
4ad72a03 | 3095 | BLOCK_INFO (e->dest)->npredecessors--; |
3096 | if (!BLOCK_INFO (e->dest)->npredecessors) | |
312866af | 3097 | { |
3098 | if (!nextbb) | |
3099 | nextbb = e->dest; | |
3100 | else | |
3101 | BLOCK_INFO (last)->next = e->dest; | |
48e1416a | 3102 | |
312866af | 3103 | last = e->dest; |
3104 | } | |
cd665a06 | 3105 | } |
f81d9f78 | 3106 | } |
3107 | } | |
3108 | ||
5327650f | 3109 | /* Estimate frequencies in loops at same nest level. */ |
e6751e9a | 3110 | |
f81d9f78 | 3111 | static void |
88e6f696 | 3112 | estimate_loops_at_level (struct loop *first_loop) |
f81d9f78 | 3113 | { |
7fb12188 | 3114 | struct loop *loop; |
f81d9f78 | 3115 | |
3116 | for (loop = first_loop; loop; loop = loop->next) | |
3117 | { | |
f81d9f78 | 3118 | edge e; |
7fb12188 | 3119 | basic_block *bbs; |
862be747 | 3120 | unsigned i; |
035def86 | 3121 | auto_bitmap tovisit; |
f81d9f78 | 3122 | |
88e6f696 | 3123 | estimate_loops_at_level (loop->inner); |
d598ad0d | 3124 | |
88e6f696 | 3125 | /* Find current loop back edge and mark it. */ |
3126 | e = loop_latch_edge (loop); | |
3127 | EDGE_INFO (e)->back_edge = 1; | |
7fb12188 | 3128 | |
3129 | bbs = get_loop_body (loop); | |
3130 | for (i = 0; i < loop->num_nodes; i++) | |
b1bb9b10 | 3131 | bitmap_set_bit (tovisit, bbs[i]->index); |
7fb12188 | 3132 | free (bbs); |
88e6f696 | 3133 | propagate_freq (loop->header, tovisit); |
f81d9f78 | 3134 | } |
3135 | } | |
3136 | ||
fa7637bd | 3137 | /* Propagates frequencies through structure of loops. */ |
88e6f696 | 3138 | |
3139 | static void | |
7194de72 | 3140 | estimate_loops (void) |
88e6f696 | 3141 | { |
035def86 | 3142 | auto_bitmap tovisit; |
88e6f696 | 3143 | basic_block bb; |
3144 | ||
3145 | /* Start by estimating the frequencies in the loops. */ | |
41f75a99 | 3146 | if (number_of_loops (cfun) > 1) |
7194de72 | 3147 | estimate_loops_at_level (current_loops->tree_root->inner); |
88e6f696 | 3148 | |
3149 | /* Now propagate the frequencies through all the blocks. */ | |
ed7d889a | 3150 | FOR_ALL_BB_FN (bb, cfun) |
88e6f696 | 3151 | { |
3152 | bitmap_set_bit (tovisit, bb->index); | |
3153 | } | |
34154e27 | 3154 | propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit); |
88e6f696 | 3155 | } |
3156 | ||
38a65d4e | 3157 | /* Drop the profile for NODE to guessed, and update its frequency based on |
901d3ddc | 3158 | whether it is expected to be hot given the CALL_COUNT. */ |
38a65d4e | 3159 | |
3160 | static void | |
db9cef39 | 3161 | drop_profile (struct cgraph_node *node, profile_count call_count) |
38a65d4e | 3162 | { |
3163 | struct function *fn = DECL_STRUCT_FUNCTION (node->decl); | |
901d3ddc | 3164 | /* In the case where this was called by another function with a |
3165 | dropped profile, call_count will be 0. Since there are no | |
3166 | non-zero call counts to this function, we don't know for sure | |
3167 | whether it is hot, and therefore it will be marked normal below. */ | |
3168 | bool hot = maybe_hot_count_p (NULL, call_count); | |
38a65d4e | 3169 | |
3170 | if (dump_file) | |
3171 | fprintf (dump_file, | |
0e388735 | 3172 | "Dropping 0 profile for %s. %s based on calls.\n", |
3173 | node->dump_name (), | |
3174 | hot ? "Function is hot" : "Function is normal"); | |
38a65d4e | 3175 | /* We only expect to miss profiles for functions that are reached |
3176 | via non-zero call edges in cases where the function may have | |
3177 | been linked from another module or library (COMDATs and extern | |
901d3ddc | 3178 | templates). See the comments below for handle_missing_profiles. |
3179 | Also, only warn in cases where the missing counts exceed the | |
3180 | number of training runs. In certain cases with an execv followed | |
3181 | by a no-return call the profile for the no-return call is not | |
3182 | dumped and there can be a mismatch. */ | |
3183 | if (!DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl) | |
3184 | && call_count > profile_info->runs) | |
38a65d4e | 3185 | { |
3186 | if (flag_profile_correction) | |
3187 | { | |
3188 | if (dump_file) | |
3189 | fprintf (dump_file, | |
0e388735 | 3190 | "Missing counts for called function %s\n", |
3191 | node->dump_name ()); | |
38a65d4e | 3192 | } |
3193 | else | |
0e388735 | 3194 | warning (0, "Missing counts for called function %s", |
3195 | node->dump_name ()); | |
38a65d4e | 3196 | } |
3197 | ||
2cbbeb51 | 3198 | basic_block bb; |
3199 | FOR_ALL_BB_FN (bb, fn) | |
3200 | { | |
3201 | bb->count = profile_count::uninitialized (); | |
3202 | ||
3203 | edge_iterator ei; | |
3204 | edge e; | |
3205 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3206 | e->count = profile_count::uninitialized (); | |
3207 | } | |
3208 | ||
3209 | struct cgraph_edge *e; | |
3210 | for (e = node->callees; e; e = e->next_caller) | |
3211 | { | |
3212 | e->count = profile_count::uninitialized (); | |
3213 | e->frequency = compute_call_stmt_bb_frequency (e->caller->decl, | |
3214 | gimple_bb (e->call_stmt)); | |
3215 | } | |
3216 | node->count = profile_count::uninitialized (); | |
3217 | ||
3bedbae3 | 3218 | profile_status_for_fn (fn) |
38a65d4e | 3219 | = (flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT); |
3220 | node->frequency | |
3221 | = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL; | |
3222 | } | |
3223 | ||
3224 | /* In the case of COMDAT routines, multiple object files will contain the same | |
3225 | function and the linker will select one for the binary. In that case | |
3226 | all the other copies from the profile instrument binary will be missing | |
3227 | profile counts. Look for cases where this happened, due to non-zero | |
3228 | call counts going to 0-count functions, and drop the profile to guessed | |
3229 | so that we can use the estimated probabilities and avoid optimizing only | |
3230 | for size. | |
3231 | ||
3232 | The other case where the profile may be missing is when the routine | |
3233 | is not going to be emitted to the object file, e.g. for "extern template" | |
3234 | class methods. Those will be marked DECL_EXTERNAL. Emit a warning in | |
3235 | all other cases of non-zero calls to 0-count functions. */ | |
3236 | ||
3237 | void | |
3238 | handle_missing_profiles (void) | |
3239 | { | |
3240 | struct cgraph_node *node; | |
3241 | int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION); | |
e9258aee | 3242 | auto_vec<struct cgraph_node *, 64> worklist; |
38a65d4e | 3243 | |
3244 | /* See if 0 count function has non-0 count callers. In this case we | |
3245 | lost some profile. Drop its function profile to PROFILE_GUESSED. */ | |
3246 | FOR_EACH_DEFINED_FUNCTION (node) | |
3247 | { | |
3248 | struct cgraph_edge *e; | |
db9cef39 | 3249 | profile_count call_count = profile_count::zero (); |
af48f0b1 | 3250 | gcov_type max_tp_first_run = 0; |
38a65d4e | 3251 | struct function *fn = DECL_STRUCT_FUNCTION (node->decl); |
3252 | ||
db9cef39 | 3253 | if (!(node->count == profile_count::zero ())) |
38a65d4e | 3254 | continue; |
3255 | for (e = node->callers; e; e = e->next_caller) | |
2cbbeb51 | 3256 | if (e->count.initialized_p () && e->count > 0) |
db9cef39 | 3257 | { |
3258 | call_count = call_count + e->count; | |
af48f0b1 | 3259 | |
db9cef39 | 3260 | if (e->caller->tp_first_run > max_tp_first_run) |
3261 | max_tp_first_run = e->caller->tp_first_run; | |
3262 | } | |
af48f0b1 | 3263 | |
3264 | /* If time profile is missing, let assign the maximum that comes from | |
3265 | caller functions. */ | |
3266 | if (!node->tp_first_run && max_tp_first_run) | |
3267 | node->tp_first_run = max_tp_first_run + 1; | |
3268 | ||
db9cef39 | 3269 | if (call_count > 0 |
38a65d4e | 3270 | && fn && fn->cfg |
2cbbeb51 | 3271 | && (call_count.apply_scale (unlikely_count_fraction, 1) |
3272 | >= profile_info->runs)) | |
38a65d4e | 3273 | { |
901d3ddc | 3274 | drop_profile (node, call_count); |
38a65d4e | 3275 | worklist.safe_push (node); |
3276 | } | |
3277 | } | |
3278 | ||
3279 | /* Propagate the profile dropping to other 0-count COMDATs that are | |
3280 | potentially called by COMDATs we already dropped the profile on. */ | |
3281 | while (worklist.length () > 0) | |
3282 | { | |
3283 | struct cgraph_edge *e; | |
3284 | ||
3285 | node = worklist.pop (); | |
3286 | for (e = node->callees; e; e = e->next_caller) | |
3287 | { | |
3288 | struct cgraph_node *callee = e->callee; | |
3289 | struct function *fn = DECL_STRUCT_FUNCTION (callee->decl); | |
3290 | ||
3291 | if (callee->count > 0) | |
3292 | continue; | |
2cbbeb51 | 3293 | if ((DECL_COMDAT (callee->decl) || DECL_EXTERNAL (callee->decl)) |
3294 | && fn && fn->cfg | |
3bedbae3 | 3295 | && profile_status_for_fn (fn) == PROFILE_READ) |
38a65d4e | 3296 | { |
db9cef39 | 3297 | drop_profile (node, profile_count::zero ()); |
38a65d4e | 3298 | worklist.safe_push (callee); |
3299 | } | |
3300 | } | |
3301 | } | |
38a65d4e | 3302 | } |
3303 | ||
3f18719c | 3304 | /* Convert counts measured by profile driven feedback to frequencies. |
3305 | Return nonzero iff there was any nonzero execution count. */ | |
e6751e9a | 3306 | |
db9cef39 | 3307 | bool |
d598ad0d | 3308 | counts_to_freqs (void) |
f81d9f78 | 3309 | { |
db9cef39 | 3310 | gcov_type count_max; |
3311 | profile_count true_count_max = profile_count::zero (); | |
4c26117a | 3312 | basic_block bb; |
b3d6de89 | 3313 | |
38a65d4e | 3314 | /* Don't overwrite the estimated frequencies when the profile for |
3315 | the function is missing. We may drop this function PROFILE_GUESSED | |
3316 | later in drop_profile (). */ | |
db9cef39 | 3317 | if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p () |
3318 | || ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ()) | |
2cbbeb51 | 3319 | return false; |
38a65d4e | 3320 | |
34154e27 | 3321 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
db9cef39 | 3322 | if (bb->count > true_count_max) |
3323 | true_count_max = bb->count; | |
3324 | ||
2cbbeb51 | 3325 | /* If we have no counts to base frequencies on, keep those that are |
3326 | already there. */ | |
3327 | if (!(true_count_max > 0)) | |
3328 | return false; | |
f81d9f78 | 3329 | |
2cbbeb51 | 3330 | count_max = true_count_max.to_gcov_type (); |
3331 | ||
3332 | FOR_ALL_BB_FN (bb, cfun) | |
db9cef39 | 3333 | if (bb->count.initialized_p ()) |
3334 | bb->frequency = RDIV (bb->count.to_gcov_type () * BB_FREQ_MAX, count_max); | |
167b550b | 3335 | |
2cbbeb51 | 3336 | return true; |
f81d9f78 | 3337 | } |
3338 | ||
e6751e9a | 3339 | /* Return true if function is likely to be expensive, so there is no point to |
3340 | optimize performance of prologue, epilogue or do inlining at the expense | |
41a6f238 | 3341 | of code size growth. THRESHOLD is the limit of number of instructions |
e6751e9a | 3342 | function can execute at average to be still considered not expensive. */ |
3343 | ||
f4c0c1a2 | 3344 | bool |
d598ad0d | 3345 | expensive_function_p (int threshold) |
f4c0c1a2 | 3346 | { |
3347 | unsigned int sum = 0; | |
4c26117a | 3348 | basic_block bb; |
27d0c333 | 3349 | unsigned int limit; |
f4c0c1a2 | 3350 | |
3351 | /* We can not compute accurately for large thresholds due to scaled | |
3352 | frequencies. */ | |
876760f6 | 3353 | gcc_assert (threshold <= BB_FREQ_MAX); |
f4c0c1a2 | 3354 | |
4a82352a | 3355 | /* Frequencies are out of range. This either means that function contains |
f4c0c1a2 | 3356 | internal loop executing more than BB_FREQ_MAX times or profile feedback |
3357 | is available and function has not been executed at all. */ | |
34154e27 | 3358 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency == 0) |
f4c0c1a2 | 3359 | return true; |
195731ad | 3360 | |
f4c0c1a2 | 3361 | /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */ |
34154e27 | 3362 | limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency * threshold; |
fc00614f | 3363 | FOR_EACH_BB_FN (bb, cfun) |
f4c0c1a2 | 3364 | { |
ee5f6585 | 3365 | rtx_insn *insn; |
f4c0c1a2 | 3366 | |
91f71fa3 | 3367 | FOR_BB_INSNS (bb, insn) |
e6751e9a | 3368 | if (active_insn_p (insn)) |
3369 | { | |
3370 | sum += bb->frequency; | |
3371 | if (sum > limit) | |
3372 | return true; | |
f4c0c1a2 | 3373 | } |
3374 | } | |
e6751e9a | 3375 | |
f4c0c1a2 | 3376 | return false; |
3377 | } | |
3378 | ||
84242aee | 3379 | /* All basic blocks that are reachable only from unlikely basic blocks are |
3380 | unlikely. */ | |
3381 | ||
3382 | void | |
3383 | propagate_unlikely_bbs_forward (void) | |
3384 | { | |
3385 | auto_vec<basic_block, 64> worklist; | |
3386 | basic_block bb; | |
3387 | edge_iterator ei; | |
3388 | edge e; | |
3389 | ||
3390 | if (!(ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ())) | |
3391 | { | |
3392 | ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(size_t) 1; | |
3393 | worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
3394 | ||
3395 | while (worklist.length () > 0) | |
3396 | { | |
3397 | bb = worklist.pop (); | |
3398 | FOR_EACH_EDGE (e, ei, bb->succs) | |
3399 | if (!(e->count == profile_count::zero ()) | |
3400 | && !(e->dest->count == profile_count::zero ()) | |
3401 | && !e->dest->aux) | |
3402 | { | |
3403 | e->dest->aux = (void *)(size_t) 1; | |
3404 | worklist.safe_push (e->dest); | |
3405 | } | |
3406 | } | |
3407 | } | |
3408 | ||
3409 | FOR_ALL_BB_FN (bb, cfun) | |
3410 | { | |
3411 | if (!bb->aux) | |
3412 | { | |
3413 | if (!(bb->count == profile_count::zero ()) | |
3414 | && (dump_file && (dump_flags & TDF_DETAILS))) | |
3415 | fprintf (dump_file, | |
3416 | "Basic block %i is marked unlikely by forward prop\n", | |
3417 | bb->index); | |
3418 | bb->count = profile_count::zero (); | |
3419 | bb->frequency = 0; | |
3420 | FOR_EACH_EDGE (e, ei, bb->succs) | |
3421 | e->count = profile_count::zero (); | |
3422 | } | |
3423 | else | |
3424 | bb->aux = NULL; | |
3425 | } | |
3426 | } | |
3427 | ||
f08c22c4 | 3428 | /* Determine basic blocks/edges that are known to be unlikely executed and set |
3429 | their counters to zero. | |
3430 | This is done with first identifying obviously unlikely BBs/edges and then | |
3431 | propagating in both directions. */ | |
3432 | ||
3433 | static void | |
3434 | determine_unlikely_bbs () | |
3435 | { | |
3436 | basic_block bb; | |
3437 | auto_vec<basic_block, 64> worklist; | |
3438 | edge_iterator ei; | |
3439 | edge e; | |
3440 | ||
3441 | FOR_EACH_BB_FN (bb, cfun) | |
3442 | { | |
3443 | if (!(bb->count == profile_count::zero ()) | |
3444 | && unlikely_executed_bb_p (bb)) | |
3445 | { | |
3446 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3447 | fprintf (dump_file, "Basic block %i is locally unlikely\n", | |
3448 | bb->index); | |
3449 | bb->count = profile_count::zero (); | |
3450 | } | |
3451 | ||
3452 | if (bb->count == profile_count::zero ()) | |
3453 | { | |
3454 | bb->frequency = 0; | |
3455 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3456 | e->count = profile_count::zero (); | |
3457 | } | |
3458 | ||
3459 | FOR_EACH_EDGE (e, ei, bb->succs) | |
3460 | if (!(e->count == profile_count::zero ()) | |
3461 | && unlikely_executed_edge_p (e)) | |
3462 | { | |
3463 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3464 | fprintf (dump_file, "Edge %i->%i is locally unlikely\n", | |
3465 | bb->index, e->dest->index); | |
3466 | e->count = profile_count::zero (); | |
3467 | } | |
3468 | ||
3469 | gcc_checking_assert (!bb->aux); | |
3470 | } | |
3471 | ||
f08c22c4 | 3472 | auto_vec<int, 64> nsuccs; |
3473 | nsuccs.safe_grow_cleared (last_basic_block_for_fn (cfun)); | |
3474 | FOR_ALL_BB_FN (bb, cfun) | |
3475 | if (!(bb->count == profile_count::zero ()) | |
3476 | && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
3477 | { | |
3478 | nsuccs[bb->index] = 0; | |
3479 | FOR_EACH_EDGE (e, ei, bb->succs) | |
3480 | if (!(e->count == profile_count::zero ())) | |
3481 | nsuccs[bb->index]++; | |
3482 | if (!nsuccs[bb->index]) | |
3483 | worklist.safe_push (bb); | |
3484 | } | |
3485 | while (worklist.length () > 0) | |
3486 | { | |
3487 | bb = worklist.pop (); | |
3488 | if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)) | |
3489 | { | |
3490 | bool found = false; | |
3491 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); | |
3492 | !gsi_end_p (gsi); gsi_next (&gsi)) | |
3493 | if (stmt_can_terminate_bb_p (gsi_stmt (gsi)) | |
3494 | /* stmt_can_terminate_bb_p special cases noreturns because it | |
3495 | assumes that fake edges are created. We want to know that | |
3496 | noreturn alone does not imply BB to be unlikely. */ | |
3497 | || (is_gimple_call (gsi_stmt (gsi)) | |
3498 | && (gimple_call_flags (gsi_stmt (gsi)) & ECF_NORETURN))) | |
3499 | { | |
3500 | found = true; | |
3501 | break; | |
3502 | } | |
3503 | if (found) | |
3504 | continue; | |
3505 | } | |
3506 | if (!(bb->count == profile_count::zero ()) | |
3507 | && (dump_file && (dump_flags & TDF_DETAILS))) | |
3508 | fprintf (dump_file, | |
3509 | "Basic block %i is marked unlikely by backward prop\n", | |
3510 | bb->index); | |
3511 | bb->count = profile_count::zero (); | |
3512 | bb->frequency = 0; | |
3513 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3514 | if (!(e->count == profile_count::zero ())) | |
3515 | { | |
3516 | e->count = profile_count::zero (); | |
3517 | if (!(e->src->count == profile_count::zero ())) | |
3518 | { | |
3519 | nsuccs[e->src->index]--; | |
3520 | if (!nsuccs[e->src->index]) | |
3521 | worklist.safe_push (e->src); | |
3522 | } | |
3523 | } | |
3524 | } | |
3525 | } | |
3526 | ||
5327650f | 3527 | /* Estimate and propagate basic block frequencies using the given branch |
3528 | probabilities. If FORCE is true, the frequencies are used to estimate | |
3529 | the counts even when there are already non-zero profile counts. */ | |
e6751e9a | 3530 | |
4ae20857 | 3531 | void |
5327650f | 3532 | estimate_bb_frequencies (bool force) |
f81d9f78 | 3533 | { |
4c26117a | 3534 | basic_block bb; |
e9d7220b | 3535 | sreal freq_max; |
56ff4880 | 3536 | |
f08c22c4 | 3537 | determine_unlikely_bbs (); |
3538 | ||
3539 | if (force || profile_status_for_fn (cfun) != PROFILE_READ | |
3540 | || !counts_to_freqs ()) | |
429fa7fa | 3541 | { |
31e4010e | 3542 | static int real_values_initialized = 0; |
3543 | ||
3544 | if (!real_values_initialized) | |
3545 | { | |
fc22704f | 3546 | real_values_initialized = 1; |
8201d1f6 | 3547 | real_br_prob_base = REG_BR_PROB_BASE; |
3548 | real_bb_freq_max = BB_FREQ_MAX; | |
23a92fc7 | 3549 | real_one_half = sreal (1, -1); |
8201d1f6 | 3550 | real_inv_br_prob_base = sreal (1) / real_br_prob_base; |
3551 | real_almost_one = sreal (1) - real_inv_br_prob_base; | |
31e4010e | 3552 | } |
f81d9f78 | 3553 | |
429fa7fa | 3554 | mark_dfs_back_edges (); |
429fa7fa | 3555 | |
34154e27 | 3556 | single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability = |
720cfc43 | 3557 | profile_probability::always (); |
429fa7fa | 3558 | |
3559 | /* Set up block info for each basic block. */ | |
9908fe4d | 3560 | alloc_aux_for_blocks (sizeof (block_info)); |
3561 | alloc_aux_for_edges (sizeof (edge_prob_info)); | |
34154e27 | 3562 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
f81d9f78 | 3563 | { |
f81d9f78 | 3564 | edge e; |
cd665a06 | 3565 | edge_iterator ei; |
429fa7fa | 3566 | |
cd665a06 | 3567 | FOR_EACH_EDGE (e, ei, bb->succs) |
f81d9f78 | 3568 | { |
720cfc43 | 3569 | EDGE_INFO (e)->back_edge_prob |
3570 | = e->probability.to_reg_br_prob_base (); | |
23a92fc7 | 3571 | EDGE_INFO (e)->back_edge_prob *= real_inv_br_prob_base; |
f81d9f78 | 3572 | } |
f81d9f78 | 3573 | } |
e6751e9a | 3574 | |
5327650f | 3575 | /* First compute frequencies locally for each loop from innermost |
3576 | to outermost to examine frequencies for back edges. */ | |
7194de72 | 3577 | estimate_loops (); |
f81d9f78 | 3578 | |
8201d1f6 | 3579 | freq_max = 0; |
fc00614f | 3580 | FOR_EACH_BB_FN (bb, cfun) |
23a92fc7 | 3581 | if (freq_max < BLOCK_INFO (bb)->frequency) |
3582 | freq_max = BLOCK_INFO (bb)->frequency; | |
2e3c56e8 | 3583 | |
23a92fc7 | 3584 | freq_max = real_bb_freq_max / freq_max; |
34154e27 | 3585 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
56ff4880 | 3586 | { |
23a92fc7 | 3587 | sreal tmp = BLOCK_INFO (bb)->frequency * freq_max + real_one_half; |
3588 | bb->frequency = tmp.to_int (); | |
429fa7fa | 3589 | } |
e6751e9a | 3590 | |
429fa7fa | 3591 | free_aux_for_blocks (); |
3592 | free_aux_for_edges (); | |
3593 | } | |
3594 | compute_function_frequency (); | |
429fa7fa | 3595 | } |
f81d9f78 | 3596 | |
429fa7fa | 3597 | /* Decide whether function is hot, cold or unlikely executed. */ |
63aab97d | 3598 | void |
d598ad0d | 3599 | compute_function_frequency (void) |
429fa7fa | 3600 | { |
4c26117a | 3601 | basic_block bb; |
415d1b9a | 3602 | struct cgraph_node *node = cgraph_node::get (current_function_decl); |
e27f29dd | 3603 | |
0f9fb931 | 3604 | if (DECL_STATIC_CONSTRUCTOR (current_function_decl) |
3605 | || MAIN_NAME_P (DECL_NAME (current_function_decl))) | |
3606 | node->only_called_at_startup = true; | |
3607 | if (DECL_STATIC_DESTRUCTOR (current_function_decl)) | |
3608 | node->only_called_at_exit = true; | |
4c26117a | 3609 | |
f26d8580 | 3610 | if (profile_status_for_fn (cfun) != PROFILE_READ) |
5de92639 | 3611 | { |
125b6d78 | 3612 | int flags = flags_from_decl_or_type (current_function_decl); |
f08c22c4 | 3613 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero () |
3614 | || lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)) | |
3615 | != NULL) | |
125b6d78 | 3616 | node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; |
5de92639 | 3617 | else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl)) |
3618 | != NULL) | |
125b6d78 | 3619 | node->frequency = NODE_FREQUENCY_HOT; |
3620 | else if (flags & ECF_NORETURN) | |
3621 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
3622 | else if (MAIN_NAME_P (DECL_NAME (current_function_decl))) | |
3623 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
3624 | else if (DECL_STATIC_CONSTRUCTOR (current_function_decl) | |
3625 | || DECL_STATIC_DESTRUCTOR (current_function_decl)) | |
3626 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
5de92639 | 3627 | return; |
3628 | } | |
e27f29dd | 3629 | |
3630 | /* Only first time try to drop function into unlikely executed. | |
3631 | After inlining the roundoff errors may confuse us. | |
3632 | Ipa-profile pass will drop functions only called from unlikely | |
3633 | functions to unlikely and that is most of what we care about. */ | |
3634 | if (!cfun->after_inlining) | |
3635 | node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; | |
fc00614f | 3636 | FOR_EACH_BB_FN (bb, cfun) |
f81d9f78 | 3637 | { |
8d672d12 | 3638 | if (maybe_hot_bb_p (cfun, bb)) |
429fa7fa | 3639 | { |
125b6d78 | 3640 | node->frequency = NODE_FREQUENCY_HOT; |
429fa7fa | 3641 | return; |
3642 | } | |
8d672d12 | 3643 | if (!probably_never_executed_bb_p (cfun, bb)) |
125b6d78 | 3644 | node->frequency = NODE_FREQUENCY_NORMAL; |
f81d9f78 | 3645 | } |
429fa7fa | 3646 | } |
f81d9f78 | 3647 | |
4a1849e3 | 3648 | /* Build PREDICT_EXPR. */ |
3649 | tree | |
3650 | build_predict_expr (enum br_predictor predictor, enum prediction taken) | |
3651 | { | |
08f62b1b | 3652 | tree t = build1 (PREDICT_EXPR, void_type_node, |
b3d480fb | 3653 | build_int_cst (integer_type_node, predictor)); |
b9c74b4d | 3654 | SET_PREDICT_EXPR_OUTCOME (t, taken); |
4a1849e3 | 3655 | return t; |
3656 | } | |
3657 | ||
3658 | const char * | |
3659 | predictor_name (enum br_predictor predictor) | |
3660 | { | |
3661 | return predictor_info[predictor].name; | |
3662 | } | |
3663 | ||
65b0537f | 3664 | /* Predict branch probabilities and estimate profile of the tree CFG. */ |
3665 | ||
cbe8bda8 | 3666 | namespace { |
3667 | ||
3668 | const pass_data pass_data_profile = | |
3669 | { | |
3670 | GIMPLE_PASS, /* type */ | |
3671 | "profile_estimate", /* name */ | |
3672 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 3673 | TV_BRANCH_PROB, /* tv_id */ |
3674 | PROP_cfg, /* properties_required */ | |
3675 | 0, /* properties_provided */ | |
3676 | 0, /* properties_destroyed */ | |
3677 | 0, /* todo_flags_start */ | |
8b88439e | 3678 | 0, /* todo_flags_finish */ |
4ee9c684 | 3679 | }; |
1add270f | 3680 | |
cbe8bda8 | 3681 | class pass_profile : public gimple_opt_pass |
3682 | { | |
3683 | public: | |
9af5ce0c | 3684 | pass_profile (gcc::context *ctxt) |
3685 | : gimple_opt_pass (pass_data_profile, ctxt) | |
cbe8bda8 | 3686 | {} |
3687 | ||
3688 | /* opt_pass methods: */ | |
31315c24 | 3689 | virtual bool gate (function *) { return flag_guess_branch_prob; } |
65b0537f | 3690 | virtual unsigned int execute (function *); |
cbe8bda8 | 3691 | |
3692 | }; // class pass_profile | |
3693 | ||
65b0537f | 3694 | unsigned int |
3695 | pass_profile::execute (function *fun) | |
3696 | { | |
3697 | unsigned nb_loops; | |
3698 | ||
3a9f48e7 | 3699 | if (profile_status_for_fn (cfun) == PROFILE_GUESSED) |
3700 | return 0; | |
3701 | ||
65b0537f | 3702 | loop_optimizer_init (LOOPS_NORMAL); |
3703 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3704 | flow_loops_dump (dump_file, NULL, 0); | |
3705 | ||
3706 | mark_irreducible_loops (); | |
3707 | ||
3708 | nb_loops = number_of_loops (fun); | |
3709 | if (nb_loops > 1) | |
3710 | scev_initialize (); | |
3711 | ||
5a5f50e9 | 3712 | tree_estimate_probability (false); |
65b0537f | 3713 | |
3714 | if (nb_loops > 1) | |
3715 | scev_finalize (); | |
3716 | ||
3717 | loop_optimizer_finalize (); | |
3718 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3719 | gimple_dump_cfg (dump_file, dump_flags); | |
3720 | if (profile_status_for_fn (fun) == PROFILE_ABSENT) | |
3721 | profile_status_for_fn (fun) = PROFILE_GUESSED; | |
cbcc4297 | 3722 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3723 | { | |
3724 | struct loop *loop; | |
3725 | FOR_EACH_LOOP (loop, LI_FROM_INNERMOST) | |
3726 | if (loop->header->frequency) | |
3727 | fprintf (dump_file, "Loop got predicted %d to iterate %i times.\n", | |
3728 | loop->num, | |
3729 | (int)expected_loop_iterations_unbounded (loop)); | |
3730 | } | |
65b0537f | 3731 | return 0; |
3732 | } | |
3733 | ||
cbe8bda8 | 3734 | } // anon namespace |
3735 | ||
3736 | gimple_opt_pass * | |
3737 | make_pass_profile (gcc::context *ctxt) | |
3738 | { | |
3739 | return new pass_profile (ctxt); | |
3740 | } | |
3741 | ||
3742 | namespace { | |
3743 | ||
3744 | const pass_data pass_data_strip_predict_hints = | |
3745 | { | |
3746 | GIMPLE_PASS, /* type */ | |
3747 | "*strip_predict_hints", /* name */ | |
3748 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 3749 | TV_BRANCH_PROB, /* tv_id */ |
3750 | PROP_cfg, /* properties_required */ | |
3751 | 0, /* properties_provided */ | |
3752 | 0, /* properties_destroyed */ | |
3753 | 0, /* todo_flags_start */ | |
8b88439e | 3754 | 0, /* todo_flags_finish */ |
1add270f | 3755 | }; |
555e8b05 | 3756 | |
cbe8bda8 | 3757 | class pass_strip_predict_hints : public gimple_opt_pass |
3758 | { | |
3759 | public: | |
9af5ce0c | 3760 | pass_strip_predict_hints (gcc::context *ctxt) |
3761 | : gimple_opt_pass (pass_data_strip_predict_hints, ctxt) | |
cbe8bda8 | 3762 | {} |
3763 | ||
3764 | /* opt_pass methods: */ | |
ae84f584 | 3765 | opt_pass * clone () { return new pass_strip_predict_hints (m_ctxt); } |
65b0537f | 3766 | virtual unsigned int execute (function *); |
cbe8bda8 | 3767 | |
3768 | }; // class pass_strip_predict_hints | |
3769 | ||
65b0537f | 3770 | /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements |
3771 | we no longer need. */ | |
3772 | unsigned int | |
3773 | pass_strip_predict_hints::execute (function *fun) | |
3774 | { | |
3775 | basic_block bb; | |
42acab1c | 3776 | gimple *ass_stmt; |
65b0537f | 3777 | tree var; |
7349d65b | 3778 | bool changed = false; |
65b0537f | 3779 | |
3780 | FOR_EACH_BB_FN (bb, fun) | |
3781 | { | |
3782 | gimple_stmt_iterator bi; | |
3783 | for (bi = gsi_start_bb (bb); !gsi_end_p (bi);) | |
3784 | { | |
42acab1c | 3785 | gimple *stmt = gsi_stmt (bi); |
65b0537f | 3786 | |
3787 | if (gimple_code (stmt) == GIMPLE_PREDICT) | |
3788 | { | |
3789 | gsi_remove (&bi, true); | |
7349d65b | 3790 | changed = true; |
65b0537f | 3791 | continue; |
3792 | } | |
3793 | else if (is_gimple_call (stmt)) | |
3794 | { | |
3795 | tree fndecl = gimple_call_fndecl (stmt); | |
3796 | ||
3797 | if ((fndecl | |
3798 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
3799 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT | |
3800 | && gimple_call_num_args (stmt) == 2) | |
3801 | || (gimple_call_internal_p (stmt) | |
3802 | && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)) | |
3803 | { | |
3804 | var = gimple_call_lhs (stmt); | |
7349d65b | 3805 | changed = true; |
65b0537f | 3806 | if (var) |
3807 | { | |
3808 | ass_stmt | |
3809 | = gimple_build_assign (var, gimple_call_arg (stmt, 0)); | |
3810 | gsi_replace (&bi, ass_stmt, true); | |
3811 | } | |
3812 | else | |
3813 | { | |
3814 | gsi_remove (&bi, true); | |
3815 | continue; | |
3816 | } | |
3817 | } | |
3818 | } | |
3819 | gsi_next (&bi); | |
3820 | } | |
3821 | } | |
7349d65b | 3822 | return changed ? TODO_cleanup_cfg : 0; |
65b0537f | 3823 | } |
3824 | ||
cbe8bda8 | 3825 | } // anon namespace |
3826 | ||
3827 | gimple_opt_pass * | |
3828 | make_pass_strip_predict_hints (gcc::context *ctxt) | |
3829 | { | |
3830 | return new pass_strip_predict_hints (ctxt); | |
3831 | } | |
3832 | ||
555e8b05 | 3833 | /* Rebuild function frequencies. Passes are in general expected to |
3834 | maintain profile by hand, however in some cases this is not possible: | |
3835 | for example when inlining several functions with loops freuqencies might run | |
3836 | out of scale and thus needs to be recomputed. */ | |
3837 | ||
3838 | void | |
3839 | rebuild_frequencies (void) | |
3840 | { | |
4b366dd3 | 3841 | timevar_push (TV_REBUILD_FREQUENCIES); |
5327650f | 3842 | |
3843 | /* When the max bb count in the function is small, there is a higher | |
3844 | chance that there were truncation errors in the integer scaling | |
3845 | of counts by inlining and other optimizations. This could lead | |
3846 | to incorrect classification of code as being cold when it isn't. | |
3847 | In that case, force the estimation of bb counts/frequencies from the | |
3848 | branch probabilities, rather than computing frequencies from counts, | |
3849 | which may also lead to frequencies incorrectly reduced to 0. There | |
3850 | is less precision in the probabilities, so we only do this for small | |
3851 | max counts. */ | |
db9cef39 | 3852 | profile_count count_max = profile_count::zero (); |
5327650f | 3853 | basic_block bb; |
34154e27 | 3854 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
db9cef39 | 3855 | if (bb->count > count_max) |
3856 | count_max = bb->count; | |
5327650f | 3857 | |
f26d8580 | 3858 | if (profile_status_for_fn (cfun) == PROFILE_GUESSED |
94bed7c3 | 3859 | || (!flag_auto_profile && profile_status_for_fn (cfun) == PROFILE_READ |
db9cef39 | 3860 | && count_max < REG_BR_PROB_BASE / 10)) |
555e8b05 | 3861 | { |
3862 | loop_optimizer_init (0); | |
3863 | add_noreturn_fake_exit_edges (); | |
3864 | mark_irreducible_loops (); | |
3865 | connect_infinite_loops_to_exit (); | |
5327650f | 3866 | estimate_bb_frequencies (true); |
555e8b05 | 3867 | remove_fake_exit_edges (); |
3868 | loop_optimizer_finalize (); | |
3869 | } | |
f26d8580 | 3870 | else if (profile_status_for_fn (cfun) == PROFILE_READ) |
555e8b05 | 3871 | counts_to_freqs (); |
3872 | else | |
3873 | gcc_unreachable (); | |
4b366dd3 | 3874 | timevar_pop (TV_REBUILD_FREQUENCIES); |
555e8b05 | 3875 | } |
5a5f50e9 | 3876 | |
3877 | /* Perform a dry run of the branch prediction pass and report comparsion of | |
3878 | the predicted and real profile into the dump file. */ | |
3879 | ||
3880 | void | |
3881 | report_predictor_hitrates (void) | |
3882 | { | |
3883 | unsigned nb_loops; | |
3884 | ||
3885 | loop_optimizer_init (LOOPS_NORMAL); | |
3886 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3887 | flow_loops_dump (dump_file, NULL, 0); | |
3888 | ||
3889 | mark_irreducible_loops (); | |
3890 | ||
3891 | nb_loops = number_of_loops (cfun); | |
3892 | if (nb_loops > 1) | |
3893 | scev_initialize (); | |
3894 | ||
3895 | tree_estimate_probability (true); | |
3896 | ||
3897 | if (nb_loops > 1) | |
3898 | scev_finalize (); | |
3899 | ||
3900 | loop_optimizer_finalize (); | |
3901 | } | |
3902 | ||
eedd711b | 3903 | /* Force edge E to be cold. |
3904 | If IMPOSSIBLE is true, for edge to have count and probability 0 otherwise | |
3905 | keep low probability to represent possible error in a guess. This is used | |
3906 | i.e. in case we predict loop to likely iterate given number of times but | |
3907 | we are not 100% sure. | |
3908 | ||
3909 | This function locally updates profile without attempt to keep global | |
3910 | consistency which can not be reached in full generality without full profile | |
3911 | rebuild from probabilities alone. Doing so is not necessarily a good idea | |
3912 | because frequencies and counts may be more realistic then probabilities. | |
3913 | ||
3914 | In some cases (such as for elimination of early exits during full loop | |
3915 | unrolling) the caller can ensure that profile will get consistent | |
3916 | afterwards. */ | |
3917 | ||
3918 | void | |
3919 | force_edge_cold (edge e, bool impossible) | |
3920 | { | |
db9cef39 | 3921 | profile_count count_sum = profile_count::zero (); |
720cfc43 | 3922 | profile_probability prob_sum = profile_probability::never (); |
eedd711b | 3923 | edge_iterator ei; |
3924 | edge e2; | |
db9cef39 | 3925 | profile_count old_count = e->count; |
720cfc43 | 3926 | profile_probability old_probability = e->probability; |
caa64124 | 3927 | bool uninitialized_exit = false; |
eedd711b | 3928 | |
720cfc43 | 3929 | profile_probability goal = (impossible ? profile_probability::never () |
3930 | : profile_probability::very_unlikely ()); | |
3931 | ||
eedd711b | 3932 | /* If edge is already improbably or cold, just return. */ |
720cfc43 | 3933 | if (e->probability <= goal |
db9cef39 | 3934 | && (!impossible || e->count == profile_count::zero ())) |
eedd711b | 3935 | return; |
3936 | FOR_EACH_EDGE (e2, ei, e->src->succs) | |
3937 | if (e2 != e) | |
3938 | { | |
db9cef39 | 3939 | if (e2->count.initialized_p ()) |
3940 | count_sum += e2->count; | |
caa64124 | 3941 | else |
3942 | uninitialized_exit = true; | |
720cfc43 | 3943 | if (e2->probability.initialized_p ()) |
3944 | prob_sum += e2->probability; | |
eedd711b | 3945 | } |
3946 | ||
3947 | /* If there are other edges out of e->src, redistribute probabilitity | |
3948 | there. */ | |
720cfc43 | 3949 | if (prob_sum > profile_probability::never ()) |
eedd711b | 3950 | { |
720cfc43 | 3951 | if (!(e->probability < goal)) |
3952 | e->probability = goal; | |
db9cef39 | 3953 | if (impossible) |
3954 | e->count = profile_count::zero (); | |
720cfc43 | 3955 | else if (old_probability > profile_probability::never ()) |
3956 | e->count = e->count.apply_probability (e->probability | |
3957 | / old_probability); | |
eedd711b | 3958 | else |
db9cef39 | 3959 | e->count = e->count.apply_scale (1, REG_BR_PROB_BASE); |
eedd711b | 3960 | |
720cfc43 | 3961 | profile_probability prob_comp = prob_sum / e->probability.invert (); |
3962 | ||
eedd711b | 3963 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3964 | fprintf (dump_file, "Making edge %i->%i %s by redistributing " | |
3965 | "probability to other edges.\n", | |
3966 | e->src->index, e->dest->index, | |
7b3e5dc9 | 3967 | impossible ? "impossible" : "cold"); |
db9cef39 | 3968 | profile_count count_sum2 = count_sum + old_count - e->count; |
eedd711b | 3969 | FOR_EACH_EDGE (e2, ei, e->src->succs) |
3970 | if (e2 != e) | |
3971 | { | |
db9cef39 | 3972 | if (count_sum > 0) |
3973 | e2->count.apply_scale (count_sum2, count_sum); | |
720cfc43 | 3974 | e2->probability /= prob_comp; |
eedd711b | 3975 | } |
4bb697cd | 3976 | if (current_ir_type () != IR_GIMPLE |
3977 | && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)) | |
720cfc43 | 3978 | update_br_prob_note (e->src); |
eedd711b | 3979 | } |
3980 | /* If all edges out of e->src are unlikely, the basic block itself | |
3981 | is unlikely. */ | |
3982 | else | |
3983 | { | |
4bb697cd | 3984 | if (prob_sum == profile_probability::never ()) |
3985 | e->probability = profile_probability::always (); | |
3986 | else | |
3987 | { | |
3988 | if (impossible) | |
3989 | e->probability = profile_probability::never (); | |
3990 | /* If BB has some edges out that are not impossible, we can not | |
3991 | assume that BB itself is. */ | |
3992 | impossible = false; | |
3993 | } | |
3994 | if (current_ir_type () != IR_GIMPLE | |
3995 | && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)) | |
720cfc43 | 3996 | update_br_prob_note (e->src); |
caa64124 | 3997 | if (e->src->count == profile_count::zero ()) |
3998 | return; | |
3999 | if (count_sum == profile_count::zero () && !uninitialized_exit | |
4000 | && impossible) | |
4001 | { | |
4002 | bool found = false; | |
720cfc43 | 4003 | if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
4004 | ; | |
4005 | else if (current_ir_type () == IR_GIMPLE) | |
4006 | for (gimple_stmt_iterator gsi = gsi_start_bb (e->src); | |
4007 | !gsi_end_p (gsi); gsi_next (&gsi)) | |
4008 | { | |
4009 | if (stmt_can_terminate_bb_p (gsi_stmt (gsi))) | |
4010 | { | |
4011 | found = true; | |
4012 | break; | |
4013 | } | |
4014 | } | |
4015 | /* FIXME: Implement RTL path. */ | |
4016 | else | |
4017 | found = true; | |
caa64124 | 4018 | if (!found) |
4019 | { | |
4020 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4021 | fprintf (dump_file, | |
4022 | "Making bb %i impossible and dropping count to 0.\n", | |
4023 | e->src->index); | |
4024 | e->count = profile_count::zero (); | |
4025 | e->src->count = profile_count::zero (); | |
4026 | FOR_EACH_EDGE (e2, ei, e->src->preds) | |
4027 | force_edge_cold (e2, impossible); | |
4028 | return; | |
4029 | } | |
4030 | } | |
eedd711b | 4031 | |
4032 | /* If we did not adjusting, the source basic block has no likely edeges | |
4033 | leaving other direction. In that case force that bb cold, too. | |
4034 | This in general is difficult task to do, but handle special case when | |
4035 | BB has only one predecestor. This is common case when we are updating | |
4036 | after loop transforms. */ | |
720cfc43 | 4037 | if (!(prob_sum > profile_probability::never ()) |
4038 | && count_sum == profile_count::zero () | |
db9cef39 | 4039 | && single_pred_p (e->src) && e->src->frequency > (impossible ? 0 : 1)) |
eedd711b | 4040 | { |
4041 | int old_frequency = e->src->frequency; | |
4042 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4043 | fprintf (dump_file, "Making bb %i %s.\n", e->src->index, | |
7b3e5dc9 | 4044 | impossible ? "impossible" : "cold"); |
eedd711b | 4045 | e->src->frequency = MIN (e->src->frequency, impossible ? 0 : 1); |
db9cef39 | 4046 | if (impossible) |
4047 | e->src->count = e->count = profile_count::zero (); | |
4048 | else | |
4049 | e->src->count = e->count = e->count.apply_scale (e->src->frequency, | |
4050 | old_frequency); | |
eedd711b | 4051 | force_edge_cold (single_pred_edge (e->src), impossible); |
4052 | } | |
4053 | else if (dump_file && (dump_flags & TDF_DETAILS) | |
4054 | && maybe_hot_bb_p (cfun, e->src)) | |
4055 | fprintf (dump_file, "Giving up on making bb %i %s.\n", e->src->index, | |
7b3e5dc9 | 4056 | impossible ? "impossible" : "cold"); |
eedd711b | 4057 | } |
4058 | } | |
71e39b3b | 4059 | |
4060 | #if CHECKING_P | |
4061 | ||
4062 | namespace selftest { | |
4063 | ||
4064 | /* Test that value range of predictor values defined in predict.def is | |
4065 | within range (50, 100]. */ | |
4066 | ||
4067 | struct branch_predictor | |
4068 | { | |
4069 | const char *name; | |
4070 | unsigned probability; | |
4071 | }; | |
4072 | ||
4073 | #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) { NAME, HITRATE }, | |
4074 | ||
4075 | static void | |
4076 | test_prediction_value_range () | |
4077 | { | |
4078 | branch_predictor predictors[] = { | |
4079 | #include "predict.def" | |
954a2d23 | 4080 | {NULL, -1U} |
71e39b3b | 4081 | }; |
4082 | ||
4083 | for (unsigned i = 0; predictors[i].name != NULL; i++) | |
4084 | { | |
4085 | unsigned p = 100 * predictors[i].probability / REG_BR_PROB_BASE; | |
4086 | ASSERT_TRUE (p > 50 && p <= 100); | |
4087 | } | |
4088 | } | |
4089 | ||
4090 | #undef DEF_PREDICTOR | |
4091 | ||
4092 | /* Run all of the selfests within this file. */ | |
4093 | ||
4094 | void | |
4095 | predict_c_tests () | |
4096 | { | |
4097 | test_prediction_value_range (); | |
4098 | } | |
4099 | ||
4100 | } // namespace selftest | |
4101 | #endif /* CHECKING_P. */ |