]>
Commit | Line | Data |
---|---|---|
f1ebdfc5 | 1 | /* Branch prediction routines for the GNU compiler. |
85ec4feb | 2 | Copyright (C) 2000-2018 Free Software Foundation, Inc. |
f1ebdfc5 | 3 | |
bfdade77 | 4 | This file is part of GCC. |
f1ebdfc5 | 5 | |
bfdade77 RK |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
bfdade77 | 9 | version. |
f1ebdfc5 | 10 | |
bfdade77 RK |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
f1ebdfc5 | 15 | |
bfdade77 | 16 | You should have received a copy of the GNU General Public License |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
f1ebdfc5 JE |
19 | |
20 | /* References: | |
21 | ||
22 | [1] "Branch Prediction for Free" | |
23 | Ball and Larus; PLDI '93. | |
24 | [2] "Static Branch Frequency and Program Profile Analysis" | |
25 | Wu and Larus; MICRO-27. | |
26 | [3] "Corpus-based Static Branch Prediction" | |
3ef42a0c | 27 | Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */ |
f1ebdfc5 JE |
28 | |
29 | ||
30 | #include "config.h" | |
31 | #include "system.h" | |
4977bab6 | 32 | #include "coretypes.h" |
c7131fb2 | 33 | #include "backend.h" |
957060b5 | 34 | #include "rtl.h" |
f1ebdfc5 | 35 | #include "tree.h" |
c7131fb2 | 36 | #include "gimple.h" |
957060b5 AM |
37 | #include "cfghooks.h" |
38 | #include "tree-pass.h" | |
c7131fb2 | 39 | #include "ssa.h" |
4d0cdd0c | 40 | #include "memmodel.h" |
957060b5 | 41 | #include "emit-rtl.h" |
957060b5 AM |
42 | #include "cgraph.h" |
43 | #include "coverage.h" | |
44 | #include "diagnostic-core.h" | |
45 | #include "gimple-predict.h" | |
40e23961 | 46 | #include "fold-const.h" |
d8a2d370 | 47 | #include "calls.h" |
60393bbc | 48 | #include "cfganal.h" |
59f2e9d8 | 49 | #include "profile.h" |
ac5e69da | 50 | #include "sreal.h" |
194734e9 | 51 | #include "params.h" |
3d436d2a | 52 | #include "cfgloop.h" |
5be5c238 | 53 | #include "gimple-iterator.h" |
442b4905 | 54 | #include "tree-cfg.h" |
e28030cf | 55 | #include "tree-ssa-loop-niter.h" |
442b4905 | 56 | #include "tree-ssa-loop.h" |
b6acab32 | 57 | #include "tree-scalar-evolution.h" |
888ed1a3 | 58 | #include "ipa-utils.h" |
9bb86f40 | 59 | #include "gimple-pretty-print.h" |
d8838217 | 60 | #include "selftest.h" |
357067f2 | 61 | #include "cfgrtl.h" |
314e6352 ML |
62 | #include "stringpool.h" |
63 | #include "attribs.h" | |
8aa18a7d | 64 | |
e49efc14 ML |
65 | /* Enum with reasons why a predictor is ignored. */ |
66 | ||
67 | enum predictor_reason | |
68 | { | |
46c1cff6 ML |
69 | REASON_NONE, |
70 | REASON_IGNORED, | |
71 | REASON_SINGLE_EDGE_DUPLICATE, | |
72 | REASON_EDGE_PAIR_DUPLICATE | |
e49efc14 ML |
73 | }; |
74 | ||
75 | /* String messages for the aforementioned enum. */ | |
76 | ||
77 | static const char *reason_messages[] = {"", " (ignored)", | |
78 | " (single edge duplicate)", " (edge pair duplicate)"}; | |
79 | ||
fbe3b30b SB |
80 | /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE, |
81 | 1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX. */ | |
fd27ffab | 82 | static sreal real_almost_one, real_br_prob_base, |
ac5e69da | 83 | real_inv_br_prob_base, real_one_half, real_bb_freq_max; |
f1ebdfc5 | 84 | |
9f215bf5 | 85 | static void combine_predictions_for_insn (rtx_insn *, basic_block); |
e49efc14 ML |
86 | static void dump_prediction (FILE *, enum br_predictor, int, basic_block, |
87 | enum predictor_reason, edge); | |
7805417a JH |
88 | static void predict_paths_leading_to (basic_block, enum br_predictor, |
89 | enum prediction, | |
90 | struct loop *in_loop = NULL); | |
91 | static void predict_paths_leading_to_edge (edge, enum br_predictor, | |
92 | enum prediction, | |
93 | struct loop *in_loop = NULL); | |
9f215bf5 | 94 | static bool can_predict_insn_p (const rtx_insn *); |
ee92cb46 | 95 | |
4db384c9 JH |
96 | /* Information we hold about each branch predictor. |
97 | Filled using information from predict.def. */ | |
bfdade77 | 98 | |
4db384c9 | 99 | struct predictor_info |
ee92cb46 | 100 | { |
8b60264b KG |
101 | const char *const name; /* Name used in the debugging dumps. */ |
102 | const int hitrate; /* Expected hitrate used by | |
103 | predict_insn_def call. */ | |
104 | const int flags; | |
4db384c9 | 105 | }; |
ee92cb46 | 106 | |
134d3a2e JH |
107 | /* Use given predictor without Dempster-Shaffer theory if it matches |
108 | using first_match heuristics. */ | |
109 | #define PRED_FLAG_FIRST_MATCH 1 | |
110 | ||
111 | /* Recompute hitrate in percent to our representation. */ | |
112 | ||
bfdade77 | 113 | #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100) |
134d3a2e JH |
114 | |
115 | #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS}, | |
bfdade77 | 116 | static const struct predictor_info predictor_info[]= { |
4db384c9 JH |
117 | #include "predict.def" |
118 | ||
dc297297 | 119 | /* Upper bound on predictors. */ |
134d3a2e | 120 | {NULL, 0, 0} |
4db384c9 JH |
121 | }; |
122 | #undef DEF_PREDICTOR | |
194734e9 | 123 | |
0208f7da JH |
124 | static gcov_type min_count = -1; |
125 | ||
126 | /* Determine the threshold for hot BB counts. */ | |
127 | ||
128 | gcov_type | |
129 | get_hot_bb_threshold () | |
130 | { | |
131 | gcov_working_set_t *ws; | |
132 | if (min_count == -1) | |
133 | { | |
134 | ws = find_working_set (PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE)); | |
135 | gcc_assert (ws); | |
136 | min_count = ws->min_counter; | |
137 | } | |
138 | return min_count; | |
139 | } | |
140 | ||
141 | /* Set the threshold for hot BB counts. */ | |
142 | ||
143 | void | |
144 | set_hot_bb_threshold (gcov_type min) | |
145 | { | |
146 | min_count = min; | |
147 | } | |
148 | ||
fb2fed03 JH |
149 | /* Return TRUE if frequency FREQ is considered to be hot. */ |
150 | ||
be3c16c4 | 151 | bool |
e7a74006 | 152 | maybe_hot_count_p (struct function *fun, profile_count count) |
fb2fed03 | 153 | { |
3995f3a2 | 154 | if (!count.initialized_p ()) |
fb2fed03 | 155 | return true; |
e7a74006 JH |
156 | if (count.ipa () == profile_count::zero ()) |
157 | return false; | |
fc06ae0d JH |
158 | if (!count.ipa_p ()) |
159 | { | |
160 | struct cgraph_node *node = cgraph_node::get (fun->decl); | |
161 | if (!profile_info || profile_status_for_fn (fun) != PROFILE_READ) | |
162 | { | |
163 | if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED) | |
164 | return false; | |
165 | if (node->frequency == NODE_FREQUENCY_HOT) | |
166 | return true; | |
167 | } | |
168 | if (profile_status_for_fn (fun) == PROFILE_ABSENT) | |
169 | return true; | |
170 | if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE | |
171 | && count < (ENTRY_BLOCK_PTR_FOR_FN (fun)->count.apply_scale (2, 3))) | |
172 | return false; | |
173 | if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0) | |
174 | return false; | |
175 | if (count.apply_scale (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION), 1) | |
176 | < ENTRY_BLOCK_PTR_FOR_FN (fun)->count) | |
177 | return false; | |
178 | return true; | |
179 | } | |
fb2fed03 | 180 | /* Code executed at most once is not hot. */ |
3995f3a2 | 181 | if (count <= MAX (profile_info ? profile_info->runs : 1, 1)) |
fb2fed03 | 182 | return false; |
3995f3a2 | 183 | return (count.to_gcov_type () >= get_hot_bb_threshold ()); |
fb2fed03 JH |
184 | } |
185 | ||
194734e9 | 186 | /* Return true in case BB can be CPU intensive and should be optimized |
d55d8fc7 | 187 | for maximal performance. */ |
194734e9 JH |
188 | |
189 | bool | |
2eb712b4 | 190 | maybe_hot_bb_p (struct function *fun, const_basic_block bb) |
194734e9 | 191 | { |
2eb712b4 | 192 | gcc_checking_assert (fun); |
e7a74006 | 193 | return maybe_hot_count_p (fun, bb->count); |
3250d724 JH |
194 | } |
195 | ||
196 | /* Return true in case BB can be CPU intensive and should be optimized | |
197 | for maximal performance. */ | |
198 | ||
199 | bool | |
200 | maybe_hot_edge_p (edge e) | |
201 | { | |
e7a74006 | 202 | return maybe_hot_count_p (cfun, e->count ()); |
194734e9 JH |
203 | } |
204 | ||
79221839 TJ |
205 | /* Return true if profile COUNT and FREQUENCY, or function FUN static |
206 | node frequency reflects never being executed. */ | |
207 | ||
208 | static bool | |
209 | probably_never_executed (struct function *fun, | |
e7a74006 | 210 | profile_count count) |
194734e9 | 211 | { |
2eb712b4 | 212 | gcc_checking_assert (fun); |
b69d9ac6 JH |
213 | if (count == profile_count::zero ()) |
214 | return true; | |
3995f3a2 | 215 | if (count.initialized_p () && profile_status_for_fn (fun) == PROFILE_READ) |
61a74079 | 216 | { |
3995f3a2 JH |
217 | int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION); |
218 | if (count.apply_scale (unlikely_count_fraction, 1) >= profile_info->runs) | |
61a74079 | 219 | return false; |
61a74079 JH |
220 | return true; |
221 | } | |
ee4e85b7 | 222 | if ((!profile_info || profile_status_for_fn (fun) != PROFILE_READ) |
d52f5295 | 223 | && (cgraph_node::get (fun->decl)->frequency |
581985d7 | 224 | == NODE_FREQUENCY_UNLIKELY_EXECUTED)) |
52bf96d2 | 225 | return true; |
194734e9 JH |
226 | return false; |
227 | } | |
228 | ||
600b5b1d | 229 | |
79221839 TJ |
230 | /* Return true in case BB is probably never executed. */ |
231 | ||
232 | bool | |
233 | probably_never_executed_bb_p (struct function *fun, const_basic_block bb) | |
234 | { | |
e7a74006 | 235 | return probably_never_executed (fun, bb->count); |
79221839 TJ |
236 | } |
237 | ||
238 | ||
5591f0dd JH |
239 | /* Return true if E is unlikely executed for obvious reasons. */ |
240 | ||
241 | static bool | |
242 | unlikely_executed_edge_p (edge e) | |
243 | { | |
ef30ab83 | 244 | return (e->count () == profile_count::zero () |
af2bbc51 | 245 | || e->probability == profile_probability::never ()) |
5591f0dd JH |
246 | || (e->flags & (EDGE_EH | EDGE_FAKE)); |
247 | } | |
248 | ||
600b5b1d TJ |
249 | /* Return true in case edge E is probably never executed. */ |
250 | ||
251 | bool | |
252 | probably_never_executed_edge_p (struct function *fun, edge e) | |
253 | { | |
af2bbc51 JH |
254 | if (unlikely_executed_edge_p (e)) |
255 | return true; | |
e7a74006 | 256 | return probably_never_executed (fun, e->count ()); |
600b5b1d TJ |
257 | } |
258 | ||
e6416b30 JH |
259 | /* Return true when current function should always be optimized for size. */ |
260 | ||
261 | bool | |
262 | optimize_function_for_size_p (struct function *fun) | |
263 | { | |
e6416b30 | 264 | if (!fun || !fun->decl) |
7525bb7d | 265 | return optimize_size; |
d52f5295 ML |
266 | cgraph_node *n = cgraph_node::get (fun->decl); |
267 | return n && n->optimize_for_size_p (); | |
e6416b30 JH |
268 | } |
269 | ||
3debdc1e JH |
270 | /* Return true when current function should always be optimized for speed. */ |
271 | ||
272 | bool | |
273 | optimize_function_for_speed_p (struct function *fun) | |
274 | { | |
275 | return !optimize_function_for_size_p (fun); | |
bf08ebeb JH |
276 | } |
277 | ||
d95ab70a RS |
278 | /* Return the optimization type that should be used for the function FUN. */ |
279 | ||
280 | optimization_type | |
281 | function_optimization_type (struct function *fun) | |
282 | { | |
283 | return (optimize_function_for_speed_p (fun) | |
284 | ? OPTIMIZE_FOR_SPEED | |
285 | : OPTIMIZE_FOR_SIZE); | |
286 | } | |
287 | ||
bf08ebeb JH |
288 | /* Return TRUE when BB should be optimized for size. */ |
289 | ||
290 | bool | |
cc870036 | 291 | optimize_bb_for_size_p (const_basic_block bb) |
bf08ebeb | 292 | { |
fef5a0d9 RB |
293 | return (optimize_function_for_size_p (cfun) |
294 | || (bb && !maybe_hot_bb_p (cfun, bb))); | |
bf08ebeb JH |
295 | } |
296 | ||
297 | /* Return TRUE when BB should be optimized for speed. */ | |
298 | ||
299 | bool | |
cc870036 | 300 | optimize_bb_for_speed_p (const_basic_block bb) |
bf08ebeb JH |
301 | { |
302 | return !optimize_bb_for_size_p (bb); | |
303 | } | |
304 | ||
d95ab70a RS |
305 | /* Return the optimization type that should be used for block BB. */ |
306 | ||
307 | optimization_type | |
308 | bb_optimization_type (const_basic_block bb) | |
309 | { | |
310 | return (optimize_bb_for_speed_p (bb) | |
311 | ? OPTIMIZE_FOR_SPEED | |
312 | : OPTIMIZE_FOR_SIZE); | |
313 | } | |
314 | ||
bf08ebeb JH |
315 | /* Return TRUE when BB should be optimized for size. */ |
316 | ||
317 | bool | |
318 | optimize_edge_for_size_p (edge e) | |
319 | { | |
3debdc1e | 320 | return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e); |
bf08ebeb JH |
321 | } |
322 | ||
323 | /* Return TRUE when BB should be optimized for speed. */ | |
324 | ||
325 | bool | |
326 | optimize_edge_for_speed_p (edge e) | |
327 | { | |
328 | return !optimize_edge_for_size_p (e); | |
329 | } | |
330 | ||
331 | /* Return TRUE when BB should be optimized for size. */ | |
332 | ||
333 | bool | |
334 | optimize_insn_for_size_p (void) | |
335 | { | |
3debdc1e | 336 | return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p; |
bf08ebeb JH |
337 | } |
338 | ||
339 | /* Return TRUE when BB should be optimized for speed. */ | |
340 | ||
341 | bool | |
342 | optimize_insn_for_speed_p (void) | |
343 | { | |
344 | return !optimize_insn_for_size_p (); | |
345 | } | |
346 | ||
cc870036 JH |
347 | /* Return TRUE when LOOP should be optimized for size. */ |
348 | ||
349 | bool | |
350 | optimize_loop_for_size_p (struct loop *loop) | |
351 | { | |
352 | return optimize_bb_for_size_p (loop->header); | |
353 | } | |
354 | ||
355 | /* Return TRUE when LOOP should be optimized for speed. */ | |
356 | ||
357 | bool | |
358 | optimize_loop_for_speed_p (struct loop *loop) | |
359 | { | |
360 | return optimize_bb_for_speed_p (loop->header); | |
361 | } | |
362 | ||
efd8f750 JH |
363 | /* Return TRUE when LOOP nest should be optimized for speed. */ |
364 | ||
365 | bool | |
366 | optimize_loop_nest_for_speed_p (struct loop *loop) | |
367 | { | |
368 | struct loop *l = loop; | |
369 | if (optimize_loop_for_speed_p (loop)) | |
370 | return true; | |
371 | l = loop->inner; | |
c16eb95f | 372 | while (l && l != loop) |
efd8f750 JH |
373 | { |
374 | if (optimize_loop_for_speed_p (l)) | |
375 | return true; | |
376 | if (l->inner) | |
377 | l = l->inner; | |
378 | else if (l->next) | |
379 | l = l->next; | |
380 | else | |
8bcf15f6 JH |
381 | { |
382 | while (l != loop && !l->next) | |
383 | l = loop_outer (l); | |
384 | if (l != loop) | |
385 | l = l->next; | |
386 | } | |
efd8f750 JH |
387 | } |
388 | return false; | |
389 | } | |
390 | ||
391 | /* Return TRUE when LOOP nest should be optimized for size. */ | |
392 | ||
393 | bool | |
394 | optimize_loop_nest_for_size_p (struct loop *loop) | |
395 | { | |
396 | return !optimize_loop_nest_for_speed_p (loop); | |
397 | } | |
398 | ||
3a4fd356 JH |
399 | /* Return true when edge E is likely to be well predictable by branch |
400 | predictor. */ | |
401 | ||
402 | bool | |
403 | predictable_edge_p (edge e) | |
404 | { | |
357067f2 | 405 | if (!e->probability.initialized_p ()) |
3a4fd356 | 406 | return false; |
357067f2 | 407 | if ((e->probability.to_reg_br_prob_base () |
3a4fd356 | 408 | <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100) |
357067f2 | 409 | || (REG_BR_PROB_BASE - e->probability.to_reg_br_prob_base () |
3a4fd356 JH |
410 | <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)) |
411 | return true; | |
412 | return false; | |
413 | } | |
414 | ||
415 | ||
bf08ebeb JH |
416 | /* Set RTL expansion for BB profile. */ |
417 | ||
418 | void | |
419 | rtl_profile_for_bb (basic_block bb) | |
420 | { | |
2eb712b4 | 421 | crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb); |
bf08ebeb JH |
422 | } |
423 | ||
424 | /* Set RTL expansion for edge profile. */ | |
425 | ||
426 | void | |
427 | rtl_profile_for_edge (edge e) | |
428 | { | |
429 | crtl->maybe_hot_insn_p = maybe_hot_edge_p (e); | |
430 | } | |
431 | ||
432 | /* Set RTL expansion to default mode (i.e. when profile info is not known). */ | |
433 | void | |
434 | default_rtl_profile (void) | |
435 | { | |
436 | crtl->maybe_hot_insn_p = true; | |
437 | } | |
438 | ||
969d70ca JH |
439 | /* Return true if the one of outgoing edges is already predicted by |
440 | PREDICTOR. */ | |
441 | ||
6de9cd9a | 442 | bool |
9678086d | 443 | rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor) |
969d70ca JH |
444 | { |
445 | rtx note; | |
a813c111 | 446 | if (!INSN_P (BB_END (bb))) |
969d70ca | 447 | return false; |
a813c111 | 448 | for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1)) |
969d70ca JH |
449 | if (REG_NOTE_KIND (note) == REG_BR_PRED |
450 | && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor) | |
451 | return true; | |
452 | return false; | |
453 | } | |
ee92cb46 | 454 | |
fba4cb03 LB |
455 | /* Structure representing predictions in tree level. */ |
456 | ||
457 | struct edge_prediction { | |
458 | struct edge_prediction *ep_next; | |
459 | edge ep_edge; | |
460 | enum br_predictor ep_predictor; | |
461 | int ep_probability; | |
462 | }; | |
463 | ||
b787e7a2 TS |
464 | /* This map contains for a basic block the list of predictions for the |
465 | outgoing edges. */ | |
466 | ||
467 | static hash_map<const_basic_block, edge_prediction *> *bb_predictions; | |
468 | ||
6de9cd9a DN |
469 | /* Return true if the one of outgoing edges is already predicted by |
470 | PREDICTOR. */ | |
471 | ||
472 | bool | |
726a989a | 473 | gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor) |
6de9cd9a | 474 | { |
4aab792d | 475 | struct edge_prediction *i; |
b787e7a2 | 476 | edge_prediction **preds = bb_predictions->get (bb); |
f06b0a10 ZD |
477 | |
478 | if (!preds) | |
479 | return false; | |
b8698a0f | 480 | |
b787e7a2 | 481 | for (i = *preds; i; i = i->ep_next) |
59ced947 | 482 | if (i->ep_predictor == predictor) |
6de9cd9a DN |
483 | return true; |
484 | return false; | |
485 | } | |
486 | ||
5aabc487 JH |
487 | /* Return true if the one of outgoing edges is already predicted by |
488 | PREDICTOR for edge E predicted as TAKEN. */ | |
489 | ||
490 | bool | |
491 | edge_predicted_by_p (edge e, enum br_predictor predictor, bool taken) | |
492 | { | |
493 | struct edge_prediction *i; | |
494 | basic_block bb = e->src; | |
495 | edge_prediction **preds = bb_predictions->get (bb); | |
496 | if (!preds) | |
497 | return false; | |
498 | ||
499 | int probability = predictor_info[(int) predictor].hitrate; | |
500 | ||
501 | if (taken != TAKEN) | |
502 | probability = REG_BR_PROB_BASE - probability; | |
503 | ||
504 | for (i = *preds; i; i = i->ep_next) | |
505 | if (i->ep_predictor == predictor | |
506 | && i->ep_edge == e | |
507 | && i->ep_probability == probability) | |
508 | return true; | |
509 | return false; | |
510 | } | |
511 | ||
2c9e13f3 JH |
512 | /* Same predicate as above, working on edges. */ |
513 | bool | |
ed7a4b4b | 514 | edge_probability_reliable_p (const_edge e) |
2c9e13f3 | 515 | { |
5fa396ad | 516 | return e->probability.probably_reliable_p (); |
2c9e13f3 JH |
517 | } |
518 | ||
519 | /* Same predicate as edge_probability_reliable_p, working on notes. */ | |
520 | bool | |
ed7a4b4b | 521 | br_prob_note_reliable_p (const_rtx note) |
2c9e13f3 JH |
522 | { |
523 | gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB); | |
5fa396ad JH |
524 | return profile_probability::from_reg_br_prob_note |
525 | (XINT (note, 0)).probably_reliable_p (); | |
2c9e13f3 JH |
526 | } |
527 | ||
7d6d381a | 528 | static void |
9f215bf5 | 529 | predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability) |
4db384c9 | 530 | { |
e16acfcd | 531 | gcc_assert (any_condjump_p (insn)); |
d50672ef JH |
532 | if (!flag_guess_branch_prob) |
533 | return; | |
bfdade77 | 534 | |
65c5f2a6 ILT |
535 | add_reg_note (insn, REG_BR_PRED, |
536 | gen_rtx_CONCAT (VOIDmode, | |
537 | GEN_INT ((int) predictor), | |
538 | GEN_INT ((int) probability))); | |
4db384c9 JH |
539 | } |
540 | ||
541 | /* Predict insn by given predictor. */ | |
bfdade77 | 542 | |
4db384c9 | 543 | void |
9f215bf5 | 544 | predict_insn_def (rtx_insn *insn, enum br_predictor predictor, |
79a490a9 | 545 | enum prediction taken) |
4db384c9 JH |
546 | { |
547 | int probability = predictor_info[(int) predictor].hitrate; | |
bfdade77 | 548 | |
4db384c9 JH |
549 | if (taken != TAKEN) |
550 | probability = REG_BR_PROB_BASE - probability; | |
bfdade77 | 551 | |
4db384c9 | 552 | predict_insn (insn, predictor, probability); |
ee92cb46 JH |
553 | } |
554 | ||
555 | /* Predict edge E with given probability if possible. */ | |
bfdade77 | 556 | |
4db384c9 | 557 | void |
6de9cd9a | 558 | rtl_predict_edge (edge e, enum br_predictor predictor, int probability) |
ee92cb46 | 559 | { |
9f215bf5 | 560 | rtx_insn *last_insn; |
a813c111 | 561 | last_insn = BB_END (e->src); |
ee92cb46 JH |
562 | |
563 | /* We can store the branch prediction information only about | |
564 | conditional jumps. */ | |
565 | if (!any_condjump_p (last_insn)) | |
566 | return; | |
567 | ||
568 | /* We always store probability of branching. */ | |
569 | if (e->flags & EDGE_FALLTHRU) | |
570 | probability = REG_BR_PROB_BASE - probability; | |
571 | ||
4db384c9 JH |
572 | predict_insn (last_insn, predictor, probability); |
573 | } | |
574 | ||
6de9cd9a DN |
575 | /* Predict edge E with the given PROBABILITY. */ |
576 | void | |
726a989a | 577 | gimple_predict_edge (edge e, enum br_predictor predictor, int probability) |
6de9cd9a | 578 | { |
1428d9e0 JH |
579 | if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) |
580 | && EDGE_COUNT (e->src->succs) > 1 | |
581 | && flag_guess_branch_prob | |
582 | && optimize) | |
e0342c26 | 583 | { |
f06b0a10 | 584 | struct edge_prediction *i = XNEW (struct edge_prediction); |
b787e7a2 | 585 | edge_prediction *&preds = bb_predictions->get_or_insert (e->src); |
6de9cd9a | 586 | |
b787e7a2 TS |
587 | i->ep_next = preds; |
588 | preds = i; | |
59ced947 RÁE |
589 | i->ep_probability = probability; |
590 | i->ep_predictor = predictor; | |
591 | i->ep_edge = e; | |
e0342c26 | 592 | } |
6de9cd9a DN |
593 | } |
594 | ||
28cd6814 ML |
595 | /* Filter edge predictions PREDS by a function FILTER. DATA are passed |
596 | to the filter function. */ | |
597 | ||
3809e990 | 598 | void |
28cd6814 ML |
599 | filter_predictions (edge_prediction **preds, |
600 | bool (*filter) (edge_prediction *, void *), void *data) | |
3809e990 | 601 | { |
f06b0a10 ZD |
602 | if (!bb_predictions) |
603 | return; | |
604 | ||
f06b0a10 | 605 | if (preds) |
3809e990 | 606 | { |
b787e7a2 | 607 | struct edge_prediction **prediction = preds; |
f06b0a10 ZD |
608 | struct edge_prediction *next; |
609 | ||
3809e990 JH |
610 | while (*prediction) |
611 | { | |
28cd6814 ML |
612 | if ((*filter) (*prediction, data)) |
613 | prediction = &((*prediction)->ep_next); | |
614 | else | |
f06b0a10 ZD |
615 | { |
616 | next = (*prediction)->ep_next; | |
617 | free (*prediction); | |
618 | *prediction = next; | |
619 | } | |
3809e990 JH |
620 | } |
621 | } | |
622 | } | |
623 | ||
28cd6814 ML |
624 | /* Filter function predicate that returns true for a edge predicate P |
625 | if its edge is equal to DATA. */ | |
626 | ||
627 | bool | |
628 | equal_edge_p (edge_prediction *p, void *data) | |
629 | { | |
630 | return p->ep_edge == (edge)data; | |
631 | } | |
632 | ||
633 | /* Remove all predictions on given basic block that are attached | |
634 | to edge E. */ | |
635 | void | |
636 | remove_predictions_associated_with_edge (edge e) | |
637 | { | |
638 | if (!bb_predictions) | |
639 | return; | |
640 | ||
641 | edge_prediction **preds = bb_predictions->get (e->src); | |
642 | filter_predictions (preds, equal_edge_p, e); | |
643 | } | |
644 | ||
f06b0a10 ZD |
645 | /* Clears the list of predictions stored for BB. */ |
646 | ||
647 | static void | |
648 | clear_bb_predictions (basic_block bb) | |
649 | { | |
b787e7a2 | 650 | edge_prediction **preds = bb_predictions->get (bb); |
f06b0a10 ZD |
651 | struct edge_prediction *pred, *next; |
652 | ||
653 | if (!preds) | |
654 | return; | |
655 | ||
b787e7a2 | 656 | for (pred = *preds; pred; pred = next) |
f06b0a10 ZD |
657 | { |
658 | next = pred->ep_next; | |
659 | free (pred); | |
660 | } | |
661 | *preds = NULL; | |
662 | } | |
663 | ||
2ffa9932 JH |
664 | /* Return true when we can store prediction on insn INSN. |
665 | At the moment we represent predictions only on conditional | |
666 | jumps, not at computed jump or other complicated cases. */ | |
667 | static bool | |
9f215bf5 | 668 | can_predict_insn_p (const rtx_insn *insn) |
2ffa9932 | 669 | { |
4b4bf941 | 670 | return (JUMP_P (insn) |
2ffa9932 | 671 | && any_condjump_p (insn) |
628f6a4e | 672 | && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2); |
2ffa9932 JH |
673 | } |
674 | ||
4db384c9 | 675 | /* Predict edge E by given predictor if possible. */ |
bfdade77 | 676 | |
4db384c9 | 677 | void |
79a490a9 AJ |
678 | predict_edge_def (edge e, enum br_predictor predictor, |
679 | enum prediction taken) | |
4db384c9 JH |
680 | { |
681 | int probability = predictor_info[(int) predictor].hitrate; | |
682 | ||
683 | if (taken != TAKEN) | |
684 | probability = REG_BR_PROB_BASE - probability; | |
bfdade77 | 685 | |
4db384c9 JH |
686 | predict_edge (e, predictor, probability); |
687 | } | |
688 | ||
689 | /* Invert all branch predictions or probability notes in the INSN. This needs | |
690 | to be done each time we invert the condition used by the jump. */ | |
bfdade77 | 691 | |
4db384c9 | 692 | void |
79a490a9 | 693 | invert_br_probabilities (rtx insn) |
4db384c9 | 694 | { |
bfdade77 RK |
695 | rtx note; |
696 | ||
697 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
698 | if (REG_NOTE_KIND (note) == REG_BR_PROB) | |
5fa396ad JH |
699 | XINT (note, 0) = profile_probability::from_reg_br_prob_note |
700 | (XINT (note, 0)).invert ().to_reg_br_prob_note (); | |
bfdade77 RK |
701 | else if (REG_NOTE_KIND (note) == REG_BR_PRED) |
702 | XEXP (XEXP (note, 0), 1) | |
703 | = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1))); | |
4db384c9 JH |
704 | } |
705 | ||
706 | /* Dump information about the branch prediction to the output file. */ | |
bfdade77 | 707 | |
4db384c9 | 708 | static void |
6de9cd9a | 709 | dump_prediction (FILE *file, enum br_predictor predictor, int probability, |
46c1cff6 | 710 | basic_block bb, enum predictor_reason reason = REASON_NONE, |
e49efc14 | 711 | edge ep_edge = NULL) |
4db384c9 | 712 | { |
e49efc14 | 713 | edge e = ep_edge; |
628f6a4e | 714 | edge_iterator ei; |
4db384c9 | 715 | |
6de9cd9a | 716 | if (!file) |
4db384c9 JH |
717 | return; |
718 | ||
e49efc14 ML |
719 | if (e == NULL) |
720 | FOR_EACH_EDGE (e, ei, bb->succs) | |
721 | if (! (e->flags & EDGE_FALLTHRU)) | |
722 | break; | |
723 | ||
724 | char edge_info_str[128]; | |
725 | if (ep_edge) | |
726 | sprintf (edge_info_str, " of edge %d->%d", ep_edge->src->index, | |
727 | ep_edge->dest->index); | |
728 | else | |
729 | edge_info_str[0] = '\0'; | |
4db384c9 | 730 | |
e49efc14 | 731 | fprintf (file, " %s heuristics%s%s: %.1f%%", |
4db384c9 | 732 | predictor_info[predictor].name, |
e49efc14 ML |
733 | edge_info_str, reason_messages[reason], |
734 | probability * 100.0 / REG_BR_PROB_BASE); | |
4db384c9 | 735 | |
3995f3a2 | 736 | if (bb->count.initialized_p ()) |
25c3a4ef | 737 | { |
3995f3a2 JH |
738 | fprintf (file, " exec "); |
739 | bb->count.dump (file); | |
fbc2782e DD |
740 | if (e) |
741 | { | |
3995f3a2 | 742 | fprintf (file, " hit "); |
ef30ab83 JH |
743 | e->count ().dump (file); |
744 | fprintf (file, " (%.1f%%)", e->count ().to_gcov_type() * 100.0 | |
3995f3a2 | 745 | / bb->count.to_gcov_type ()); |
fbc2782e | 746 | } |
25c3a4ef | 747 | } |
bfdade77 | 748 | |
6de9cd9a | 749 | fprintf (file, "\n"); |
d1b9a572 ML |
750 | |
751 | /* Print output that be easily read by analyze_brprob.py script. We are | |
752 | interested only in counts that are read from GCDA files. */ | |
753 | if (dump_file && (dump_flags & TDF_DETAILS) | |
754 | && bb->count.precise_p () | |
755 | && reason == REASON_NONE) | |
756 | { | |
757 | gcc_assert (e->count ().precise_p ()); | |
758 | fprintf (file, ";;heuristics;%s;%" PRId64 ";%" PRId64 ";%.1f;\n", | |
759 | predictor_info[predictor].name, | |
760 | bb->count.to_gcov_type (), e->count ().to_gcov_type (), | |
761 | probability * 100.0 / REG_BR_PROB_BASE); | |
762 | } | |
4db384c9 JH |
763 | } |
764 | ||
b69d9ac6 JH |
765 | /* Return true if STMT is known to be unlikely executed. */ |
766 | ||
767 | static bool | |
768 | unlikely_executed_stmt_p (gimple *stmt) | |
769 | { | |
770 | if (!is_gimple_call (stmt)) | |
771 | return false; | |
d225aa74 | 772 | /* NORETURN attribute alone is not strong enough: exit() may be quite |
b69d9ac6 JH |
773 | likely executed once during program run. */ |
774 | if (gimple_call_fntype (stmt) | |
775 | && lookup_attribute ("cold", | |
776 | TYPE_ATTRIBUTES (gimple_call_fntype (stmt))) | |
777 | && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))) | |
778 | return true; | |
779 | tree decl = gimple_call_fndecl (stmt); | |
780 | if (!decl) | |
781 | return false; | |
782 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl)) | |
783 | && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))) | |
784 | return true; | |
785 | ||
786 | cgraph_node *n = cgraph_node::get (decl); | |
787 | if (!n) | |
788 | return false; | |
e813ee34 JH |
789 | |
790 | availability avail; | |
b69d9ac6 JH |
791 | n = n->ultimate_alias_target (&avail); |
792 | if (avail < AVAIL_AVAILABLE) | |
e813ee34 | 793 | return false; |
b69d9ac6 JH |
794 | if (!n->analyzed |
795 | || n->decl == current_function_decl) | |
796 | return false; | |
797 | return n->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED; | |
798 | } | |
799 | ||
800 | /* Return true if BB is unlikely executed. */ | |
801 | ||
802 | static bool | |
803 | unlikely_executed_bb_p (basic_block bb) | |
804 | { | |
805 | if (bb->count == profile_count::zero ()) | |
806 | return true; | |
807 | if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) || bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
808 | return false; | |
809 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); | |
810 | !gsi_end_p (gsi); gsi_next (&gsi)) | |
811 | { | |
812 | if (unlikely_executed_stmt_p (gsi_stmt (gsi))) | |
813 | return true; | |
814 | if (stmt_can_terminate_bb_p (gsi_stmt (gsi))) | |
815 | return false; | |
816 | } | |
817 | return false; | |
818 | } | |
819 | ||
229031d0 | 820 | /* We can not predict the probabilities of outgoing edges of bb. Set them |
5d9da222 ML |
821 | evenly and hope for the best. If UNLIKELY_EDGES is not null, distribute |
822 | even probability for all edges not mentioned in the set. These edges | |
823 | are given PROB_VERY_UNLIKELY probability. */ | |
824 | ||
87022a6b | 825 | static void |
5d9da222 ML |
826 | set_even_probabilities (basic_block bb, |
827 | hash_set<edge> *unlikely_edges = NULL) | |
87022a6b | 828 | { |
33e9feb5 | 829 | unsigned nedges = 0, unlikely_count = 0; |
eb0f8780 | 830 | edge e = NULL; |
628f6a4e | 831 | edge_iterator ei; |
33e9feb5 | 832 | profile_probability all = profile_probability::always (); |
87022a6b | 833 | |
628f6a4e | 834 | FOR_EACH_EDGE (e, ei, bb->succs) |
33e9feb5 JH |
835 | if (e->probability.initialized_p ()) |
836 | all -= e->probability; | |
837 | else if (!unlikely_executed_edge_p (e)) | |
838 | { | |
839 | nedges ++; | |
840 | if (unlikely_edges != NULL && unlikely_edges->contains (e)) | |
841 | { | |
842 | all -= profile_probability::very_unlikely (); | |
843 | unlikely_count++; | |
844 | } | |
845 | } | |
5d9da222 ML |
846 | |
847 | /* Make the distribution even if all edges are unlikely. */ | |
5d9da222 ML |
848 | if (unlikely_count == nedges) |
849 | { | |
850 | unlikely_edges = NULL; | |
851 | unlikely_count = 0; | |
852 | } | |
853 | ||
854 | unsigned c = nedges - unlikely_count; | |
855 | ||
628f6a4e | 856 | FOR_EACH_EDGE (e, ei, bb->succs) |
33e9feb5 JH |
857 | if (e->probability.initialized_p ()) |
858 | ; | |
859 | else if (!unlikely_executed_edge_p (e)) | |
5d9da222 ML |
860 | { |
861 | if (unlikely_edges != NULL && unlikely_edges->contains (e)) | |
357067f2 | 862 | e->probability = profile_probability::very_unlikely (); |
5d9da222 | 863 | else |
33e9feb5 | 864 | e->probability = all.apply_scale (1, c).guessed (); |
5d9da222 | 865 | } |
87022a6b | 866 | else |
357067f2 | 867 | e->probability = profile_probability::never (); |
87022a6b JH |
868 | } |
869 | ||
5fa396ad JH |
870 | /* Add REG_BR_PROB note to JUMP with PROB. */ |
871 | ||
872 | void | |
873 | add_reg_br_prob_note (rtx_insn *jump, profile_probability prob) | |
874 | { | |
875 | gcc_checking_assert (JUMP_P (jump) && !find_reg_note (jump, REG_BR_PROB, 0)); | |
876 | add_int_reg_note (jump, REG_BR_PROB, prob.to_reg_br_prob_note ()); | |
877 | } | |
878 | ||
4db384c9 JH |
879 | /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB |
880 | note if not already present. Remove now useless REG_BR_PRED notes. */ | |
bfdade77 | 881 | |
4db384c9 | 882 | static void |
9f215bf5 | 883 | combine_predictions_for_insn (rtx_insn *insn, basic_block bb) |
4db384c9 | 884 | { |
87022a6b JH |
885 | rtx prob_note; |
886 | rtx *pnote; | |
bfdade77 | 887 | rtx note; |
4db384c9 | 888 | int best_probability = PROB_EVEN; |
bbbbb16a | 889 | enum br_predictor best_predictor = END_PREDICTORS; |
134d3a2e JH |
890 | int combined_probability = REG_BR_PROB_BASE / 2; |
891 | int d; | |
d195b46f JH |
892 | bool first_match = false; |
893 | bool found = false; | |
4db384c9 | 894 | |
87022a6b JH |
895 | if (!can_predict_insn_p (insn)) |
896 | { | |
897 | set_even_probabilities (bb); | |
898 | return; | |
899 | } | |
900 | ||
901 | prob_note = find_reg_note (insn, REG_BR_PROB, 0); | |
902 | pnote = ®_NOTES (insn); | |
c263766c RH |
903 | if (dump_file) |
904 | fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn), | |
0b17ab2f | 905 | bb->index); |
4db384c9 JH |
906 | |
907 | /* We implement "first match" heuristics and use probability guessed | |
6de9cd9a | 908 | by predictor with smallest index. */ |
bfdade77 RK |
909 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
910 | if (REG_NOTE_KIND (note) == REG_BR_PRED) | |
911 | { | |
81f40b79 ILT |
912 | enum br_predictor predictor = ((enum br_predictor) |
913 | INTVAL (XEXP (XEXP (note, 0), 0))); | |
bfdade77 RK |
914 | int probability = INTVAL (XEXP (XEXP (note, 0), 1)); |
915 | ||
916 | found = true; | |
c9f4fe73 ML |
917 | if (best_predictor > predictor |
918 | && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH) | |
bfdade77 RK |
919 | best_probability = probability, best_predictor = predictor; |
920 | ||
921 | d = (combined_probability * probability | |
922 | + (REG_BR_PROB_BASE - combined_probability) | |
923 | * (REG_BR_PROB_BASE - probability)); | |
924 | ||
925 | /* Use FP math to avoid overflows of 32bit integers. */ | |
571a03b8 JJ |
926 | if (d == 0) |
927 | /* If one probability is 0% and one 100%, avoid division by zero. */ | |
928 | combined_probability = REG_BR_PROB_BASE / 2; | |
929 | else | |
930 | combined_probability = (((double) combined_probability) * probability | |
931 | * REG_BR_PROB_BASE / d + 0.5); | |
bfdade77 RK |
932 | } |
933 | ||
934 | /* Decide which heuristic to use. In case we didn't match anything, | |
935 | use no_prediction heuristic, in case we did match, use either | |
d195b46f JH |
936 | first match or Dempster-Shaffer theory depending on the flags. */ |
937 | ||
c9f4fe73 | 938 | if (best_predictor != END_PREDICTORS) |
d195b46f JH |
939 | first_match = true; |
940 | ||
941 | if (!found) | |
6de9cd9a | 942 | dump_prediction (dump_file, PRED_NO_PREDICTION, |
e49efc14 | 943 | combined_probability, bb); |
d195b46f JH |
944 | else |
945 | { | |
c9f4fe73 ML |
946 | if (!first_match) |
947 | dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, | |
948 | bb, !first_match ? REASON_NONE : REASON_IGNORED); | |
949 | else | |
950 | dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, | |
951 | bb, first_match ? REASON_NONE : REASON_IGNORED); | |
d195b46f JH |
952 | } |
953 | ||
954 | if (first_match) | |
134d3a2e | 955 | combined_probability = best_probability; |
e49efc14 | 956 | dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb); |
d195b46f JH |
957 | |
958 | while (*pnote) | |
959 | { | |
960 | if (REG_NOTE_KIND (*pnote) == REG_BR_PRED) | |
961 | { | |
81f40b79 ILT |
962 | enum br_predictor predictor = ((enum br_predictor) |
963 | INTVAL (XEXP (XEXP (*pnote, 0), 0))); | |
d195b46f JH |
964 | int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1)); |
965 | ||
6de9cd9a | 966 | dump_prediction (dump_file, predictor, probability, bb, |
e49efc14 | 967 | (!first_match || best_predictor == predictor) |
46c1cff6 | 968 | ? REASON_NONE : REASON_IGNORED); |
6a4d6760 | 969 | *pnote = XEXP (*pnote, 1); |
d195b46f JH |
970 | } |
971 | else | |
6a4d6760 | 972 | pnote = &XEXP (*pnote, 1); |
d195b46f | 973 | } |
bfdade77 | 974 | |
4db384c9 JH |
975 | if (!prob_note) |
976 | { | |
5fa396ad JH |
977 | profile_probability p |
978 | = profile_probability::from_reg_br_prob_base (combined_probability); | |
979 | add_reg_br_prob_note (insn, p); | |
bfdade77 | 980 | |
134d3a2e JH |
981 | /* Save the prediction into CFG in case we are seeing non-degenerated |
982 | conditional jump. */ | |
c5cbcccf | 983 | if (!single_succ_p (bb)) |
134d3a2e | 984 | { |
5fa396ad | 985 | BRANCH_EDGE (bb)->probability = p; |
bfdade77 | 986 | FALLTHRU_EDGE (bb)->probability |
357067f2 | 987 | = BRANCH_EDGE (bb)->probability.invert (); |
134d3a2e | 988 | } |
4db384c9 | 989 | } |
c5cbcccf | 990 | else if (!single_succ_p (bb)) |
e53de54d | 991 | { |
5fa396ad JH |
992 | profile_probability prob = profile_probability::from_reg_br_prob_note |
993 | (XINT (prob_note, 0)); | |
e53de54d | 994 | |
5fa396ad JH |
995 | BRANCH_EDGE (bb)->probability = prob; |
996 | FALLTHRU_EDGE (bb)->probability = prob.invert (); | |
e53de54d JH |
997 | } |
998 | else | |
357067f2 | 999 | single_succ_edge (bb)->probability = profile_probability::always (); |
ee92cb46 JH |
1000 | } |
1001 | ||
e49efc14 ML |
1002 | /* Edge prediction hash traits. */ |
1003 | ||
1004 | struct predictor_hash: pointer_hash <edge_prediction> | |
1005 | { | |
1006 | ||
1007 | static inline hashval_t hash (const edge_prediction *); | |
1008 | static inline bool equal (const edge_prediction *, const edge_prediction *); | |
1009 | }; | |
1010 | ||
1011 | /* Calculate hash value of an edge prediction P based on predictor and | |
1012 | normalized probability. */ | |
1013 | ||
1014 | inline hashval_t | |
1015 | predictor_hash::hash (const edge_prediction *p) | |
1016 | { | |
1017 | inchash::hash hstate; | |
1018 | hstate.add_int (p->ep_predictor); | |
1019 | ||
1020 | int prob = p->ep_probability; | |
1021 | if (prob > REG_BR_PROB_BASE / 2) | |
1022 | prob = REG_BR_PROB_BASE - prob; | |
1023 | ||
1024 | hstate.add_int (prob); | |
1025 | ||
1026 | return hstate.end (); | |
1027 | } | |
1028 | ||
1029 | /* Return true whether edge predictions P1 and P2 use the same predictor and | |
1030 | have equal (or opposed probability). */ | |
1031 | ||
1032 | inline bool | |
1033 | predictor_hash::equal (const edge_prediction *p1, const edge_prediction *p2) | |
1034 | { | |
1035 | return (p1->ep_predictor == p2->ep_predictor | |
1036 | && (p1->ep_probability == p2->ep_probability | |
1037 | || p1->ep_probability == REG_BR_PROB_BASE - p2->ep_probability)); | |
1038 | } | |
1039 | ||
1040 | struct predictor_hash_traits: predictor_hash, | |
1041 | typed_noop_remove <edge_prediction *> {}; | |
1042 | ||
1043 | /* Return true if edge prediction P is not in DATA hash set. */ | |
1044 | ||
1045 | static bool | |
1046 | not_removed_prediction_p (edge_prediction *p, void *data) | |
1047 | { | |
1048 | hash_set<edge_prediction *> *remove = (hash_set<edge_prediction *> *) data; | |
1049 | return !remove->contains (p); | |
1050 | } | |
1051 | ||
1052 | /* Prune predictions for a basic block BB. Currently we do following | |
1053 | clean-up steps: | |
1054 | ||
1055 | 1) remove duplicate prediction that is guessed with the same probability | |
1056 | (different than 1/2) to both edge | |
1057 | 2) remove duplicates for a prediction that belongs with the same probability | |
1058 | to a single edge | |
1059 | ||
1060 | */ | |
1061 | ||
1062 | static void | |
1063 | prune_predictions_for_bb (basic_block bb) | |
1064 | { | |
1065 | edge_prediction **preds = bb_predictions->get (bb); | |
1066 | ||
1067 | if (preds) | |
1068 | { | |
1069 | hash_table <predictor_hash_traits> s (13); | |
1070 | hash_set <edge_prediction *> remove; | |
1071 | ||
1072 | /* Step 1: identify predictors that should be removed. */ | |
1073 | for (edge_prediction *pred = *preds; pred; pred = pred->ep_next) | |
1074 | { | |
1075 | edge_prediction *existing = s.find (pred); | |
1076 | if (existing) | |
1077 | { | |
1078 | if (pred->ep_edge == existing->ep_edge | |
1079 | && pred->ep_probability == existing->ep_probability) | |
1080 | { | |
1081 | /* Remove a duplicate predictor. */ | |
1082 | dump_prediction (dump_file, pred->ep_predictor, | |
1083 | pred->ep_probability, bb, | |
46c1cff6 | 1084 | REASON_SINGLE_EDGE_DUPLICATE, pred->ep_edge); |
e49efc14 ML |
1085 | |
1086 | remove.add (pred); | |
1087 | } | |
1088 | else if (pred->ep_edge != existing->ep_edge | |
1089 | && pred->ep_probability == existing->ep_probability | |
1090 | && pred->ep_probability != REG_BR_PROB_BASE / 2) | |
1091 | { | |
1092 | /* Remove both predictors as they predict the same | |
1093 | for both edges. */ | |
1094 | dump_prediction (dump_file, existing->ep_predictor, | |
1095 | pred->ep_probability, bb, | |
46c1cff6 | 1096 | REASON_EDGE_PAIR_DUPLICATE, |
e49efc14 ML |
1097 | existing->ep_edge); |
1098 | dump_prediction (dump_file, pred->ep_predictor, | |
1099 | pred->ep_probability, bb, | |
46c1cff6 | 1100 | REASON_EDGE_PAIR_DUPLICATE, |
e49efc14 ML |
1101 | pred->ep_edge); |
1102 | ||
1103 | remove.add (existing); | |
1104 | remove.add (pred); | |
1105 | } | |
1106 | } | |
1107 | ||
1108 | edge_prediction **slot2 = s.find_slot (pred, INSERT); | |
1109 | *slot2 = pred; | |
1110 | } | |
1111 | ||
1112 | /* Step 2: Remove predictors. */ | |
1113 | filter_predictions (preds, not_removed_prediction_p, &remove); | |
1114 | } | |
1115 | } | |
1116 | ||
6de9cd9a | 1117 | /* Combine predictions into single probability and store them into CFG. |
460545e8 JH |
1118 | Remove now useless prediction entries. |
1119 | If DRY_RUN is set, only produce dumps and do not modify profile. */ | |
f1ebdfc5 | 1120 | |
6de9cd9a | 1121 | static void |
460545e8 | 1122 | combine_predictions_for_bb (basic_block bb, bool dry_run) |
f1ebdfc5 | 1123 | { |
6de9cd9a | 1124 | int best_probability = PROB_EVEN; |
bbbbb16a | 1125 | enum br_predictor best_predictor = END_PREDICTORS; |
6de9cd9a DN |
1126 | int combined_probability = REG_BR_PROB_BASE / 2; |
1127 | int d; | |
1128 | bool first_match = false; | |
1129 | bool found = false; | |
1130 | struct edge_prediction *pred; | |
1131 | int nedges = 0; | |
1132 | edge e, first = NULL, second = NULL; | |
628f6a4e | 1133 | edge_iterator ei; |
b00ff621 JH |
1134 | int nzero = 0; |
1135 | int nunknown = 0; | |
f1ebdfc5 | 1136 | |
628f6a4e | 1137 | FOR_EACH_EDGE (e, ei, bb->succs) |
b00ff621 JH |
1138 | { |
1139 | if (!unlikely_executed_edge_p (e)) | |
1140 | { | |
1141 | nedges ++; | |
1142 | if (first && !second) | |
1143 | second = e; | |
1144 | if (!first) | |
1145 | first = e; | |
1146 | } | |
1147 | else if (!e->probability.initialized_p ()) | |
1148 | e->probability = profile_probability::never (); | |
1149 | if (!e->probability.initialized_p ()) | |
1150 | nunknown++; | |
1151 | else if (e->probability == profile_probability::never ()) | |
1152 | nzero++; | |
1153 | } | |
6de9cd9a | 1154 | |
b8698a0f | 1155 | /* When there is no successor or only one choice, prediction is easy. |
6de9cd9a | 1156 | |
5d9da222 ML |
1157 | When we have a basic block with more than 2 successors, the situation |
1158 | is more complicated as DS theory cannot be used literally. | |
1159 | More precisely, let's assume we predicted edge e1 with probability p1, | |
1160 | thus: m1({b1}) = p1. As we're going to combine more than 2 edges, we | |
1161 | need to find probability of e.g. m1({b2}), which we don't know. | |
1162 | The only approximation is to equally distribute 1-p1 to all edges | |
1163 | different from b1. | |
1164 | ||
1165 | According to numbers we've got from SPEC2006 benchark, there's only | |
1166 | one interesting reliable predictor (noreturn call), which can be | |
1167 | handled with a bit easier approach. */ | |
6de9cd9a DN |
1168 | if (nedges != 2) |
1169 | { | |
5d9da222 ML |
1170 | hash_set<edge> unlikely_edges (4); |
1171 | ||
1172 | /* Identify all edges that have a probability close to very unlikely. | |
1173 | Doing the approach for very unlikely doesn't worth for doing as | |
1174 | there's no such probability in SPEC2006 benchmark. */ | |
1175 | edge_prediction **preds = bb_predictions->get (bb); | |
1176 | if (preds) | |
1177 | for (pred = *preds; pred; pred = pred->ep_next) | |
1178 | if (pred->ep_probability <= PROB_VERY_UNLIKELY) | |
1179 | unlikely_edges.add (pred->ep_edge); | |
1180 | ||
33e9feb5 | 1181 | if (!dry_run) |
5d9da222 | 1182 | set_even_probabilities (bb, &unlikely_edges); |
f06b0a10 | 1183 | clear_bb_predictions (bb); |
10d22567 | 1184 | if (dump_file) |
5d9da222 ML |
1185 | { |
1186 | fprintf (dump_file, "Predictions for bb %i\n", bb->index); | |
1187 | if (unlikely_edges.elements () == 0) | |
1188 | fprintf (dump_file, | |
1189 | "%i edges in bb %i predicted to even probabilities\n", | |
1190 | nedges, bb->index); | |
1191 | else | |
1192 | { | |
1193 | fprintf (dump_file, | |
1194 | "%i edges in bb %i predicted with some unlikely edges\n", | |
1195 | nedges, bb->index); | |
1196 | FOR_EACH_EDGE (e, ei, bb->succs) | |
b69d9ac6 | 1197 | if (!unlikely_executed_edge_p (e)) |
357067f2 JH |
1198 | dump_prediction (dump_file, PRED_COMBINED, |
1199 | e->probability.to_reg_br_prob_base (), bb, REASON_NONE, e); | |
5d9da222 ML |
1200 | } |
1201 | } | |
6de9cd9a DN |
1202 | return; |
1203 | } | |
1204 | ||
10d22567 ZD |
1205 | if (dump_file) |
1206 | fprintf (dump_file, "Predictions for bb %i\n", bb->index); | |
6de9cd9a | 1207 | |
e49efc14 ML |
1208 | prune_predictions_for_bb (bb); |
1209 | ||
b787e7a2 | 1210 | edge_prediction **preds = bb_predictions->get (bb); |
e49efc14 | 1211 | |
f06b0a10 | 1212 | if (preds) |
6de9cd9a | 1213 | { |
f06b0a10 ZD |
1214 | /* We implement "first match" heuristics and use probability guessed |
1215 | by predictor with smallest index. */ | |
b787e7a2 | 1216 | for (pred = *preds; pred; pred = pred->ep_next) |
f06b0a10 | 1217 | { |
bbbbb16a | 1218 | enum br_predictor predictor = pred->ep_predictor; |
f06b0a10 | 1219 | int probability = pred->ep_probability; |
6de9cd9a | 1220 | |
f06b0a10 ZD |
1221 | if (pred->ep_edge != first) |
1222 | probability = REG_BR_PROB_BASE - probability; | |
6de9cd9a | 1223 | |
f06b0a10 | 1224 | found = true; |
c0ee0021 JH |
1225 | /* First match heuristics would be widly confused if we predicted |
1226 | both directions. */ | |
c9f4fe73 ML |
1227 | if (best_predictor > predictor |
1228 | && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH) | |
c0ee0021 JH |
1229 | { |
1230 | struct edge_prediction *pred2; | |
1231 | int prob = probability; | |
1232 | ||
ed9c79e1 JJ |
1233 | for (pred2 = (struct edge_prediction *) *preds; |
1234 | pred2; pred2 = pred2->ep_next) | |
c0ee0021 JH |
1235 | if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor) |
1236 | { | |
f3c5ecc2 | 1237 | int probability2 = pred2->ep_probability; |
c0ee0021 JH |
1238 | |
1239 | if (pred2->ep_edge != first) | |
1240 | probability2 = REG_BR_PROB_BASE - probability2; | |
1241 | ||
b8698a0f | 1242 | if ((probability < REG_BR_PROB_BASE / 2) != |
c0ee0021 JH |
1243 | (probability2 < REG_BR_PROB_BASE / 2)) |
1244 | break; | |
1245 | ||
1246 | /* If the same predictor later gave better result, go for it! */ | |
1247 | if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability)) | |
1248 | || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability))) | |
1249 | prob = probability2; | |
1250 | } | |
1251 | if (!pred2) | |
1252 | best_probability = prob, best_predictor = predictor; | |
1253 | } | |
6de9cd9a | 1254 | |
f06b0a10 ZD |
1255 | d = (combined_probability * probability |
1256 | + (REG_BR_PROB_BASE - combined_probability) | |
1257 | * (REG_BR_PROB_BASE - probability)); | |
6de9cd9a | 1258 | |
f06b0a10 ZD |
1259 | /* Use FP math to avoid overflows of 32bit integers. */ |
1260 | if (d == 0) | |
1261 | /* If one probability is 0% and one 100%, avoid division by zero. */ | |
1262 | combined_probability = REG_BR_PROB_BASE / 2; | |
1263 | else | |
1264 | combined_probability = (((double) combined_probability) | |
1265 | * probability | |
1266 | * REG_BR_PROB_BASE / d + 0.5); | |
1267 | } | |
6de9cd9a DN |
1268 | } |
1269 | ||
1270 | /* Decide which heuristic to use. In case we didn't match anything, | |
1271 | use no_prediction heuristic, in case we did match, use either | |
1272 | first match or Dempster-Shaffer theory depending on the flags. */ | |
1273 | ||
c9f4fe73 | 1274 | if (best_predictor != END_PREDICTORS) |
6de9cd9a DN |
1275 | first_match = true; |
1276 | ||
1277 | if (!found) | |
e49efc14 | 1278 | dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb); |
6de9cd9a DN |
1279 | else |
1280 | { | |
c9f4fe73 ML |
1281 | if (!first_match) |
1282 | dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb, | |
1283 | !first_match ? REASON_NONE : REASON_IGNORED); | |
1284 | else | |
1285 | dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb, | |
1286 | first_match ? REASON_NONE : REASON_IGNORED); | |
6de9cd9a DN |
1287 | } |
1288 | ||
1289 | if (first_match) | |
1290 | combined_probability = best_probability; | |
e49efc14 | 1291 | dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb); |
6de9cd9a | 1292 | |
f06b0a10 | 1293 | if (preds) |
6de9cd9a | 1294 | { |
d3bfe4de | 1295 | for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next) |
f06b0a10 | 1296 | { |
bbbbb16a | 1297 | enum br_predictor predictor = pred->ep_predictor; |
f06b0a10 | 1298 | int probability = pred->ep_probability; |
6de9cd9a | 1299 | |
f06b0a10 | 1300 | dump_prediction (dump_file, predictor, probability, bb, |
e49efc14 | 1301 | (!first_match || best_predictor == predictor) |
46c1cff6 | 1302 | ? REASON_NONE : REASON_IGNORED, pred->ep_edge); |
f06b0a10 | 1303 | } |
6de9cd9a | 1304 | } |
f06b0a10 | 1305 | clear_bb_predictions (bb); |
6de9cd9a | 1306 | |
b00ff621 JH |
1307 | |
1308 | /* If we have only one successor which is unknown, we can compute missing | |
1309 | probablity. */ | |
1310 | if (nunknown == 1) | |
1311 | { | |
1312 | profile_probability prob = profile_probability::always (); | |
1313 | edge missing = NULL; | |
1314 | ||
1315 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1316 | if (e->probability.initialized_p ()) | |
1317 | prob -= e->probability; | |
1318 | else if (missing == NULL) | |
1319 | missing = e; | |
1320 | else | |
1321 | gcc_unreachable (); | |
1322 | missing->probability = prob; | |
1323 | } | |
1324 | /* If nothing is unknown, we have nothing to update. */ | |
1325 | else if (!nunknown && nzero != (int)EDGE_COUNT (bb->succs)) | |
1326 | ; | |
1327 | else if (!dry_run) | |
87022a6b | 1328 | { |
357067f2 JH |
1329 | first->probability |
1330 | = profile_probability::from_reg_br_prob_base (combined_probability); | |
1331 | second->probability = first->probability.invert (); | |
87022a6b | 1332 | } |
6de9cd9a DN |
1333 | } |
1334 | ||
9c04723a DC |
1335 | /* Check if T1 and T2 satisfy the IV_COMPARE condition. |
1336 | Return the SSA_NAME if the condition satisfies, NULL otherwise. | |
1337 | ||
1338 | T1 and T2 should be one of the following cases: | |
1339 | 1. T1 is SSA_NAME, T2 is NULL | |
1340 | 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4] | |
1341 | 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */ | |
1342 | ||
1343 | static tree | |
1344 | strips_small_constant (tree t1, tree t2) | |
1345 | { | |
1346 | tree ret = NULL; | |
1347 | int value = 0; | |
1348 | ||
1349 | if (!t1) | |
1350 | return NULL; | |
1351 | else if (TREE_CODE (t1) == SSA_NAME) | |
1352 | ret = t1; | |
9541ffee | 1353 | else if (tree_fits_shwi_p (t1)) |
9439e9a1 | 1354 | value = tree_to_shwi (t1); |
9c04723a DC |
1355 | else |
1356 | return NULL; | |
1357 | ||
1358 | if (!t2) | |
1359 | return ret; | |
9541ffee | 1360 | else if (tree_fits_shwi_p (t2)) |
9439e9a1 | 1361 | value = tree_to_shwi (t2); |
9c04723a DC |
1362 | else if (TREE_CODE (t2) == SSA_NAME) |
1363 | { | |
1364 | if (ret) | |
1365 | return NULL; | |
1366 | else | |
1367 | ret = t2; | |
1368 | } | |
1369 | ||
1370 | if (value <= 4 && value >= -4) | |
1371 | return ret; | |
1372 | else | |
1373 | return NULL; | |
1374 | } | |
1375 | ||
1376 | /* Return the SSA_NAME in T or T's operands. | |
1377 | Return NULL if SSA_NAME cannot be found. */ | |
1378 | ||
1379 | static tree | |
1380 | get_base_value (tree t) | |
1381 | { | |
1382 | if (TREE_CODE (t) == SSA_NAME) | |
1383 | return t; | |
1384 | ||
1385 | if (!BINARY_CLASS_P (t)) | |
1386 | return NULL; | |
1387 | ||
1388 | switch (TREE_OPERAND_LENGTH (t)) | |
1389 | { | |
1390 | case 1: | |
1391 | return strips_small_constant (TREE_OPERAND (t, 0), NULL); | |
1392 | case 2: | |
1393 | return strips_small_constant (TREE_OPERAND (t, 0), | |
1394 | TREE_OPERAND (t, 1)); | |
1395 | default: | |
1396 | return NULL; | |
1397 | } | |
1398 | } | |
1399 | ||
1400 | /* Check the compare STMT in LOOP. If it compares an induction | |
1401 | variable to a loop invariant, return true, and save | |
1402 | LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP. | |
1403 | Otherwise return false and set LOOP_INVAIANT to NULL. */ | |
1404 | ||
1405 | static bool | |
538dd0b7 | 1406 | is_comparison_with_loop_invariant_p (gcond *stmt, struct loop *loop, |
9c04723a DC |
1407 | tree *loop_invariant, |
1408 | enum tree_code *compare_code, | |
ecd4f20a | 1409 | tree *loop_step, |
9c04723a DC |
1410 | tree *loop_iv_base) |
1411 | { | |
1412 | tree op0, op1, bound, base; | |
1413 | affine_iv iv0, iv1; | |
1414 | enum tree_code code; | |
ecd4f20a | 1415 | tree step; |
9c04723a DC |
1416 | |
1417 | code = gimple_cond_code (stmt); | |
1418 | *loop_invariant = NULL; | |
1419 | ||
1420 | switch (code) | |
1421 | { | |
1422 | case GT_EXPR: | |
1423 | case GE_EXPR: | |
1424 | case NE_EXPR: | |
1425 | case LT_EXPR: | |
1426 | case LE_EXPR: | |
1427 | case EQ_EXPR: | |
1428 | break; | |
1429 | ||
1430 | default: | |
1431 | return false; | |
1432 | } | |
1433 | ||
1434 | op0 = gimple_cond_lhs (stmt); | |
1435 | op1 = gimple_cond_rhs (stmt); | |
1436 | ||
1437 | if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST) | |
1438 | || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST)) | |
1439 | return false; | |
1440 | if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true)) | |
1441 | return false; | |
1442 | if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true)) | |
1443 | return false; | |
1444 | if (TREE_CODE (iv0.step) != INTEGER_CST | |
1445 | || TREE_CODE (iv1.step) != INTEGER_CST) | |
1446 | return false; | |
1447 | if ((integer_zerop (iv0.step) && integer_zerop (iv1.step)) | |
1448 | || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step))) | |
1449 | return false; | |
1450 | ||
1451 | if (integer_zerop (iv0.step)) | |
1452 | { | |
1453 | if (code != NE_EXPR && code != EQ_EXPR) | |
1454 | code = invert_tree_comparison (code, false); | |
1455 | bound = iv0.base; | |
1456 | base = iv1.base; | |
9541ffee | 1457 | if (tree_fits_shwi_p (iv1.step)) |
ecd4f20a | 1458 | step = iv1.step; |
9c04723a DC |
1459 | else |
1460 | return false; | |
1461 | } | |
1462 | else | |
1463 | { | |
1464 | bound = iv1.base; | |
1465 | base = iv0.base; | |
9541ffee | 1466 | if (tree_fits_shwi_p (iv0.step)) |
ecd4f20a | 1467 | step = iv0.step; |
9c04723a DC |
1468 | else |
1469 | return false; | |
1470 | } | |
1471 | ||
1472 | if (TREE_CODE (bound) != INTEGER_CST) | |
1473 | bound = get_base_value (bound); | |
1474 | if (!bound) | |
1475 | return false; | |
1476 | if (TREE_CODE (base) != INTEGER_CST) | |
1477 | base = get_base_value (base); | |
1478 | if (!base) | |
1479 | return false; | |
1480 | ||
1481 | *loop_invariant = bound; | |
1482 | *compare_code = code; | |
1483 | *loop_step = step; | |
1484 | *loop_iv_base = base; | |
1485 | return true; | |
1486 | } | |
1487 | ||
1488 | /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */ | |
1489 | ||
1490 | static bool | |
1491 | expr_coherent_p (tree t1, tree t2) | |
1492 | { | |
355fe088 | 1493 | gimple *stmt; |
9c04723a DC |
1494 | tree ssa_name_1 = NULL; |
1495 | tree ssa_name_2 = NULL; | |
1496 | ||
1497 | gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST); | |
1498 | gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST); | |
1499 | ||
1500 | if (t1 == t2) | |
1501 | return true; | |
1502 | ||
1503 | if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST) | |
1504 | return true; | |
1505 | if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST) | |
1506 | return false; | |
1507 | ||
1508 | /* Check to see if t1 is expressed/defined with t2. */ | |
1509 | stmt = SSA_NAME_DEF_STMT (t1); | |
1510 | gcc_assert (stmt != NULL); | |
1511 | if (is_gimple_assign (stmt)) | |
1512 | { | |
1513 | ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE); | |
1514 | if (ssa_name_1 && ssa_name_1 == t2) | |
1515 | return true; | |
1516 | } | |
1517 | ||
1518 | /* Check to see if t2 is expressed/defined with t1. */ | |
1519 | stmt = SSA_NAME_DEF_STMT (t2); | |
1520 | gcc_assert (stmt != NULL); | |
1521 | if (is_gimple_assign (stmt)) | |
1522 | { | |
1523 | ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE); | |
1524 | if (ssa_name_2 && ssa_name_2 == t1) | |
1525 | return true; | |
1526 | } | |
1527 | ||
1528 | /* Compare if t1 and t2's def_stmts are identical. */ | |
1529 | if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2) | |
1530 | return true; | |
1531 | else | |
1532 | return false; | |
1533 | } | |
1534 | ||
429d2750 JH |
1535 | /* Return true if E is predicted by one of loop heuristics. */ |
1536 | ||
1537 | static bool | |
1538 | predicted_by_loop_heuristics_p (basic_block bb) | |
1539 | { | |
1540 | struct edge_prediction *i; | |
1541 | edge_prediction **preds = bb_predictions->get (bb); | |
1542 | ||
1543 | if (!preds) | |
1544 | return false; | |
1545 | ||
1546 | for (i = *preds; i; i = i->ep_next) | |
1547 | if (i->ep_predictor == PRED_LOOP_ITERATIONS_GUESSED | |
1548 | || i->ep_predictor == PRED_LOOP_ITERATIONS_MAX | |
1549 | || i->ep_predictor == PRED_LOOP_ITERATIONS | |
1550 | || i->ep_predictor == PRED_LOOP_EXIT | |
9bb86f40 | 1551 | || i->ep_predictor == PRED_LOOP_EXIT_WITH_RECURSION |
429d2750 JH |
1552 | || i->ep_predictor == PRED_LOOP_EXTRA_EXIT) |
1553 | return true; | |
1554 | return false; | |
1555 | } | |
1556 | ||
9c04723a DC |
1557 | /* Predict branch probability of BB when BB contains a branch that compares |
1558 | an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The | |
1559 | loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP. | |
1560 | ||
1561 | E.g. | |
1562 | for (int i = 0; i < bound; i++) { | |
1563 | if (i < bound - 2) | |
1564 | computation_1(); | |
1565 | else | |
1566 | computation_2(); | |
1567 | } | |
1568 | ||
1569 | In this loop, we will predict the branch inside the loop to be taken. */ | |
1570 | ||
1571 | static void | |
1572 | predict_iv_comparison (struct loop *loop, basic_block bb, | |
1573 | tree loop_bound_var, | |
1574 | tree loop_iv_base_var, | |
1575 | enum tree_code loop_bound_code, | |
1576 | int loop_bound_step) | |
1577 | { | |
355fe088 | 1578 | gimple *stmt; |
9c04723a DC |
1579 | tree compare_var, compare_base; |
1580 | enum tree_code compare_code; | |
ecd4f20a | 1581 | tree compare_step_var; |
9c04723a DC |
1582 | edge then_edge; |
1583 | edge_iterator ei; | |
1584 | ||
429d2750 | 1585 | if (predicted_by_loop_heuristics_p (bb)) |
9c04723a DC |
1586 | return; |
1587 | ||
1588 | stmt = last_stmt (bb); | |
1589 | if (!stmt || gimple_code (stmt) != GIMPLE_COND) | |
1590 | return; | |
538dd0b7 DM |
1591 | if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt), |
1592 | loop, &compare_var, | |
9c04723a | 1593 | &compare_code, |
ecd4f20a | 1594 | &compare_step_var, |
9c04723a DC |
1595 | &compare_base)) |
1596 | return; | |
1597 | ||
1598 | /* Find the taken edge. */ | |
1599 | FOR_EACH_EDGE (then_edge, ei, bb->succs) | |
1600 | if (then_edge->flags & EDGE_TRUE_VALUE) | |
1601 | break; | |
1602 | ||
1603 | /* When comparing an IV to a loop invariant, NE is more likely to be | |
1604 | taken while EQ is more likely to be not-taken. */ | |
1605 | if (compare_code == NE_EXPR) | |
1606 | { | |
1607 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1608 | return; | |
1609 | } | |
1610 | else if (compare_code == EQ_EXPR) | |
1611 | { | |
1612 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1613 | return; | |
1614 | } | |
1615 | ||
1616 | if (!expr_coherent_p (loop_iv_base_var, compare_base)) | |
1617 | return; | |
1618 | ||
1619 | /* If loop bound, base and compare bound are all constants, we can | |
1620 | calculate the probability directly. */ | |
9541ffee RS |
1621 | if (tree_fits_shwi_p (loop_bound_var) |
1622 | && tree_fits_shwi_p (compare_var) | |
1623 | && tree_fits_shwi_p (compare_base)) | |
9c04723a DC |
1624 | { |
1625 | int probability; | |
807e902e KZ |
1626 | bool overflow, overall_overflow = false; |
1627 | widest_int compare_count, tem; | |
ecd4f20a MP |
1628 | |
1629 | /* (loop_bound - base) / compare_step */ | |
807e902e KZ |
1630 | tem = wi::sub (wi::to_widest (loop_bound_var), |
1631 | wi::to_widest (compare_base), SIGNED, &overflow); | |
1632 | overall_overflow |= overflow; | |
1633 | widest_int loop_count = wi::div_trunc (tem, | |
1634 | wi::to_widest (compare_step_var), | |
1635 | SIGNED, &overflow); | |
1636 | overall_overflow |= overflow; | |
1637 | ||
1638 | if (!wi::neg_p (wi::to_widest (compare_step_var)) | |
9c04723a | 1639 | ^ (compare_code == LT_EXPR || compare_code == LE_EXPR)) |
ecd4f20a MP |
1640 | { |
1641 | /* (loop_bound - compare_bound) / compare_step */ | |
807e902e KZ |
1642 | tem = wi::sub (wi::to_widest (loop_bound_var), |
1643 | wi::to_widest (compare_var), SIGNED, &overflow); | |
1644 | overall_overflow |= overflow; | |
1645 | compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var), | |
1646 | SIGNED, &overflow); | |
1647 | overall_overflow |= overflow; | |
ecd4f20a | 1648 | } |
9c04723a | 1649 | else |
ecd4f20a MP |
1650 | { |
1651 | /* (compare_bound - base) / compare_step */ | |
807e902e KZ |
1652 | tem = wi::sub (wi::to_widest (compare_var), |
1653 | wi::to_widest (compare_base), SIGNED, &overflow); | |
1654 | overall_overflow |= overflow; | |
1655 | compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var), | |
1656 | SIGNED, &overflow); | |
1657 | overall_overflow |= overflow; | |
ecd4f20a | 1658 | } |
9c04723a | 1659 | if (compare_code == LE_EXPR || compare_code == GE_EXPR) |
ecd4f20a | 1660 | ++compare_count; |
9c04723a | 1661 | if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR) |
ecd4f20a | 1662 | ++loop_count; |
807e902e KZ |
1663 | if (wi::neg_p (compare_count)) |
1664 | compare_count = 0; | |
1665 | if (wi::neg_p (loop_count)) | |
1666 | loop_count = 0; | |
1667 | if (loop_count == 0) | |
9c04723a | 1668 | probability = 0; |
807e902e | 1669 | else if (wi::cmps (compare_count, loop_count) == 1) |
9c04723a DC |
1670 | probability = REG_BR_PROB_BASE; |
1671 | else | |
ecd4f20a | 1672 | { |
807e902e KZ |
1673 | tem = compare_count * REG_BR_PROB_BASE; |
1674 | tem = wi::udiv_trunc (tem, loop_count); | |
ecd4f20a MP |
1675 | probability = tem.to_uhwi (); |
1676 | } | |
1677 | ||
d1fcc2bd | 1678 | /* FIXME: The branch prediction seems broken. It has only 20% hitrate. */ |
807e902e | 1679 | if (!overall_overflow) |
ecd4f20a MP |
1680 | predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability); |
1681 | ||
9c04723a DC |
1682 | return; |
1683 | } | |
1684 | ||
1685 | if (expr_coherent_p (loop_bound_var, compare_var)) | |
1686 | { | |
1687 | if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR) | |
1688 | && (compare_code == LT_EXPR || compare_code == LE_EXPR)) | |
1689 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1690 | else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR) | |
1691 | && (compare_code == GT_EXPR || compare_code == GE_EXPR)) | |
1692 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1693 | else if (loop_bound_code == NE_EXPR) | |
1694 | { | |
1695 | /* If the loop backedge condition is "(i != bound)", we do | |
1696 | the comparison based on the step of IV: | |
1697 | * step < 0 : backedge condition is like (i > bound) | |
1698 | * step > 0 : backedge condition is like (i < bound) */ | |
1699 | gcc_assert (loop_bound_step != 0); | |
1700 | if (loop_bound_step > 0 | |
1701 | && (compare_code == LT_EXPR | |
1702 | || compare_code == LE_EXPR)) | |
1703 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1704 | else if (loop_bound_step < 0 | |
1705 | && (compare_code == GT_EXPR | |
1706 | || compare_code == GE_EXPR)) | |
1707 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1708 | else | |
1709 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1710 | } | |
1711 | else | |
1712 | /* The branch is predicted not-taken if loop_bound_code is | |
1713 | opposite with compare_code. */ | |
1714 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1715 | } | |
1716 | else if (expr_coherent_p (loop_iv_base_var, compare_var)) | |
1717 | { | |
1718 | /* For cases like: | |
1719 | for (i = s; i < h; i++) | |
1720 | if (i > s + 2) .... | |
1721 | The branch should be predicted taken. */ | |
1722 | if (loop_bound_step > 0 | |
1723 | && (compare_code == GT_EXPR || compare_code == GE_EXPR)) | |
1724 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1725 | else if (loop_bound_step < 0 | |
1726 | && (compare_code == LT_EXPR || compare_code == LE_EXPR)) | |
1727 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN); | |
1728 | else | |
1729 | predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN); | |
1730 | } | |
1731 | } | |
16fdb75f DC |
1732 | |
1733 | /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop | |
1734 | exits are resulted from short-circuit conditions that will generate an | |
1735 | if_tmp. E.g.: | |
1736 | ||
1737 | if (foo() || global > 10) | |
1738 | break; | |
1739 | ||
1740 | This will be translated into: | |
1741 | ||
1742 | BB3: | |
1743 | loop header... | |
1744 | BB4: | |
1745 | if foo() goto BB6 else goto BB5 | |
1746 | BB5: | |
1747 | if global > 10 goto BB6 else goto BB7 | |
1748 | BB6: | |
1749 | goto BB7 | |
1750 | BB7: | |
1751 | iftmp = (PHI 0(BB5), 1(BB6)) | |
1752 | if iftmp == 1 goto BB8 else goto BB3 | |
1753 | BB8: | |
1754 | outside of the loop... | |
1755 | ||
1756 | The edge BB7->BB8 is loop exit because BB8 is outside of the loop. | |
1757 | From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop | |
1758 | exits. This function takes BB7->BB8 as input, and finds out the extra loop | |
050fb209 | 1759 | exits to predict them using PRED_LOOP_EXTRA_EXIT. */ |
16fdb75f DC |
1760 | |
1761 | static void | |
1762 | predict_extra_loop_exits (edge exit_edge) | |
1763 | { | |
1764 | unsigned i; | |
1765 | bool check_value_one; | |
355fe088 | 1766 | gimple *lhs_def_stmt; |
538dd0b7 | 1767 | gphi *phi_stmt; |
16fdb75f | 1768 | tree cmp_rhs, cmp_lhs; |
355fe088 | 1769 | gimple *last; |
538dd0b7 | 1770 | gcond *cmp_stmt; |
16fdb75f | 1771 | |
538dd0b7 DM |
1772 | last = last_stmt (exit_edge->src); |
1773 | if (!last) | |
1774 | return; | |
1775 | cmp_stmt = dyn_cast <gcond *> (last); | |
1776 | if (!cmp_stmt) | |
16fdb75f | 1777 | return; |
538dd0b7 | 1778 | |
16fdb75f DC |
1779 | cmp_rhs = gimple_cond_rhs (cmp_stmt); |
1780 | cmp_lhs = gimple_cond_lhs (cmp_stmt); | |
1781 | if (!TREE_CONSTANT (cmp_rhs) | |
1782 | || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs))) | |
1783 | return; | |
1784 | if (TREE_CODE (cmp_lhs) != SSA_NAME) | |
1785 | return; | |
1786 | ||
1787 | /* If check_value_one is true, only the phi_args with value '1' will lead | |
1788 | to loop exit. Otherwise, only the phi_args with value '0' will lead to | |
1789 | loop exit. */ | |
1790 | check_value_one = (((integer_onep (cmp_rhs)) | |
1791 | ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR)) | |
1792 | ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0)); | |
1793 | ||
538dd0b7 DM |
1794 | lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs); |
1795 | if (!lhs_def_stmt) | |
1796 | return; | |
1797 | ||
1798 | phi_stmt = dyn_cast <gphi *> (lhs_def_stmt); | |
1799 | if (!phi_stmt) | |
16fdb75f DC |
1800 | return; |
1801 | ||
1802 | for (i = 0; i < gimple_phi_num_args (phi_stmt); i++) | |
1803 | { | |
1804 | edge e1; | |
1805 | edge_iterator ei; | |
1806 | tree val = gimple_phi_arg_def (phi_stmt, i); | |
1807 | edge e = gimple_phi_arg_edge (phi_stmt, i); | |
1808 | ||
1809 | if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val))) | |
1810 | continue; | |
1811 | if ((check_value_one ^ integer_onep (val)) == 1) | |
1812 | continue; | |
1813 | if (EDGE_COUNT (e->src->succs) != 1) | |
1814 | { | |
050fb209 | 1815 | predict_paths_leading_to_edge (e, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN); |
16fdb75f DC |
1816 | continue; |
1817 | } | |
1818 | ||
1819 | FOR_EACH_EDGE (e1, ei, e->src->preds) | |
050fb209 | 1820 | predict_paths_leading_to_edge (e1, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN); |
16fdb75f DC |
1821 | } |
1822 | } | |
1823 | ||
429d2750 | 1824 | |
d73be268 ZD |
1825 | /* Predict edge probabilities by exploiting loop structure. */ |
1826 | ||
6de9cd9a | 1827 | static void |
d73be268 | 1828 | predict_loops (void) |
6de9cd9a | 1829 | { |
42fd6772 | 1830 | struct loop *loop; |
9bb86f40 JH |
1831 | basic_block bb; |
1832 | hash_set <struct loop *> with_recursion(10); | |
1833 | ||
1834 | FOR_EACH_BB_FN (bb, cfun) | |
1835 | { | |
1836 | gimple_stmt_iterator gsi; | |
1837 | tree decl; | |
1838 | ||
1839 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1840 | if (is_gimple_call (gsi_stmt (gsi)) | |
1841 | && (decl = gimple_call_fndecl (gsi_stmt (gsi))) != NULL | |
1842 | && recursive_call_p (current_function_decl, decl)) | |
1843 | { | |
1844 | loop = bb->loop_father; | |
1845 | while (loop && !with_recursion.add (loop)) | |
1846 | loop = loop_outer (loop); | |
1847 | } | |
1848 | } | |
0b92ff33 | 1849 | |
65169dcf JE |
1850 | /* Try to predict out blocks in a loop that are not part of a |
1851 | natural loop. */ | |
429d2750 | 1852 | FOR_EACH_LOOP (loop, LI_FROM_INNERMOST) |
f1ebdfc5 | 1853 | { |
2ecfd709 | 1854 | basic_block bb, *bbs; |
429d2750 | 1855 | unsigned j, n_exits = 0; |
9771b263 | 1856 | vec<edge> exits; |
992c31e6 | 1857 | struct tree_niter_desc niter_desc; |
ca83d385 | 1858 | edge ex; |
9c04723a DC |
1859 | struct nb_iter_bound *nb_iter; |
1860 | enum tree_code loop_bound_code = ERROR_MARK; | |
ecd4f20a | 1861 | tree loop_bound_step = NULL; |
9c04723a DC |
1862 | tree loop_bound_var = NULL; |
1863 | tree loop_iv_base = NULL; | |
538dd0b7 | 1864 | gcond *stmt = NULL; |
9bb86f40 | 1865 | bool recursion = with_recursion.contains (loop); |
f1ebdfc5 | 1866 | |
ca83d385 | 1867 | exits = get_loop_exit_edges (loop); |
429d2750 | 1868 | FOR_EACH_VEC_ELT (exits, j, ex) |
b69d9ac6 | 1869 | if (!unlikely_executed_edge_p (ex) && !(ex->flags & EDGE_ABNORMAL_CALL)) |
429d2750 | 1870 | n_exits ++; |
f481cd49 JH |
1871 | if (!n_exits) |
1872 | { | |
9771b263 | 1873 | exits.release (); |
f481cd49 JH |
1874 | continue; |
1875 | } | |
0dd0e980 | 1876 | |
9bb86f40 JH |
1877 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1878 | fprintf (dump_file, "Predicting loop %i%s with %i exits.\n", | |
1879 | loop->num, recursion ? " (with recursion)":"", n_exits); | |
1880 | if (dump_file && (dump_flags & TDF_DETAILS) | |
1881 | && max_loop_iterations_int (loop) >= 0) | |
1882 | { | |
1883 | fprintf (dump_file, | |
1884 | "Loop %d iterates at most %i times.\n", loop->num, | |
1885 | (int)max_loop_iterations_int (loop)); | |
1886 | } | |
1887 | if (dump_file && (dump_flags & TDF_DETAILS) | |
1888 | && likely_max_loop_iterations_int (loop) >= 0) | |
1889 | { | |
1890 | fprintf (dump_file, "Loop %d likely iterates at most %i times.\n", | |
1891 | loop->num, (int)likely_max_loop_iterations_int (loop)); | |
1892 | } | |
1893 | ||
9771b263 | 1894 | FOR_EACH_VEC_ELT (exits, j, ex) |
b6acab32 | 1895 | { |
992c31e6 | 1896 | tree niter = NULL; |
4839cb59 ZD |
1897 | HOST_WIDE_INT nitercst; |
1898 | int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS); | |
1899 | int probability; | |
1900 | enum br_predictor predictor; | |
429d2750 | 1901 | widest_int nit; |
b6acab32 | 1902 | |
b69d9ac6 JH |
1903 | if (unlikely_executed_edge_p (ex) |
1904 | || (ex->flags & EDGE_ABNORMAL_CALL)) | |
429d2750 JH |
1905 | continue; |
1906 | /* Loop heuristics do not expect exit conditional to be inside | |
1907 | inner loop. We predict from innermost to outermost loop. */ | |
1908 | if (predicted_by_loop_heuristics_p (ex->src)) | |
9bb86f40 JH |
1909 | { |
1910 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1911 | fprintf (dump_file, "Skipping exit %i->%i because " | |
1912 | "it is already predicted.\n", | |
1913 | ex->src->index, ex->dest->index); | |
1914 | continue; | |
1915 | } | |
16fdb75f DC |
1916 | predict_extra_loop_exits (ex); |
1917 | ||
46deac6c | 1918 | if (number_of_iterations_exit (loop, ex, &niter_desc, false, false)) |
992c31e6 JH |
1919 | niter = niter_desc.niter; |
1920 | if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST) | |
ca83d385 | 1921 | niter = loop_niter_by_eval (loop, ex); |
9bb86f40 JH |
1922 | if (dump_file && (dump_flags & TDF_DETAILS) |
1923 | && TREE_CODE (niter) == INTEGER_CST) | |
1924 | { | |
1925 | fprintf (dump_file, "Exit %i->%i %d iterates ", | |
1926 | ex->src->index, ex->dest->index, | |
1927 | loop->num); | |
1928 | print_generic_expr (dump_file, niter, TDF_SLIM); | |
1929 | fprintf (dump_file, " times.\n"); | |
1930 | } | |
b6acab32 | 1931 | |
992c31e6 JH |
1932 | if (TREE_CODE (niter) == INTEGER_CST) |
1933 | { | |
cc269bb6 | 1934 | if (tree_fits_uhwi_p (niter) |
2aa579ad JJ |
1935 | && max |
1936 | && compare_tree_int (niter, max - 1) == -1) | |
ae7e9ddd | 1937 | nitercst = tree_to_uhwi (niter) + 1; |
992c31e6 | 1938 | else |
4839cb59 ZD |
1939 | nitercst = max; |
1940 | predictor = PRED_LOOP_ITERATIONS; | |
1941 | } | |
1942 | /* If we have just one exit and we can derive some information about | |
1943 | the number of iterations of the loop from the statements inside | |
1944 | the loop, use it to predict this exit. */ | |
429d2750 JH |
1945 | else if (n_exits == 1 |
1946 | && estimated_stmt_executions (loop, &nit)) | |
4839cb59 | 1947 | { |
429d2750 | 1948 | if (wi::gtu_p (nit, max)) |
4839cb59 | 1949 | nitercst = max; |
429d2750 JH |
1950 | else |
1951 | nitercst = nit.to_shwi (); | |
4839cb59 | 1952 | predictor = PRED_LOOP_ITERATIONS_GUESSED; |
992c31e6 | 1953 | } |
429d2750 JH |
1954 | /* If we have likely upper bound, trust it for very small iteration |
1955 | counts. Such loops would otherwise get mispredicted by standard | |
1956 | LOOP_EXIT heuristics. */ | |
1957 | else if (n_exits == 1 | |
1958 | && likely_max_stmt_executions (loop, &nit) | |
1959 | && wi::ltu_p (nit, | |
1960 | RDIV (REG_BR_PROB_BASE, | |
1961 | REG_BR_PROB_BASE | |
1962 | - predictor_info | |
9bb86f40 JH |
1963 | [recursion |
1964 | ? PRED_LOOP_EXIT_WITH_RECURSION | |
1965 | : PRED_LOOP_EXIT].hitrate))) | |
429d2750 JH |
1966 | { |
1967 | nitercst = nit.to_shwi (); | |
1968 | predictor = PRED_LOOP_ITERATIONS_MAX; | |
1969 | } | |
4839cb59 | 1970 | else |
9bb86f40 JH |
1971 | { |
1972 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1973 | fprintf (dump_file, "Nothing known about exit %i->%i.\n", | |
1974 | ex->src->index, ex->dest->index); | |
1975 | continue; | |
1976 | } | |
4839cb59 | 1977 | |
9bb86f40 JH |
1978 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1979 | fprintf (dump_file, "Recording prediction to %i iterations by %s.\n", | |
1980 | (int)nitercst, predictor_info[predictor].name); | |
60fa7862 ML |
1981 | /* If the prediction for number of iterations is zero, do not |
1982 | predict the exit edges. */ | |
1983 | if (nitercst == 0) | |
1984 | continue; | |
1985 | ||
429d2750 | 1986 | probability = RDIV (REG_BR_PROB_BASE, nitercst); |
4839cb59 | 1987 | predict_edge (ex, predictor, probability); |
b6acab32 | 1988 | } |
9771b263 | 1989 | exits.release (); |
3d436d2a | 1990 | |
9c04723a DC |
1991 | /* Find information about loop bound variables. */ |
1992 | for (nb_iter = loop->bounds; nb_iter; | |
1993 | nb_iter = nb_iter->next) | |
1994 | if (nb_iter->stmt | |
1995 | && gimple_code (nb_iter->stmt) == GIMPLE_COND) | |
1996 | { | |
538dd0b7 | 1997 | stmt = as_a <gcond *> (nb_iter->stmt); |
9c04723a DC |
1998 | break; |
1999 | } | |
2000 | if (!stmt && last_stmt (loop->header) | |
2001 | && gimple_code (last_stmt (loop->header)) == GIMPLE_COND) | |
538dd0b7 | 2002 | stmt = as_a <gcond *> (last_stmt (loop->header)); |
9c04723a DC |
2003 | if (stmt) |
2004 | is_comparison_with_loop_invariant_p (stmt, loop, | |
2005 | &loop_bound_var, | |
2006 | &loop_bound_code, | |
2007 | &loop_bound_step, | |
2008 | &loop_iv_base); | |
2009 | ||
2ecfd709 | 2010 | bbs = get_loop_body (loop); |
6de9cd9a | 2011 | |
2ecfd709 ZD |
2012 | for (j = 0; j < loop->num_nodes; j++) |
2013 | { | |
2ecfd709 | 2014 | edge e; |
628f6a4e | 2015 | edge_iterator ei; |
2ecfd709 ZD |
2016 | |
2017 | bb = bbs[j]; | |
bfdade77 | 2018 | |
969d70ca JH |
2019 | /* Bypass loop heuristics on continue statement. These |
2020 | statements construct loops via "non-loop" constructs | |
2021 | in the source language and are better to be handled | |
2022 | separately. */ | |
992c31e6 | 2023 | if (predicted_by_p (bb, PRED_CONTINUE)) |
9bb86f40 JH |
2024 | { |
2025 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2026 | fprintf (dump_file, "BB %i predicted by continue.\n", | |
2027 | bb->index); | |
2028 | continue; | |
2029 | } | |
969d70ca | 2030 | |
9bb86f40 JH |
2031 | /* If we already used more reliable loop exit predictors, do not |
2032 | bother with PRED_LOOP_EXIT. */ | |
2033 | if (!predicted_by_loop_heuristics_p (bb)) | |
2c9e13f3 JH |
2034 | { |
2035 | /* For loop with many exits we don't want to predict all exits | |
2036 | with the pretty large probability, because if all exits are | |
2037 | considered in row, the loop would be predicted to iterate | |
2038 | almost never. The code to divide probability by number of | |
2039 | exits is very rough. It should compute the number of exits | |
2040 | taken in each patch through function (not the overall number | |
2041 | of exits that might be a lot higher for loops with wide switch | |
2042 | statements in them) and compute n-th square root. | |
2043 | ||
2044 | We limit the minimal probability by 2% to avoid | |
2045 | EDGE_PROBABILITY_RELIABLE from trusting the branch prediction | |
2046 | as this was causing regression in perl benchmark containing such | |
2047 | a wide loop. */ | |
b8698a0f | 2048 | |
2c9e13f3 | 2049 | int probability = ((REG_BR_PROB_BASE |
9bb86f40 JH |
2050 | - predictor_info |
2051 | [recursion | |
2052 | ? PRED_LOOP_EXIT_WITH_RECURSION | |
2053 | : PRED_LOOP_EXIT].hitrate) | |
2c9e13f3 JH |
2054 | / n_exits); |
2055 | if (probability < HITRATE (2)) | |
2056 | probability = HITRATE (2); | |
2057 | FOR_EACH_EDGE (e, ei, bb->succs) | |
2058 | if (e->dest->index < NUM_FIXED_BLOCKS | |
2059 | || !flow_bb_inside_loop_p (loop, e->dest)) | |
9bb86f40 JH |
2060 | { |
2061 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2062 | fprintf (dump_file, | |
2063 | "Predicting exit %i->%i with prob %i.\n", | |
2064 | e->src->index, e->dest->index, probability); | |
2065 | predict_edge (e, | |
2066 | recursion ? PRED_LOOP_EXIT_WITH_RECURSION | |
2067 | : PRED_LOOP_EXIT, probability); | |
2068 | } | |
2c9e13f3 | 2069 | } |
9c04723a DC |
2070 | if (loop_bound_var) |
2071 | predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base, | |
2072 | loop_bound_code, | |
9439e9a1 | 2073 | tree_to_shwi (loop_bound_step)); |
2ecfd709 | 2074 | } |
b8698a0f | 2075 | |
7805417a JH |
2076 | /* In the following code |
2077 | for (loop1) | |
2078 | if (cond) | |
2079 | for (loop2) | |
2080 | body; | |
2081 | guess that cond is unlikely. */ | |
2082 | if (loop_outer (loop)->num) | |
2083 | { | |
2084 | basic_block bb = NULL; | |
2085 | edge preheader_edge = loop_preheader_edge (loop); | |
2086 | ||
2087 | if (single_pred_p (preheader_edge->src) | |
2088 | && single_succ_p (preheader_edge->src)) | |
2089 | preheader_edge = single_pred_edge (preheader_edge->src); | |
2090 | ||
2091 | gimple *stmt = last_stmt (preheader_edge->src); | |
2092 | /* Pattern match fortran loop preheader: | |
2093 | _16 = BUILTIN_EXPECT (_15, 1, PRED_FORTRAN_LOOP_PREHEADER); | |
2094 | _17 = (logical(kind=4)) _16; | |
2095 | if (_17 != 0) | |
2096 | goto <bb 11>; | |
2097 | else | |
2098 | goto <bb 13>; | |
2099 | ||
2100 | Loop guard branch prediction says nothing about duplicated loop | |
2101 | headers produced by fortran frontend and in this case we want | |
2102 | to predict paths leading to this preheader. */ | |
2103 | ||
2104 | if (stmt | |
2105 | && gimple_code (stmt) == GIMPLE_COND | |
2106 | && gimple_cond_code (stmt) == NE_EXPR | |
2107 | && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME | |
2108 | && integer_zerop (gimple_cond_rhs (stmt))) | |
2109 | { | |
2110 | gimple *call_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt)); | |
2111 | if (gimple_code (call_stmt) == GIMPLE_ASSIGN | |
2112 | && gimple_expr_code (call_stmt) == NOP_EXPR | |
2113 | && TREE_CODE (gimple_assign_rhs1 (call_stmt)) == SSA_NAME) | |
2114 | call_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (call_stmt)); | |
8e4284d0 | 2115 | if (gimple_call_internal_p (call_stmt, IFN_BUILTIN_EXPECT) |
7805417a JH |
2116 | && TREE_CODE (gimple_call_arg (call_stmt, 2)) == INTEGER_CST |
2117 | && tree_fits_uhwi_p (gimple_call_arg (call_stmt, 2)) | |
2118 | && tree_to_uhwi (gimple_call_arg (call_stmt, 2)) | |
2119 | == PRED_FORTRAN_LOOP_PREHEADER) | |
2120 | bb = preheader_edge->src; | |
2121 | } | |
2122 | if (!bb) | |
2123 | { | |
2124 | if (!dominated_by_p (CDI_DOMINATORS, | |
2125 | loop_outer (loop)->latch, loop->header)) | |
2126 | predict_paths_leading_to_edge (loop_preheader_edge (loop), | |
9bb86f40 JH |
2127 | recursion |
2128 | ? PRED_LOOP_GUARD_WITH_RECURSION | |
2129 | : PRED_LOOP_GUARD, | |
7805417a JH |
2130 | NOT_TAKEN, |
2131 | loop_outer (loop)); | |
2132 | } | |
2133 | else | |
2134 | { | |
2135 | if (!dominated_by_p (CDI_DOMINATORS, | |
2136 | loop_outer (loop)->latch, bb)) | |
2137 | predict_paths_leading_to (bb, | |
9bb86f40 JH |
2138 | recursion |
2139 | ? PRED_LOOP_GUARD_WITH_RECURSION | |
2140 | : PRED_LOOP_GUARD, | |
7805417a JH |
2141 | NOT_TAKEN, |
2142 | loop_outer (loop)); | |
2143 | } | |
2144 | } | |
2145 | ||
e0a21ab9 | 2146 | /* Free basic blocks from get_loop_body. */ |
36579663 | 2147 | free (bbs); |
f1ebdfc5 | 2148 | } |
6de9cd9a DN |
2149 | } |
2150 | ||
87022a6b JH |
2151 | /* Attempt to predict probabilities of BB outgoing edges using local |
2152 | properties. */ | |
2153 | static void | |
2154 | bb_estimate_probability_locally (basic_block bb) | |
2155 | { | |
9f215bf5 | 2156 | rtx_insn *last_insn = BB_END (bb); |
87022a6b JH |
2157 | rtx cond; |
2158 | ||
2159 | if (! can_predict_insn_p (last_insn)) | |
2160 | return; | |
2161 | cond = get_condition (last_insn, NULL, false, false); | |
2162 | if (! cond) | |
2163 | return; | |
2164 | ||
2165 | /* Try "pointer heuristic." | |
2166 | A comparison ptr == 0 is predicted as false. | |
2167 | Similarly, a comparison ptr1 == ptr2 is predicted as false. */ | |
2168 | if (COMPARISON_P (cond) | |
2169 | && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0))) | |
2170 | || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1))))) | |
2171 | { | |
2172 | if (GET_CODE (cond) == EQ) | |
2173 | predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN); | |
2174 | else if (GET_CODE (cond) == NE) | |
2175 | predict_insn_def (last_insn, PRED_POINTER, TAKEN); | |
2176 | } | |
2177 | else | |
2178 | ||
2179 | /* Try "opcode heuristic." | |
2180 | EQ tests are usually false and NE tests are usually true. Also, | |
2181 | most quantities are positive, so we can make the appropriate guesses | |
2182 | about signed comparisons against zero. */ | |
2183 | switch (GET_CODE (cond)) | |
2184 | { | |
2185 | case CONST_INT: | |
2186 | /* Unconditional branch. */ | |
2187 | predict_insn_def (last_insn, PRED_UNCONDITIONAL, | |
2188 | cond == const0_rtx ? NOT_TAKEN : TAKEN); | |
2189 | break; | |
2190 | ||
2191 | case EQ: | |
2192 | case UNEQ: | |
2193 | /* Floating point comparisons appears to behave in a very | |
2194 | unpredictable way because of special role of = tests in | |
2195 | FP code. */ | |
2196 | if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))) | |
2197 | ; | |
2198 | /* Comparisons with 0 are often used for booleans and there is | |
2199 | nothing useful to predict about them. */ | |
2200 | else if (XEXP (cond, 1) == const0_rtx | |
2201 | || XEXP (cond, 0) == const0_rtx) | |
2202 | ; | |
2203 | else | |
2204 | predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN); | |
2205 | break; | |
2206 | ||
2207 | case NE: | |
2208 | case LTGT: | |
2209 | /* Floating point comparisons appears to behave in a very | |
2210 | unpredictable way because of special role of = tests in | |
2211 | FP code. */ | |
2212 | if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))) | |
2213 | ; | |
2214 | /* Comparisons with 0 are often used for booleans and there is | |
2215 | nothing useful to predict about them. */ | |
2216 | else if (XEXP (cond, 1) == const0_rtx | |
2217 | || XEXP (cond, 0) == const0_rtx) | |
2218 | ; | |
2219 | else | |
2220 | predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN); | |
2221 | break; | |
2222 | ||
2223 | case ORDERED: | |
2224 | predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN); | |
2225 | break; | |
2226 | ||
2227 | case UNORDERED: | |
2228 | predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN); | |
2229 | break; | |
2230 | ||
2231 | case LE: | |
2232 | case LT: | |
2233 | if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx | |
2234 | || XEXP (cond, 1) == constm1_rtx) | |
2235 | predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN); | |
2236 | break; | |
2237 | ||
2238 | case GE: | |
2239 | case GT: | |
2240 | if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx | |
2241 | || XEXP (cond, 1) == constm1_rtx) | |
2242 | predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN); | |
2243 | break; | |
2244 | ||
2245 | default: | |
2246 | break; | |
2247 | } | |
2248 | } | |
2249 | ||
229031d0 | 2250 | /* Set edge->probability for each successor edge of BB. */ |
87022a6b JH |
2251 | void |
2252 | guess_outgoing_edge_probabilities (basic_block bb) | |
2253 | { | |
2254 | bb_estimate_probability_locally (bb); | |
2255 | combine_predictions_for_insn (BB_END (bb), bb); | |
2256 | } | |
6de9cd9a | 2257 | \f |
ed9c79e1 | 2258 | static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor); |
726a989a RB |
2259 | |
2260 | /* Helper function for expr_expected_value. */ | |
42f97fd2 JH |
2261 | |
2262 | static tree | |
c08472ea | 2263 | expr_expected_value_1 (tree type, tree op0, enum tree_code code, |
ed9c79e1 | 2264 | tree op1, bitmap visited, enum br_predictor *predictor) |
42f97fd2 | 2265 | { |
355fe088 | 2266 | gimple *def; |
726a989a | 2267 | |
ed9c79e1 JJ |
2268 | if (predictor) |
2269 | *predictor = PRED_UNCONDITIONAL; | |
2270 | ||
726a989a | 2271 | if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS) |
42f97fd2 | 2272 | { |
726a989a RB |
2273 | if (TREE_CONSTANT (op0)) |
2274 | return op0; | |
2275 | ||
849a76a5 JJ |
2276 | if (code == IMAGPART_EXPR) |
2277 | { | |
2278 | if (TREE_CODE (TREE_OPERAND (op0, 0)) == SSA_NAME) | |
2279 | { | |
2280 | def = SSA_NAME_DEF_STMT (TREE_OPERAND (op0, 0)); | |
2281 | if (is_gimple_call (def) | |
2282 | && gimple_call_internal_p (def) | |
2283 | && (gimple_call_internal_fn (def) | |
2284 | == IFN_ATOMIC_COMPARE_EXCHANGE)) | |
2285 | { | |
2286 | /* Assume that any given atomic operation has low contention, | |
2287 | and thus the compare-and-swap operation succeeds. */ | |
2288 | if (predictor) | |
2289 | *predictor = PRED_COMPARE_AND_SWAP; | |
2290 | return build_one_cst (TREE_TYPE (op0)); | |
2291 | } | |
2292 | } | |
2293 | } | |
2294 | ||
726a989a RB |
2295 | if (code != SSA_NAME) |
2296 | return NULL_TREE; | |
2297 | ||
2298 | def = SSA_NAME_DEF_STMT (op0); | |
42f97fd2 JH |
2299 | |
2300 | /* If we were already here, break the infinite cycle. */ | |
fcaa4ca4 | 2301 | if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0))) |
42f97fd2 | 2302 | return NULL; |
42f97fd2 | 2303 | |
726a989a | 2304 | if (gimple_code (def) == GIMPLE_PHI) |
42f97fd2 JH |
2305 | { |
2306 | /* All the arguments of the PHI node must have the same constant | |
2307 | length. */ | |
726a989a | 2308 | int i, n = gimple_phi_num_args (def); |
42f97fd2 | 2309 | tree val = NULL, new_val; |
6de9cd9a | 2310 | |
726a989a | 2311 | for (i = 0; i < n; i++) |
42f97fd2 JH |
2312 | { |
2313 | tree arg = PHI_ARG_DEF (def, i); | |
ed9c79e1 | 2314 | enum br_predictor predictor2; |
42f97fd2 JH |
2315 | |
2316 | /* If this PHI has itself as an argument, we cannot | |
2317 | determine the string length of this argument. However, | |
1f838355 | 2318 | if we can find an expected constant value for the other |
42f97fd2 JH |
2319 | PHI args then we can still be sure that this is |
2320 | likely a constant. So be optimistic and just | |
2321 | continue with the next argument. */ | |
2322 | if (arg == PHI_RESULT (def)) | |
2323 | continue; | |
2324 | ||
ed9c79e1 JJ |
2325 | new_val = expr_expected_value (arg, visited, &predictor2); |
2326 | ||
2327 | /* It is difficult to combine value predictors. Simply assume | |
2328 | that later predictor is weaker and take its prediction. */ | |
2329 | if (predictor && *predictor < predictor2) | |
2330 | *predictor = predictor2; | |
42f97fd2 JH |
2331 | if (!new_val) |
2332 | return NULL; | |
2333 | if (!val) | |
2334 | val = new_val; | |
2335 | else if (!operand_equal_p (val, new_val, false)) | |
2336 | return NULL; | |
2337 | } | |
2338 | return val; | |
2339 | } | |
726a989a | 2340 | if (is_gimple_assign (def)) |
42f97fd2 | 2341 | { |
726a989a RB |
2342 | if (gimple_assign_lhs (def) != op0) |
2343 | return NULL; | |
42f97fd2 | 2344 | |
726a989a RB |
2345 | return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)), |
2346 | gimple_assign_rhs1 (def), | |
2347 | gimple_assign_rhs_code (def), | |
2348 | gimple_assign_rhs2 (def), | |
ed9c79e1 | 2349 | visited, predictor); |
726a989a RB |
2350 | } |
2351 | ||
2352 | if (is_gimple_call (def)) | |
2353 | { | |
2354 | tree decl = gimple_call_fndecl (def); | |
2355 | if (!decl) | |
ed9c79e1 JJ |
2356 | { |
2357 | if (gimple_call_internal_p (def) | |
2358 | && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT) | |
2359 | { | |
2360 | gcc_assert (gimple_call_num_args (def) == 3); | |
2361 | tree val = gimple_call_arg (def, 0); | |
2362 | if (TREE_CONSTANT (val)) | |
2363 | return val; | |
2364 | if (predictor) | |
2365 | { | |
ed9c79e1 JJ |
2366 | tree val2 = gimple_call_arg (def, 2); |
2367 | gcc_assert (TREE_CODE (val2) == INTEGER_CST | |
2368 | && tree_fits_uhwi_p (val2) | |
2369 | && tree_to_uhwi (val2) < END_PREDICTORS); | |
2370 | *predictor = (enum br_predictor) tree_to_uhwi (val2); | |
2371 | } | |
2372 | return gimple_call_arg (def, 1); | |
2373 | } | |
2374 | return NULL; | |
2375 | } | |
c08472ea RH |
2376 | if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) |
2377 | switch (DECL_FUNCTION_CODE (decl)) | |
2378 | { | |
2379 | case BUILT_IN_EXPECT: | |
2380 | { | |
2381 | tree val; | |
2382 | if (gimple_call_num_args (def) != 2) | |
2383 | return NULL; | |
2384 | val = gimple_call_arg (def, 0); | |
2385 | if (TREE_CONSTANT (val)) | |
2386 | return val; | |
ed9c79e1 JJ |
2387 | if (predictor) |
2388 | *predictor = PRED_BUILTIN_EXPECT; | |
c08472ea RH |
2389 | return gimple_call_arg (def, 1); |
2390 | } | |
726a989a | 2391 | |
c08472ea RH |
2392 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N: |
2393 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1: | |
2394 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2: | |
2395 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4: | |
2396 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8: | |
2397 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16: | |
2398 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE: | |
2399 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N: | |
2400 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1: | |
2401 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2: | |
2402 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4: | |
2403 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8: | |
2404 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16: | |
2405 | /* Assume that any given atomic operation has low contention, | |
2406 | and thus the compare-and-swap operation succeeds. */ | |
ed9c79e1 JJ |
2407 | if (predictor) |
2408 | *predictor = PRED_COMPARE_AND_SWAP; | |
c08472ea | 2409 | return boolean_true_node; |
083e891e MP |
2410 | default: |
2411 | break; | |
726a989a | 2412 | } |
42f97fd2 | 2413 | } |
726a989a RB |
2414 | |
2415 | return NULL; | |
42f97fd2 | 2416 | } |
726a989a RB |
2417 | |
2418 | if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS) | |
42f97fd2 | 2419 | { |
726a989a | 2420 | tree res; |
ed9c79e1 JJ |
2421 | enum br_predictor predictor2; |
2422 | op0 = expr_expected_value (op0, visited, predictor); | |
42f97fd2 JH |
2423 | if (!op0) |
2424 | return NULL; | |
ed9c79e1 JJ |
2425 | op1 = expr_expected_value (op1, visited, &predictor2); |
2426 | if (predictor && *predictor < predictor2) | |
2427 | *predictor = predictor2; | |
42f97fd2 JH |
2428 | if (!op1) |
2429 | return NULL; | |
726a989a | 2430 | res = fold_build2 (code, type, op0, op1); |
42f97fd2 JH |
2431 | if (TREE_CONSTANT (res)) |
2432 | return res; | |
2433 | return NULL; | |
2434 | } | |
726a989a | 2435 | if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS) |
42f97fd2 | 2436 | { |
726a989a | 2437 | tree res; |
ed9c79e1 | 2438 | op0 = expr_expected_value (op0, visited, predictor); |
42f97fd2 JH |
2439 | if (!op0) |
2440 | return NULL; | |
726a989a | 2441 | res = fold_build1 (code, type, op0); |
42f97fd2 JH |
2442 | if (TREE_CONSTANT (res)) |
2443 | return res; | |
2444 | return NULL; | |
2445 | } | |
2446 | return NULL; | |
2447 | } | |
726a989a | 2448 | |
b8698a0f | 2449 | /* Return constant EXPR will likely have at execution time, NULL if unknown. |
726a989a RB |
2450 | The function is used by builtin_expect branch predictor so the evidence |
2451 | must come from this construct and additional possible constant folding. | |
b8698a0f | 2452 | |
726a989a RB |
2453 | We may want to implement more involved value guess (such as value range |
2454 | propagation based prediction), but such tricks shall go to new | |
2455 | implementation. */ | |
2456 | ||
2457 | static tree | |
ed9c79e1 JJ |
2458 | expr_expected_value (tree expr, bitmap visited, |
2459 | enum br_predictor *predictor) | |
726a989a RB |
2460 | { |
2461 | enum tree_code code; | |
2462 | tree op0, op1; | |
2463 | ||
2464 | if (TREE_CONSTANT (expr)) | |
ed9c79e1 JJ |
2465 | { |
2466 | if (predictor) | |
2467 | *predictor = PRED_UNCONDITIONAL; | |
2468 | return expr; | |
2469 | } | |
726a989a RB |
2470 | |
2471 | extract_ops_from_tree (expr, &code, &op0, &op1); | |
2472 | return expr_expected_value_1 (TREE_TYPE (expr), | |
ed9c79e1 | 2473 | op0, code, op1, visited, predictor); |
726a989a | 2474 | } |
42f97fd2 | 2475 | \f |
6de9cd9a DN |
2476 | /* Predict using opcode of the last statement in basic block. */ |
2477 | static void | |
2478 | tree_predict_by_opcode (basic_block bb) | |
2479 | { | |
355fe088 | 2480 | gimple *stmt = last_stmt (bb); |
6de9cd9a | 2481 | edge then_edge; |
726a989a | 2482 | tree op0, op1; |
6de9cd9a | 2483 | tree type; |
42f97fd2 | 2484 | tree val; |
726a989a | 2485 | enum tree_code cmp; |
628f6a4e | 2486 | edge_iterator ei; |
ed9c79e1 | 2487 | enum br_predictor predictor; |
6de9cd9a | 2488 | |
726a989a | 2489 | if (!stmt || gimple_code (stmt) != GIMPLE_COND) |
6de9cd9a | 2490 | return; |
628f6a4e | 2491 | FOR_EACH_EDGE (then_edge, ei, bb->succs) |
6de9cd9a | 2492 | if (then_edge->flags & EDGE_TRUE_VALUE) |
628f6a4e | 2493 | break; |
726a989a RB |
2494 | op0 = gimple_cond_lhs (stmt); |
2495 | op1 = gimple_cond_rhs (stmt); | |
2496 | cmp = gimple_cond_code (stmt); | |
6de9cd9a | 2497 | type = TREE_TYPE (op0); |
0e3de1d4 | 2498 | val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, auto_bitmap (), |
ed9c79e1 | 2499 | &predictor); |
ed9c79e1 | 2500 | if (val && TREE_CODE (val) == INTEGER_CST) |
42f97fd2 | 2501 | { |
ed9c79e1 JJ |
2502 | if (predictor == PRED_BUILTIN_EXPECT) |
2503 | { | |
2504 | int percent = PARAM_VALUE (BUILTIN_EXPECT_PROBABILITY); | |
942df739 | 2505 | |
ed9c79e1 JJ |
2506 | gcc_assert (percent >= 0 && percent <= 100); |
2507 | if (integer_zerop (val)) | |
2508 | percent = 100 - percent; | |
2509 | predict_edge (then_edge, PRED_BUILTIN_EXPECT, HITRATE (percent)); | |
2510 | } | |
2511 | else | |
75dc52c6 ML |
2512 | predict_edge_def (then_edge, predictor, |
2513 | integer_zerop (val) ? NOT_TAKEN : TAKEN); | |
42f97fd2 | 2514 | } |
6de9cd9a DN |
2515 | /* Try "pointer heuristic." |
2516 | A comparison ptr == 0 is predicted as false. | |
2517 | Similarly, a comparison ptr1 == ptr2 is predicted as false. */ | |
2518 | if (POINTER_TYPE_P (type)) | |
2519 | { | |
726a989a | 2520 | if (cmp == EQ_EXPR) |
6de9cd9a | 2521 | predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN); |
726a989a | 2522 | else if (cmp == NE_EXPR) |
6de9cd9a DN |
2523 | predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN); |
2524 | } | |
2525 | else | |
2526 | ||
2527 | /* Try "opcode heuristic." | |
2528 | EQ tests are usually false and NE tests are usually true. Also, | |
2529 | most quantities are positive, so we can make the appropriate guesses | |
2530 | about signed comparisons against zero. */ | |
726a989a | 2531 | switch (cmp) |
6de9cd9a DN |
2532 | { |
2533 | case EQ_EXPR: | |
2534 | case UNEQ_EXPR: | |
2535 | /* Floating point comparisons appears to behave in a very | |
2536 | unpredictable way because of special role of = tests in | |
2537 | FP code. */ | |
2538 | if (FLOAT_TYPE_P (type)) | |
2539 | ; | |
2540 | /* Comparisons with 0 are often used for booleans and there is | |
2541 | nothing useful to predict about them. */ | |
726a989a | 2542 | else if (integer_zerop (op0) || integer_zerop (op1)) |
6de9cd9a DN |
2543 | ; |
2544 | else | |
2545 | predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN); | |
2546 | break; | |
2547 | ||
2548 | case NE_EXPR: | |
d1a7edaf | 2549 | case LTGT_EXPR: |
6de9cd9a DN |
2550 | /* Floating point comparisons appears to behave in a very |
2551 | unpredictable way because of special role of = tests in | |
2552 | FP code. */ | |
2553 | if (FLOAT_TYPE_P (type)) | |
2554 | ; | |
2555 | /* Comparisons with 0 are often used for booleans and there is | |
2556 | nothing useful to predict about them. */ | |
2557 | else if (integer_zerop (op0) | |
726a989a | 2558 | || integer_zerop (op1)) |
6de9cd9a DN |
2559 | ; |
2560 | else | |
2561 | predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN); | |
2562 | break; | |
2563 | ||
2564 | case ORDERED_EXPR: | |
2565 | predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN); | |
2566 | break; | |
2567 | ||
2568 | case UNORDERED_EXPR: | |
2569 | predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN); | |
2570 | break; | |
2571 | ||
2572 | case LE_EXPR: | |
2573 | case LT_EXPR: | |
726a989a RB |
2574 | if (integer_zerop (op1) |
2575 | || integer_onep (op1) | |
2576 | || integer_all_onesp (op1) | |
2577 | || real_zerop (op1) | |
2578 | || real_onep (op1) | |
2579 | || real_minus_onep (op1)) | |
6de9cd9a DN |
2580 | predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN); |
2581 | break; | |
2582 | ||
2583 | case GE_EXPR: | |
2584 | case GT_EXPR: | |
726a989a RB |
2585 | if (integer_zerop (op1) |
2586 | || integer_onep (op1) | |
2587 | || integer_all_onesp (op1) | |
2588 | || real_zerop (op1) | |
2589 | || real_onep (op1) | |
2590 | || real_minus_onep (op1)) | |
6de9cd9a DN |
2591 | predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN); |
2592 | break; | |
2593 | ||
2594 | default: | |
2595 | break; | |
2596 | } | |
2597 | } | |
2598 | ||
e25a366f AP |
2599 | /* Returns TRUE if the STMT is exit(0) like statement. */ |
2600 | ||
2601 | static bool | |
2602 | is_exit_with_zero_arg (const gimple *stmt) | |
2603 | { | |
2604 | /* This is not exit, _exit or _Exit. */ | |
2605 | if (!gimple_call_builtin_p (stmt, BUILT_IN_EXIT) | |
2606 | && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT) | |
2607 | && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT2)) | |
2608 | return false; | |
2609 | ||
2610 | /* Argument is an interger zero. */ | |
2611 | return integer_zerop (gimple_call_arg (stmt, 0)); | |
2612 | } | |
2613 | ||
bb033fd8 | 2614 | /* Try to guess whether the value of return means error code. */ |
726a989a | 2615 | |
bb033fd8 JH |
2616 | static enum br_predictor |
2617 | return_prediction (tree val, enum prediction *prediction) | |
2618 | { | |
2619 | /* VOID. */ | |
2620 | if (!val) | |
2621 | return PRED_NO_PREDICTION; | |
2622 | /* Different heuristics for pointers and scalars. */ | |
2623 | if (POINTER_TYPE_P (TREE_TYPE (val))) | |
2624 | { | |
2625 | /* NULL is usually not returned. */ | |
2626 | if (integer_zerop (val)) | |
2627 | { | |
2628 | *prediction = NOT_TAKEN; | |
2629 | return PRED_NULL_RETURN; | |
2630 | } | |
2631 | } | |
2632 | else if (INTEGRAL_TYPE_P (TREE_TYPE (val))) | |
2633 | { | |
2634 | /* Negative return values are often used to indicate | |
2635 | errors. */ | |
2636 | if (TREE_CODE (val) == INTEGER_CST | |
2637 | && tree_int_cst_sgn (val) < 0) | |
2638 | { | |
2639 | *prediction = NOT_TAKEN; | |
2640 | return PRED_NEGATIVE_RETURN; | |
2641 | } | |
2642 | /* Constant return values seems to be commonly taken. | |
2643 | Zero/one often represent booleans so exclude them from the | |
2644 | heuristics. */ | |
2645 | if (TREE_CONSTANT (val) | |
2646 | && (!integer_zerop (val) && !integer_onep (val))) | |
2647 | { | |
d1fcc2bd | 2648 | *prediction = NOT_TAKEN; |
75b6bb62 | 2649 | return PRED_CONST_RETURN; |
bb033fd8 JH |
2650 | } |
2651 | } | |
2652 | return PRED_NO_PREDICTION; | |
2653 | } | |
2654 | ||
97202774 JJ |
2655 | /* Return zero if phi result could have values other than -1, 0 or 1, |
2656 | otherwise return a bitmask, with bits 0, 1 and 2 set if -1, 0 and 1 | |
2657 | values are used or likely. */ | |
2658 | ||
2659 | static int | |
2660 | zero_one_minusone (gphi *phi, int limit) | |
2661 | { | |
2662 | int phi_num_args = gimple_phi_num_args (phi); | |
2663 | int ret = 0; | |
2664 | for (int i = 0; i < phi_num_args; i++) | |
2665 | { | |
2666 | tree t = PHI_ARG_DEF (phi, i); | |
2667 | if (TREE_CODE (t) != INTEGER_CST) | |
2668 | continue; | |
2669 | wide_int w = wi::to_wide (t); | |
2670 | if (w == -1) | |
2671 | ret |= 1; | |
2672 | else if (w == 0) | |
2673 | ret |= 2; | |
2674 | else if (w == 1) | |
2675 | ret |= 4; | |
2676 | else | |
2677 | return 0; | |
2678 | } | |
2679 | for (int i = 0; i < phi_num_args; i++) | |
2680 | { | |
2681 | tree t = PHI_ARG_DEF (phi, i); | |
2682 | if (TREE_CODE (t) == INTEGER_CST) | |
2683 | continue; | |
2684 | if (TREE_CODE (t) != SSA_NAME) | |
2685 | return 0; | |
2686 | gimple *g = SSA_NAME_DEF_STMT (t); | |
2687 | if (gimple_code (g) == GIMPLE_PHI && limit > 0) | |
2688 | if (int r = zero_one_minusone (as_a <gphi *> (g), limit - 1)) | |
2689 | { | |
2690 | ret |= r; | |
2691 | continue; | |
2692 | } | |
2693 | if (!is_gimple_assign (g)) | |
2694 | return 0; | |
2695 | if (gimple_assign_cast_p (g)) | |
2696 | { | |
2697 | tree rhs1 = gimple_assign_rhs1 (g); | |
2698 | if (TREE_CODE (rhs1) != SSA_NAME | |
2699 | || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
2700 | || TYPE_PRECISION (TREE_TYPE (rhs1)) != 1 | |
2701 | || !TYPE_UNSIGNED (TREE_TYPE (rhs1))) | |
2702 | return 0; | |
2703 | ret |= (2 | 4); | |
2704 | continue; | |
2705 | } | |
2706 | if (TREE_CODE_CLASS (gimple_assign_rhs_code (g)) != tcc_comparison) | |
2707 | return 0; | |
2708 | ret |= (2 | 4); | |
2709 | } | |
2710 | return ret; | |
2711 | } | |
2712 | ||
bb033fd8 JH |
2713 | /* Find the basic block with return expression and look up for possible |
2714 | return value trying to apply RETURN_PREDICTION heuristics. */ | |
2715 | static void | |
3e4b9ad0 | 2716 | apply_return_prediction (void) |
bb033fd8 | 2717 | { |
538dd0b7 | 2718 | greturn *return_stmt = NULL; |
bb033fd8 JH |
2719 | tree return_val; |
2720 | edge e; | |
538dd0b7 | 2721 | gphi *phi; |
bb033fd8 JH |
2722 | int phi_num_args, i; |
2723 | enum br_predictor pred; | |
2724 | enum prediction direction; | |
628f6a4e | 2725 | edge_iterator ei; |
bb033fd8 | 2726 | |
fefa31b5 | 2727 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
bb033fd8 | 2728 | { |
355fe088 | 2729 | gimple *last = last_stmt (e->src); |
538dd0b7 DM |
2730 | if (last |
2731 | && gimple_code (last) == GIMPLE_RETURN) | |
2732 | { | |
2733 | return_stmt = as_a <greturn *> (last); | |
2734 | break; | |
2735 | } | |
bb033fd8 JH |
2736 | } |
2737 | if (!e) | |
2738 | return; | |
726a989a | 2739 | return_val = gimple_return_retval (return_stmt); |
bb033fd8 JH |
2740 | if (!return_val) |
2741 | return; | |
bb033fd8 JH |
2742 | if (TREE_CODE (return_val) != SSA_NAME |
2743 | || !SSA_NAME_DEF_STMT (return_val) | |
726a989a | 2744 | || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI) |
bb033fd8 | 2745 | return; |
538dd0b7 | 2746 | phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val)); |
726a989a | 2747 | phi_num_args = gimple_phi_num_args (phi); |
bb033fd8 JH |
2748 | pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction); |
2749 | ||
97202774 JJ |
2750 | /* Avoid the case where the function returns -1, 0 and 1 values and |
2751 | nothing else. Those could be qsort etc. comparison functions | |
2752 | where the negative return isn't less probable than positive. | |
2753 | For this require that the function returns at least -1 or 1 | |
2754 | or -1 and a boolean value or comparison result, so that functions | |
2755 | returning just -1 and 0 are treated as if -1 represents error value. */ | |
2756 | if (INTEGRAL_TYPE_P (TREE_TYPE (return_val)) | |
2757 | && !TYPE_UNSIGNED (TREE_TYPE (return_val)) | |
2758 | && TYPE_PRECISION (TREE_TYPE (return_val)) > 1) | |
2759 | if (int r = zero_one_minusone (phi, 3)) | |
2760 | if ((r & (1 | 4)) == (1 | 4)) | |
2761 | return; | |
2762 | ||
bb033fd8 JH |
2763 | /* Avoid the degenerate case where all return values form the function |
2764 | belongs to same category (ie they are all positive constants) | |
2765 | so we can hardly say something about them. */ | |
2766 | for (i = 1; i < phi_num_args; i++) | |
2767 | if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction)) | |
2768 | break; | |
2769 | if (i != phi_num_args) | |
2770 | for (i = 0; i < phi_num_args; i++) | |
2771 | { | |
2772 | pred = return_prediction (PHI_ARG_DEF (phi, i), &direction); | |
2773 | if (pred != PRED_NO_PREDICTION) | |
5210bbc5 JH |
2774 | predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred, |
2775 | direction); | |
bb033fd8 JH |
2776 | } |
2777 | } | |
2778 | ||
2779 | /* Look for basic block that contains unlikely to happen events | |
2780 | (such as noreturn calls) and mark all paths leading to execution | |
2781 | of this basic blocks as unlikely. */ | |
2782 | ||
2783 | static void | |
2784 | tree_bb_level_predictions (void) | |
2785 | { | |
2786 | basic_block bb; | |
c0ee0021 JH |
2787 | bool has_return_edges = false; |
2788 | edge e; | |
2789 | edge_iterator ei; | |
2790 | ||
fefa31b5 | 2791 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
b69d9ac6 | 2792 | if (!unlikely_executed_edge_p (e) && !(e->flags & EDGE_ABNORMAL_CALL)) |
c0ee0021 JH |
2793 | { |
2794 | has_return_edges = true; | |
2795 | break; | |
2796 | } | |
bb033fd8 | 2797 | |
3e4b9ad0 | 2798 | apply_return_prediction (); |
bb033fd8 | 2799 | |
11cd3bed | 2800 | FOR_EACH_BB_FN (bb, cfun) |
bb033fd8 | 2801 | { |
726a989a | 2802 | gimple_stmt_iterator gsi; |
bb033fd8 | 2803 | |
7299cb99 | 2804 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
bb033fd8 | 2805 | { |
355fe088 | 2806 | gimple *stmt = gsi_stmt (gsi); |
52bf96d2 | 2807 | tree decl; |
daac0317 | 2808 | |
726a989a | 2809 | if (is_gimple_call (stmt)) |
bb033fd8 | 2810 | { |
e25a366f AP |
2811 | if (gimple_call_noreturn_p (stmt) |
2812 | && has_return_edges | |
2813 | && !is_exit_with_zero_arg (stmt)) | |
726a989a RB |
2814 | predict_paths_leading_to (bb, PRED_NORETURN, |
2815 | NOT_TAKEN); | |
2816 | decl = gimple_call_fndecl (stmt); | |
2817 | if (decl | |
2818 | && lookup_attribute ("cold", | |
2819 | DECL_ATTRIBUTES (decl))) | |
2820 | predict_paths_leading_to (bb, PRED_COLD_FUNCTION, | |
2821 | NOT_TAKEN); | |
888ed1a3 JH |
2822 | if (decl && recursive_call_p (current_function_decl, decl)) |
2823 | predict_paths_leading_to (bb, PRED_RECURSIVE_CALL, | |
2824 | NOT_TAKEN); | |
bb033fd8 | 2825 | } |
726a989a RB |
2826 | else if (gimple_code (stmt) == GIMPLE_PREDICT) |
2827 | { | |
2828 | predict_paths_leading_to (bb, gimple_predict_predictor (stmt), | |
2829 | gimple_predict_outcome (stmt)); | |
7299cb99 JH |
2830 | /* Keep GIMPLE_PREDICT around so early inlining will propagate |
2831 | hints to callers. */ | |
726a989a | 2832 | } |
bb033fd8 JH |
2833 | } |
2834 | } | |
bb033fd8 JH |
2835 | } |
2836 | ||
b787e7a2 | 2837 | /* Callback for hash_map::traverse, asserts that the pointer map is |
f06b0a10 ZD |
2838 | empty. */ |
2839 | ||
b787e7a2 TS |
2840 | bool |
2841 | assert_is_empty (const_basic_block const &, edge_prediction *const &value, | |
2842 | void *) | |
f06b0a10 | 2843 | { |
b787e7a2 | 2844 | gcc_assert (!value); |
f06b0a10 ZD |
2845 | return false; |
2846 | } | |
f06b0a10 | 2847 | |
52261a21 JH |
2848 | /* Predict branch probabilities and estimate profile for basic block BB. |
2849 | When LOCAL_ONLY is set do not use any global properties of CFG. */ | |
8e88f9fd SP |
2850 | |
2851 | static void | |
52261a21 | 2852 | tree_estimate_probability_bb (basic_block bb, bool local_only) |
8e88f9fd SP |
2853 | { |
2854 | edge e; | |
2855 | edge_iterator ei; | |
8e88f9fd SP |
2856 | |
2857 | FOR_EACH_EDGE (e, ei, bb->succs) | |
2858 | { | |
8e88f9fd SP |
2859 | /* Look for block we are guarding (ie we dominate it, |
2860 | but it doesn't postdominate us). */ | |
fefa31b5 | 2861 | if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb |
52261a21 | 2862 | && !local_only |
8e88f9fd SP |
2863 | && dominated_by_p (CDI_DOMINATORS, e->dest, e->src) |
2864 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest)) | |
2865 | { | |
2866 | gimple_stmt_iterator bi; | |
2867 | ||
2868 | /* The call heuristic claims that a guarded function call | |
2869 | is improbable. This is because such calls are often used | |
2870 | to signal exceptional situations such as printing error | |
2871 | messages. */ | |
2872 | for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi); | |
2873 | gsi_next (&bi)) | |
2874 | { | |
355fe088 | 2875 | gimple *stmt = gsi_stmt (bi); |
8e88f9fd | 2876 | if (is_gimple_call (stmt) |
888ed1a3 | 2877 | && !gimple_inexpensive_call_p (as_a <gcall *> (stmt)) |
8e88f9fd SP |
2878 | /* Constant and pure calls are hardly used to signalize |
2879 | something exceptional. */ | |
2880 | && gimple_has_side_effects (stmt)) | |
2881 | { | |
2c512374 JH |
2882 | if (gimple_call_fndecl (stmt)) |
2883 | predict_edge_def (e, PRED_CALL, NOT_TAKEN); | |
2884 | else if (virtual_method_call_p (gimple_call_fn (stmt))) | |
350de209 | 2885 | predict_edge_def (e, PRED_POLYMORPHIC_CALL, NOT_TAKEN); |
2c512374 JH |
2886 | else |
2887 | predict_edge_def (e, PRED_INDIR_CALL, TAKEN); | |
8e88f9fd SP |
2888 | break; |
2889 | } | |
2890 | } | |
2891 | } | |
2892 | } | |
2893 | tree_predict_by_opcode (bb); | |
2894 | } | |
2895 | ||
2896 | /* Predict branch probabilities and estimate profile of the tree CFG. | |
2897 | This function can be called from the loop optimizers to recompute | |
460545e8 JH |
2898 | the profile information. |
2899 | If DRY_RUN is set, do not modify CFG and only produce dump files. */ | |
8e88f9fd SP |
2900 | |
2901 | void | |
460545e8 | 2902 | tree_estimate_probability (bool dry_run) |
6de9cd9a DN |
2903 | { |
2904 | basic_block bb; | |
6de9cd9a | 2905 | |
bb033fd8 | 2906 | add_noreturn_fake_exit_edges (); |
6de9cd9a | 2907 | connect_infinite_loops_to_exit (); |
c7b852c8 ZD |
2908 | /* We use loop_niter_by_eval, which requires that the loops have |
2909 | preheaders. */ | |
2910 | create_preheaders (CP_SIMPLE_PREHEADERS); | |
6de9cd9a DN |
2911 | calculate_dominance_info (CDI_POST_DOMINATORS); |
2912 | ||
b787e7a2 | 2913 | bb_predictions = new hash_map<const_basic_block, edge_prediction *>; |
bb033fd8 | 2914 | tree_bb_level_predictions (); |
4839cb59 | 2915 | record_loop_exits (); |
8e88f9fd | 2916 | |
0fc822d0 | 2917 | if (number_of_loops (cfun) > 1) |
d73be268 | 2918 | predict_loops (); |
6de9cd9a | 2919 | |
11cd3bed | 2920 | FOR_EACH_BB_FN (bb, cfun) |
52261a21 | 2921 | tree_estimate_probability_bb (bb, false); |
6de9cd9a | 2922 | |
11cd3bed | 2923 | FOR_EACH_BB_FN (bb, cfun) |
460545e8 | 2924 | combine_predictions_for_bb (bb, dry_run); |
861f9cd0 | 2925 | |
b2b29377 MM |
2926 | if (flag_checking) |
2927 | bb_predictions->traverse<void *, assert_is_empty> (NULL); | |
2928 | ||
b787e7a2 | 2929 | delete bb_predictions; |
f06b0a10 ZD |
2930 | bb_predictions = NULL; |
2931 | ||
460545e8 JH |
2932 | if (!dry_run) |
2933 | estimate_bb_frequencies (false); | |
6de9cd9a | 2934 | free_dominance_info (CDI_POST_DOMINATORS); |
6809cbf9 | 2935 | remove_fake_exit_edges (); |
8e88f9fd | 2936 | } |
52261a21 JH |
2937 | |
2938 | /* Set edge->probability for each successor edge of BB. */ | |
2939 | void | |
2940 | tree_guess_outgoing_edge_probabilities (basic_block bb) | |
2941 | { | |
2942 | bb_predictions = new hash_map<const_basic_block, edge_prediction *>; | |
2943 | tree_estimate_probability_bb (bb, true); | |
2944 | combine_predictions_for_bb (bb, false); | |
2945 | if (flag_checking) | |
2946 | bb_predictions->traverse<void *, assert_is_empty> (NULL); | |
2947 | delete bb_predictions; | |
2948 | bb_predictions = NULL; | |
2949 | } | |
994a57cd | 2950 | \f |
fa10beec | 2951 | /* Predict edges to successors of CUR whose sources are not postdominated by |
3e4b9ad0 | 2952 | BB by PRED and recurse to all postdominators. */ |
bb033fd8 JH |
2953 | |
2954 | static void | |
3e4b9ad0 JH |
2955 | predict_paths_for_bb (basic_block cur, basic_block bb, |
2956 | enum br_predictor pred, | |
0f3b7e9a | 2957 | enum prediction taken, |
7805417a | 2958 | bitmap visited, struct loop *in_loop = NULL) |
bb033fd8 JH |
2959 | { |
2960 | edge e; | |
628f6a4e | 2961 | edge_iterator ei; |
3e4b9ad0 | 2962 | basic_block son; |
bb033fd8 | 2963 | |
7805417a JH |
2964 | /* If we exited the loop or CUR is unconditional in the loop, there is |
2965 | nothing to do. */ | |
2966 | if (in_loop | |
2967 | && (!flow_bb_inside_loop_p (in_loop, cur) | |
2968 | || dominated_by_p (CDI_DOMINATORS, in_loop->latch, cur))) | |
2969 | return; | |
2970 | ||
3e4b9ad0 JH |
2971 | /* We are looking for all edges forming edge cut induced by |
2972 | set of all blocks postdominated by BB. */ | |
2973 | FOR_EACH_EDGE (e, ei, cur->preds) | |
2974 | if (e->src->index >= NUM_FIXED_BLOCKS | |
2975 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb)) | |
bb033fd8 | 2976 | { |
450997ef JH |
2977 | edge e2; |
2978 | edge_iterator ei2; | |
2979 | bool found = false; | |
2980 | ||
5210bbc5 | 2981 | /* Ignore fake edges and eh, we predict them as not taken anyway. */ |
b69d9ac6 | 2982 | if (unlikely_executed_edge_p (e)) |
450997ef | 2983 | continue; |
3e4b9ad0 | 2984 | gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb)); |
450997ef | 2985 | |
0f3b7e9a | 2986 | /* See if there is an edge from e->src that is not abnormal |
7805417a | 2987 | and does not lead to BB and does not exit the loop. */ |
450997ef JH |
2988 | FOR_EACH_EDGE (e2, ei2, e->src->succs) |
2989 | if (e2 != e | |
b69d9ac6 | 2990 | && !unlikely_executed_edge_p (e2) |
7805417a JH |
2991 | && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb) |
2992 | && (!in_loop || !loop_exit_edge_p (in_loop, e2))) | |
450997ef JH |
2993 | { |
2994 | found = true; | |
2995 | break; | |
2996 | } | |
2997 | ||
2998 | /* If there is non-abnormal path leaving e->src, predict edge | |
2999 | using predictor. Otherwise we need to look for paths | |
0f3b7e9a JH |
3000 | leading to e->src. |
3001 | ||
3002 | The second may lead to infinite loop in the case we are predicitng | |
3003 | regions that are only reachable by abnormal edges. We simply | |
3004 | prevent visiting given BB twice. */ | |
450997ef | 3005 | if (found) |
5aabc487 JH |
3006 | { |
3007 | if (!edge_predicted_by_p (e, pred, taken)) | |
3008 | predict_edge_def (e, pred, taken); | |
3009 | } | |
993716bd | 3010 | else if (bitmap_set_bit (visited, e->src->index)) |
7805417a | 3011 | predict_paths_for_bb (e->src, e->src, pred, taken, visited, in_loop); |
bb033fd8 | 3012 | } |
3e4b9ad0 JH |
3013 | for (son = first_dom_son (CDI_POST_DOMINATORS, cur); |
3014 | son; | |
3015 | son = next_dom_son (CDI_POST_DOMINATORS, son)) | |
7805417a | 3016 | predict_paths_for_bb (son, bb, pred, taken, visited, in_loop); |
3e4b9ad0 | 3017 | } |
bb033fd8 | 3018 | |
3e4b9ad0 JH |
3019 | /* Sets branch probabilities according to PREDiction and |
3020 | FLAGS. */ | |
bb033fd8 | 3021 | |
3e4b9ad0 JH |
3022 | static void |
3023 | predict_paths_leading_to (basic_block bb, enum br_predictor pred, | |
7805417a | 3024 | enum prediction taken, struct loop *in_loop) |
3e4b9ad0 | 3025 | { |
0e3de1d4 | 3026 | predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop); |
bb033fd8 | 3027 | } |
5210bbc5 JH |
3028 | |
3029 | /* Like predict_paths_leading_to but take edge instead of basic block. */ | |
3030 | ||
3031 | static void | |
3032 | predict_paths_leading_to_edge (edge e, enum br_predictor pred, | |
7805417a | 3033 | enum prediction taken, struct loop *in_loop) |
5210bbc5 JH |
3034 | { |
3035 | bool has_nonloop_edge = false; | |
3036 | edge_iterator ei; | |
3037 | edge e2; | |
3038 | ||
3039 | basic_block bb = e->src; | |
3040 | FOR_EACH_EDGE (e2, ei, bb->succs) | |
3041 | if (e2->dest != e->src && e2->dest != e->dest | |
b69d9ac6 | 3042 | && !unlikely_executed_edge_p (e) |
5210bbc5 JH |
3043 | && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest)) |
3044 | { | |
3045 | has_nonloop_edge = true; | |
3046 | break; | |
3047 | } | |
3048 | if (!has_nonloop_edge) | |
0f3b7e9a | 3049 | { |
0e3de1d4 | 3050 | predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop); |
0f3b7e9a | 3051 | } |
5210bbc5 JH |
3052 | else |
3053 | predict_edge_def (e, pred, taken); | |
3054 | } | |
969d70ca | 3055 | \f |
57cb6d52 | 3056 | /* This is used to carry information about basic blocks. It is |
861f9cd0 JH |
3057 | attached to the AUX field of the standard CFG block. */ |
3058 | ||
11478306 | 3059 | struct block_info |
861f9cd0 JH |
3060 | { |
3061 | /* Estimated frequency of execution of basic_block. */ | |
ac5e69da | 3062 | sreal frequency; |
861f9cd0 JH |
3063 | |
3064 | /* To keep queue of basic blocks to process. */ | |
3065 | basic_block next; | |
3066 | ||
eaec9b3d | 3067 | /* Number of predecessors we need to visit first. */ |
754d9299 | 3068 | int npredecessors; |
11478306 | 3069 | }; |
861f9cd0 JH |
3070 | |
3071 | /* Similar information for edges. */ | |
11478306 | 3072 | struct edge_prob_info |
861f9cd0 | 3073 | { |
569b7f6a | 3074 | /* In case edge is a loopback edge, the probability edge will be reached |
861f9cd0 | 3075 | in case header is. Estimated number of iterations of the loop can be |
8aa18a7d | 3076 | then computed as 1 / (1 - back_edge_prob). */ |
ac5e69da | 3077 | sreal back_edge_prob; |
569b7f6a | 3078 | /* True if the edge is a loopback edge in the natural loop. */ |
2c45a16a | 3079 | unsigned int back_edge:1; |
11478306 | 3080 | }; |
861f9cd0 | 3081 | |
11478306 | 3082 | #define BLOCK_INFO(B) ((block_info *) (B)->aux) |
59f2e9d8 | 3083 | #undef EDGE_INFO |
11478306 | 3084 | #define EDGE_INFO(E) ((edge_prob_info *) (E)->aux) |
861f9cd0 JH |
3085 | |
3086 | /* Helper function for estimate_bb_frequencies. | |
598ec7bd ZD |
3087 | Propagate the frequencies in blocks marked in |
3088 | TOVISIT, starting in HEAD. */ | |
bfdade77 | 3089 | |
861f9cd0 | 3090 | static void |
598ec7bd | 3091 | propagate_freq (basic_block head, bitmap tovisit) |
861f9cd0 | 3092 | { |
e0082a72 ZD |
3093 | basic_block bb; |
3094 | basic_block last; | |
b9af0016 | 3095 | unsigned i; |
861f9cd0 JH |
3096 | edge e; |
3097 | basic_block nextbb; | |
8a998e0c | 3098 | bitmap_iterator bi; |
247a370b | 3099 | |
eaec9b3d | 3100 | /* For each basic block we need to visit count number of his predecessors |
247a370b | 3101 | we need to visit first. */ |
8a998e0c | 3102 | EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi) |
247a370b | 3103 | { |
8a998e0c JL |
3104 | edge_iterator ei; |
3105 | int count = 0; | |
3106 | ||
06e28de2 | 3107 | bb = BASIC_BLOCK_FOR_FN (cfun, i); |
bfdade77 | 3108 | |
8a998e0c JL |
3109 | FOR_EACH_EDGE (e, ei, bb->preds) |
3110 | { | |
3111 | bool visit = bitmap_bit_p (tovisit, e->src->index); | |
3112 | ||
3113 | if (visit && !(e->flags & EDGE_DFS_BACK)) | |
3114 | count++; | |
3115 | else if (visit && dump_file && !EDGE_INFO (e)->back_edge) | |
3116 | fprintf (dump_file, | |
3117 | "Irreducible region hit, ignoring edge to %i->%i\n", | |
3118 | e->src->index, bb->index); | |
247a370b | 3119 | } |
b9af0016 | 3120 | BLOCK_INFO (bb)->npredecessors = count; |
b35366ce | 3121 | /* When function never returns, we will never process exit block. */ |
fefa31b5 | 3122 | if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
e7a74006 | 3123 | bb->count = profile_count::zero (); |
247a370b | 3124 | } |
861f9cd0 | 3125 | |
fd27ffab | 3126 | BLOCK_INFO (head)->frequency = 1; |
e0082a72 ZD |
3127 | last = head; |
3128 | for (bb = head; bb; bb = nextbb) | |
861f9cd0 | 3129 | { |
628f6a4e | 3130 | edge_iterator ei; |
fd27ffab ML |
3131 | sreal cyclic_probability = 0; |
3132 | sreal frequency = 0; | |
861f9cd0 JH |
3133 | |
3134 | nextbb = BLOCK_INFO (bb)->next; | |
3135 | BLOCK_INFO (bb)->next = NULL; | |
3136 | ||
3137 | /* Compute frequency of basic block. */ | |
3138 | if (bb != head) | |
3139 | { | |
b2b29377 MM |
3140 | if (flag_checking) |
3141 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3142 | gcc_assert (!bitmap_bit_p (tovisit, e->src->index) | |
3143 | || (e->flags & EDGE_DFS_BACK)); | |
861f9cd0 | 3144 | |
628f6a4e | 3145 | FOR_EACH_EDGE (e, ei, bb->preds) |
861f9cd0 | 3146 | if (EDGE_INFO (e)->back_edge) |
8aa18a7d | 3147 | { |
618b7f29 | 3148 | cyclic_probability += EDGE_INFO (e)->back_edge_prob; |
8aa18a7d | 3149 | } |
247a370b | 3150 | else if (!(e->flags & EDGE_DFS_BACK)) |
8aa18a7d | 3151 | { |
8aa18a7d JH |
3152 | /* frequency += (e->probability |
3153 | * BLOCK_INFO (e->src)->frequency / | |
3154 | REG_BR_PROB_BASE); */ | |
3155 | ||
e7a74006 JH |
3156 | /* FIXME: Graphite is producing edges with no profile. Once |
3157 | this is fixed, drop this. */ | |
3158 | sreal tmp = e->probability.initialized_p () ? | |
3159 | e->probability.to_reg_br_prob_base () : 0; | |
618b7f29 TS |
3160 | tmp *= BLOCK_INFO (e->src)->frequency; |
3161 | tmp *= real_inv_br_prob_base; | |
3162 | frequency += tmp; | |
8aa18a7d JH |
3163 | } |
3164 | ||
fd27ffab | 3165 | if (cyclic_probability == 0) |
ac5e69da | 3166 | { |
618b7f29 | 3167 | BLOCK_INFO (bb)->frequency = frequency; |
ac5e69da | 3168 | } |
fbe3b30b SB |
3169 | else |
3170 | { | |
618b7f29 TS |
3171 | if (cyclic_probability > real_almost_one) |
3172 | cyclic_probability = real_almost_one; | |
861f9cd0 | 3173 | |
79a490a9 | 3174 | /* BLOCK_INFO (bb)->frequency = frequency |
ac5e69da | 3175 | / (1 - cyclic_probability) */ |
861f9cd0 | 3176 | |
fd27ffab | 3177 | cyclic_probability = sreal (1) - cyclic_probability; |
618b7f29 | 3178 | BLOCK_INFO (bb)->frequency = frequency / cyclic_probability; |
fbe3b30b | 3179 | } |
861f9cd0 JH |
3180 | } |
3181 | ||
8a998e0c | 3182 | bitmap_clear_bit (tovisit, bb->index); |
861f9cd0 | 3183 | |
9ff3d2de JL |
3184 | e = find_edge (bb, head); |
3185 | if (e) | |
3186 | { | |
9ff3d2de JL |
3187 | /* EDGE_INFO (e)->back_edge_prob |
3188 | = ((e->probability * BLOCK_INFO (bb)->frequency) | |
3189 | / REG_BR_PROB_BASE); */ | |
b8698a0f | 3190 | |
e7a74006 JH |
3191 | /* FIXME: Graphite is producing edges with no profile. Once |
3192 | this is fixed, drop this. */ | |
3193 | sreal tmp = e->probability.initialized_p () ? | |
3194 | e->probability.to_reg_br_prob_base () : 0; | |
618b7f29 TS |
3195 | tmp *= BLOCK_INFO (bb)->frequency; |
3196 | EDGE_INFO (e)->back_edge_prob = tmp * real_inv_br_prob_base; | |
9ff3d2de | 3197 | } |
861f9cd0 | 3198 | |
57cb6d52 | 3199 | /* Propagate to successor blocks. */ |
628f6a4e | 3200 | FOR_EACH_EDGE (e, ei, bb->succs) |
247a370b | 3201 | if (!(e->flags & EDGE_DFS_BACK) |
754d9299 | 3202 | && BLOCK_INFO (e->dest)->npredecessors) |
861f9cd0 | 3203 | { |
754d9299 JM |
3204 | BLOCK_INFO (e->dest)->npredecessors--; |
3205 | if (!BLOCK_INFO (e->dest)->npredecessors) | |
247a370b JH |
3206 | { |
3207 | if (!nextbb) | |
3208 | nextbb = e->dest; | |
3209 | else | |
3210 | BLOCK_INFO (last)->next = e->dest; | |
b8698a0f | 3211 | |
247a370b JH |
3212 | last = e->dest; |
3213 | } | |
628f6a4e | 3214 | } |
861f9cd0 JH |
3215 | } |
3216 | } | |
3217 | ||
67fa7880 | 3218 | /* Estimate frequencies in loops at same nest level. */ |
bfdade77 | 3219 | |
861f9cd0 | 3220 | static void |
598ec7bd | 3221 | estimate_loops_at_level (struct loop *first_loop) |
861f9cd0 | 3222 | { |
2ecfd709 | 3223 | struct loop *loop; |
861f9cd0 JH |
3224 | |
3225 | for (loop = first_loop; loop; loop = loop->next) | |
3226 | { | |
861f9cd0 | 3227 | edge e; |
2ecfd709 | 3228 | basic_block *bbs; |
3d436d2a | 3229 | unsigned i; |
0e3de1d4 | 3230 | auto_bitmap tovisit; |
861f9cd0 | 3231 | |
598ec7bd | 3232 | estimate_loops_at_level (loop->inner); |
79a490a9 | 3233 | |
598ec7bd ZD |
3234 | /* Find current loop back edge and mark it. */ |
3235 | e = loop_latch_edge (loop); | |
3236 | EDGE_INFO (e)->back_edge = 1; | |
2ecfd709 ZD |
3237 | |
3238 | bbs = get_loop_body (loop); | |
3239 | for (i = 0; i < loop->num_nodes; i++) | |
8a998e0c | 3240 | bitmap_set_bit (tovisit, bbs[i]->index); |
2ecfd709 | 3241 | free (bbs); |
598ec7bd | 3242 | propagate_freq (loop->header, tovisit); |
861f9cd0 JH |
3243 | } |
3244 | } | |
3245 | ||
2f8e468b | 3246 | /* Propagates frequencies through structure of loops. */ |
598ec7bd ZD |
3247 | |
3248 | static void | |
d73be268 | 3249 | estimate_loops (void) |
598ec7bd | 3250 | { |
0e3de1d4 | 3251 | auto_bitmap tovisit; |
598ec7bd ZD |
3252 | basic_block bb; |
3253 | ||
3254 | /* Start by estimating the frequencies in the loops. */ | |
0fc822d0 | 3255 | if (number_of_loops (cfun) > 1) |
d73be268 | 3256 | estimate_loops_at_level (current_loops->tree_root->inner); |
598ec7bd ZD |
3257 | |
3258 | /* Now propagate the frequencies through all the blocks. */ | |
04a90bec | 3259 | FOR_ALL_BB_FN (bb, cfun) |
598ec7bd ZD |
3260 | { |
3261 | bitmap_set_bit (tovisit, bb->index); | |
3262 | } | |
fefa31b5 | 3263 | propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit); |
598ec7bd ZD |
3264 | } |
3265 | ||
eb4b92c1 | 3266 | /* Drop the profile for NODE to guessed, and update its frequency based on |
4c7d0777 | 3267 | whether it is expected to be hot given the CALL_COUNT. */ |
eb4b92c1 TJ |
3268 | |
3269 | static void | |
3995f3a2 | 3270 | drop_profile (struct cgraph_node *node, profile_count call_count) |
eb4b92c1 TJ |
3271 | { |
3272 | struct function *fn = DECL_STRUCT_FUNCTION (node->decl); | |
4c7d0777 TJ |
3273 | /* In the case where this was called by another function with a |
3274 | dropped profile, call_count will be 0. Since there are no | |
3275 | non-zero call counts to this function, we don't know for sure | |
3276 | whether it is hot, and therefore it will be marked normal below. */ | |
3277 | bool hot = maybe_hot_count_p (NULL, call_count); | |
eb4b92c1 TJ |
3278 | |
3279 | if (dump_file) | |
3280 | fprintf (dump_file, | |
464d0118 ML |
3281 | "Dropping 0 profile for %s. %s based on calls.\n", |
3282 | node->dump_name (), | |
3283 | hot ? "Function is hot" : "Function is normal"); | |
eb4b92c1 TJ |
3284 | /* We only expect to miss profiles for functions that are reached |
3285 | via non-zero call edges in cases where the function may have | |
3286 | been linked from another module or library (COMDATs and extern | |
4c7d0777 TJ |
3287 | templates). See the comments below for handle_missing_profiles. |
3288 | Also, only warn in cases where the missing counts exceed the | |
3289 | number of training runs. In certain cases with an execv followed | |
3290 | by a no-return call the profile for the no-return call is not | |
3291 | dumped and there can be a mismatch. */ | |
3292 | if (!DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl) | |
3293 | && call_count > profile_info->runs) | |
eb4b92c1 TJ |
3294 | { |
3295 | if (flag_profile_correction) | |
3296 | { | |
3297 | if (dump_file) | |
3298 | fprintf (dump_file, | |
464d0118 ML |
3299 | "Missing counts for called function %s\n", |
3300 | node->dump_name ()); | |
eb4b92c1 TJ |
3301 | } |
3302 | else | |
464d0118 ML |
3303 | warning (0, "Missing counts for called function %s", |
3304 | node->dump_name ()); | |
eb4b92c1 TJ |
3305 | } |
3306 | ||
e525ddf4 | 3307 | basic_block bb; |
e7a74006 JH |
3308 | push_cfun (DECL_STRUCT_FUNCTION (node->decl)); |
3309 | if (flag_guess_branch_prob) | |
3310 | { | |
3311 | bool clear_zeros | |
3312 | = ENTRY_BLOCK_PTR_FOR_FN | |
3313 | (DECL_STRUCT_FUNCTION (node->decl))->count.nonzero_p (); | |
3314 | FOR_ALL_BB_FN (bb, fn) | |
3315 | if (clear_zeros || !(bb->count == profile_count::zero ())) | |
3316 | bb->count = bb->count.guessed_local (); | |
3317 | DECL_STRUCT_FUNCTION (node->decl)->cfg->count_max = | |
3318 | DECL_STRUCT_FUNCTION (node->decl)->cfg->count_max.guessed_local (); | |
3319 | } | |
3320 | else | |
e525ddf4 | 3321 | { |
e7a74006 JH |
3322 | FOR_ALL_BB_FN (bb, fn) |
3323 | bb->count = profile_count::uninitialized (); | |
3324 | DECL_STRUCT_FUNCTION (node->decl)->cfg->count_max | |
3325 | = profile_count::uninitialized (); | |
e525ddf4 | 3326 | } |
e7a74006 | 3327 | pop_cfun (); |
e525ddf4 JH |
3328 | |
3329 | struct cgraph_edge *e; | |
1bad9c18 JH |
3330 | for (e = node->callees; e; e = e->next_callee) |
3331 | e->count = gimple_bb (e->call_stmt)->count; | |
3332 | for (e = node->indirect_calls; e; e = e->next_callee) | |
3333 | e->count = gimple_bb (e->call_stmt)->count; | |
e525ddf4 | 3334 | |
ea19eb9f | 3335 | profile_status_for_fn (fn) |
eb4b92c1 TJ |
3336 | = (flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT); |
3337 | node->frequency | |
3338 | = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL; | |
3339 | } | |
3340 | ||
3341 | /* In the case of COMDAT routines, multiple object files will contain the same | |
3342 | function and the linker will select one for the binary. In that case | |
3343 | all the other copies from the profile instrument binary will be missing | |
3344 | profile counts. Look for cases where this happened, due to non-zero | |
3345 | call counts going to 0-count functions, and drop the profile to guessed | |
3346 | so that we can use the estimated probabilities and avoid optimizing only | |
3347 | for size. | |
3348 | ||
3349 | The other case where the profile may be missing is when the routine | |
3350 | is not going to be emitted to the object file, e.g. for "extern template" | |
3351 | class methods. Those will be marked DECL_EXTERNAL. Emit a warning in | |
3352 | all other cases of non-zero calls to 0-count functions. */ | |
3353 | ||
3354 | void | |
3355 | handle_missing_profiles (void) | |
3356 | { | |
3357 | struct cgraph_node *node; | |
3358 | int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION); | |
8c681247 | 3359 | auto_vec<struct cgraph_node *, 64> worklist; |
eb4b92c1 TJ |
3360 | |
3361 | /* See if 0 count function has non-0 count callers. In this case we | |
3362 | lost some profile. Drop its function profile to PROFILE_GUESSED. */ | |
3363 | FOR_EACH_DEFINED_FUNCTION (node) | |
3364 | { | |
3365 | struct cgraph_edge *e; | |
3995f3a2 | 3366 | profile_count call_count = profile_count::zero (); |
9cec31f4 | 3367 | gcov_type max_tp_first_run = 0; |
eb4b92c1 TJ |
3368 | struct function *fn = DECL_STRUCT_FUNCTION (node->decl); |
3369 | ||
3995f3a2 | 3370 | if (!(node->count == profile_count::zero ())) |
eb4b92c1 TJ |
3371 | continue; |
3372 | for (e = node->callers; e; e = e->next_caller) | |
e525ddf4 | 3373 | if (e->count.initialized_p () && e->count > 0) |
3995f3a2 JH |
3374 | { |
3375 | call_count = call_count + e->count; | |
9cec31f4 | 3376 | |
3995f3a2 JH |
3377 | if (e->caller->tp_first_run > max_tp_first_run) |
3378 | max_tp_first_run = e->caller->tp_first_run; | |
3379 | } | |
9cec31f4 ML |
3380 | |
3381 | /* If time profile is missing, let assign the maximum that comes from | |
3382 | caller functions. */ | |
3383 | if (!node->tp_first_run && max_tp_first_run) | |
3384 | node->tp_first_run = max_tp_first_run + 1; | |
3385 | ||
3995f3a2 | 3386 | if (call_count > 0 |
eb4b92c1 | 3387 | && fn && fn->cfg |
e525ddf4 JH |
3388 | && (call_count.apply_scale (unlikely_count_fraction, 1) |
3389 | >= profile_info->runs)) | |
eb4b92c1 | 3390 | { |
4c7d0777 | 3391 | drop_profile (node, call_count); |
eb4b92c1 TJ |
3392 | worklist.safe_push (node); |
3393 | } | |
3394 | } | |
3395 | ||
3396 | /* Propagate the profile dropping to other 0-count COMDATs that are | |
3397 | potentially called by COMDATs we already dropped the profile on. */ | |
3398 | while (worklist.length () > 0) | |
3399 | { | |
3400 | struct cgraph_edge *e; | |
3401 | ||
3402 | node = worklist.pop (); | |
3403 | for (e = node->callees; e; e = e->next_caller) | |
3404 | { | |
3405 | struct cgraph_node *callee = e->callee; | |
3406 | struct function *fn = DECL_STRUCT_FUNCTION (callee->decl); | |
3407 | ||
3408 | if (callee->count > 0) | |
3409 | continue; | |
e525ddf4 JH |
3410 | if ((DECL_COMDAT (callee->decl) || DECL_EXTERNAL (callee->decl)) |
3411 | && fn && fn->cfg | |
ea19eb9f | 3412 | && profile_status_for_fn (fn) == PROFILE_READ) |
eb4b92c1 | 3413 | { |
3995f3a2 | 3414 | drop_profile (node, profile_count::zero ()); |
eb4b92c1 TJ |
3415 | worklist.safe_push (callee); |
3416 | } | |
3417 | } | |
3418 | } | |
eb4b92c1 TJ |
3419 | } |
3420 | ||
02307675 R |
3421 | /* Convert counts measured by profile driven feedback to frequencies. |
3422 | Return nonzero iff there was any nonzero execution count. */ | |
bfdade77 | 3423 | |
3995f3a2 | 3424 | bool |
fc06ae0d | 3425 | update_max_bb_count (void) |
861f9cd0 | 3426 | { |
e7a74006 | 3427 | profile_count true_count_max = profile_count::uninitialized (); |
e0082a72 | 3428 | basic_block bb; |
0b17ab2f | 3429 | |
fefa31b5 | 3430 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
8e7d1486 | 3431 | true_count_max = true_count_max.max (bb->count); |
3995f3a2 | 3432 | |
e7a74006 | 3433 | cfun->cfg->count_max = true_count_max; |
e525ddf4 | 3434 | |
fc06ae0d | 3435 | return true_count_max.ipa ().nonzero_p (); |
861f9cd0 JH |
3436 | } |
3437 | ||
bfdade77 RK |
3438 | /* Return true if function is likely to be expensive, so there is no point to |
3439 | optimize performance of prologue, epilogue or do inlining at the expense | |
d55d8fc7 | 3440 | of code size growth. THRESHOLD is the limit of number of instructions |
bfdade77 RK |
3441 | function can execute at average to be still considered not expensive. */ |
3442 | ||
6ab16dd9 | 3443 | bool |
79a490a9 | 3444 | expensive_function_p (int threshold) |
6ab16dd9 | 3445 | { |
e0082a72 | 3446 | basic_block bb; |
6ab16dd9 | 3447 | |
fc06ae0d JH |
3448 | /* If profile was scaled in a way entry block has count 0, then the function |
3449 | is deifnitly taking a lot of time. */ | |
3450 | if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.nonzero_p ()) | |
6ab16dd9 | 3451 | return true; |
6a4d6760 | 3452 | |
fc06ae0d JH |
3453 | profile_count limit = ENTRY_BLOCK_PTR_FOR_FN |
3454 | (cfun)->count.apply_scale (threshold, 1); | |
3455 | profile_count sum = profile_count::zero (); | |
11cd3bed | 3456 | FOR_EACH_BB_FN (bb, cfun) |
6ab16dd9 | 3457 | { |
9f215bf5 | 3458 | rtx_insn *insn; |
6ab16dd9 | 3459 | |
fc06ae0d JH |
3460 | if (!bb->count.initialized_p ()) |
3461 | { | |
3462 | if (dump_file) | |
3463 | fprintf (dump_file, "Function is considered expensive because" | |
3464 | " count of bb %i is not initialized\n", bb->index); | |
3465 | return true; | |
3466 | } | |
3467 | ||
39718607 | 3468 | FOR_BB_INSNS (bb, insn) |
bfdade77 RK |
3469 | if (active_insn_p (insn)) |
3470 | { | |
fc06ae0d | 3471 | sum += bb->count; |
bfdade77 RK |
3472 | if (sum > limit) |
3473 | return true; | |
6ab16dd9 JH |
3474 | } |
3475 | } | |
bfdade77 | 3476 | |
6ab16dd9 JH |
3477 | return false; |
3478 | } | |
3479 | ||
95a60a5c JH |
3480 | /* All basic blocks that are reachable only from unlikely basic blocks are |
3481 | unlikely. */ | |
3482 | ||
3483 | void | |
3484 | propagate_unlikely_bbs_forward (void) | |
3485 | { | |
3486 | auto_vec<basic_block, 64> worklist; | |
3487 | basic_block bb; | |
3488 | edge_iterator ei; | |
3489 | edge e; | |
3490 | ||
3491 | if (!(ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ())) | |
3492 | { | |
3493 | ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(size_t) 1; | |
3494 | worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
3495 | ||
3496 | while (worklist.length () > 0) | |
3497 | { | |
3498 | bb = worklist.pop (); | |
3499 | FOR_EACH_EDGE (e, ei, bb->succs) | |
ef30ab83 | 3500 | if (!(e->count () == profile_count::zero ()) |
95a60a5c JH |
3501 | && !(e->dest->count == profile_count::zero ()) |
3502 | && !e->dest->aux) | |
3503 | { | |
3504 | e->dest->aux = (void *)(size_t) 1; | |
3505 | worklist.safe_push (e->dest); | |
3506 | } | |
3507 | } | |
3508 | } | |
3509 | ||
3510 | FOR_ALL_BB_FN (bb, cfun) | |
3511 | { | |
3512 | if (!bb->aux) | |
3513 | { | |
3514 | if (!(bb->count == profile_count::zero ()) | |
3515 | && (dump_file && (dump_flags & TDF_DETAILS))) | |
3516 | fprintf (dump_file, | |
3517 | "Basic block %i is marked unlikely by forward prop\n", | |
3518 | bb->index); | |
3519 | bb->count = profile_count::zero (); | |
95a60a5c JH |
3520 | } |
3521 | else | |
3522 | bb->aux = NULL; | |
3523 | } | |
3524 | } | |
3525 | ||
b69d9ac6 JH |
3526 | /* Determine basic blocks/edges that are known to be unlikely executed and set |
3527 | their counters to zero. | |
3528 | This is done with first identifying obviously unlikely BBs/edges and then | |
3529 | propagating in both directions. */ | |
3530 | ||
3531 | static void | |
3532 | determine_unlikely_bbs () | |
3533 | { | |
3534 | basic_block bb; | |
3535 | auto_vec<basic_block, 64> worklist; | |
3536 | edge_iterator ei; | |
3537 | edge e; | |
3538 | ||
3539 | FOR_EACH_BB_FN (bb, cfun) | |
3540 | { | |
3541 | if (!(bb->count == profile_count::zero ()) | |
3542 | && unlikely_executed_bb_p (bb)) | |
3543 | { | |
3544 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3545 | fprintf (dump_file, "Basic block %i is locally unlikely\n", | |
3546 | bb->index); | |
3547 | bb->count = profile_count::zero (); | |
3548 | } | |
3549 | ||
b69d9ac6 | 3550 | FOR_EACH_EDGE (e, ei, bb->succs) |
ef30ab83 | 3551 | if (!(e->probability == profile_probability::never ()) |
b69d9ac6 JH |
3552 | && unlikely_executed_edge_p (e)) |
3553 | { | |
3554 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3555 | fprintf (dump_file, "Edge %i->%i is locally unlikely\n", | |
3556 | bb->index, e->dest->index); | |
ef30ab83 | 3557 | e->probability = profile_probability::never (); |
b69d9ac6 JH |
3558 | } |
3559 | ||
3560 | gcc_checking_assert (!bb->aux); | |
3561 | } | |
b00ff621 | 3562 | propagate_unlikely_bbs_forward (); |
b69d9ac6 | 3563 | |
b69d9ac6 JH |
3564 | auto_vec<int, 64> nsuccs; |
3565 | nsuccs.safe_grow_cleared (last_basic_block_for_fn (cfun)); | |
3566 | FOR_ALL_BB_FN (bb, cfun) | |
3567 | if (!(bb->count == profile_count::zero ()) | |
3568 | && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
3569 | { | |
3570 | nsuccs[bb->index] = 0; | |
3571 | FOR_EACH_EDGE (e, ei, bb->succs) | |
ef30ab83 JH |
3572 | if (!(e->probability == profile_probability::never ()) |
3573 | && !(e->dest->count == profile_count::zero ())) | |
b69d9ac6 JH |
3574 | nsuccs[bb->index]++; |
3575 | if (!nsuccs[bb->index]) | |
3576 | worklist.safe_push (bb); | |
3577 | } | |
3578 | while (worklist.length () > 0) | |
3579 | { | |
3580 | bb = worklist.pop (); | |
f36180f4 JH |
3581 | if (bb->count == profile_count::zero ()) |
3582 | continue; | |
b69d9ac6 JH |
3583 | if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
3584 | { | |
3585 | bool found = false; | |
3586 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); | |
3587 | !gsi_end_p (gsi); gsi_next (&gsi)) | |
3588 | if (stmt_can_terminate_bb_p (gsi_stmt (gsi)) | |
3589 | /* stmt_can_terminate_bb_p special cases noreturns because it | |
3590 | assumes that fake edges are created. We want to know that | |
3591 | noreturn alone does not imply BB to be unlikely. */ | |
3592 | || (is_gimple_call (gsi_stmt (gsi)) | |
3593 | && (gimple_call_flags (gsi_stmt (gsi)) & ECF_NORETURN))) | |
3594 | { | |
3595 | found = true; | |
3596 | break; | |
3597 | } | |
3598 | if (found) | |
3599 | continue; | |
3600 | } | |
f36180f4 | 3601 | if (dump_file && (dump_flags & TDF_DETAILS)) |
b69d9ac6 JH |
3602 | fprintf (dump_file, |
3603 | "Basic block %i is marked unlikely by backward prop\n", | |
3604 | bb->index); | |
3605 | bb->count = profile_count::zero (); | |
b69d9ac6 | 3606 | FOR_EACH_EDGE (e, ei, bb->preds) |
ef30ab83 | 3607 | if (!(e->probability == profile_probability::never ())) |
b69d9ac6 | 3608 | { |
b69d9ac6 JH |
3609 | if (!(e->src->count == profile_count::zero ())) |
3610 | { | |
f36180f4 | 3611 | gcc_checking_assert (nsuccs[e->src->index] > 0); |
b69d9ac6 JH |
3612 | nsuccs[e->src->index]--; |
3613 | if (!nsuccs[e->src->index]) | |
3614 | worklist.safe_push (e->src); | |
3615 | } | |
3616 | } | |
3617 | } | |
b00ff621 JH |
3618 | /* Finally all edges from non-0 regions to 0 are unlikely. */ |
3619 | FOR_ALL_BB_FN (bb, cfun) | |
3620 | if (!(bb->count == profile_count::zero ())) | |
3621 | FOR_EACH_EDGE (e, ei, bb->succs) | |
3622 | if (!(e->probability == profile_probability::never ()) | |
3623 | && e->dest->count == profile_count::zero ()) | |
3624 | { | |
3625 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3626 | fprintf (dump_file, "Edge %i->%i is unlikely because " | |
3627 | "it enters unlikely block\n", | |
3628 | bb->index, e->dest->index); | |
3629 | e->probability = profile_probability::never (); | |
3630 | } | |
650fe732 JH |
3631 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ()) |
3632 | cgraph_node::get (current_function_decl)->count = profile_count::zero (); | |
b69d9ac6 JH |
3633 | } |
3634 | ||
67fa7880 TJ |
3635 | /* Estimate and propagate basic block frequencies using the given branch |
3636 | probabilities. If FORCE is true, the frequencies are used to estimate | |
3637 | the counts even when there are already non-zero profile counts. */ | |
bfdade77 | 3638 | |
45a80bb9 | 3639 | void |
67fa7880 | 3640 | estimate_bb_frequencies (bool force) |
861f9cd0 | 3641 | { |
e0082a72 | 3642 | basic_block bb; |
ac5e69da | 3643 | sreal freq_max; |
8aa18a7d | 3644 | |
b69d9ac6 JH |
3645 | determine_unlikely_bbs (); |
3646 | ||
3647 | if (force || profile_status_for_fn (cfun) != PROFILE_READ | |
fc06ae0d | 3648 | || !update_max_bb_count ()) |
194734e9 | 3649 | { |
c4f6b78e RE |
3650 | static int real_values_initialized = 0; |
3651 | ||
3652 | if (!real_values_initialized) | |
3653 | { | |
85bb9c2a | 3654 | real_values_initialized = 1; |
fd27ffab | 3655 | real_br_prob_base = REG_BR_PROB_BASE; |
650fe732 JH |
3656 | /* Scaling frequencies up to maximal profile count may result in |
3657 | frequent overflows especially when inlining loops. | |
3658 | Small scalling results in unnecesary precision loss. Stay in | |
3659 | the half of the (exponential) range. */ | |
3660 | real_bb_freq_max = (uint64_t)1 << (profile_count::n_bits / 2); | |
618b7f29 | 3661 | real_one_half = sreal (1, -1); |
fd27ffab ML |
3662 | real_inv_br_prob_base = sreal (1) / real_br_prob_base; |
3663 | real_almost_one = sreal (1) - real_inv_br_prob_base; | |
c4f6b78e | 3664 | } |
861f9cd0 | 3665 | |
194734e9 | 3666 | mark_dfs_back_edges (); |
194734e9 | 3667 | |
fefa31b5 | 3668 | single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability = |
357067f2 | 3669 | profile_probability::always (); |
194734e9 JH |
3670 | |
3671 | /* Set up block info for each basic block. */ | |
11478306 JH |
3672 | alloc_aux_for_blocks (sizeof (block_info)); |
3673 | alloc_aux_for_edges (sizeof (edge_prob_info)); | |
fefa31b5 | 3674 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
861f9cd0 | 3675 | { |
861f9cd0 | 3676 | edge e; |
628f6a4e | 3677 | edge_iterator ei; |
194734e9 | 3678 | |
628f6a4e | 3679 | FOR_EACH_EDGE (e, ei, bb->succs) |
861f9cd0 | 3680 | { |
e7a74006 JH |
3681 | /* FIXME: Graphite is producing edges with no profile. Once |
3682 | this is fixed, drop this. */ | |
3683 | if (e->probability.initialized_p ()) | |
3684 | EDGE_INFO (e)->back_edge_prob | |
3685 | = e->probability.to_reg_br_prob_base (); | |
3686 | else | |
3687 | EDGE_INFO (e)->back_edge_prob = REG_BR_PROB_BASE / 2; | |
618b7f29 | 3688 | EDGE_INFO (e)->back_edge_prob *= real_inv_br_prob_base; |
861f9cd0 | 3689 | } |
861f9cd0 | 3690 | } |
bfdade77 | 3691 | |
67fa7880 TJ |
3692 | /* First compute frequencies locally for each loop from innermost |
3693 | to outermost to examine frequencies for back edges. */ | |
d73be268 | 3694 | estimate_loops (); |
861f9cd0 | 3695 | |
fd27ffab | 3696 | freq_max = 0; |
11cd3bed | 3697 | FOR_EACH_BB_FN (bb, cfun) |
618b7f29 TS |
3698 | if (freq_max < BLOCK_INFO (bb)->frequency) |
3699 | freq_max = BLOCK_INFO (bb)->frequency; | |
fbe3b30b | 3700 | |
618b7f29 | 3701 | freq_max = real_bb_freq_max / freq_max; |
650fe732 JH |
3702 | if (freq_max < 16) |
3703 | freq_max = 16; | |
517048ce | 3704 | profile_count ipa_count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa (); |
e7a74006 | 3705 | cfun->cfg->count_max = profile_count::uninitialized (); |
fefa31b5 | 3706 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
8aa18a7d | 3707 | { |
618b7f29 | 3708 | sreal tmp = BLOCK_INFO (bb)->frequency * freq_max + real_one_half; |
e7a74006 JH |
3709 | profile_count count = profile_count::from_gcov_type (tmp.to_int ()); |
3710 | ||
3711 | /* If we have profile feedback in which this function was never | |
3712 | executed, then preserve this info. */ | |
517048ce JH |
3713 | if (!(bb->count == profile_count::zero ())) |
3714 | bb->count = count.guessed_local ().combine_with_ipa_count (ipa_count); | |
e7a74006 | 3715 | cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count); |
194734e9 | 3716 | } |
bfdade77 | 3717 | |
194734e9 JH |
3718 | free_aux_for_blocks (); |
3719 | free_aux_for_edges (); | |
3720 | } | |
3721 | compute_function_frequency (); | |
194734e9 | 3722 | } |
861f9cd0 | 3723 | |
194734e9 | 3724 | /* Decide whether function is hot, cold or unlikely executed. */ |
965b98d0 | 3725 | void |
79a490a9 | 3726 | compute_function_frequency (void) |
194734e9 | 3727 | { |
e0082a72 | 3728 | basic_block bb; |
d52f5295 | 3729 | struct cgraph_node *node = cgraph_node::get (current_function_decl); |
daf5c770 | 3730 | |
844db5d0 JH |
3731 | if (DECL_STATIC_CONSTRUCTOR (current_function_decl) |
3732 | || MAIN_NAME_P (DECL_NAME (current_function_decl))) | |
3733 | node->only_called_at_startup = true; | |
3734 | if (DECL_STATIC_DESTRUCTOR (current_function_decl)) | |
3735 | node->only_called_at_exit = true; | |
e0082a72 | 3736 | |
0a6a6ac9 | 3737 | if (profile_status_for_fn (cfun) != PROFILE_READ) |
52bf96d2 | 3738 | { |
5fefcf92 | 3739 | int flags = flags_from_decl_or_type (current_function_decl); |
e7a74006 JH |
3740 | if ((ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa_p () |
3741 | && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa() == profile_count::zero ()) | |
b69d9ac6 JH |
3742 | || lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)) |
3743 | != NULL) | |
12b9f3ac JH |
3744 | { |
3745 | node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; | |
3746 | warn_function_cold (current_function_decl); | |
3747 | } | |
52bf96d2 JH |
3748 | else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl)) |
3749 | != NULL) | |
5fefcf92 JH |
3750 | node->frequency = NODE_FREQUENCY_HOT; |
3751 | else if (flags & ECF_NORETURN) | |
3752 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
3753 | else if (MAIN_NAME_P (DECL_NAME (current_function_decl))) | |
3754 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
3755 | else if (DECL_STATIC_CONSTRUCTOR (current_function_decl) | |
3756 | || DECL_STATIC_DESTRUCTOR (current_function_decl)) | |
3757 | node->frequency = NODE_FREQUENCY_EXECUTED_ONCE; | |
52bf96d2 JH |
3758 | return; |
3759 | } | |
daf5c770 JH |
3760 | |
3761 | /* Only first time try to drop function into unlikely executed. | |
3762 | After inlining the roundoff errors may confuse us. | |
3763 | Ipa-profile pass will drop functions only called from unlikely | |
3764 | functions to unlikely and that is most of what we care about. */ | |
3765 | if (!cfun->after_inlining) | |
12b9f3ac JH |
3766 | { |
3767 | node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED; | |
3768 | warn_function_cold (current_function_decl); | |
3769 | } | |
11cd3bed | 3770 | FOR_EACH_BB_FN (bb, cfun) |
861f9cd0 | 3771 | { |
2eb712b4 | 3772 | if (maybe_hot_bb_p (cfun, bb)) |
194734e9 | 3773 | { |
5fefcf92 | 3774 | node->frequency = NODE_FREQUENCY_HOT; |
194734e9 JH |
3775 | return; |
3776 | } | |
2eb712b4 | 3777 | if (!probably_never_executed_bb_p (cfun, bb)) |
5fefcf92 | 3778 | node->frequency = NODE_FREQUENCY_NORMAL; |
861f9cd0 | 3779 | } |
194734e9 | 3780 | } |
861f9cd0 | 3781 | |
2e28e797 JH |
3782 | /* Build PREDICT_EXPR. */ |
3783 | tree | |
3784 | build_predict_expr (enum br_predictor predictor, enum prediction taken) | |
3785 | { | |
9d7e5c4d | 3786 | tree t = build1 (PREDICT_EXPR, void_type_node, |
9f616812 | 3787 | build_int_cst (integer_type_node, predictor)); |
bbbbb16a | 3788 | SET_PREDICT_EXPR_OUTCOME (t, taken); |
2e28e797 JH |
3789 | return t; |
3790 | } | |
3791 | ||
3792 | const char * | |
3793 | predictor_name (enum br_predictor predictor) | |
3794 | { | |
3795 | return predictor_info[predictor].name; | |
3796 | } | |
3797 | ||
be55bfe6 TS |
3798 | /* Predict branch probabilities and estimate profile of the tree CFG. */ |
3799 | ||
27a4cd48 DM |
3800 | namespace { |
3801 | ||
3802 | const pass_data pass_data_profile = | |
3803 | { | |
3804 | GIMPLE_PASS, /* type */ | |
3805 | "profile_estimate", /* name */ | |
3806 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
3807 | TV_BRANCH_PROB, /* tv_id */ |
3808 | PROP_cfg, /* properties_required */ | |
3809 | 0, /* properties_provided */ | |
3810 | 0, /* properties_destroyed */ | |
3811 | 0, /* todo_flags_start */ | |
3bea341f | 3812 | 0, /* todo_flags_finish */ |
6de9cd9a | 3813 | }; |
7299cb99 | 3814 | |
27a4cd48 DM |
3815 | class pass_profile : public gimple_opt_pass |
3816 | { | |
3817 | public: | |
c3284718 RS |
3818 | pass_profile (gcc::context *ctxt) |
3819 | : gimple_opt_pass (pass_data_profile, ctxt) | |
27a4cd48 DM |
3820 | {} |
3821 | ||
3822 | /* opt_pass methods: */ | |
1a3d085c | 3823 | virtual bool gate (function *) { return flag_guess_branch_prob; } |
be55bfe6 | 3824 | virtual unsigned int execute (function *); |
27a4cd48 DM |
3825 | |
3826 | }; // class pass_profile | |
3827 | ||
be55bfe6 TS |
3828 | unsigned int |
3829 | pass_profile::execute (function *fun) | |
3830 | { | |
3831 | unsigned nb_loops; | |
3832 | ||
10881cff JH |
3833 | if (profile_status_for_fn (cfun) == PROFILE_GUESSED) |
3834 | return 0; | |
3835 | ||
be55bfe6 TS |
3836 | loop_optimizer_init (LOOPS_NORMAL); |
3837 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3838 | flow_loops_dump (dump_file, NULL, 0); | |
3839 | ||
3840 | mark_irreducible_loops (); | |
3841 | ||
3842 | nb_loops = number_of_loops (fun); | |
3843 | if (nb_loops > 1) | |
3844 | scev_initialize (); | |
3845 | ||
460545e8 | 3846 | tree_estimate_probability (false); |
be55bfe6 TS |
3847 | |
3848 | if (nb_loops > 1) | |
3849 | scev_finalize (); | |
3850 | ||
3851 | loop_optimizer_finalize (); | |
3852 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3853 | gimple_dump_cfg (dump_file, dump_flags); | |
3854 | if (profile_status_for_fn (fun) == PROFILE_ABSENT) | |
3855 | profile_status_for_fn (fun) = PROFILE_GUESSED; | |
9bb86f40 JH |
3856 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3857 | { | |
3858 | struct loop *loop; | |
3859 | FOR_EACH_LOOP (loop, LI_FROM_INNERMOST) | |
e7a74006 | 3860 | if (loop->header->count.initialized_p ()) |
9bb86f40 JH |
3861 | fprintf (dump_file, "Loop got predicted %d to iterate %i times.\n", |
3862 | loop->num, | |
3863 | (int)expected_loop_iterations_unbounded (loop)); | |
3864 | } | |
be55bfe6 TS |
3865 | return 0; |
3866 | } | |
3867 | ||
27a4cd48 DM |
3868 | } // anon namespace |
3869 | ||
3870 | gimple_opt_pass * | |
3871 | make_pass_profile (gcc::context *ctxt) | |
3872 | { | |
3873 | return new pass_profile (ctxt); | |
3874 | } | |
3875 | ||
3876 | namespace { | |
3877 | ||
3878 | const pass_data pass_data_strip_predict_hints = | |
3879 | { | |
3880 | GIMPLE_PASS, /* type */ | |
3881 | "*strip_predict_hints", /* name */ | |
3882 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
3883 | TV_BRANCH_PROB, /* tv_id */ |
3884 | PROP_cfg, /* properties_required */ | |
3885 | 0, /* properties_provided */ | |
3886 | 0, /* properties_destroyed */ | |
3887 | 0, /* todo_flags_start */ | |
3bea341f | 3888 | 0, /* todo_flags_finish */ |
7299cb99 | 3889 | }; |
b35366ce | 3890 | |
27a4cd48 DM |
3891 | class pass_strip_predict_hints : public gimple_opt_pass |
3892 | { | |
3893 | public: | |
c3284718 RS |
3894 | pass_strip_predict_hints (gcc::context *ctxt) |
3895 | : gimple_opt_pass (pass_data_strip_predict_hints, ctxt) | |
27a4cd48 DM |
3896 | {} |
3897 | ||
3898 | /* opt_pass methods: */ | |
65d3284b | 3899 | opt_pass * clone () { return new pass_strip_predict_hints (m_ctxt); } |
be55bfe6 | 3900 | virtual unsigned int execute (function *); |
27a4cd48 DM |
3901 | |
3902 | }; // class pass_strip_predict_hints | |
3903 | ||
be55bfe6 TS |
3904 | /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements |
3905 | we no longer need. */ | |
3906 | unsigned int | |
3907 | pass_strip_predict_hints::execute (function *fun) | |
3908 | { | |
3909 | basic_block bb; | |
355fe088 | 3910 | gimple *ass_stmt; |
be55bfe6 | 3911 | tree var; |
cd1d802a | 3912 | bool changed = false; |
be55bfe6 TS |
3913 | |
3914 | FOR_EACH_BB_FN (bb, fun) | |
3915 | { | |
3916 | gimple_stmt_iterator bi; | |
3917 | for (bi = gsi_start_bb (bb); !gsi_end_p (bi);) | |
3918 | { | |
355fe088 | 3919 | gimple *stmt = gsi_stmt (bi); |
be55bfe6 TS |
3920 | |
3921 | if (gimple_code (stmt) == GIMPLE_PREDICT) | |
3922 | { | |
3923 | gsi_remove (&bi, true); | |
cd1d802a | 3924 | changed = true; |
be55bfe6 TS |
3925 | continue; |
3926 | } | |
3927 | else if (is_gimple_call (stmt)) | |
3928 | { | |
3929 | tree fndecl = gimple_call_fndecl (stmt); | |
3930 | ||
3931 | if ((fndecl | |
3932 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
3933 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT | |
3934 | && gimple_call_num_args (stmt) == 2) | |
3935 | || (gimple_call_internal_p (stmt) | |
3936 | && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)) | |
3937 | { | |
3938 | var = gimple_call_lhs (stmt); | |
cd1d802a | 3939 | changed = true; |
be55bfe6 TS |
3940 | if (var) |
3941 | { | |
3942 | ass_stmt | |
3943 | = gimple_build_assign (var, gimple_call_arg (stmt, 0)); | |
3944 | gsi_replace (&bi, ass_stmt, true); | |
3945 | } | |
3946 | else | |
3947 | { | |
3948 | gsi_remove (&bi, true); | |
3949 | continue; | |
3950 | } | |
3951 | } | |
3952 | } | |
3953 | gsi_next (&bi); | |
3954 | } | |
3955 | } | |
cd1d802a | 3956 | return changed ? TODO_cleanup_cfg : 0; |
be55bfe6 TS |
3957 | } |
3958 | ||
27a4cd48 DM |
3959 | } // anon namespace |
3960 | ||
3961 | gimple_opt_pass * | |
3962 | make_pass_strip_predict_hints (gcc::context *ctxt) | |
3963 | { | |
3964 | return new pass_strip_predict_hints (ctxt); | |
3965 | } | |
3966 | ||
b35366ce JH |
3967 | /* Rebuild function frequencies. Passes are in general expected to |
3968 | maintain profile by hand, however in some cases this is not possible: | |
3969 | for example when inlining several functions with loops freuqencies might run | |
3970 | out of scale and thus needs to be recomputed. */ | |
3971 | ||
3972 | void | |
3973 | rebuild_frequencies (void) | |
3974 | { | |
a222c01a | 3975 | timevar_push (TV_REBUILD_FREQUENCIES); |
67fa7880 TJ |
3976 | |
3977 | /* When the max bb count in the function is small, there is a higher | |
3978 | chance that there were truncation errors in the integer scaling | |
3979 | of counts by inlining and other optimizations. This could lead | |
3980 | to incorrect classification of code as being cold when it isn't. | |
3981 | In that case, force the estimation of bb counts/frequencies from the | |
3982 | branch probabilities, rather than computing frequencies from counts, | |
3983 | which may also lead to frequencies incorrectly reduced to 0. There | |
3984 | is less precision in the probabilities, so we only do this for small | |
3985 | max counts. */ | |
e7a74006 | 3986 | cfun->cfg->count_max = profile_count::uninitialized (); |
67fa7880 | 3987 | basic_block bb; |
fefa31b5 | 3988 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb) |
e7a74006 | 3989 | cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count); |
67fa7880 | 3990 | |
e7a74006 | 3991 | if (profile_status_for_fn (cfun) == PROFILE_GUESSED) |
b35366ce JH |
3992 | { |
3993 | loop_optimizer_init (0); | |
3994 | add_noreturn_fake_exit_edges (); | |
3995 | mark_irreducible_loops (); | |
3996 | connect_infinite_loops_to_exit (); | |
67fa7880 | 3997 | estimate_bb_frequencies (true); |
b35366ce JH |
3998 | remove_fake_exit_edges (); |
3999 | loop_optimizer_finalize (); | |
4000 | } | |
0a6a6ac9 | 4001 | else if (profile_status_for_fn (cfun) == PROFILE_READ) |
fc06ae0d | 4002 | update_max_bb_count (); |
b35366ce JH |
4003 | else |
4004 | gcc_unreachable (); | |
a222c01a | 4005 | timevar_pop (TV_REBUILD_FREQUENCIES); |
b35366ce | 4006 | } |
460545e8 JH |
4007 | |
4008 | /* Perform a dry run of the branch prediction pass and report comparsion of | |
4009 | the predicted and real profile into the dump file. */ | |
4010 | ||
4011 | void | |
4012 | report_predictor_hitrates (void) | |
4013 | { | |
4014 | unsigned nb_loops; | |
4015 | ||
4016 | loop_optimizer_init (LOOPS_NORMAL); | |
4017 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4018 | flow_loops_dump (dump_file, NULL, 0); | |
4019 | ||
4020 | mark_irreducible_loops (); | |
4021 | ||
4022 | nb_loops = number_of_loops (cfun); | |
4023 | if (nb_loops > 1) | |
4024 | scev_initialize (); | |
4025 | ||
4026 | tree_estimate_probability (true); | |
4027 | ||
4028 | if (nb_loops > 1) | |
4029 | scev_finalize (); | |
4030 | ||
4031 | loop_optimizer_finalize (); | |
4032 | } | |
4033 | ||
98bdbb39 JH |
4034 | /* Force edge E to be cold. |
4035 | If IMPOSSIBLE is true, for edge to have count and probability 0 otherwise | |
4036 | keep low probability to represent possible error in a guess. This is used | |
4037 | i.e. in case we predict loop to likely iterate given number of times but | |
4038 | we are not 100% sure. | |
4039 | ||
4040 | This function locally updates profile without attempt to keep global | |
4041 | consistency which can not be reached in full generality without full profile | |
4042 | rebuild from probabilities alone. Doing so is not necessarily a good idea | |
4043 | because frequencies and counts may be more realistic then probabilities. | |
4044 | ||
4045 | In some cases (such as for elimination of early exits during full loop | |
4046 | unrolling) the caller can ensure that profile will get consistent | |
4047 | afterwards. */ | |
4048 | ||
4049 | void | |
4050 | force_edge_cold (edge e, bool impossible) | |
4051 | { | |
3995f3a2 | 4052 | profile_count count_sum = profile_count::zero (); |
357067f2 | 4053 | profile_probability prob_sum = profile_probability::never (); |
98bdbb39 JH |
4054 | edge_iterator ei; |
4055 | edge e2; | |
8d71d7cd | 4056 | bool uninitialized_exit = false; |
98bdbb39 | 4057 | |
d89f01a0 JH |
4058 | /* When branch probability guesses are not known, then do nothing. */ |
4059 | if (!impossible && !e->count ().initialized_p ()) | |
4060 | return; | |
4061 | ||
357067f2 JH |
4062 | profile_probability goal = (impossible ? profile_probability::never () |
4063 | : profile_probability::very_unlikely ()); | |
4064 | ||
98bdbb39 | 4065 | /* If edge is already improbably or cold, just return. */ |
357067f2 | 4066 | if (e->probability <= goal |
ef30ab83 | 4067 | && (!impossible || e->count () == profile_count::zero ())) |
98bdbb39 JH |
4068 | return; |
4069 | FOR_EACH_EDGE (e2, ei, e->src->succs) | |
4070 | if (e2 != e) | |
4071 | { | |
d89f01a0 JH |
4072 | if (e->flags & EDGE_FAKE) |
4073 | continue; | |
ef30ab83 JH |
4074 | if (e2->count ().initialized_p ()) |
4075 | count_sum += e2->count (); | |
357067f2 JH |
4076 | if (e2->probability.initialized_p ()) |
4077 | prob_sum += e2->probability; | |
d89f01a0 JH |
4078 | else |
4079 | uninitialized_exit = true; | |
98bdbb39 JH |
4080 | } |
4081 | ||
d89f01a0 JH |
4082 | /* If we are not guessing profiles but have some other edges out, |
4083 | just assume the control flow goes elsewhere. */ | |
4084 | if (uninitialized_exit) | |
4085 | e->probability = goal; | |
98bdbb39 JH |
4086 | /* If there are other edges out of e->src, redistribute probabilitity |
4087 | there. */ | |
d89f01a0 | 4088 | else if (prob_sum > profile_probability::never ()) |
98bdbb39 | 4089 | { |
357067f2 JH |
4090 | if (!(e->probability < goal)) |
4091 | e->probability = goal; | |
98bdbb39 | 4092 | |
357067f2 JH |
4093 | profile_probability prob_comp = prob_sum / e->probability.invert (); |
4094 | ||
98bdbb39 JH |
4095 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4096 | fprintf (dump_file, "Making edge %i->%i %s by redistributing " | |
4097 | "probability to other edges.\n", | |
4098 | e->src->index, e->dest->index, | |
2c40d563 | 4099 | impossible ? "impossible" : "cold"); |
98bdbb39 JH |
4100 | FOR_EACH_EDGE (e2, ei, e->src->succs) |
4101 | if (e2 != e) | |
4102 | { | |
357067f2 | 4103 | e2->probability /= prob_comp; |
98bdbb39 | 4104 | } |
2f70a979 JH |
4105 | if (current_ir_type () != IR_GIMPLE |
4106 | && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)) | |
357067f2 | 4107 | update_br_prob_note (e->src); |
98bdbb39 JH |
4108 | } |
4109 | /* If all edges out of e->src are unlikely, the basic block itself | |
4110 | is unlikely. */ | |
4111 | else | |
4112 | { | |
2f70a979 JH |
4113 | if (prob_sum == profile_probability::never ()) |
4114 | e->probability = profile_probability::always (); | |
4115 | else | |
4116 | { | |
4117 | if (impossible) | |
4118 | e->probability = profile_probability::never (); | |
4119 | /* If BB has some edges out that are not impossible, we can not | |
4120 | assume that BB itself is. */ | |
4121 | impossible = false; | |
4122 | } | |
4123 | if (current_ir_type () != IR_GIMPLE | |
4124 | && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)) | |
357067f2 | 4125 | update_br_prob_note (e->src); |
8d71d7cd JH |
4126 | if (e->src->count == profile_count::zero ()) |
4127 | return; | |
d89f01a0 | 4128 | if (count_sum == profile_count::zero () && impossible) |
8d71d7cd JH |
4129 | { |
4130 | bool found = false; | |
357067f2 JH |
4131 | if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
4132 | ; | |
4133 | else if (current_ir_type () == IR_GIMPLE) | |
4134 | for (gimple_stmt_iterator gsi = gsi_start_bb (e->src); | |
4135 | !gsi_end_p (gsi); gsi_next (&gsi)) | |
4136 | { | |
4137 | if (stmt_can_terminate_bb_p (gsi_stmt (gsi))) | |
4138 | { | |
4139 | found = true; | |
4140 | break; | |
4141 | } | |
4142 | } | |
4143 | /* FIXME: Implement RTL path. */ | |
4144 | else | |
4145 | found = true; | |
8d71d7cd JH |
4146 | if (!found) |
4147 | { | |
4148 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4149 | fprintf (dump_file, | |
4150 | "Making bb %i impossible and dropping count to 0.\n", | |
4151 | e->src->index); | |
8d71d7cd JH |
4152 | e->src->count = profile_count::zero (); |
4153 | FOR_EACH_EDGE (e2, ei, e->src->preds) | |
4154 | force_edge_cold (e2, impossible); | |
4155 | return; | |
4156 | } | |
4157 | } | |
98bdbb39 JH |
4158 | |
4159 | /* If we did not adjusting, the source basic block has no likely edeges | |
4160 | leaving other direction. In that case force that bb cold, too. | |
4161 | This in general is difficult task to do, but handle special case when | |
4162 | BB has only one predecestor. This is common case when we are updating | |
4163 | after loop transforms. */ | |
357067f2 JH |
4164 | if (!(prob_sum > profile_probability::never ()) |
4165 | && count_sum == profile_count::zero () | |
e7a74006 JH |
4166 | && single_pred_p (e->src) && e->src->count.to_frequency (cfun) |
4167 | > (impossible ? 0 : 1)) | |
98bdbb39 | 4168 | { |
e7a74006 | 4169 | int old_frequency = e->src->count.to_frequency (cfun); |
98bdbb39 JH |
4170 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4171 | fprintf (dump_file, "Making bb %i %s.\n", e->src->index, | |
2c40d563 | 4172 | impossible ? "impossible" : "cold"); |
e7a74006 JH |
4173 | int new_frequency = MIN (e->src->count.to_frequency (cfun), |
4174 | impossible ? 0 : 1); | |
3995f3a2 | 4175 | if (impossible) |
ef30ab83 | 4176 | e->src->count = profile_count::zero (); |
3995f3a2 | 4177 | else |
e7a74006 | 4178 | e->src->count = e->count ().apply_scale (new_frequency, |
ef30ab83 | 4179 | old_frequency); |
98bdbb39 JH |
4180 | force_edge_cold (single_pred_edge (e->src), impossible); |
4181 | } | |
4182 | else if (dump_file && (dump_flags & TDF_DETAILS) | |
4183 | && maybe_hot_bb_p (cfun, e->src)) | |
4184 | fprintf (dump_file, "Giving up on making bb %i %s.\n", e->src->index, | |
2c40d563 | 4185 | impossible ? "impossible" : "cold"); |
98bdbb39 JH |
4186 | } |
4187 | } | |
d8838217 ML |
4188 | |
4189 | #if CHECKING_P | |
4190 | ||
4191 | namespace selftest { | |
4192 | ||
4193 | /* Test that value range of predictor values defined in predict.def is | |
4194 | within range (50, 100]. */ | |
4195 | ||
4196 | struct branch_predictor | |
4197 | { | |
4198 | const char *name; | |
4199 | unsigned probability; | |
4200 | }; | |
4201 | ||
4202 | #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) { NAME, HITRATE }, | |
4203 | ||
4204 | static void | |
4205 | test_prediction_value_range () | |
4206 | { | |
4207 | branch_predictor predictors[] = { | |
4208 | #include "predict.def" | |
f6b3ca5a | 4209 | {NULL, -1U} |
d8838217 ML |
4210 | }; |
4211 | ||
4212 | for (unsigned i = 0; predictors[i].name != NULL; i++) | |
4213 | { | |
4214 | unsigned p = 100 * predictors[i].probability / REG_BR_PROB_BASE; | |
4215 | ASSERT_TRUE (p > 50 && p <= 100); | |
4216 | } | |
4217 | } | |
4218 | ||
4219 | #undef DEF_PREDICTOR | |
4220 | ||
4221 | /* Run all of the selfests within this file. */ | |
4222 | ||
4223 | void | |
4224 | predict_c_tests () | |
4225 | { | |
4226 | test_prediction_value_range (); | |
4227 | } | |
4228 | ||
4229 | } // namespace selftest | |
4230 | #endif /* CHECKING_P. */ |