]>
Commit | Line | Data |
---|---|---|
c9784e6d | 1 | /* CFG cleanup for trees. |
cbe34bb5 | 2 | Copyright (C) 2001-2017 Free Software Foundation, Inc. |
c9784e6d KH |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 8 | the Free Software Foundation; either version 3, or (at your option) |
c9784e6d KH |
9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
c9784e6d KH |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
c7131fb2 | 23 | #include "backend.h" |
957060b5 | 24 | #include "rtl.h" |
c9784e6d | 25 | #include "tree.h" |
c7131fb2 | 26 | #include "gimple.h" |
957060b5 AM |
27 | #include "cfghooks.h" |
28 | #include "tree-pass.h" | |
c7131fb2 | 29 | #include "ssa.h" |
957060b5 | 30 | #include "diagnostic-core.h" |
40e23961 | 31 | #include "fold-const.h" |
60393bbc AM |
32 | #include "cfganal.h" |
33 | #include "cfgcleanup.h" | |
2fb9a547 | 34 | #include "tree-eh.h" |
45b0be94 | 35 | #include "gimplify.h" |
5be5c238 | 36 | #include "gimple-iterator.h" |
442b4905 | 37 | #include "tree-cfg.h" |
e28030cf | 38 | #include "tree-ssa-loop-manip.h" |
442b4905 | 39 | #include "tree-dfa.h" |
7a300452 | 40 | #include "tree-ssa.h" |
c9784e6d | 41 | #include "cfgloop.h" |
17684618 | 42 | #include "tree-scalar-evolution.h" |
c3bea076 RB |
43 | #include "gimple-match.h" |
44 | #include "gimple-fold.h" | |
cbb88345 | 45 | #include "tree-ssa-loop-niter.h" |
c3bea076 | 46 | |
c9784e6d | 47 | |
672987e8 ZD |
48 | /* The set of blocks in that at least one of the following changes happened: |
49 | -- the statement at the end of the block was changed | |
50 | -- the block was newly created | |
51 | -- the set of the predecessors of the block changed | |
52 | -- the set of the successors of the block changed | |
53 | ??? Maybe we could track these changes separately, since they determine | |
54 | what cleanups it makes sense to try on the block. */ | |
55 | bitmap cfgcleanup_altered_bbs; | |
56 | ||
c9784e6d KH |
57 | /* Remove any fallthru edge from EV. Return true if an edge was removed. */ |
58 | ||
59 | static bool | |
9771b263 | 60 | remove_fallthru_edge (vec<edge, va_gc> *ev) |
c9784e6d KH |
61 | { |
62 | edge_iterator ei; | |
63 | edge e; | |
64 | ||
65 | FOR_EACH_EDGE (e, ei, ev) | |
66 | if ((e->flags & EDGE_FALLTHRU) != 0) | |
67 | { | |
0107dca2 RB |
68 | if (e->flags & EDGE_COMPLEX) |
69 | e->flags &= ~EDGE_FALLTHRU; | |
70 | else | |
71 | remove_edge_and_dominated_blocks (e); | |
c9784e6d KH |
72 | return true; |
73 | } | |
74 | return false; | |
75 | } | |
76 | ||
726a989a | 77 | |
c9784e6d KH |
78 | /* Disconnect an unreachable block in the control expression starting |
79 | at block BB. */ | |
80 | ||
81 | static bool | |
7b5ab0cd RB |
82 | cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi, |
83 | bool first_p) | |
c9784e6d KH |
84 | { |
85 | edge taken_edge; | |
86 | bool retval = false; | |
355fe088 | 87 | gimple *stmt = gsi_stmt (gsi); |
c9784e6d KH |
88 | |
89 | if (!single_succ_p (bb)) | |
90 | { | |
91 | edge e; | |
92 | edge_iterator ei; | |
6ac01510 | 93 | bool warned; |
c3bea076 | 94 | tree val = NULL_TREE; |
6ac01510 ILT |
95 | |
96 | fold_defer_overflow_warnings (); | |
37530014 RG |
97 | switch (gimple_code (stmt)) |
98 | { | |
99 | case GIMPLE_COND: | |
7b5ab0cd RB |
100 | /* During a first iteration on the CFG only remove trivially |
101 | dead edges but mark other conditions for re-evaluation. */ | |
102 | if (first_p) | |
103 | { | |
104 | val = const_binop (gimple_cond_code (stmt), boolean_type_node, | |
105 | gimple_cond_lhs (stmt), | |
106 | gimple_cond_rhs (stmt)); | |
107 | if (! val) | |
108 | bitmap_set_bit (cfgcleanup_altered_bbs, bb->index); | |
109 | } | |
110 | else | |
111 | { | |
112 | code_helper rcode; | |
113 | tree ops[3] = {}; | |
114 | if (gimple_simplify (stmt, &rcode, ops, NULL, no_follow_ssa_edges, | |
115 | no_follow_ssa_edges) | |
116 | && rcode == INTEGER_CST) | |
117 | val = ops[0]; | |
118 | } | |
119 | break; | |
37530014 RG |
120 | |
121 | case GIMPLE_SWITCH: | |
538dd0b7 | 122 | val = gimple_switch_index (as_a <gswitch *> (stmt)); |
37530014 RG |
123 | break; |
124 | ||
125 | default: | |
c3bea076 | 126 | ; |
37530014 | 127 | } |
c9784e6d KH |
128 | taken_edge = find_taken_edge (bb, val); |
129 | if (!taken_edge) | |
6ac01510 ILT |
130 | { |
131 | fold_undefer_and_ignore_overflow_warnings (); | |
132 | return false; | |
133 | } | |
c9784e6d KH |
134 | |
135 | /* Remove all the edges except the one that is always executed. */ | |
6ac01510 | 136 | warned = false; |
c9784e6d KH |
137 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
138 | { | |
139 | if (e != taken_edge) | |
140 | { | |
6ac01510 ILT |
141 | if (!warned) |
142 | { | |
143 | fold_undefer_overflow_warnings | |
726a989a | 144 | (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); |
6ac01510 ILT |
145 | warned = true; |
146 | } | |
147 | ||
c9784e6d KH |
148 | taken_edge->probability += e->probability; |
149 | taken_edge->count += e->count; | |
672987e8 | 150 | remove_edge_and_dominated_blocks (e); |
c9784e6d KH |
151 | retval = true; |
152 | } | |
153 | else | |
154 | ei_next (&ei); | |
155 | } | |
6ac01510 ILT |
156 | if (!warned) |
157 | fold_undefer_and_ignore_overflow_warnings (); | |
c9784e6d KH |
158 | if (taken_edge->probability > REG_BR_PROB_BASE) |
159 | taken_edge->probability = REG_BR_PROB_BASE; | |
160 | } | |
161 | else | |
162 | taken_edge = single_succ_edge (bb); | |
163 | ||
672987e8 | 164 | bitmap_set_bit (cfgcleanup_altered_bbs, bb->index); |
726a989a | 165 | gsi_remove (&gsi, true); |
c9784e6d KH |
166 | taken_edge->flags = EDGE_FALLTHRU; |
167 | ||
c9784e6d KH |
168 | return retval; |
169 | } | |
170 | ||
58041fe6 MJ |
171 | /* Cleanup the GF_CALL_CTRL_ALTERING flag according to |
172 | to updated gimple_call_flags. */ | |
173 | ||
174 | static void | |
355fe088 | 175 | cleanup_call_ctrl_altering_flag (gimple *bb_end) |
58041fe6 MJ |
176 | { |
177 | if (!is_gimple_call (bb_end) | |
178 | || !gimple_call_ctrl_altering_p (bb_end)) | |
179 | return; | |
180 | ||
181 | int flags = gimple_call_flags (bb_end); | |
182 | if (((flags & (ECF_CONST | ECF_PURE)) | |
183 | && !(flags & ECF_LOOPING_CONST_OR_PURE)) | |
184 | || (flags & ECF_LEAF)) | |
185 | gimple_call_set_ctrl_altering (bb_end, false); | |
186 | } | |
187 | ||
672987e8 ZD |
188 | /* Try to remove superfluous control structures in basic block BB. Returns |
189 | true if anything changes. */ | |
c9784e6d KH |
190 | |
191 | static bool | |
7b5ab0cd | 192 | cleanup_control_flow_bb (basic_block bb, bool first_p) |
c9784e6d | 193 | { |
726a989a | 194 | gimple_stmt_iterator gsi; |
c9784e6d | 195 | bool retval = false; |
355fe088 | 196 | gimple *stmt; |
c9784e6d | 197 | |
672987e8 ZD |
198 | /* If the last statement of the block could throw and now cannot, |
199 | we need to prune cfg. */ | |
726a989a | 200 | retval |= gimple_purge_dead_eh_edges (bb); |
672987e8 | 201 | |
3be33b6e | 202 | gsi = gsi_last_nondebug_bb (bb); |
726a989a | 203 | if (gsi_end_p (gsi)) |
672987e8 ZD |
204 | return retval; |
205 | ||
726a989a | 206 | stmt = gsi_stmt (gsi); |
672987e8 | 207 | |
58041fe6 MJ |
208 | /* Try to cleanup ctrl altering flag for call which ends bb. */ |
209 | cleanup_call_ctrl_altering_flag (stmt); | |
210 | ||
726a989a RB |
211 | if (gimple_code (stmt) == GIMPLE_COND |
212 | || gimple_code (stmt) == GIMPLE_SWITCH) | |
3be33b6e JJ |
213 | { |
214 | gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt); | |
7b5ab0cd | 215 | retval |= cleanup_control_expr_graph (bb, gsi, first_p); |
3be33b6e | 216 | } |
726a989a RB |
217 | else if (gimple_code (stmt) == GIMPLE_GOTO |
218 | && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR | |
219 | && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0)) | |
672987e8 | 220 | == LABEL_DECL)) |
c9784e6d | 221 | { |
726a989a RB |
222 | /* If we had a computed goto which has a compile-time determinable |
223 | destination, then we can eliminate the goto. */ | |
672987e8 ZD |
224 | edge e; |
225 | tree label; | |
226 | edge_iterator ei; | |
227 | basic_block target_block; | |
c9784e6d | 228 | |
3be33b6e | 229 | gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt); |
672987e8 ZD |
230 | /* First look at all the outgoing edges. Delete any outgoing |
231 | edges which do not go to the right block. For the one | |
232 | edge which goes to the right block, fix up its flags. */ | |
726a989a | 233 | label = TREE_OPERAND (gimple_goto_dest (stmt), 0); |
b05c3fd6 RB |
234 | if (DECL_CONTEXT (label) != cfun->decl) |
235 | return retval; | |
672987e8 ZD |
236 | target_block = label_to_block (label); |
237 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
c9784e6d | 238 | { |
672987e8 ZD |
239 | if (e->dest != target_block) |
240 | remove_edge_and_dominated_blocks (e); | |
241 | else | |
c9784e6d | 242 | { |
672987e8 ZD |
243 | /* Turn off the EDGE_ABNORMAL flag. */ |
244 | e->flags &= ~EDGE_ABNORMAL; | |
c9784e6d | 245 | |
672987e8 ZD |
246 | /* And set EDGE_FALLTHRU. */ |
247 | e->flags |= EDGE_FALLTHRU; | |
248 | ei_next (&ei); | |
249 | } | |
c9784e6d KH |
250 | } |
251 | ||
672987e8 ZD |
252 | bitmap_set_bit (cfgcleanup_altered_bbs, bb->index); |
253 | bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index); | |
254 | ||
255 | /* Remove the GOTO_EXPR as it is not needed. The CFG has all the | |
256 | relevant information we need. */ | |
726a989a | 257 | gsi_remove (&gsi, true); |
672987e8 | 258 | retval = true; |
c9784e6d | 259 | } |
672987e8 ZD |
260 | |
261 | /* Check for indirect calls that have been turned into | |
262 | noreturn calls. */ | |
726a989a | 263 | else if (is_gimple_call (stmt) |
3be33b6e JJ |
264 | && gimple_call_noreturn_p (stmt)) |
265 | { | |
266 | /* If there are debug stmts after the noreturn call, remove them | |
267 | now, they should be all unreachable anyway. */ | |
268 | for (gsi_next (&gsi); !gsi_end_p (gsi); ) | |
269 | gsi_remove (&gsi, true); | |
270 | if (remove_fallthru_edge (bb->succs)) | |
271 | retval = true; | |
272 | } | |
672987e8 | 273 | |
c9784e6d KH |
274 | return retval; |
275 | } | |
276 | ||
277 | /* Return true if basic block BB does nothing except pass control | |
278 | flow to another block and that we can safely insert a label at | |
279 | the start of the successor block. | |
280 | ||
281 | As a precondition, we require that BB be not equal to | |
6626665f | 282 | the entry block. */ |
c9784e6d KH |
283 | |
284 | static bool | |
285 | tree_forwarder_block_p (basic_block bb, bool phi_wanted) | |
286 | { | |
726a989a | 287 | gimple_stmt_iterator gsi; |
f99fcb3b | 288 | location_t locus; |
c9784e6d KH |
289 | |
290 | /* BB must have a single outgoing edge. */ | |
291 | if (single_succ_p (bb) != 1 | |
292 | /* If PHI_WANTED is false, BB must not have any PHI nodes. | |
293 | Otherwise, BB must have PHI nodes. */ | |
726a989a | 294 | || gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted |
6626665f | 295 | /* BB may not be a predecessor of the exit block. */ |
fefa31b5 | 296 | || single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun) |
c9784e6d KH |
297 | /* Nor should this be an infinite loop. */ |
298 | || single_succ (bb) == bb | |
299 | /* BB may not have an abnormal outgoing edge. */ | |
300 | || (single_succ_edge (bb)->flags & EDGE_ABNORMAL)) | |
301 | return false; | |
302 | ||
fefa31b5 | 303 | gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
c9784e6d | 304 | |
f99fcb3b JJ |
305 | locus = single_succ_edge (bb)->goto_locus; |
306 | ||
1d65f45c RH |
307 | /* There should not be an edge coming from entry, or an EH edge. */ |
308 | { | |
309 | edge_iterator ei; | |
310 | edge e; | |
311 | ||
312 | FOR_EACH_EDGE (e, ei, bb->preds) | |
fefa31b5 | 313 | if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH)) |
1d65f45c | 314 | return false; |
f99fcb3b JJ |
315 | /* If goto_locus of any of the edges differs, prevent removing |
316 | the forwarder block for -O0. */ | |
317 | else if (optimize == 0 && e->goto_locus != locus) | |
318 | return false; | |
1d65f45c RH |
319 | } |
320 | ||
c9784e6d KH |
321 | /* Now walk through the statements backward. We can ignore labels, |
322 | anything else means this is not a forwarder block. */ | |
726a989a | 323 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) |
c9784e6d | 324 | { |
355fe088 | 325 | gimple *stmt = gsi_stmt (gsi); |
c9784e6d | 326 | |
726a989a | 327 | switch (gimple_code (stmt)) |
c9784e6d | 328 | { |
726a989a | 329 | case GIMPLE_LABEL: |
538dd0b7 | 330 | if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt)))) |
c9784e6d | 331 | return false; |
f99fcb3b JJ |
332 | if (optimize == 0 && gimple_location (stmt) != locus) |
333 | return false; | |
c9784e6d KH |
334 | break; |
335 | ||
b5b8b0ac AO |
336 | /* ??? For now, hope there's a corresponding debug |
337 | assignment at the destination. */ | |
338 | case GIMPLE_DEBUG: | |
339 | break; | |
340 | ||
c9784e6d KH |
341 | default: |
342 | return false; | |
343 | } | |
344 | } | |
345 | ||
c9784e6d KH |
346 | if (current_loops) |
347 | { | |
348 | basic_block dest; | |
a3afdbb8 | 349 | /* Protect loop headers. */ |
ee281008 | 350 | if (bb_loop_header_p (bb)) |
c9784e6d | 351 | return false; |
c9784e6d | 352 | |
a3afdbb8 BC |
353 | dest = EDGE_SUCC (bb, 0)->dest; |
354 | /* Protect loop preheaders and latches if requested. */ | |
c9784e6d | 355 | if (dest->loop_father->header == dest) |
a3afdbb8 | 356 | { |
fba7c564 RB |
357 | if (bb->loop_father == dest->loop_father) |
358 | { | |
359 | if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)) | |
360 | return false; | |
361 | /* If bb doesn't have a single predecessor we'd make this | |
362 | loop have multiple latches. Don't do that if that | |
363 | would in turn require disambiguating them. */ | |
364 | return (single_pred_p (bb) | |
365 | || loops_state_satisfies_p | |
366 | (LOOPS_MAY_HAVE_MULTIPLE_LATCHES)); | |
367 | } | |
368 | else if (bb->loop_father == loop_outer (dest->loop_father)) | |
369 | return !loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS); | |
370 | /* Always preserve other edges into loop headers that are | |
371 | not simple latches or preheaders. */ | |
372 | return false; | |
a3afdbb8 | 373 | } |
c9784e6d | 374 | } |
a3afdbb8 | 375 | |
c9784e6d KH |
376 | return true; |
377 | } | |
378 | ||
c9784e6d KH |
379 | /* If all the PHI nodes in DEST have alternatives for E1 and E2 and |
380 | those alternatives are equal in each of the PHI nodes, then return | |
381 | true, else return false. */ | |
382 | ||
383 | static bool | |
384 | phi_alternatives_equal (basic_block dest, edge e1, edge e2) | |
385 | { | |
386 | int n1 = e1->dest_idx; | |
387 | int n2 = e2->dest_idx; | |
538dd0b7 | 388 | gphi_iterator gsi; |
c9784e6d | 389 | |
726a989a | 390 | for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
c9784e6d | 391 | { |
538dd0b7 | 392 | gphi *phi = gsi.phi (); |
726a989a RB |
393 | tree val1 = gimple_phi_arg_def (phi, n1); |
394 | tree val2 = gimple_phi_arg_def (phi, n2); | |
c9784e6d KH |
395 | |
396 | gcc_assert (val1 != NULL_TREE); | |
397 | gcc_assert (val2 != NULL_TREE); | |
398 | ||
399 | if (!operand_equal_for_phi_arg_p (val1, val2)) | |
400 | return false; | |
401 | } | |
402 | ||
403 | return true; | |
404 | } | |
405 | ||
672987e8 | 406 | /* Removes forwarder block BB. Returns false if this failed. */ |
c9784e6d KH |
407 | |
408 | static bool | |
672987e8 | 409 | remove_forwarder_block (basic_block bb) |
c9784e6d KH |
410 | { |
411 | edge succ = single_succ_edge (bb), e, s; | |
412 | basic_block dest = succ->dest; | |
355fe088 | 413 | gimple *label; |
c9784e6d | 414 | edge_iterator ei; |
726a989a | 415 | gimple_stmt_iterator gsi, gsi_to; |
70235ab9 | 416 | bool can_move_debug_stmts; |
c9784e6d KH |
417 | |
418 | /* We check for infinite loops already in tree_forwarder_block_p. | |
419 | However it may happen that the infinite loop is created | |
420 | afterwards due to removal of forwarders. */ | |
421 | if (dest == bb) | |
422 | return false; | |
423 | ||
28e5ca15 RB |
424 | /* If the destination block consists of a nonlocal label or is a |
425 | EH landing pad, do not merge it. */ | |
c9784e6d | 426 | label = first_stmt (dest); |
538dd0b7 DM |
427 | if (label) |
428 | if (glabel *label_stmt = dyn_cast <glabel *> (label)) | |
429 | if (DECL_NONLOCAL (gimple_label_label (label_stmt)) | |
430 | || EH_LANDING_PAD_NR (gimple_label_label (label_stmt)) != 0) | |
431 | return false; | |
c9784e6d KH |
432 | |
433 | /* If there is an abnormal edge to basic block BB, but not into | |
434 | dest, problems might occur during removal of the phi node at out | |
435 | of ssa due to overlapping live ranges of registers. | |
436 | ||
437 | If there is an abnormal edge in DEST, the problems would occur | |
438 | anyway since cleanup_dead_labels would then merge the labels for | |
439 | two different eh regions, and rest of exception handling code | |
440 | does not like it. | |
441 | ||
442 | So if there is an abnormal edge to BB, proceed only if there is | |
443 | no abnormal edge to DEST and there are no phi nodes in DEST. */ | |
31ff2426 NF |
444 | if (bb_has_abnormal_pred (bb) |
445 | && (bb_has_abnormal_pred (dest) | |
1197e789 RG |
446 | || !gimple_seq_empty_p (phi_nodes (dest)))) |
447 | return false; | |
c9784e6d KH |
448 | |
449 | /* If there are phi nodes in DEST, and some of the blocks that are | |
450 | predecessors of BB are also predecessors of DEST, check that the | |
451 | phi node arguments match. */ | |
726a989a | 452 | if (!gimple_seq_empty_p (phi_nodes (dest))) |
c9784e6d KH |
453 | { |
454 | FOR_EACH_EDGE (e, ei, bb->preds) | |
455 | { | |
456 | s = find_edge (e->src, dest); | |
457 | if (!s) | |
458 | continue; | |
459 | ||
460 | if (!phi_alternatives_equal (dest, succ, s)) | |
461 | return false; | |
462 | } | |
463 | } | |
464 | ||
dc764d10 | 465 | can_move_debug_stmts = MAY_HAVE_DEBUG_STMTS && single_pred_p (dest); |
70235ab9 | 466 | |
fba7c564 RB |
467 | basic_block pred = NULL; |
468 | if (single_pred_p (bb)) | |
469 | pred = single_pred (bb); | |
470 | ||
c9784e6d KH |
471 | /* Redirect the edges. */ |
472 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
473 | { | |
672987e8 ZD |
474 | bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index); |
475 | ||
c9784e6d KH |
476 | if (e->flags & EDGE_ABNORMAL) |
477 | { | |
478 | /* If there is an abnormal edge, redirect it anyway, and | |
479 | move the labels to the new block to make it legal. */ | |
480 | s = redirect_edge_succ_nodup (e, dest); | |
481 | } | |
482 | else | |
483 | s = redirect_edge_and_branch (e, dest); | |
484 | ||
485 | if (s == e) | |
486 | { | |
487 | /* Create arguments for the phi nodes, since the edge was not | |
488 | here before. */ | |
538dd0b7 DM |
489 | for (gphi_iterator psi = gsi_start_phis (dest); |
490 | !gsi_end_p (psi); | |
491 | gsi_next (&psi)) | |
726a989a | 492 | { |
538dd0b7 | 493 | gphi *phi = psi.phi (); |
f5045c96 | 494 | source_location l = gimple_phi_arg_location_from_edge (phi, succ); |
2724573f RB |
495 | tree def = gimple_phi_arg_def (phi, succ->dest_idx); |
496 | add_phi_arg (phi, unshare_expr (def), s, l); | |
726a989a | 497 | } |
c9784e6d | 498 | } |
c9784e6d KH |
499 | } |
500 | ||
1197e789 RG |
501 | /* Move nonlocal labels and computed goto targets as well as user |
502 | defined labels and labels with an EH landing pad number to the | |
503 | new block, so that the redirection of the abnormal edges works, | |
504 | jump targets end up in a sane place and debug information for | |
505 | labels is retained. */ | |
506 | gsi_to = gsi_start_bb (dest); | |
507 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) | |
508 | { | |
509 | tree decl; | |
510 | label = gsi_stmt (gsi); | |
511 | if (is_gimple_debug (label)) | |
512 | break; | |
538dd0b7 | 513 | decl = gimple_label_label (as_a <glabel *> (label)); |
1197e789 RG |
514 | if (EH_LANDING_PAD_NR (decl) != 0 |
515 | || DECL_NONLOCAL (decl) | |
516 | || FORCED_LABEL (decl) | |
517 | || !DECL_ARTIFICIAL (decl)) | |
518 | { | |
519 | gsi_remove (&gsi, false); | |
520 | gsi_insert_before (&gsi_to, label, GSI_SAME_STMT); | |
521 | } | |
522 | else | |
523 | gsi_next (&gsi); | |
524 | } | |
525 | ||
dc764d10 | 526 | /* Move debug statements if the destination has a single predecessor. */ |
70235ab9 | 527 | if (can_move_debug_stmts) |
c9784e6d | 528 | { |
1197e789 RG |
529 | gsi_to = gsi_after_labels (dest); |
530 | for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); ) | |
c9784e6d | 531 | { |
355fe088 | 532 | gimple *debug = gsi_stmt (gsi); |
70235ab9 | 533 | if (!is_gimple_debug (debug)) |
1197e789 | 534 | break; |
726a989a | 535 | gsi_remove (&gsi, false); |
70235ab9 | 536 | gsi_insert_before (&gsi_to, debug, GSI_SAME_STMT); |
c9784e6d KH |
537 | } |
538 | } | |
539 | ||
672987e8 ZD |
540 | bitmap_set_bit (cfgcleanup_altered_bbs, dest->index); |
541 | ||
c9784e6d KH |
542 | /* Update the dominators. */ |
543 | if (dom_info_available_p (CDI_DOMINATORS)) | |
544 | { | |
545 | basic_block dom, dombb, domdest; | |
546 | ||
547 | dombb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
548 | domdest = get_immediate_dominator (CDI_DOMINATORS, dest); | |
549 | if (domdest == bb) | |
550 | { | |
551 | /* Shortcut to avoid calling (relatively expensive) | |
552 | nearest_common_dominator unless necessary. */ | |
553 | dom = dombb; | |
554 | } | |
555 | else | |
556 | dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); | |
557 | ||
558 | set_immediate_dominator (CDI_DOMINATORS, dest, dom); | |
559 | } | |
560 | ||
a3afdbb8 BC |
561 | /* Adjust latch infomation of BB's parent loop as otherwise |
562 | the cfg hook has a hard time not to kill the loop. */ | |
563 | if (current_loops && bb->loop_father->latch == bb) | |
fba7c564 | 564 | bb->loop_father->latch = pred; |
a3afdbb8 | 565 | |
c9784e6d KH |
566 | /* And kill the forwarder block. */ |
567 | delete_basic_block (bb); | |
568 | ||
569 | return true; | |
570 | } | |
571 | ||
bed18fbd RB |
572 | /* STMT is a call that has been discovered noreturn. Split the |
573 | block to prepare fixing up the CFG and remove LHS. | |
574 | Return true if cleanup-cfg needs to run. */ | |
566d09ef JH |
575 | |
576 | bool | |
355fe088 | 577 | fixup_noreturn_call (gimple *stmt) |
566d09ef JH |
578 | { |
579 | basic_block bb = gimple_bb (stmt); | |
bed18fbd | 580 | bool changed = false; |
566d09ef JH |
581 | |
582 | if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN)) | |
583 | return false; | |
584 | ||
585 | /* First split basic block if stmt is not last. */ | |
586 | if (stmt != gsi_stmt (gsi_last_bb (bb))) | |
c83ee180 JJ |
587 | { |
588 | if (stmt == gsi_stmt (gsi_last_nondebug_bb (bb))) | |
589 | { | |
590 | /* Don't split if there are only debug stmts | |
591 | after stmt, that can result in -fcompare-debug | |
592 | failures. Remove the debug stmts instead, | |
593 | they should be all unreachable anyway. */ | |
594 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
595 | for (gsi_next (&gsi); !gsi_end_p (gsi); ) | |
596 | gsi_remove (&gsi, true); | |
597 | } | |
598 | else | |
bed18fbd RB |
599 | { |
600 | split_block (bb, stmt); | |
601 | changed = true; | |
602 | } | |
c83ee180 | 603 | } |
566d09ef | 604 | |
e6a54b01 EB |
605 | /* If there is an LHS, remove it, but only if its type has fixed size. |
606 | The LHS will need to be recreated during RTL expansion and creating | |
45177337 | 607 | temporaries of variable-sized types is not supported. Also don't |
458a1cd0 JJ |
608 | do this with TREE_ADDRESSABLE types, as assign_temp will abort. |
609 | Drop LHS regardless of TREE_ADDRESSABLE, if the function call | |
610 | has been changed into a call that does not return a value, like | |
611 | __builtin_unreachable or __cxa_pure_virtual. */ | |
2f1b0141 | 612 | tree lhs = gimple_call_lhs (stmt); |
458a1cd0 JJ |
613 | if (lhs |
614 | && (should_remove_lhs_p (lhs) | |
615 | || VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt))))) | |
566d09ef | 616 | { |
566d09ef | 617 | gimple_call_set_lhs (stmt, NULL_TREE); |
02d635a2 | 618 | |
2f1b0141 EB |
619 | /* We need to fix up the SSA name to avoid checking errors. */ |
620 | if (TREE_CODE (lhs) == SSA_NAME) | |
566d09ef | 621 | { |
b731b390 | 622 | tree new_var = create_tmp_reg (TREE_TYPE (lhs)); |
2f1b0141 EB |
623 | SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, new_var); |
624 | SSA_NAME_DEF_STMT (lhs) = gimple_build_nop (); | |
625 | set_ssa_default_def (cfun, new_var, lhs); | |
566d09ef | 626 | } |
2f1b0141 | 627 | |
566d09ef | 628 | update_stmt (stmt); |
566d09ef | 629 | } |
2f1b0141 | 630 | |
2a5671ee | 631 | /* Mark the call as altering control flow. */ |
bed18fbd RB |
632 | if (!gimple_call_ctrl_altering_p (stmt)) |
633 | { | |
634 | gimple_call_set_ctrl_altering (stmt, true); | |
635 | changed = true; | |
636 | } | |
2a5671ee | 637 | |
bed18fbd | 638 | return changed; |
566d09ef JH |
639 | } |
640 | ||
641 | ||
672987e8 ZD |
642 | /* Tries to cleanup cfg in basic block BB. Returns true if anything |
643 | changes. */ | |
c9784e6d | 644 | |
89e80dd4 | 645 | static bool |
672987e8 | 646 | cleanup_tree_cfg_bb (basic_block bb) |
c9784e6d | 647 | { |
f99fcb3b | 648 | if (tree_forwarder_block_p (bb, false) |
672987e8 ZD |
649 | && remove_forwarder_block (bb)) |
650 | return true; | |
c9784e6d | 651 | |
50bf47fd RB |
652 | /* If there is a merge opportunity with the predecessor |
653 | do nothing now but wait until we process the predecessor. | |
654 | This happens when we visit BBs in a non-optimal order and | |
655 | avoids quadratic behavior with adjusting stmts BB pointer. */ | |
656 | if (single_pred_p (bb) | |
657 | && can_merge_blocks_p (single_pred (bb), bb)) | |
658 | /* But make sure we _do_ visit it. When we remove unreachable paths | |
659 | ending in a backedge we fail to mark the destinations predecessors | |
660 | as changed. */ | |
661 | bitmap_set_bit (cfgcleanup_altered_bbs, single_pred (bb)->index); | |
662 | ||
89e80dd4 DN |
663 | /* Merging the blocks may create new opportunities for folding |
664 | conditional branches (due to the elimination of single-valued PHI | |
665 | nodes). */ | |
50bf47fd RB |
666 | else if (single_succ_p (bb) |
667 | && can_merge_blocks_p (bb, single_succ (bb))) | |
672987e8 | 668 | { |
50bf47fd RB |
669 | merge_blocks (bb, single_succ (bb)); |
670 | return true; | |
672987e8 ZD |
671 | } |
672 | ||
3810ff24 | 673 | return false; |
672987e8 ZD |
674 | } |
675 | ||
676 | /* Iterate the cfg cleanups, while anything changes. */ | |
677 | ||
678 | static bool | |
679 | cleanup_tree_cfg_1 (void) | |
680 | { | |
681 | bool retval = false; | |
682 | basic_block bb; | |
683 | unsigned i, n; | |
684 | ||
672987e8 ZD |
685 | /* Prepare the worklists of altered blocks. */ |
686 | cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL); | |
687 | ||
688 | /* During forwarder block cleanup, we may redirect edges out of | |
689 | SWITCH_EXPRs, which can get expensive. So we want to enable | |
690 | recording of edge to CASE_LABEL_EXPR. */ | |
691 | start_recording_case_labels (); | |
89e80dd4 | 692 | |
3810ff24 | 693 | /* We cannot use FOR_EACH_BB_FN for the BB iterations below |
672987e8 | 694 | since the basic blocks may get removed. */ |
3810ff24 RB |
695 | |
696 | /* Start by iterating over all basic blocks looking for edge removal | |
697 | opportunities. Do this first because incoming SSA form may be | |
698 | invalid and we want to avoid performing SSA related tasks such | |
699 | as propgating out a PHI node during BB merging in that state. */ | |
700 | n = last_basic_block_for_fn (cfun); | |
701 | for (i = NUM_FIXED_BLOCKS; i < n; i++) | |
702 | { | |
703 | bb = BASIC_BLOCK_FOR_FN (cfun, i); | |
704 | if (bb) | |
7b5ab0cd | 705 | retval |= cleanup_control_flow_bb (bb, true); |
3810ff24 RB |
706 | } |
707 | ||
708 | /* After doing the above SSA form should be valid (or an update SSA | |
709 | should be required). */ | |
710 | ||
711 | /* Continue by iterating over all basic blocks looking for BB merging | |
712 | opportunities. */ | |
8b1c6fd7 | 713 | n = last_basic_block_for_fn (cfun); |
672987e8 ZD |
714 | for (i = NUM_FIXED_BLOCKS; i < n; i++) |
715 | { | |
06e28de2 | 716 | bb = BASIC_BLOCK_FOR_FN (cfun, i); |
672987e8 | 717 | if (bb) |
2a5671ee | 718 | retval |= cleanup_tree_cfg_bb (bb); |
672987e8 ZD |
719 | } |
720 | ||
721 | /* Now process the altered blocks, as long as any are available. */ | |
722 | while (!bitmap_empty_p (cfgcleanup_altered_bbs)) | |
723 | { | |
724 | i = bitmap_first_set_bit (cfgcleanup_altered_bbs); | |
725 | bitmap_clear_bit (cfgcleanup_altered_bbs, i); | |
726 | if (i < NUM_FIXED_BLOCKS) | |
727 | continue; | |
728 | ||
06e28de2 | 729 | bb = BASIC_BLOCK_FOR_FN (cfun, i); |
672987e8 ZD |
730 | if (!bb) |
731 | continue; | |
732 | ||
7b5ab0cd | 733 | retval |= cleanup_control_flow_bb (bb, false); |
672987e8 | 734 | retval |= cleanup_tree_cfg_bb (bb); |
672987e8 | 735 | } |
b8698a0f | 736 | |
672987e8 ZD |
737 | end_recording_case_labels (); |
738 | BITMAP_FREE (cfgcleanup_altered_bbs); | |
89e80dd4 DN |
739 | return retval; |
740 | } | |
741 | ||
e7d70c6c RB |
742 | static bool |
743 | mfb_keep_latches (edge e) | |
744 | { | |
745 | return ! dominated_by_p (CDI_DOMINATORS, e->src, e->dest); | |
746 | } | |
89e80dd4 | 747 | |
e3594cb3 DN |
748 | /* Remove unreachable blocks and other miscellaneous clean up work. |
749 | Return true if the flowgraph was modified, false otherwise. */ | |
89e80dd4 | 750 | |
592c303d ZD |
751 | static bool |
752 | cleanup_tree_cfg_noloop (void) | |
89e80dd4 | 753 | { |
672987e8 | 754 | bool changed; |
89e80dd4 DN |
755 | |
756 | timevar_push (TV_TREE_CLEANUP_CFG); | |
757 | ||
e3594cb3 | 758 | /* Iterate until there are no more cleanups left to do. If any |
672987e8 ZD |
759 | iteration changed the flowgraph, set CHANGED to true. |
760 | ||
761 | If dominance information is available, there cannot be any unreachable | |
762 | blocks. */ | |
2b28c07a | 763 | if (!dom_info_available_p (CDI_DOMINATORS)) |
e3594cb3 | 764 | { |
672987e8 ZD |
765 | changed = delete_unreachable_blocks (); |
766 | calculate_dominance_info (CDI_DOMINATORS); | |
e3594cb3 | 767 | } |
672987e8 | 768 | else |
30251f7a | 769 | { |
b2b29377 | 770 | checking_verify_dominators (CDI_DOMINATORS); |
30251f7a ZD |
771 | changed = false; |
772 | } | |
c9784e6d | 773 | |
e7d70c6c RB |
774 | /* Ensure that we have single entries into loop headers. Otherwise |
775 | if one of the entries is becoming a latch due to CFG cleanup | |
776 | (from formerly being part of an irreducible region) then we mess | |
777 | up loop fixup and associate the old loop with a different region | |
778 | which makes niter upper bounds invalid. See for example PR80549. | |
779 | This needs to be done before we remove trivially dead edges as | |
780 | we need to capture the dominance state before the pending transform. */ | |
781 | if (current_loops) | |
782 | { | |
783 | loop_p loop; | |
784 | unsigned i; | |
785 | FOR_EACH_VEC_ELT (*get_loops (cfun), i, loop) | |
786 | if (loop && loop->header) | |
787 | { | |
788 | basic_block bb = loop->header; | |
789 | edge_iterator ei; | |
790 | edge e; | |
791 | bool found_latch = false; | |
792 | bool any_abnormal = false; | |
793 | unsigned n = 0; | |
794 | /* We are only interested in preserving existing loops, but | |
795 | we need to check whether they are still real and of course | |
796 | if we need to add a preheader at all. */ | |
797 | FOR_EACH_EDGE (e, ei, bb->preds) | |
798 | { | |
799 | if (e->flags & EDGE_ABNORMAL) | |
800 | { | |
801 | any_abnormal = true; | |
802 | break; | |
803 | } | |
804 | if (dominated_by_p (CDI_DOMINATORS, e->src, bb)) | |
805 | { | |
806 | found_latch = true; | |
807 | continue; | |
808 | } | |
809 | n++; | |
810 | } | |
811 | /* If we have more than one entry to the loop header | |
812 | create a forwarder. */ | |
813 | if (found_latch && ! any_abnormal && n > 1) | |
814 | { | |
815 | edge fallthru = make_forwarder_block (bb, mfb_keep_latches, | |
816 | NULL); | |
817 | loop->header = fallthru->dest; | |
818 | if (! loops_state_satisfies_p (LOOPS_NEED_FIXUP)) | |
819 | { | |
820 | /* The loop updating from the CFG hook is incomplete | |
821 | when we have multiple latches, fixup manually. */ | |
822 | remove_bb_from_loops (fallthru->src); | |
823 | loop_p cloop = loop; | |
824 | FOR_EACH_EDGE (e, ei, fallthru->src->preds) | |
825 | cloop = find_common_loop (cloop, e->src->loop_father); | |
826 | add_bb_to_loop (fallthru->src, cloop); | |
827 | } | |
828 | } | |
829 | } | |
830 | } | |
831 | ||
672987e8 ZD |
832 | changed |= cleanup_tree_cfg_1 (); |
833 | ||
2b28c07a | 834 | gcc_assert (dom_info_available_p (CDI_DOMINATORS)); |
c9784e6d KH |
835 | compact_blocks (); |
836 | ||
b2b29377 | 837 | checking_verify_flow_info (); |
89e80dd4 | 838 | |
c9784e6d | 839 | timevar_pop (TV_TREE_CLEANUP_CFG); |
89e80dd4 | 840 | |
592c303d | 841 | if (changed && current_loops) |
f87000d0 | 842 | loops_state_set (LOOPS_NEED_FIXUP); |
592c303d | 843 | |
e3594cb3 | 844 | return changed; |
c9784e6d KH |
845 | } |
846 | ||
592c303d | 847 | /* Repairs loop structures. */ |
c9784e6d | 848 | |
592c303d ZD |
849 | static void |
850 | repair_loop_structures (void) | |
c9784e6d | 851 | { |
a222c01a | 852 | bitmap changed_bbs; |
8e89b5b5 | 853 | unsigned n_new_loops; |
a222c01a | 854 | |
cc360b36 SB |
855 | calculate_dominance_info (CDI_DOMINATORS); |
856 | ||
a222c01a MM |
857 | timevar_push (TV_REPAIR_LOOPS); |
858 | changed_bbs = BITMAP_ALLOC (NULL); | |
8e89b5b5 | 859 | n_new_loops = fix_loop_structure (changed_bbs); |
c9784e6d | 860 | |
592c303d ZD |
861 | /* This usually does nothing. But sometimes parts of cfg that originally |
862 | were inside a loop get out of it due to edge removal (since they | |
8e89b5b5 RB |
863 | become unreachable by back edges from latch). Also a former |
864 | irreducible loop can become reducible - in this case force a full | |
865 | rewrite into loop-closed SSA form. */ | |
f87000d0 | 866 | if (loops_state_satisfies_p (LOOP_CLOSED_SSA)) |
8e89b5b5 RB |
867 | rewrite_into_loop_closed_ssa (n_new_loops ? NULL : changed_bbs, |
868 | TODO_update_ssa); | |
c9784e6d | 869 | |
592c303d | 870 | BITMAP_FREE (changed_bbs); |
c9784e6d | 871 | |
b2b29377 | 872 | checking_verify_loop_structure (); |
592c303d ZD |
873 | scev_reset (); |
874 | ||
a222c01a | 875 | timevar_pop (TV_REPAIR_LOOPS); |
592c303d ZD |
876 | } |
877 | ||
878 | /* Cleanup cfg and repair loop structures. */ | |
879 | ||
880 | bool | |
881 | cleanup_tree_cfg (void) | |
882 | { | |
883 | bool changed = cleanup_tree_cfg_noloop (); | |
884 | ||
885 | if (current_loops != NULL | |
f87000d0 | 886 | && loops_state_satisfies_p (LOOPS_NEED_FIXUP)) |
592c303d ZD |
887 | repair_loop_structures (); |
888 | ||
1994bfea | 889 | return changed; |
c9784e6d KH |
890 | } |
891 | ||
a9e0d843 RB |
892 | /* Tries to merge the PHI nodes at BB into those at BB's sole successor. |
893 | Returns true if successful. */ | |
c9784e6d | 894 | |
a9e0d843 | 895 | static bool |
c9784e6d KH |
896 | remove_forwarder_block_with_phi (basic_block bb) |
897 | { | |
898 | edge succ = single_succ_edge (bb); | |
899 | basic_block dest = succ->dest; | |
355fe088 | 900 | gimple *label; |
c9784e6d KH |
901 | basic_block dombb, domdest, dom; |
902 | ||
903 | /* We check for infinite loops already in tree_forwarder_block_p. | |
904 | However it may happen that the infinite loop is created | |
905 | afterwards due to removal of forwarders. */ | |
906 | if (dest == bb) | |
a9e0d843 | 907 | return false; |
f3127685 JL |
908 | |
909 | /* Removal of forwarders may expose new natural loops and thus | |
910 | a block may turn into a loop header. */ | |
911 | if (current_loops && bb_loop_header_p (bb)) | |
912 | return false; | |
c9784e6d KH |
913 | |
914 | /* If the destination block consists of a nonlocal label, do not | |
915 | merge it. */ | |
916 | label = first_stmt (dest); | |
538dd0b7 DM |
917 | if (label) |
918 | if (glabel *label_stmt = dyn_cast <glabel *> (label)) | |
919 | if (DECL_NONLOCAL (gimple_label_label (label_stmt))) | |
920 | return false; | |
c9784e6d | 921 | |
d731ee04 BC |
922 | /* Record BB's single pred in case we need to update the father |
923 | loop's latch information later. */ | |
924 | basic_block pred = NULL; | |
925 | if (single_pred_p (bb)) | |
926 | pred = single_pred (bb); | |
927 | ||
c9784e6d KH |
928 | /* Redirect each incoming edge to BB to DEST. */ |
929 | while (EDGE_COUNT (bb->preds) > 0) | |
930 | { | |
931 | edge e = EDGE_PRED (bb, 0), s; | |
538dd0b7 | 932 | gphi_iterator gsi; |
c9784e6d KH |
933 | |
934 | s = find_edge (e->src, dest); | |
935 | if (s) | |
936 | { | |
937 | /* We already have an edge S from E->src to DEST. If S and | |
938 | E->dest's sole successor edge have the same PHI arguments | |
939 | at DEST, redirect S to DEST. */ | |
940 | if (phi_alternatives_equal (dest, s, succ)) | |
941 | { | |
942 | e = redirect_edge_and_branch (e, dest); | |
ea7e6d5a | 943 | redirect_edge_var_map_clear (e); |
c9784e6d KH |
944 | continue; |
945 | } | |
946 | ||
947 | /* PHI arguments are different. Create a forwarder block by | |
948 | splitting E so that we can merge PHI arguments on E to | |
949 | DEST. */ | |
950 | e = single_succ_edge (split_edge (e)); | |
951 | } | |
cbb88345 RB |
952 | else |
953 | { | |
954 | /* If we merge the forwarder into a loop header verify if we | |
955 | are creating another loop latch edge. If so, reset | |
956 | number of iteration information of the loop. */ | |
957 | if (dest->loop_father->header == dest | |
958 | && dominated_by_p (CDI_DOMINATORS, e->src, dest)) | |
959 | { | |
960 | dest->loop_father->any_upper_bound = false; | |
961 | dest->loop_father->any_likely_upper_bound = false; | |
962 | free_numbers_of_iterations_estimates_loop (dest->loop_father); | |
963 | } | |
964 | } | |
c9784e6d KH |
965 | |
966 | s = redirect_edge_and_branch (e, dest); | |
967 | ||
968 | /* redirect_edge_and_branch must not create a new edge. */ | |
969 | gcc_assert (s == e); | |
970 | ||
971 | /* Add to the PHI nodes at DEST each PHI argument removed at the | |
972 | destination of E. */ | |
726a989a RB |
973 | for (gsi = gsi_start_phis (dest); |
974 | !gsi_end_p (gsi); | |
975 | gsi_next (&gsi)) | |
c9784e6d | 976 | { |
538dd0b7 | 977 | gphi *phi = gsi.phi (); |
726a989a | 978 | tree def = gimple_phi_arg_def (phi, succ->dest_idx); |
f5045c96 | 979 | source_location locus = gimple_phi_arg_location_from_edge (phi, succ); |
c9784e6d KH |
980 | |
981 | if (TREE_CODE (def) == SSA_NAME) | |
982 | { | |
c9784e6d KH |
983 | /* If DEF is one of the results of PHI nodes removed during |
984 | redirection, replace it with the PHI argument that used | |
985 | to be on E. */ | |
b787e7a2 TS |
986 | vec<edge_var_map> *head = redirect_edge_var_map_vector (e); |
987 | size_t length = head ? head->length () : 0; | |
988 | for (size_t i = 0; i < length; i++) | |
c9784e6d | 989 | { |
b787e7a2 | 990 | edge_var_map *vm = &(*head)[i]; |
ea7e6d5a AH |
991 | tree old_arg = redirect_edge_var_map_result (vm); |
992 | tree new_arg = redirect_edge_var_map_def (vm); | |
c9784e6d KH |
993 | |
994 | if (def == old_arg) | |
995 | { | |
996 | def = new_arg; | |
f5045c96 | 997 | locus = redirect_edge_var_map_location (vm); |
c9784e6d KH |
998 | break; |
999 | } | |
1000 | } | |
1001 | } | |
1002 | ||
9e227d60 | 1003 | add_phi_arg (phi, def, s, locus); |
c9784e6d KH |
1004 | } |
1005 | ||
ea7e6d5a | 1006 | redirect_edge_var_map_clear (e); |
c9784e6d KH |
1007 | } |
1008 | ||
1009 | /* Update the dominators. */ | |
1010 | dombb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
1011 | domdest = get_immediate_dominator (CDI_DOMINATORS, dest); | |
1012 | if (domdest == bb) | |
1013 | { | |
1014 | /* Shortcut to avoid calling (relatively expensive) | |
1015 | nearest_common_dominator unless necessary. */ | |
1016 | dom = dombb; | |
1017 | } | |
1018 | else | |
1019 | dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); | |
1020 | ||
1021 | set_immediate_dominator (CDI_DOMINATORS, dest, dom); | |
1022 | ||
d731ee04 BC |
1023 | /* Adjust latch infomation of BB's parent loop as otherwise |
1024 | the cfg hook has a hard time not to kill the loop. */ | |
1025 | if (current_loops && bb->loop_father->latch == bb) | |
1026 | bb->loop_father->latch = pred; | |
1027 | ||
c9784e6d KH |
1028 | /* Remove BB since all of BB's incoming edges have been redirected |
1029 | to DEST. */ | |
1030 | delete_basic_block (bb); | |
a9e0d843 RB |
1031 | |
1032 | return true; | |
c9784e6d KH |
1033 | } |
1034 | ||
1035 | /* This pass merges PHI nodes if one feeds into another. For example, | |
1036 | suppose we have the following: | |
1037 | ||
1038 | goto <bb 9> (<L9>); | |
1039 | ||
1040 | <L8>:; | |
1041 | tem_17 = foo (); | |
1042 | ||
1043 | # tem_6 = PHI <tem_17(8), tem_23(7)>; | |
1044 | <L9>:; | |
1045 | ||
1046 | # tem_3 = PHI <tem_6(9), tem_2(5)>; | |
1047 | <L10>:; | |
1048 | ||
1049 | Then we merge the first PHI node into the second one like so: | |
1050 | ||
1051 | goto <bb 9> (<L10>); | |
1052 | ||
1053 | <L8>:; | |
1054 | tem_17 = foo (); | |
1055 | ||
1056 | # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>; | |
1057 | <L10>:; | |
1058 | */ | |
1059 | ||
be55bfe6 TS |
1060 | namespace { |
1061 | ||
1062 | const pass_data pass_data_merge_phi = | |
1063 | { | |
1064 | GIMPLE_PASS, /* type */ | |
1065 | "mergephi", /* name */ | |
1066 | OPTGROUP_NONE, /* optinfo_flags */ | |
be55bfe6 TS |
1067 | TV_TREE_MERGE_PHI, /* tv_id */ |
1068 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1069 | 0, /* properties_provided */ | |
1070 | 0, /* properties_destroyed */ | |
1071 | 0, /* todo_flags_start */ | |
3bea341f | 1072 | 0, /* todo_flags_finish */ |
be55bfe6 TS |
1073 | }; |
1074 | ||
1075 | class pass_merge_phi : public gimple_opt_pass | |
c9784e6d | 1076 | { |
be55bfe6 TS |
1077 | public: |
1078 | pass_merge_phi (gcc::context *ctxt) | |
1079 | : gimple_opt_pass (pass_data_merge_phi, ctxt) | |
1080 | {} | |
1081 | ||
1082 | /* opt_pass methods: */ | |
1083 | opt_pass * clone () { return new pass_merge_phi (m_ctxt); } | |
1084 | virtual unsigned int execute (function *); | |
1085 | ||
1086 | }; // class pass_merge_phi | |
1087 | ||
1088 | unsigned int | |
1089 | pass_merge_phi::execute (function *fun) | |
1090 | { | |
1091 | basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (fun)); | |
c9784e6d KH |
1092 | basic_block *current = worklist; |
1093 | basic_block bb; | |
1094 | ||
1095 | calculate_dominance_info (CDI_DOMINATORS); | |
1096 | ||
1097 | /* Find all PHI nodes that we may be able to merge. */ | |
be55bfe6 | 1098 | FOR_EACH_BB_FN (bb, fun) |
c9784e6d KH |
1099 | { |
1100 | basic_block dest; | |
1101 | ||
1102 | /* Look for a forwarder block with PHI nodes. */ | |
1103 | if (!tree_forwarder_block_p (bb, true)) | |
1104 | continue; | |
1105 | ||
1106 | dest = single_succ (bb); | |
1107 | ||
1108 | /* We have to feed into another basic block with PHI | |
1109 | nodes. */ | |
8eacd016 | 1110 | if (gimple_seq_empty_p (phi_nodes (dest)) |
c9784e6d KH |
1111 | /* We don't want to deal with a basic block with |
1112 | abnormal edges. */ | |
31ff2426 | 1113 | || bb_has_abnormal_pred (bb)) |
c9784e6d KH |
1114 | continue; |
1115 | ||
1116 | if (!dominated_by_p (CDI_DOMINATORS, dest, bb)) | |
1117 | { | |
1118 | /* If BB does not dominate DEST, then the PHI nodes at | |
1119 | DEST must be the only users of the results of the PHI | |
1120 | nodes at BB. */ | |
1121 | *current++ = bb; | |
1122 | } | |
ea65cd37 JL |
1123 | else |
1124 | { | |
538dd0b7 | 1125 | gphi_iterator gsi; |
338b5886 | 1126 | unsigned int dest_idx = single_succ_edge (bb)->dest_idx; |
ea65cd37 JL |
1127 | |
1128 | /* BB dominates DEST. There may be many users of the PHI | |
1129 | nodes in BB. However, there is still a trivial case we | |
1130 | can handle. If the result of every PHI in BB is used | |
1131 | only by a PHI in DEST, then we can trivially merge the | |
1132 | PHI nodes from BB into DEST. */ | |
726a989a RB |
1133 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); |
1134 | gsi_next (&gsi)) | |
ea65cd37 | 1135 | { |
538dd0b7 | 1136 | gphi *phi = gsi.phi (); |
726a989a | 1137 | tree result = gimple_phi_result (phi); |
ea65cd37 | 1138 | use_operand_p imm_use; |
355fe088 | 1139 | gimple *use_stmt; |
ea65cd37 JL |
1140 | |
1141 | /* If the PHI's result is never used, then we can just | |
1142 | ignore it. */ | |
bfc646bf | 1143 | if (has_zero_uses (result)) |
ea65cd37 JL |
1144 | continue; |
1145 | ||
1146 | /* Get the single use of the result of this PHI node. */ | |
1147 | if (!single_imm_use (result, &imm_use, &use_stmt) | |
726a989a RB |
1148 | || gimple_code (use_stmt) != GIMPLE_PHI |
1149 | || gimple_bb (use_stmt) != dest | |
1150 | || gimple_phi_arg_def (use_stmt, dest_idx) != result) | |
ea65cd37 JL |
1151 | break; |
1152 | } | |
1153 | ||
c0220ea4 | 1154 | /* If the loop above iterated through all the PHI nodes |
ea65cd37 | 1155 | in BB, then we can merge the PHIs from BB into DEST. */ |
726a989a | 1156 | if (gsi_end_p (gsi)) |
ea65cd37 JL |
1157 | *current++ = bb; |
1158 | } | |
c9784e6d KH |
1159 | } |
1160 | ||
1161 | /* Now let's drain WORKLIST. */ | |
a9e0d843 | 1162 | bool changed = false; |
c9784e6d KH |
1163 | while (current != worklist) |
1164 | { | |
1165 | bb = *--current; | |
a9e0d843 | 1166 | changed |= remove_forwarder_block_with_phi (bb); |
c9784e6d | 1167 | } |
c9784e6d | 1168 | free (worklist); |
a9e0d843 RB |
1169 | |
1170 | /* Removing forwarder blocks can cause formerly irreducible loops | |
1171 | to become reducible if we merged two entry blocks. */ | |
1172 | if (changed | |
1173 | && current_loops) | |
1174 | loops_state_set (LOOPS_NEED_FIXUP); | |
1175 | ||
c2924966 | 1176 | return 0; |
c9784e6d KH |
1177 | } |
1178 | ||
27a4cd48 DM |
1179 | } // anon namespace |
1180 | ||
1181 | gimple_opt_pass * | |
1182 | make_pass_merge_phi (gcc::context *ctxt) | |
1183 | { | |
1184 | return new pass_merge_phi (ctxt); | |
1185 | } | |
4484a35a AM |
1186 | |
1187 | /* Pass: cleanup the CFG just before expanding trees to RTL. | |
1188 | This is just a round of label cleanups and case node grouping | |
1189 | because after the tree optimizers have run such cleanups may | |
1190 | be necessary. */ | |
1191 | ||
1192 | static unsigned int | |
1193 | execute_cleanup_cfg_post_optimizing (void) | |
1194 | { | |
2a5671ee | 1195 | unsigned int todo = execute_fixup_cfg (); |
4484a35a | 1196 | if (cleanup_tree_cfg ()) |
2a5671ee RB |
1197 | { |
1198 | todo &= ~TODO_cleanup_cfg; | |
1199 | todo |= TODO_update_ssa; | |
1200 | } | |
4484a35a AM |
1201 | maybe_remove_unreachable_handlers (); |
1202 | cleanup_dead_labels (); | |
1203 | group_case_labels (); | |
1204 | if ((flag_compare_debug_opt || flag_compare_debug) | |
1205 | && flag_dump_final_insns) | |
1206 | { | |
1207 | FILE *final_output = fopen (flag_dump_final_insns, "a"); | |
1208 | ||
1209 | if (!final_output) | |
1210 | { | |
1211 | error ("could not open final insn dump file %qs: %m", | |
1212 | flag_dump_final_insns); | |
1213 | flag_dump_final_insns = NULL; | |
1214 | } | |
1215 | else | |
1216 | { | |
1217 | int save_unnumbered = flag_dump_unnumbered; | |
1218 | int save_noaddr = flag_dump_noaddr; | |
1219 | ||
1220 | flag_dump_noaddr = flag_dump_unnumbered = 1; | |
1221 | fprintf (final_output, "\n"); | |
1222 | dump_enumerated_decls (final_output, dump_flags | TDF_NOUID); | |
1223 | flag_dump_noaddr = save_noaddr; | |
1224 | flag_dump_unnumbered = save_unnumbered; | |
1225 | if (fclose (final_output)) | |
1226 | { | |
1227 | error ("could not close final insn dump file %qs: %m", | |
1228 | flag_dump_final_insns); | |
1229 | flag_dump_final_insns = NULL; | |
1230 | } | |
1231 | } | |
1232 | } | |
1233 | return todo; | |
1234 | } | |
1235 | ||
1236 | namespace { | |
1237 | ||
1238 | const pass_data pass_data_cleanup_cfg_post_optimizing = | |
1239 | { | |
1240 | GIMPLE_PASS, /* type */ | |
1241 | "optimized", /* name */ | |
1242 | OPTGROUP_NONE, /* optinfo_flags */ | |
4484a35a AM |
1243 | TV_TREE_CLEANUP_CFG, /* tv_id */ |
1244 | PROP_cfg, /* properties_required */ | |
1245 | 0, /* properties_provided */ | |
1246 | 0, /* properties_destroyed */ | |
1247 | 0, /* todo_flags_start */ | |
1248 | TODO_remove_unused_locals, /* todo_flags_finish */ | |
1249 | }; | |
1250 | ||
1251 | class pass_cleanup_cfg_post_optimizing : public gimple_opt_pass | |
1252 | { | |
1253 | public: | |
1254 | pass_cleanup_cfg_post_optimizing (gcc::context *ctxt) | |
1255 | : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing, ctxt) | |
1256 | {} | |
1257 | ||
1258 | /* opt_pass methods: */ | |
be55bfe6 TS |
1259 | virtual unsigned int execute (function *) |
1260 | { | |
1261 | return execute_cleanup_cfg_post_optimizing (); | |
1262 | } | |
4484a35a AM |
1263 | |
1264 | }; // class pass_cleanup_cfg_post_optimizing | |
1265 | ||
1266 | } // anon namespace | |
1267 | ||
1268 | gimple_opt_pass * | |
1269 | make_pass_cleanup_cfg_post_optimizing (gcc::context *ctxt) | |
1270 | { | |
1271 | return new pass_cleanup_cfg_post_optimizing (ctxt); | |
1272 | } | |
1273 | ||
1274 |