]>
Commit | Line | Data |
---|---|---|
c9784e6d | 1 | /* CFG cleanup for trees. |
d1e082c2 | 2 | Copyright (C) 2001-2013 Free Software Foundation, Inc. |
c9784e6d KH |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 8 | the Free Software Foundation; either version 3, or (at your option) |
c9784e6d KH |
9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
c9784e6d KH |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
23 | #include "tm.h" | |
24 | #include "tree.h" | |
c9784e6d | 25 | #include "tm_p.h" |
c9784e6d | 26 | #include "basic-block.h" |
718f9c0f | 27 | #include "diagnostic-core.h" |
c9784e6d KH |
28 | #include "flags.h" |
29 | #include "function.h" | |
c9784e6d KH |
30 | #include "ggc.h" |
31 | #include "langhooks.h" | |
18f429e2 | 32 | #include "gimple.h" |
45b0be94 | 33 | #include "gimplify.h" |
5be5c238 | 34 | #include "gimple-iterator.h" |
442b4905 AM |
35 | #include "gimple-ssa.h" |
36 | #include "tree-cfg.h" | |
37 | #include "tree-phinodes.h" | |
38 | #include "ssa-iterators.h" | |
d8a2d370 | 39 | #include "stringpool.h" |
442b4905 | 40 | #include "tree-ssanames.h" |
e28030cf | 41 | #include "tree-ssa-loop-manip.h" |
d8a2d370 | 42 | #include "expr.h" |
442b4905 | 43 | #include "tree-dfa.h" |
7a300452 | 44 | #include "tree-ssa.h" |
c9784e6d | 45 | #include "tree-pass.h" |
c9784e6d KH |
46 | #include "except.h" |
47 | #include "cfgloop.h" | |
c9784e6d KH |
48 | #include "hashtab.h" |
49 | #include "tree-ssa-propagate.h" | |
17684618 | 50 | #include "tree-scalar-evolution.h" |
c9784e6d | 51 | |
672987e8 ZD |
52 | /* The set of blocks in that at least one of the following changes happened: |
53 | -- the statement at the end of the block was changed | |
54 | -- the block was newly created | |
55 | -- the set of the predecessors of the block changed | |
56 | -- the set of the successors of the block changed | |
57 | ??? Maybe we could track these changes separately, since they determine | |
58 | what cleanups it makes sense to try on the block. */ | |
59 | bitmap cfgcleanup_altered_bbs; | |
60 | ||
c9784e6d KH |
61 | /* Remove any fallthru edge from EV. Return true if an edge was removed. */ |
62 | ||
63 | static bool | |
9771b263 | 64 | remove_fallthru_edge (vec<edge, va_gc> *ev) |
c9784e6d KH |
65 | { |
66 | edge_iterator ei; | |
67 | edge e; | |
68 | ||
69 | FOR_EACH_EDGE (e, ei, ev) | |
70 | if ((e->flags & EDGE_FALLTHRU) != 0) | |
71 | { | |
0107dca2 RB |
72 | if (e->flags & EDGE_COMPLEX) |
73 | e->flags &= ~EDGE_FALLTHRU; | |
74 | else | |
75 | remove_edge_and_dominated_blocks (e); | |
c9784e6d KH |
76 | return true; |
77 | } | |
78 | return false; | |
79 | } | |
80 | ||
726a989a | 81 | |
c9784e6d KH |
82 | /* Disconnect an unreachable block in the control expression starting |
83 | at block BB. */ | |
84 | ||
85 | static bool | |
726a989a | 86 | cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi) |
c9784e6d KH |
87 | { |
88 | edge taken_edge; | |
89 | bool retval = false; | |
726a989a RB |
90 | gimple stmt = gsi_stmt (gsi); |
91 | tree val; | |
c9784e6d KH |
92 | |
93 | if (!single_succ_p (bb)) | |
94 | { | |
95 | edge e; | |
96 | edge_iterator ei; | |
6ac01510 | 97 | bool warned; |
37530014 | 98 | location_t loc; |
6ac01510 ILT |
99 | |
100 | fold_defer_overflow_warnings (); | |
37530014 RG |
101 | loc = gimple_location (stmt); |
102 | switch (gimple_code (stmt)) | |
103 | { | |
104 | case GIMPLE_COND: | |
fea4ea73 EB |
105 | val = fold_binary_loc (loc, gimple_cond_code (stmt), |
106 | boolean_type_node, | |
107 | gimple_cond_lhs (stmt), | |
108 | gimple_cond_rhs (stmt)); | |
109 | break; | |
37530014 RG |
110 | |
111 | case GIMPLE_SWITCH: | |
112 | val = gimple_switch_index (stmt); | |
113 | break; | |
114 | ||
115 | default: | |
116 | val = NULL_TREE; | |
117 | } | |
c9784e6d KH |
118 | taken_edge = find_taken_edge (bb, val); |
119 | if (!taken_edge) | |
6ac01510 ILT |
120 | { |
121 | fold_undefer_and_ignore_overflow_warnings (); | |
122 | return false; | |
123 | } | |
c9784e6d KH |
124 | |
125 | /* Remove all the edges except the one that is always executed. */ | |
6ac01510 | 126 | warned = false; |
c9784e6d KH |
127 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
128 | { | |
129 | if (e != taken_edge) | |
130 | { | |
6ac01510 ILT |
131 | if (!warned) |
132 | { | |
133 | fold_undefer_overflow_warnings | |
726a989a | 134 | (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); |
6ac01510 ILT |
135 | warned = true; |
136 | } | |
137 | ||
c9784e6d KH |
138 | taken_edge->probability += e->probability; |
139 | taken_edge->count += e->count; | |
672987e8 | 140 | remove_edge_and_dominated_blocks (e); |
c9784e6d KH |
141 | retval = true; |
142 | } | |
143 | else | |
144 | ei_next (&ei); | |
145 | } | |
6ac01510 ILT |
146 | if (!warned) |
147 | fold_undefer_and_ignore_overflow_warnings (); | |
c9784e6d KH |
148 | if (taken_edge->probability > REG_BR_PROB_BASE) |
149 | taken_edge->probability = REG_BR_PROB_BASE; | |
150 | } | |
151 | else | |
152 | taken_edge = single_succ_edge (bb); | |
153 | ||
672987e8 | 154 | bitmap_set_bit (cfgcleanup_altered_bbs, bb->index); |
726a989a | 155 | gsi_remove (&gsi, true); |
c9784e6d KH |
156 | taken_edge->flags = EDGE_FALLTHRU; |
157 | ||
c9784e6d KH |
158 | return retval; |
159 | } | |
160 | ||
672987e8 ZD |
161 | /* Try to remove superfluous control structures in basic block BB. Returns |
162 | true if anything changes. */ | |
c9784e6d KH |
163 | |
164 | static bool | |
672987e8 | 165 | cleanup_control_flow_bb (basic_block bb) |
c9784e6d | 166 | { |
726a989a | 167 | gimple_stmt_iterator gsi; |
c9784e6d | 168 | bool retval = false; |
726a989a | 169 | gimple stmt; |
c9784e6d | 170 | |
672987e8 ZD |
171 | /* If the last statement of the block could throw and now cannot, |
172 | we need to prune cfg. */ | |
726a989a | 173 | retval |= gimple_purge_dead_eh_edges (bb); |
672987e8 | 174 | |
726a989a RB |
175 | gsi = gsi_last_bb (bb); |
176 | if (gsi_end_p (gsi)) | |
672987e8 ZD |
177 | return retval; |
178 | ||
726a989a | 179 | stmt = gsi_stmt (gsi); |
672987e8 | 180 | |
726a989a RB |
181 | if (gimple_code (stmt) == GIMPLE_COND |
182 | || gimple_code (stmt) == GIMPLE_SWITCH) | |
183 | retval |= cleanup_control_expr_graph (bb, gsi); | |
184 | else if (gimple_code (stmt) == GIMPLE_GOTO | |
185 | && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR | |
186 | && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0)) | |
672987e8 | 187 | == LABEL_DECL)) |
c9784e6d | 188 | { |
726a989a RB |
189 | /* If we had a computed goto which has a compile-time determinable |
190 | destination, then we can eliminate the goto. */ | |
672987e8 ZD |
191 | edge e; |
192 | tree label; | |
193 | edge_iterator ei; | |
194 | basic_block target_block; | |
c9784e6d | 195 | |
672987e8 ZD |
196 | /* First look at all the outgoing edges. Delete any outgoing |
197 | edges which do not go to the right block. For the one | |
198 | edge which goes to the right block, fix up its flags. */ | |
726a989a | 199 | label = TREE_OPERAND (gimple_goto_dest (stmt), 0); |
672987e8 ZD |
200 | target_block = label_to_block (label); |
201 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
c9784e6d | 202 | { |
672987e8 ZD |
203 | if (e->dest != target_block) |
204 | remove_edge_and_dominated_blocks (e); | |
205 | else | |
c9784e6d | 206 | { |
672987e8 ZD |
207 | /* Turn off the EDGE_ABNORMAL flag. */ |
208 | e->flags &= ~EDGE_ABNORMAL; | |
c9784e6d | 209 | |
672987e8 ZD |
210 | /* And set EDGE_FALLTHRU. */ |
211 | e->flags |= EDGE_FALLTHRU; | |
212 | ei_next (&ei); | |
213 | } | |
c9784e6d KH |
214 | } |
215 | ||
672987e8 ZD |
216 | bitmap_set_bit (cfgcleanup_altered_bbs, bb->index); |
217 | bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index); | |
218 | ||
219 | /* Remove the GOTO_EXPR as it is not needed. The CFG has all the | |
220 | relevant information we need. */ | |
726a989a | 221 | gsi_remove (&gsi, true); |
672987e8 | 222 | retval = true; |
c9784e6d | 223 | } |
672987e8 ZD |
224 | |
225 | /* Check for indirect calls that have been turned into | |
226 | noreturn calls. */ | |
726a989a RB |
227 | else if (is_gimple_call (stmt) |
228 | && gimple_call_noreturn_p (stmt) | |
229 | && remove_fallthru_edge (bb->succs)) | |
672987e8 ZD |
230 | retval = true; |
231 | ||
c9784e6d KH |
232 | return retval; |
233 | } | |
234 | ||
235 | /* Return true if basic block BB does nothing except pass control | |
236 | flow to another block and that we can safely insert a label at | |
237 | the start of the successor block. | |
238 | ||
239 | As a precondition, we require that BB be not equal to | |
240 | ENTRY_BLOCK_PTR. */ | |
241 | ||
242 | static bool | |
243 | tree_forwarder_block_p (basic_block bb, bool phi_wanted) | |
244 | { | |
726a989a | 245 | gimple_stmt_iterator gsi; |
f99fcb3b | 246 | location_t locus; |
c9784e6d KH |
247 | |
248 | /* BB must have a single outgoing edge. */ | |
249 | if (single_succ_p (bb) != 1 | |
250 | /* If PHI_WANTED is false, BB must not have any PHI nodes. | |
251 | Otherwise, BB must have PHI nodes. */ | |
726a989a | 252 | || gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted |
c9784e6d | 253 | /* BB may not be a predecessor of EXIT_BLOCK_PTR. */ |
fefa31b5 | 254 | || single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun) |
c9784e6d KH |
255 | /* Nor should this be an infinite loop. */ |
256 | || single_succ (bb) == bb | |
257 | /* BB may not have an abnormal outgoing edge. */ | |
258 | || (single_succ_edge (bb)->flags & EDGE_ABNORMAL)) | |
259 | return false; | |
260 | ||
fefa31b5 | 261 | gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
c9784e6d | 262 | |
f99fcb3b JJ |
263 | locus = single_succ_edge (bb)->goto_locus; |
264 | ||
1d65f45c RH |
265 | /* There should not be an edge coming from entry, or an EH edge. */ |
266 | { | |
267 | edge_iterator ei; | |
268 | edge e; | |
269 | ||
270 | FOR_EACH_EDGE (e, ei, bb->preds) | |
fefa31b5 | 271 | if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH)) |
1d65f45c | 272 | return false; |
f99fcb3b JJ |
273 | /* If goto_locus of any of the edges differs, prevent removing |
274 | the forwarder block for -O0. */ | |
275 | else if (optimize == 0 && e->goto_locus != locus) | |
276 | return false; | |
1d65f45c RH |
277 | } |
278 | ||
c9784e6d KH |
279 | /* Now walk through the statements backward. We can ignore labels, |
280 | anything else means this is not a forwarder block. */ | |
726a989a | 281 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) |
c9784e6d | 282 | { |
726a989a | 283 | gimple stmt = gsi_stmt (gsi); |
c9784e6d | 284 | |
726a989a | 285 | switch (gimple_code (stmt)) |
c9784e6d | 286 | { |
726a989a RB |
287 | case GIMPLE_LABEL: |
288 | if (DECL_NONLOCAL (gimple_label_label (stmt))) | |
c9784e6d | 289 | return false; |
f99fcb3b JJ |
290 | if (optimize == 0 && gimple_location (stmt) != locus) |
291 | return false; | |
c9784e6d KH |
292 | break; |
293 | ||
b5b8b0ac AO |
294 | /* ??? For now, hope there's a corresponding debug |
295 | assignment at the destination. */ | |
296 | case GIMPLE_DEBUG: | |
297 | break; | |
298 | ||
c9784e6d KH |
299 | default: |
300 | return false; | |
301 | } | |
302 | } | |
303 | ||
c9784e6d KH |
304 | if (current_loops) |
305 | { | |
306 | basic_block dest; | |
307 | /* Protect loop latches, headers and preheaders. */ | |
308 | if (bb->loop_father->header == bb) | |
309 | return false; | |
310 | dest = EDGE_SUCC (bb, 0)->dest; | |
311 | ||
312 | if (dest->loop_father->header == dest) | |
313 | return false; | |
314 | } | |
c9784e6d KH |
315 | return true; |
316 | } | |
317 | ||
c9784e6d KH |
318 | /* If all the PHI nodes in DEST have alternatives for E1 and E2 and |
319 | those alternatives are equal in each of the PHI nodes, then return | |
320 | true, else return false. */ | |
321 | ||
322 | static bool | |
323 | phi_alternatives_equal (basic_block dest, edge e1, edge e2) | |
324 | { | |
325 | int n1 = e1->dest_idx; | |
326 | int n2 = e2->dest_idx; | |
726a989a | 327 | gimple_stmt_iterator gsi; |
c9784e6d | 328 | |
726a989a | 329 | for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
c9784e6d | 330 | { |
726a989a RB |
331 | gimple phi = gsi_stmt (gsi); |
332 | tree val1 = gimple_phi_arg_def (phi, n1); | |
333 | tree val2 = gimple_phi_arg_def (phi, n2); | |
c9784e6d KH |
334 | |
335 | gcc_assert (val1 != NULL_TREE); | |
336 | gcc_assert (val2 != NULL_TREE); | |
337 | ||
338 | if (!operand_equal_for_phi_arg_p (val1, val2)) | |
339 | return false; | |
340 | } | |
341 | ||
342 | return true; | |
343 | } | |
344 | ||
672987e8 | 345 | /* Removes forwarder block BB. Returns false if this failed. */ |
c9784e6d KH |
346 | |
347 | static bool | |
672987e8 | 348 | remove_forwarder_block (basic_block bb) |
c9784e6d KH |
349 | { |
350 | edge succ = single_succ_edge (bb), e, s; | |
351 | basic_block dest = succ->dest; | |
726a989a | 352 | gimple label; |
c9784e6d | 353 | edge_iterator ei; |
726a989a | 354 | gimple_stmt_iterator gsi, gsi_to; |
70235ab9 | 355 | bool can_move_debug_stmts; |
c9784e6d KH |
356 | |
357 | /* We check for infinite loops already in tree_forwarder_block_p. | |
358 | However it may happen that the infinite loop is created | |
359 | afterwards due to removal of forwarders. */ | |
360 | if (dest == bb) | |
361 | return false; | |
362 | ||
28e5ca15 RB |
363 | /* If the destination block consists of a nonlocal label or is a |
364 | EH landing pad, do not merge it. */ | |
c9784e6d KH |
365 | label = first_stmt (dest); |
366 | if (label | |
726a989a | 367 | && gimple_code (label) == GIMPLE_LABEL |
28e5ca15 RB |
368 | && (DECL_NONLOCAL (gimple_label_label (label)) |
369 | || EH_LANDING_PAD_NR (gimple_label_label (label)) != 0)) | |
c9784e6d KH |
370 | return false; |
371 | ||
372 | /* If there is an abnormal edge to basic block BB, but not into | |
373 | dest, problems might occur during removal of the phi node at out | |
374 | of ssa due to overlapping live ranges of registers. | |
375 | ||
376 | If there is an abnormal edge in DEST, the problems would occur | |
377 | anyway since cleanup_dead_labels would then merge the labels for | |
378 | two different eh regions, and rest of exception handling code | |
379 | does not like it. | |
380 | ||
381 | So if there is an abnormal edge to BB, proceed only if there is | |
382 | no abnormal edge to DEST and there are no phi nodes in DEST. */ | |
31ff2426 NF |
383 | if (bb_has_abnormal_pred (bb) |
384 | && (bb_has_abnormal_pred (dest) | |
1197e789 RG |
385 | || !gimple_seq_empty_p (phi_nodes (dest)))) |
386 | return false; | |
c9784e6d KH |
387 | |
388 | /* If there are phi nodes in DEST, and some of the blocks that are | |
389 | predecessors of BB are also predecessors of DEST, check that the | |
390 | phi node arguments match. */ | |
726a989a | 391 | if (!gimple_seq_empty_p (phi_nodes (dest))) |
c9784e6d KH |
392 | { |
393 | FOR_EACH_EDGE (e, ei, bb->preds) | |
394 | { | |
395 | s = find_edge (e->src, dest); | |
396 | if (!s) | |
397 | continue; | |
398 | ||
399 | if (!phi_alternatives_equal (dest, succ, s)) | |
400 | return false; | |
401 | } | |
402 | } | |
403 | ||
dc764d10 | 404 | can_move_debug_stmts = MAY_HAVE_DEBUG_STMTS && single_pred_p (dest); |
70235ab9 | 405 | |
c9784e6d KH |
406 | /* Redirect the edges. */ |
407 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
408 | { | |
672987e8 ZD |
409 | bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index); |
410 | ||
c9784e6d KH |
411 | if (e->flags & EDGE_ABNORMAL) |
412 | { | |
413 | /* If there is an abnormal edge, redirect it anyway, and | |
414 | move the labels to the new block to make it legal. */ | |
415 | s = redirect_edge_succ_nodup (e, dest); | |
416 | } | |
417 | else | |
418 | s = redirect_edge_and_branch (e, dest); | |
419 | ||
420 | if (s == e) | |
421 | { | |
422 | /* Create arguments for the phi nodes, since the edge was not | |
423 | here before. */ | |
726a989a RB |
424 | for (gsi = gsi_start_phis (dest); |
425 | !gsi_end_p (gsi); | |
426 | gsi_next (&gsi)) | |
427 | { | |
428 | gimple phi = gsi_stmt (gsi); | |
f5045c96 | 429 | source_location l = gimple_phi_arg_location_from_edge (phi, succ); |
2724573f RB |
430 | tree def = gimple_phi_arg_def (phi, succ->dest_idx); |
431 | add_phi_arg (phi, unshare_expr (def), s, l); | |
726a989a | 432 | } |
c9784e6d | 433 | } |
c9784e6d KH |
434 | } |
435 | ||
1197e789 RG |
436 | /* Move nonlocal labels and computed goto targets as well as user |
437 | defined labels and labels with an EH landing pad number to the | |
438 | new block, so that the redirection of the abnormal edges works, | |
439 | jump targets end up in a sane place and debug information for | |
440 | labels is retained. */ | |
441 | gsi_to = gsi_start_bb (dest); | |
442 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) | |
443 | { | |
444 | tree decl; | |
445 | label = gsi_stmt (gsi); | |
446 | if (is_gimple_debug (label)) | |
447 | break; | |
448 | decl = gimple_label_label (label); | |
449 | if (EH_LANDING_PAD_NR (decl) != 0 | |
450 | || DECL_NONLOCAL (decl) | |
451 | || FORCED_LABEL (decl) | |
452 | || !DECL_ARTIFICIAL (decl)) | |
453 | { | |
454 | gsi_remove (&gsi, false); | |
455 | gsi_insert_before (&gsi_to, label, GSI_SAME_STMT); | |
456 | } | |
457 | else | |
458 | gsi_next (&gsi); | |
459 | } | |
460 | ||
dc764d10 | 461 | /* Move debug statements if the destination has a single predecessor. */ |
70235ab9 | 462 | if (can_move_debug_stmts) |
c9784e6d | 463 | { |
1197e789 RG |
464 | gsi_to = gsi_after_labels (dest); |
465 | for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); ) | |
c9784e6d | 466 | { |
70235ab9 JJ |
467 | gimple debug = gsi_stmt (gsi); |
468 | if (!is_gimple_debug (debug)) | |
1197e789 | 469 | break; |
726a989a | 470 | gsi_remove (&gsi, false); |
70235ab9 | 471 | gsi_insert_before (&gsi_to, debug, GSI_SAME_STMT); |
c9784e6d KH |
472 | } |
473 | } | |
474 | ||
672987e8 ZD |
475 | bitmap_set_bit (cfgcleanup_altered_bbs, dest->index); |
476 | ||
c9784e6d KH |
477 | /* Update the dominators. */ |
478 | if (dom_info_available_p (CDI_DOMINATORS)) | |
479 | { | |
480 | basic_block dom, dombb, domdest; | |
481 | ||
482 | dombb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
483 | domdest = get_immediate_dominator (CDI_DOMINATORS, dest); | |
484 | if (domdest == bb) | |
485 | { | |
486 | /* Shortcut to avoid calling (relatively expensive) | |
487 | nearest_common_dominator unless necessary. */ | |
488 | dom = dombb; | |
489 | } | |
490 | else | |
491 | dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); | |
492 | ||
493 | set_immediate_dominator (CDI_DOMINATORS, dest, dom); | |
494 | } | |
495 | ||
496 | /* And kill the forwarder block. */ | |
497 | delete_basic_block (bb); | |
498 | ||
499 | return true; | |
500 | } | |
501 | ||
566d09ef JH |
502 | /* STMT is a call that has been discovered noreturn. Fixup the CFG |
503 | and remove LHS. Return true if something changed. */ | |
504 | ||
505 | bool | |
506 | fixup_noreturn_call (gimple stmt) | |
507 | { | |
508 | basic_block bb = gimple_bb (stmt); | |
509 | bool changed = false; | |
510 | ||
511 | if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN)) | |
512 | return false; | |
513 | ||
514 | /* First split basic block if stmt is not last. */ | |
515 | if (stmt != gsi_stmt (gsi_last_bb (bb))) | |
516 | split_block (bb, stmt); | |
517 | ||
518 | changed |= remove_fallthru_edge (bb->succs); | |
519 | ||
520 | /* If there is LHS, remove it. */ | |
521 | if (gimple_call_lhs (stmt)) | |
522 | { | |
523 | tree op = gimple_call_lhs (stmt); | |
524 | gimple_call_set_lhs (stmt, NULL_TREE); | |
02d635a2 | 525 | |
acb6411a | 526 | /* We need to remove SSA name to avoid checking errors. |
566d09ef | 527 | All uses are dominated by the noreturn and thus will |
02d635a2 JH |
528 | be removed afterwards. |
529 | We proactively remove affected non-PHI statements to avoid | |
530 | fixup_cfg from trying to update them and crashing. */ | |
566d09ef JH |
531 | if (TREE_CODE (op) == SSA_NAME) |
532 | { | |
533 | use_operand_p use_p; | |
534 | imm_use_iterator iter; | |
535 | gimple use_stmt; | |
02d635a2 JH |
536 | bitmap_iterator bi; |
537 | unsigned int bb_index; | |
538 | ||
539 | bitmap blocks = BITMAP_ALLOC (NULL); | |
566d09ef JH |
540 | |
541 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, op) | |
02d635a2 JH |
542 | { |
543 | if (gimple_code (use_stmt) != GIMPLE_PHI) | |
544 | bitmap_set_bit (blocks, gimple_bb (use_stmt)->index); | |
545 | else | |
546 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
547 | SET_USE (use_p, error_mark_node); | |
548 | } | |
549 | EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi) | |
550 | delete_basic_block (BASIC_BLOCK (bb_index)); | |
551 | BITMAP_FREE (blocks); | |
552 | release_ssa_name (op); | |
566d09ef JH |
553 | } |
554 | update_stmt (stmt); | |
555 | changed = true; | |
556 | } | |
8ba0479e JJ |
557 | /* Similarly remove VDEF if there is any. */ |
558 | else if (gimple_vdef (stmt)) | |
559 | update_stmt (stmt); | |
566d09ef JH |
560 | return changed; |
561 | } | |
562 | ||
563 | ||
672987e8 ZD |
564 | /* Split basic blocks on calls in the middle of a basic block that are now |
565 | known not to return, and remove the unreachable code. */ | |
c9784e6d KH |
566 | |
567 | static bool | |
672987e8 | 568 | split_bbs_on_noreturn_calls (void) |
c9784e6d | 569 | { |
c9784e6d | 570 | bool changed = false; |
726a989a | 571 | gimple stmt; |
672987e8 | 572 | basic_block bb; |
c9784e6d | 573 | |
672987e8 ZD |
574 | /* Detect cases where a mid-block call is now known not to return. */ |
575 | if (cfun->gimple_df) | |
9771b263 | 576 | while (vec_safe_length (MODIFIED_NORETURN_CALLS (cfun))) |
672987e8 | 577 | { |
9771b263 | 578 | stmt = MODIFIED_NORETURN_CALLS (cfun)->pop (); |
726a989a | 579 | bb = gimple_bb (stmt); |
4d731f17 JJ |
580 | /* BB might be deleted at this point, so verify first |
581 | BB is present in the cfg. */ | |
672987e8 | 582 | if (bb == NULL |
4d731f17 | 583 | || bb->index < NUM_FIXED_BLOCKS |
30f1e6de | 584 | || bb->index >= last_basic_block |
4d731f17 | 585 | || BASIC_BLOCK (bb->index) != bb |
726a989a | 586 | || !gimple_call_noreturn_p (stmt)) |
672987e8 ZD |
587 | continue; |
588 | ||
566d09ef | 589 | changed |= fixup_noreturn_call (stmt); |
672987e8 | 590 | } |
c9784e6d | 591 | |
c9784e6d KH |
592 | return changed; |
593 | } | |
594 | ||
672987e8 ZD |
595 | /* Tries to cleanup cfg in basic block BB. Returns true if anything |
596 | changes. */ | |
c9784e6d | 597 | |
89e80dd4 | 598 | static bool |
672987e8 | 599 | cleanup_tree_cfg_bb (basic_block bb) |
c9784e6d | 600 | { |
65e7bfe3 | 601 | bool retval = cleanup_control_flow_bb (bb); |
b8698a0f | 602 | |
f99fcb3b | 603 | if (tree_forwarder_block_p (bb, false) |
672987e8 ZD |
604 | && remove_forwarder_block (bb)) |
605 | return true; | |
c9784e6d | 606 | |
89e80dd4 DN |
607 | /* Merging the blocks may create new opportunities for folding |
608 | conditional branches (due to the elimination of single-valued PHI | |
609 | nodes). */ | |
672987e8 ZD |
610 | if (single_succ_p (bb) |
611 | && can_merge_blocks_p (bb, single_succ (bb))) | |
612 | { | |
613 | merge_blocks (bb, single_succ (bb)); | |
614 | return true; | |
615 | } | |
616 | ||
617 | return retval; | |
618 | } | |
619 | ||
620 | /* Iterate the cfg cleanups, while anything changes. */ | |
621 | ||
622 | static bool | |
623 | cleanup_tree_cfg_1 (void) | |
624 | { | |
625 | bool retval = false; | |
626 | basic_block bb; | |
627 | unsigned i, n; | |
628 | ||
629 | retval |= split_bbs_on_noreturn_calls (); | |
630 | ||
631 | /* Prepare the worklists of altered blocks. */ | |
632 | cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL); | |
633 | ||
634 | /* During forwarder block cleanup, we may redirect edges out of | |
635 | SWITCH_EXPRs, which can get expensive. So we want to enable | |
636 | recording of edge to CASE_LABEL_EXPR. */ | |
637 | start_recording_case_labels (); | |
89e80dd4 | 638 | |
672987e8 ZD |
639 | /* Start by iterating over all basic blocks. We cannot use FOR_EACH_BB, |
640 | since the basic blocks may get removed. */ | |
641 | n = last_basic_block; | |
642 | for (i = NUM_FIXED_BLOCKS; i < n; i++) | |
643 | { | |
644 | bb = BASIC_BLOCK (i); | |
645 | if (bb) | |
646 | retval |= cleanup_tree_cfg_bb (bb); | |
647 | } | |
648 | ||
649 | /* Now process the altered blocks, as long as any are available. */ | |
650 | while (!bitmap_empty_p (cfgcleanup_altered_bbs)) | |
651 | { | |
652 | i = bitmap_first_set_bit (cfgcleanup_altered_bbs); | |
653 | bitmap_clear_bit (cfgcleanup_altered_bbs, i); | |
654 | if (i < NUM_FIXED_BLOCKS) | |
655 | continue; | |
656 | ||
657 | bb = BASIC_BLOCK (i); | |
658 | if (!bb) | |
659 | continue; | |
660 | ||
661 | retval |= cleanup_tree_cfg_bb (bb); | |
662 | ||
663 | /* Rerun split_bbs_on_noreturn_calls, in case we have altered any noreturn | |
664 | calls. */ | |
665 | retval |= split_bbs_on_noreturn_calls (); | |
666 | } | |
b8698a0f | 667 | |
672987e8 ZD |
668 | end_recording_case_labels (); |
669 | BITMAP_FREE (cfgcleanup_altered_bbs); | |
89e80dd4 DN |
670 | return retval; |
671 | } | |
672 | ||
673 | ||
e3594cb3 DN |
674 | /* Remove unreachable blocks and other miscellaneous clean up work. |
675 | Return true if the flowgraph was modified, false otherwise. */ | |
89e80dd4 | 676 | |
592c303d ZD |
677 | static bool |
678 | cleanup_tree_cfg_noloop (void) | |
89e80dd4 | 679 | { |
672987e8 | 680 | bool changed; |
89e80dd4 DN |
681 | |
682 | timevar_push (TV_TREE_CLEANUP_CFG); | |
683 | ||
e3594cb3 | 684 | /* Iterate until there are no more cleanups left to do. If any |
672987e8 ZD |
685 | iteration changed the flowgraph, set CHANGED to true. |
686 | ||
687 | If dominance information is available, there cannot be any unreachable | |
688 | blocks. */ | |
2b28c07a | 689 | if (!dom_info_available_p (CDI_DOMINATORS)) |
e3594cb3 | 690 | { |
672987e8 ZD |
691 | changed = delete_unreachable_blocks (); |
692 | calculate_dominance_info (CDI_DOMINATORS); | |
e3594cb3 | 693 | } |
672987e8 | 694 | else |
30251f7a ZD |
695 | { |
696 | #ifdef ENABLE_CHECKING | |
697 | verify_dominators (CDI_DOMINATORS); | |
698 | #endif | |
699 | changed = false; | |
700 | } | |
c9784e6d | 701 | |
672987e8 ZD |
702 | changed |= cleanup_tree_cfg_1 (); |
703 | ||
2b28c07a | 704 | gcc_assert (dom_info_available_p (CDI_DOMINATORS)); |
c9784e6d KH |
705 | compact_blocks (); |
706 | ||
707 | #ifdef ENABLE_CHECKING | |
708 | verify_flow_info (); | |
709 | #endif | |
89e80dd4 | 710 | |
c9784e6d | 711 | timevar_pop (TV_TREE_CLEANUP_CFG); |
89e80dd4 | 712 | |
592c303d | 713 | if (changed && current_loops) |
f87000d0 | 714 | loops_state_set (LOOPS_NEED_FIXUP); |
592c303d | 715 | |
e3594cb3 | 716 | return changed; |
c9784e6d KH |
717 | } |
718 | ||
592c303d | 719 | /* Repairs loop structures. */ |
c9784e6d | 720 | |
592c303d ZD |
721 | static void |
722 | repair_loop_structures (void) | |
c9784e6d | 723 | { |
a222c01a | 724 | bitmap changed_bbs; |
8e89b5b5 | 725 | unsigned n_new_loops; |
a222c01a | 726 | |
cc360b36 SB |
727 | calculate_dominance_info (CDI_DOMINATORS); |
728 | ||
a222c01a MM |
729 | timevar_push (TV_REPAIR_LOOPS); |
730 | changed_bbs = BITMAP_ALLOC (NULL); | |
8e89b5b5 | 731 | n_new_loops = fix_loop_structure (changed_bbs); |
c9784e6d | 732 | |
592c303d ZD |
733 | /* This usually does nothing. But sometimes parts of cfg that originally |
734 | were inside a loop get out of it due to edge removal (since they | |
8e89b5b5 RB |
735 | become unreachable by back edges from latch). Also a former |
736 | irreducible loop can become reducible - in this case force a full | |
737 | rewrite into loop-closed SSA form. */ | |
f87000d0 | 738 | if (loops_state_satisfies_p (LOOP_CLOSED_SSA)) |
8e89b5b5 RB |
739 | rewrite_into_loop_closed_ssa (n_new_loops ? NULL : changed_bbs, |
740 | TODO_update_ssa); | |
c9784e6d | 741 | |
592c303d | 742 | BITMAP_FREE (changed_bbs); |
c9784e6d KH |
743 | |
744 | #ifdef ENABLE_CHECKING | |
592c303d | 745 | verify_loop_structure (); |
c9784e6d | 746 | #endif |
592c303d ZD |
747 | scev_reset (); |
748 | ||
a222c01a | 749 | timevar_pop (TV_REPAIR_LOOPS); |
592c303d ZD |
750 | } |
751 | ||
752 | /* Cleanup cfg and repair loop structures. */ | |
753 | ||
754 | bool | |
755 | cleanup_tree_cfg (void) | |
756 | { | |
757 | bool changed = cleanup_tree_cfg_noloop (); | |
758 | ||
759 | if (current_loops != NULL | |
f87000d0 | 760 | && loops_state_satisfies_p (LOOPS_NEED_FIXUP)) |
592c303d ZD |
761 | repair_loop_structures (); |
762 | ||
1994bfea | 763 | return changed; |
c9784e6d KH |
764 | } |
765 | ||
a9e0d843 RB |
766 | /* Tries to merge the PHI nodes at BB into those at BB's sole successor. |
767 | Returns true if successful. */ | |
c9784e6d | 768 | |
a9e0d843 | 769 | static bool |
c9784e6d KH |
770 | remove_forwarder_block_with_phi (basic_block bb) |
771 | { | |
772 | edge succ = single_succ_edge (bb); | |
773 | basic_block dest = succ->dest; | |
726a989a | 774 | gimple label; |
c9784e6d KH |
775 | basic_block dombb, domdest, dom; |
776 | ||
777 | /* We check for infinite loops already in tree_forwarder_block_p. | |
778 | However it may happen that the infinite loop is created | |
779 | afterwards due to removal of forwarders. */ | |
780 | if (dest == bb) | |
a9e0d843 | 781 | return false; |
c9784e6d KH |
782 | |
783 | /* If the destination block consists of a nonlocal label, do not | |
784 | merge it. */ | |
785 | label = first_stmt (dest); | |
786 | if (label | |
726a989a RB |
787 | && gimple_code (label) == GIMPLE_LABEL |
788 | && DECL_NONLOCAL (gimple_label_label (label))) | |
a9e0d843 | 789 | return false; |
c9784e6d KH |
790 | |
791 | /* Redirect each incoming edge to BB to DEST. */ | |
792 | while (EDGE_COUNT (bb->preds) > 0) | |
793 | { | |
794 | edge e = EDGE_PRED (bb, 0), s; | |
726a989a | 795 | gimple_stmt_iterator gsi; |
c9784e6d KH |
796 | |
797 | s = find_edge (e->src, dest); | |
798 | if (s) | |
799 | { | |
800 | /* We already have an edge S from E->src to DEST. If S and | |
801 | E->dest's sole successor edge have the same PHI arguments | |
802 | at DEST, redirect S to DEST. */ | |
803 | if (phi_alternatives_equal (dest, s, succ)) | |
804 | { | |
805 | e = redirect_edge_and_branch (e, dest); | |
ea7e6d5a | 806 | redirect_edge_var_map_clear (e); |
c9784e6d KH |
807 | continue; |
808 | } | |
809 | ||
810 | /* PHI arguments are different. Create a forwarder block by | |
811 | splitting E so that we can merge PHI arguments on E to | |
812 | DEST. */ | |
813 | e = single_succ_edge (split_edge (e)); | |
814 | } | |
815 | ||
816 | s = redirect_edge_and_branch (e, dest); | |
817 | ||
818 | /* redirect_edge_and_branch must not create a new edge. */ | |
819 | gcc_assert (s == e); | |
820 | ||
821 | /* Add to the PHI nodes at DEST each PHI argument removed at the | |
822 | destination of E. */ | |
726a989a RB |
823 | for (gsi = gsi_start_phis (dest); |
824 | !gsi_end_p (gsi); | |
825 | gsi_next (&gsi)) | |
c9784e6d | 826 | { |
726a989a RB |
827 | gimple phi = gsi_stmt (gsi); |
828 | tree def = gimple_phi_arg_def (phi, succ->dest_idx); | |
f5045c96 | 829 | source_location locus = gimple_phi_arg_location_from_edge (phi, succ); |
c9784e6d KH |
830 | |
831 | if (TREE_CODE (def) == SSA_NAME) | |
832 | { | |
9771b263 | 833 | edge_var_map_vector *head; |
ea7e6d5a AH |
834 | edge_var_map *vm; |
835 | size_t i; | |
c9784e6d KH |
836 | |
837 | /* If DEF is one of the results of PHI nodes removed during | |
838 | redirection, replace it with the PHI argument that used | |
839 | to be on E. */ | |
ea7e6d5a | 840 | head = redirect_edge_var_map_vector (e); |
6fa5e0ed | 841 | FOR_EACH_VEC_SAFE_ELT (head, i, vm) |
c9784e6d | 842 | { |
ea7e6d5a AH |
843 | tree old_arg = redirect_edge_var_map_result (vm); |
844 | tree new_arg = redirect_edge_var_map_def (vm); | |
c9784e6d KH |
845 | |
846 | if (def == old_arg) | |
847 | { | |
848 | def = new_arg; | |
f5045c96 | 849 | locus = redirect_edge_var_map_location (vm); |
c9784e6d KH |
850 | break; |
851 | } | |
852 | } | |
853 | } | |
854 | ||
9e227d60 | 855 | add_phi_arg (phi, def, s, locus); |
c9784e6d KH |
856 | } |
857 | ||
ea7e6d5a | 858 | redirect_edge_var_map_clear (e); |
c9784e6d KH |
859 | } |
860 | ||
861 | /* Update the dominators. */ | |
862 | dombb = get_immediate_dominator (CDI_DOMINATORS, bb); | |
863 | domdest = get_immediate_dominator (CDI_DOMINATORS, dest); | |
864 | if (domdest == bb) | |
865 | { | |
866 | /* Shortcut to avoid calling (relatively expensive) | |
867 | nearest_common_dominator unless necessary. */ | |
868 | dom = dombb; | |
869 | } | |
870 | else | |
871 | dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb); | |
872 | ||
873 | set_immediate_dominator (CDI_DOMINATORS, dest, dom); | |
874 | ||
875 | /* Remove BB since all of BB's incoming edges have been redirected | |
876 | to DEST. */ | |
877 | delete_basic_block (bb); | |
a9e0d843 RB |
878 | |
879 | return true; | |
c9784e6d KH |
880 | } |
881 | ||
882 | /* This pass merges PHI nodes if one feeds into another. For example, | |
883 | suppose we have the following: | |
884 | ||
885 | goto <bb 9> (<L9>); | |
886 | ||
887 | <L8>:; | |
888 | tem_17 = foo (); | |
889 | ||
890 | # tem_6 = PHI <tem_17(8), tem_23(7)>; | |
891 | <L9>:; | |
892 | ||
893 | # tem_3 = PHI <tem_6(9), tem_2(5)>; | |
894 | <L10>:; | |
895 | ||
896 | Then we merge the first PHI node into the second one like so: | |
897 | ||
898 | goto <bb 9> (<L10>); | |
899 | ||
900 | <L8>:; | |
901 | tem_17 = foo (); | |
902 | ||
903 | # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>; | |
904 | <L10>:; | |
905 | */ | |
906 | ||
c2924966 | 907 | static unsigned int |
c9784e6d KH |
908 | merge_phi_nodes (void) |
909 | { | |
0cae8d31 | 910 | basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun)); |
c9784e6d KH |
911 | basic_block *current = worklist; |
912 | basic_block bb; | |
913 | ||
914 | calculate_dominance_info (CDI_DOMINATORS); | |
915 | ||
916 | /* Find all PHI nodes that we may be able to merge. */ | |
917 | FOR_EACH_BB (bb) | |
918 | { | |
919 | basic_block dest; | |
920 | ||
921 | /* Look for a forwarder block with PHI nodes. */ | |
922 | if (!tree_forwarder_block_p (bb, true)) | |
923 | continue; | |
924 | ||
925 | dest = single_succ (bb); | |
926 | ||
927 | /* We have to feed into another basic block with PHI | |
928 | nodes. */ | |
8eacd016 | 929 | if (gimple_seq_empty_p (phi_nodes (dest)) |
c9784e6d KH |
930 | /* We don't want to deal with a basic block with |
931 | abnormal edges. */ | |
31ff2426 | 932 | || bb_has_abnormal_pred (bb)) |
c9784e6d KH |
933 | continue; |
934 | ||
935 | if (!dominated_by_p (CDI_DOMINATORS, dest, bb)) | |
936 | { | |
937 | /* If BB does not dominate DEST, then the PHI nodes at | |
938 | DEST must be the only users of the results of the PHI | |
939 | nodes at BB. */ | |
940 | *current++ = bb; | |
941 | } | |
ea65cd37 JL |
942 | else |
943 | { | |
726a989a | 944 | gimple_stmt_iterator gsi; |
338b5886 | 945 | unsigned int dest_idx = single_succ_edge (bb)->dest_idx; |
ea65cd37 JL |
946 | |
947 | /* BB dominates DEST. There may be many users of the PHI | |
948 | nodes in BB. However, there is still a trivial case we | |
949 | can handle. If the result of every PHI in BB is used | |
950 | only by a PHI in DEST, then we can trivially merge the | |
951 | PHI nodes from BB into DEST. */ | |
726a989a RB |
952 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); |
953 | gsi_next (&gsi)) | |
ea65cd37 | 954 | { |
726a989a RB |
955 | gimple phi = gsi_stmt (gsi); |
956 | tree result = gimple_phi_result (phi); | |
ea65cd37 | 957 | use_operand_p imm_use; |
726a989a | 958 | gimple use_stmt; |
ea65cd37 JL |
959 | |
960 | /* If the PHI's result is never used, then we can just | |
961 | ignore it. */ | |
bfc646bf | 962 | if (has_zero_uses (result)) |
ea65cd37 JL |
963 | continue; |
964 | ||
965 | /* Get the single use of the result of this PHI node. */ | |
966 | if (!single_imm_use (result, &imm_use, &use_stmt) | |
726a989a RB |
967 | || gimple_code (use_stmt) != GIMPLE_PHI |
968 | || gimple_bb (use_stmt) != dest | |
969 | || gimple_phi_arg_def (use_stmt, dest_idx) != result) | |
ea65cd37 JL |
970 | break; |
971 | } | |
972 | ||
c0220ea4 | 973 | /* If the loop above iterated through all the PHI nodes |
ea65cd37 | 974 | in BB, then we can merge the PHIs from BB into DEST. */ |
726a989a | 975 | if (gsi_end_p (gsi)) |
ea65cd37 JL |
976 | *current++ = bb; |
977 | } | |
c9784e6d KH |
978 | } |
979 | ||
980 | /* Now let's drain WORKLIST. */ | |
a9e0d843 | 981 | bool changed = false; |
c9784e6d KH |
982 | while (current != worklist) |
983 | { | |
984 | bb = *--current; | |
a9e0d843 | 985 | changed |= remove_forwarder_block_with_phi (bb); |
c9784e6d | 986 | } |
c9784e6d | 987 | free (worklist); |
a9e0d843 RB |
988 | |
989 | /* Removing forwarder blocks can cause formerly irreducible loops | |
990 | to become reducible if we merged two entry blocks. */ | |
991 | if (changed | |
992 | && current_loops) | |
993 | loops_state_set (LOOPS_NEED_FIXUP); | |
994 | ||
c2924966 | 995 | return 0; |
c9784e6d KH |
996 | } |
997 | ||
998 | static bool | |
999 | gate_merge_phi (void) | |
1000 | { | |
1001 | return 1; | |
1002 | } | |
1003 | ||
27a4cd48 DM |
1004 | namespace { |
1005 | ||
1006 | const pass_data pass_data_merge_phi = | |
8ddbbcae | 1007 | { |
27a4cd48 DM |
1008 | GIMPLE_PASS, /* type */ |
1009 | "mergephi", /* name */ | |
1010 | OPTGROUP_NONE, /* optinfo_flags */ | |
1011 | true, /* has_gate */ | |
1012 | true, /* has_execute */ | |
1013 | TV_TREE_MERGE_PHI, /* tv_id */ | |
1014 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
1015 | 0, /* properties_provided */ | |
1016 | 0, /* properties_destroyed */ | |
1017 | 0, /* todo_flags_start */ | |
1018 | TODO_verify_ssa, /* todo_flags_finish */ | |
c9784e6d | 1019 | }; |
27a4cd48 DM |
1020 | |
1021 | class pass_merge_phi : public gimple_opt_pass | |
1022 | { | |
1023 | public: | |
c3284718 RS |
1024 | pass_merge_phi (gcc::context *ctxt) |
1025 | : gimple_opt_pass (pass_data_merge_phi, ctxt) | |
27a4cd48 DM |
1026 | {} |
1027 | ||
1028 | /* opt_pass methods: */ | |
65d3284b | 1029 | opt_pass * clone () { return new pass_merge_phi (m_ctxt); } |
27a4cd48 DM |
1030 | bool gate () { return gate_merge_phi (); } |
1031 | unsigned int execute () { return merge_phi_nodes (); } | |
1032 | ||
1033 | }; // class pass_merge_phi | |
1034 | ||
1035 | } // anon namespace | |
1036 | ||
1037 | gimple_opt_pass * | |
1038 | make_pass_merge_phi (gcc::context *ctxt) | |
1039 | { | |
1040 | return new pass_merge_phi (ctxt); | |
1041 | } | |
4484a35a AM |
1042 | |
1043 | /* Pass: cleanup the CFG just before expanding trees to RTL. | |
1044 | This is just a round of label cleanups and case node grouping | |
1045 | because after the tree optimizers have run such cleanups may | |
1046 | be necessary. */ | |
1047 | ||
1048 | static unsigned int | |
1049 | execute_cleanup_cfg_post_optimizing (void) | |
1050 | { | |
1051 | unsigned int todo = 0; | |
1052 | if (cleanup_tree_cfg ()) | |
1053 | todo |= TODO_update_ssa; | |
1054 | maybe_remove_unreachable_handlers (); | |
1055 | cleanup_dead_labels (); | |
1056 | group_case_labels (); | |
1057 | if ((flag_compare_debug_opt || flag_compare_debug) | |
1058 | && flag_dump_final_insns) | |
1059 | { | |
1060 | FILE *final_output = fopen (flag_dump_final_insns, "a"); | |
1061 | ||
1062 | if (!final_output) | |
1063 | { | |
1064 | error ("could not open final insn dump file %qs: %m", | |
1065 | flag_dump_final_insns); | |
1066 | flag_dump_final_insns = NULL; | |
1067 | } | |
1068 | else | |
1069 | { | |
1070 | int save_unnumbered = flag_dump_unnumbered; | |
1071 | int save_noaddr = flag_dump_noaddr; | |
1072 | ||
1073 | flag_dump_noaddr = flag_dump_unnumbered = 1; | |
1074 | fprintf (final_output, "\n"); | |
1075 | dump_enumerated_decls (final_output, dump_flags | TDF_NOUID); | |
1076 | flag_dump_noaddr = save_noaddr; | |
1077 | flag_dump_unnumbered = save_unnumbered; | |
1078 | if (fclose (final_output)) | |
1079 | { | |
1080 | error ("could not close final insn dump file %qs: %m", | |
1081 | flag_dump_final_insns); | |
1082 | flag_dump_final_insns = NULL; | |
1083 | } | |
1084 | } | |
1085 | } | |
1086 | return todo; | |
1087 | } | |
1088 | ||
1089 | namespace { | |
1090 | ||
1091 | const pass_data pass_data_cleanup_cfg_post_optimizing = | |
1092 | { | |
1093 | GIMPLE_PASS, /* type */ | |
1094 | "optimized", /* name */ | |
1095 | OPTGROUP_NONE, /* optinfo_flags */ | |
1096 | false, /* has_gate */ | |
1097 | true, /* has_execute */ | |
1098 | TV_TREE_CLEANUP_CFG, /* tv_id */ | |
1099 | PROP_cfg, /* properties_required */ | |
1100 | 0, /* properties_provided */ | |
1101 | 0, /* properties_destroyed */ | |
1102 | 0, /* todo_flags_start */ | |
1103 | TODO_remove_unused_locals, /* todo_flags_finish */ | |
1104 | }; | |
1105 | ||
1106 | class pass_cleanup_cfg_post_optimizing : public gimple_opt_pass | |
1107 | { | |
1108 | public: | |
1109 | pass_cleanup_cfg_post_optimizing (gcc::context *ctxt) | |
1110 | : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing, ctxt) | |
1111 | {} | |
1112 | ||
1113 | /* opt_pass methods: */ | |
1114 | unsigned int execute () { | |
1115 | return execute_cleanup_cfg_post_optimizing (); | |
1116 | } | |
1117 | ||
1118 | }; // class pass_cleanup_cfg_post_optimizing | |
1119 | ||
1120 | } // anon namespace | |
1121 | ||
1122 | gimple_opt_pass * | |
1123 | make_pass_cleanup_cfg_post_optimizing (gcc::context *ctxt) | |
1124 | { | |
1125 | return new pass_cleanup_cfg_post_optimizing (ctxt); | |
1126 | } | |
1127 | ||
1128 |