]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-cfgcleanup.c
dojump.h: New header file.
[thirdparty/gcc.git] / gcc / tree-cfgcleanup.c
CommitLineData
c9784e6d 1/* CFG cleanup for trees.
5624e564 2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
c9784e6d
KH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
c9784e6d
KH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
c9784e6d
KH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
40e23961
MC
24#include "hash-set.h"
25#include "machmode.h"
26#include "vec.h"
27#include "double-int.h"
28#include "input.h"
29#include "alias.h"
30#include "symtab.h"
31#include "wide-int.h"
32#include "inchash.h"
c9784e6d 33#include "tree.h"
40e23961 34#include "fold-const.h"
c9784e6d 35#include "tm_p.h"
60393bbc 36#include "predict.h"
83685514 37#include "hard-reg-set.h"
c9784e6d 38#include "function.h"
60393bbc
AM
39#include "dominance.h"
40#include "cfg.h"
41#include "cfganal.h"
42#include "cfgcleanup.h"
43#include "basic-block.h"
44#include "diagnostic-core.h"
45#include "flags.h"
c9784e6d 46#include "langhooks.h"
2fb9a547
AM
47#include "tree-ssa-alias.h"
48#include "internal-fn.h"
49#include "tree-eh.h"
50#include "gimple-expr.h"
51#include "is-a.h"
18f429e2 52#include "gimple.h"
45b0be94 53#include "gimplify.h"
5be5c238 54#include "gimple-iterator.h"
442b4905
AM
55#include "gimple-ssa.h"
56#include "tree-cfg.h"
57#include "tree-phinodes.h"
58#include "ssa-iterators.h"
d8a2d370 59#include "stringpool.h"
442b4905 60#include "tree-ssanames.h"
e28030cf 61#include "tree-ssa-loop-manip.h"
36566b39
PK
62#include "hashtab.h"
63#include "rtl.h"
64#include "statistics.h"
65#include "real.h"
66#include "fixed-value.h"
67#include "insn-config.h"
68#include "expmed.h"
69#include "dojump.h"
70#include "explow.h"
71#include "calls.h"
72#include "emit-rtl.h"
73#include "varasm.h"
74#include "stmt.h"
d8a2d370 75#include "expr.h"
442b4905 76#include "tree-dfa.h"
7a300452 77#include "tree-ssa.h"
c9784e6d 78#include "tree-pass.h"
c9784e6d
KH
79#include "except.h"
80#include "cfgloop.h"
c9784e6d 81#include "tree-ssa-propagate.h"
17684618 82#include "tree-scalar-evolution.h"
c9784e6d 83
672987e8
ZD
84/* The set of blocks in that at least one of the following changes happened:
85 -- the statement at the end of the block was changed
86 -- the block was newly created
87 -- the set of the predecessors of the block changed
88 -- the set of the successors of the block changed
89 ??? Maybe we could track these changes separately, since they determine
90 what cleanups it makes sense to try on the block. */
91bitmap cfgcleanup_altered_bbs;
92
c9784e6d
KH
93/* Remove any fallthru edge from EV. Return true if an edge was removed. */
94
95static bool
9771b263 96remove_fallthru_edge (vec<edge, va_gc> *ev)
c9784e6d
KH
97{
98 edge_iterator ei;
99 edge e;
100
101 FOR_EACH_EDGE (e, ei, ev)
102 if ((e->flags & EDGE_FALLTHRU) != 0)
103 {
0107dca2
RB
104 if (e->flags & EDGE_COMPLEX)
105 e->flags &= ~EDGE_FALLTHRU;
106 else
107 remove_edge_and_dominated_blocks (e);
c9784e6d
KH
108 return true;
109 }
110 return false;
111}
112
726a989a 113
c9784e6d
KH
114/* Disconnect an unreachable block in the control expression starting
115 at block BB. */
116
117static bool
726a989a 118cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
c9784e6d
KH
119{
120 edge taken_edge;
121 bool retval = false;
726a989a
RB
122 gimple stmt = gsi_stmt (gsi);
123 tree val;
c9784e6d
KH
124
125 if (!single_succ_p (bb))
126 {
127 edge e;
128 edge_iterator ei;
6ac01510 129 bool warned;
37530014 130 location_t loc;
6ac01510
ILT
131
132 fold_defer_overflow_warnings ();
37530014
RG
133 loc = gimple_location (stmt);
134 switch (gimple_code (stmt))
135 {
136 case GIMPLE_COND:
fea4ea73
EB
137 val = fold_binary_loc (loc, gimple_cond_code (stmt),
138 boolean_type_node,
139 gimple_cond_lhs (stmt),
140 gimple_cond_rhs (stmt));
141 break;
37530014
RG
142
143 case GIMPLE_SWITCH:
538dd0b7 144 val = gimple_switch_index (as_a <gswitch *> (stmt));
37530014
RG
145 break;
146
147 default:
148 val = NULL_TREE;
149 }
c9784e6d
KH
150 taken_edge = find_taken_edge (bb, val);
151 if (!taken_edge)
6ac01510
ILT
152 {
153 fold_undefer_and_ignore_overflow_warnings ();
154 return false;
155 }
c9784e6d
KH
156
157 /* Remove all the edges except the one that is always executed. */
6ac01510 158 warned = false;
c9784e6d
KH
159 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
160 {
161 if (e != taken_edge)
162 {
6ac01510
ILT
163 if (!warned)
164 {
165 fold_undefer_overflow_warnings
726a989a 166 (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
6ac01510
ILT
167 warned = true;
168 }
169
c9784e6d
KH
170 taken_edge->probability += e->probability;
171 taken_edge->count += e->count;
672987e8 172 remove_edge_and_dominated_blocks (e);
c9784e6d
KH
173 retval = true;
174 }
175 else
176 ei_next (&ei);
177 }
6ac01510
ILT
178 if (!warned)
179 fold_undefer_and_ignore_overflow_warnings ();
c9784e6d
KH
180 if (taken_edge->probability > REG_BR_PROB_BASE)
181 taken_edge->probability = REG_BR_PROB_BASE;
182 }
183 else
184 taken_edge = single_succ_edge (bb);
185
672987e8 186 bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
726a989a 187 gsi_remove (&gsi, true);
c9784e6d
KH
188 taken_edge->flags = EDGE_FALLTHRU;
189
c9784e6d
KH
190 return retval;
191}
192
58041fe6
MJ
193/* Cleanup the GF_CALL_CTRL_ALTERING flag according to
194 to updated gimple_call_flags. */
195
196static void
197cleanup_call_ctrl_altering_flag (gimple bb_end)
198{
199 if (!is_gimple_call (bb_end)
200 || !gimple_call_ctrl_altering_p (bb_end))
201 return;
202
203 int flags = gimple_call_flags (bb_end);
204 if (((flags & (ECF_CONST | ECF_PURE))
205 && !(flags & ECF_LOOPING_CONST_OR_PURE))
206 || (flags & ECF_LEAF))
207 gimple_call_set_ctrl_altering (bb_end, false);
208}
209
672987e8
ZD
210/* Try to remove superfluous control structures in basic block BB. Returns
211 true if anything changes. */
c9784e6d
KH
212
213static bool
672987e8 214cleanup_control_flow_bb (basic_block bb)
c9784e6d 215{
726a989a 216 gimple_stmt_iterator gsi;
c9784e6d 217 bool retval = false;
726a989a 218 gimple stmt;
c9784e6d 219
672987e8
ZD
220 /* If the last statement of the block could throw and now cannot,
221 we need to prune cfg. */
726a989a 222 retval |= gimple_purge_dead_eh_edges (bb);
672987e8 223
726a989a
RB
224 gsi = gsi_last_bb (bb);
225 if (gsi_end_p (gsi))
672987e8
ZD
226 return retval;
227
726a989a 228 stmt = gsi_stmt (gsi);
672987e8 229
58041fe6
MJ
230 /* Try to cleanup ctrl altering flag for call which ends bb. */
231 cleanup_call_ctrl_altering_flag (stmt);
232
726a989a
RB
233 if (gimple_code (stmt) == GIMPLE_COND
234 || gimple_code (stmt) == GIMPLE_SWITCH)
235 retval |= cleanup_control_expr_graph (bb, gsi);
236 else if (gimple_code (stmt) == GIMPLE_GOTO
237 && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
238 && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0))
672987e8 239 == LABEL_DECL))
c9784e6d 240 {
726a989a
RB
241 /* If we had a computed goto which has a compile-time determinable
242 destination, then we can eliminate the goto. */
672987e8
ZD
243 edge e;
244 tree label;
245 edge_iterator ei;
246 basic_block target_block;
c9784e6d 247
672987e8
ZD
248 /* First look at all the outgoing edges. Delete any outgoing
249 edges which do not go to the right block. For the one
250 edge which goes to the right block, fix up its flags. */
726a989a 251 label = TREE_OPERAND (gimple_goto_dest (stmt), 0);
672987e8
ZD
252 target_block = label_to_block (label);
253 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
c9784e6d 254 {
672987e8
ZD
255 if (e->dest != target_block)
256 remove_edge_and_dominated_blocks (e);
257 else
c9784e6d 258 {
672987e8
ZD
259 /* Turn off the EDGE_ABNORMAL flag. */
260 e->flags &= ~EDGE_ABNORMAL;
c9784e6d 261
672987e8
ZD
262 /* And set EDGE_FALLTHRU. */
263 e->flags |= EDGE_FALLTHRU;
264 ei_next (&ei);
265 }
c9784e6d
KH
266 }
267
672987e8
ZD
268 bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
269 bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index);
270
271 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
272 relevant information we need. */
726a989a 273 gsi_remove (&gsi, true);
672987e8 274 retval = true;
c9784e6d 275 }
672987e8
ZD
276
277 /* Check for indirect calls that have been turned into
278 noreturn calls. */
726a989a
RB
279 else if (is_gimple_call (stmt)
280 && gimple_call_noreturn_p (stmt)
281 && remove_fallthru_edge (bb->succs))
672987e8
ZD
282 retval = true;
283
c9784e6d
KH
284 return retval;
285}
286
287/* Return true if basic block BB does nothing except pass control
288 flow to another block and that we can safely insert a label at
289 the start of the successor block.
290
291 As a precondition, we require that BB be not equal to
6626665f 292 the entry block. */
c9784e6d
KH
293
294static bool
295tree_forwarder_block_p (basic_block bb, bool phi_wanted)
296{
726a989a 297 gimple_stmt_iterator gsi;
f99fcb3b 298 location_t locus;
c9784e6d
KH
299
300 /* BB must have a single outgoing edge. */
301 if (single_succ_p (bb) != 1
302 /* If PHI_WANTED is false, BB must not have any PHI nodes.
303 Otherwise, BB must have PHI nodes. */
726a989a 304 || gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted
6626665f 305 /* BB may not be a predecessor of the exit block. */
fefa31b5 306 || single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
c9784e6d
KH
307 /* Nor should this be an infinite loop. */
308 || single_succ (bb) == bb
309 /* BB may not have an abnormal outgoing edge. */
310 || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
311 return false;
312
fefa31b5 313 gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun));
c9784e6d 314
f99fcb3b
JJ
315 locus = single_succ_edge (bb)->goto_locus;
316
1d65f45c
RH
317 /* There should not be an edge coming from entry, or an EH edge. */
318 {
319 edge_iterator ei;
320 edge e;
321
322 FOR_EACH_EDGE (e, ei, bb->preds)
fefa31b5 323 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH))
1d65f45c 324 return false;
f99fcb3b
JJ
325 /* If goto_locus of any of the edges differs, prevent removing
326 the forwarder block for -O0. */
327 else if (optimize == 0 && e->goto_locus != locus)
328 return false;
1d65f45c
RH
329 }
330
c9784e6d
KH
331 /* Now walk through the statements backward. We can ignore labels,
332 anything else means this is not a forwarder block. */
726a989a 333 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
c9784e6d 334 {
726a989a 335 gimple stmt = gsi_stmt (gsi);
c9784e6d 336
726a989a 337 switch (gimple_code (stmt))
c9784e6d 338 {
726a989a 339 case GIMPLE_LABEL:
538dd0b7 340 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
c9784e6d 341 return false;
f99fcb3b
JJ
342 if (optimize == 0 && gimple_location (stmt) != locus)
343 return false;
c9784e6d
KH
344 break;
345
b5b8b0ac
AO
346 /* ??? For now, hope there's a corresponding debug
347 assignment at the destination. */
348 case GIMPLE_DEBUG:
349 break;
350
c9784e6d
KH
351 default:
352 return false;
353 }
354 }
355
c9784e6d
KH
356 if (current_loops)
357 {
358 basic_block dest;
a3afdbb8 359 /* Protect loop headers. */
c9784e6d
KH
360 if (bb->loop_father->header == bb)
361 return false;
c9784e6d 362
a3afdbb8
BC
363 dest = EDGE_SUCC (bb, 0)->dest;
364 /* Protect loop preheaders and latches if requested. */
c9784e6d 365 if (dest->loop_father->header == dest)
a3afdbb8 366 {
fba7c564
RB
367 if (bb->loop_father == dest->loop_father)
368 {
369 if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
370 return false;
371 /* If bb doesn't have a single predecessor we'd make this
372 loop have multiple latches. Don't do that if that
373 would in turn require disambiguating them. */
374 return (single_pred_p (bb)
375 || loops_state_satisfies_p
376 (LOOPS_MAY_HAVE_MULTIPLE_LATCHES));
377 }
378 else if (bb->loop_father == loop_outer (dest->loop_father))
379 return !loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS);
380 /* Always preserve other edges into loop headers that are
381 not simple latches or preheaders. */
382 return false;
a3afdbb8 383 }
c9784e6d 384 }
a3afdbb8 385
c9784e6d
KH
386 return true;
387}
388
c9784e6d
KH
389/* If all the PHI nodes in DEST have alternatives for E1 and E2 and
390 those alternatives are equal in each of the PHI nodes, then return
391 true, else return false. */
392
393static bool
394phi_alternatives_equal (basic_block dest, edge e1, edge e2)
395{
396 int n1 = e1->dest_idx;
397 int n2 = e2->dest_idx;
538dd0b7 398 gphi_iterator gsi;
c9784e6d 399
726a989a 400 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
c9784e6d 401 {
538dd0b7 402 gphi *phi = gsi.phi ();
726a989a
RB
403 tree val1 = gimple_phi_arg_def (phi, n1);
404 tree val2 = gimple_phi_arg_def (phi, n2);
c9784e6d
KH
405
406 gcc_assert (val1 != NULL_TREE);
407 gcc_assert (val2 != NULL_TREE);
408
409 if (!operand_equal_for_phi_arg_p (val1, val2))
410 return false;
411 }
412
413 return true;
414}
415
672987e8 416/* Removes forwarder block BB. Returns false if this failed. */
c9784e6d
KH
417
418static bool
672987e8 419remove_forwarder_block (basic_block bb)
c9784e6d
KH
420{
421 edge succ = single_succ_edge (bb), e, s;
422 basic_block dest = succ->dest;
726a989a 423 gimple label;
c9784e6d 424 edge_iterator ei;
726a989a 425 gimple_stmt_iterator gsi, gsi_to;
70235ab9 426 bool can_move_debug_stmts;
c9784e6d
KH
427
428 /* We check for infinite loops already in tree_forwarder_block_p.
429 However it may happen that the infinite loop is created
430 afterwards due to removal of forwarders. */
431 if (dest == bb)
432 return false;
433
28e5ca15
RB
434 /* If the destination block consists of a nonlocal label or is a
435 EH landing pad, do not merge it. */
c9784e6d 436 label = first_stmt (dest);
538dd0b7
DM
437 if (label)
438 if (glabel *label_stmt = dyn_cast <glabel *> (label))
439 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
440 || EH_LANDING_PAD_NR (gimple_label_label (label_stmt)) != 0)
441 return false;
c9784e6d
KH
442
443 /* If there is an abnormal edge to basic block BB, but not into
444 dest, problems might occur during removal of the phi node at out
445 of ssa due to overlapping live ranges of registers.
446
447 If there is an abnormal edge in DEST, the problems would occur
448 anyway since cleanup_dead_labels would then merge the labels for
449 two different eh regions, and rest of exception handling code
450 does not like it.
451
452 So if there is an abnormal edge to BB, proceed only if there is
453 no abnormal edge to DEST and there are no phi nodes in DEST. */
31ff2426
NF
454 if (bb_has_abnormal_pred (bb)
455 && (bb_has_abnormal_pred (dest)
1197e789
RG
456 || !gimple_seq_empty_p (phi_nodes (dest))))
457 return false;
c9784e6d
KH
458
459 /* If there are phi nodes in DEST, and some of the blocks that are
460 predecessors of BB are also predecessors of DEST, check that the
461 phi node arguments match. */
726a989a 462 if (!gimple_seq_empty_p (phi_nodes (dest)))
c9784e6d
KH
463 {
464 FOR_EACH_EDGE (e, ei, bb->preds)
465 {
466 s = find_edge (e->src, dest);
467 if (!s)
468 continue;
469
470 if (!phi_alternatives_equal (dest, succ, s))
471 return false;
472 }
473 }
474
dc764d10 475 can_move_debug_stmts = MAY_HAVE_DEBUG_STMTS && single_pred_p (dest);
70235ab9 476
fba7c564
RB
477 basic_block pred = NULL;
478 if (single_pred_p (bb))
479 pred = single_pred (bb);
480
c9784e6d
KH
481 /* Redirect the edges. */
482 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
483 {
672987e8
ZD
484 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
485
c9784e6d
KH
486 if (e->flags & EDGE_ABNORMAL)
487 {
488 /* If there is an abnormal edge, redirect it anyway, and
489 move the labels to the new block to make it legal. */
490 s = redirect_edge_succ_nodup (e, dest);
491 }
492 else
493 s = redirect_edge_and_branch (e, dest);
494
495 if (s == e)
496 {
497 /* Create arguments for the phi nodes, since the edge was not
498 here before. */
538dd0b7
DM
499 for (gphi_iterator psi = gsi_start_phis (dest);
500 !gsi_end_p (psi);
501 gsi_next (&psi))
726a989a 502 {
538dd0b7 503 gphi *phi = psi.phi ();
f5045c96 504 source_location l = gimple_phi_arg_location_from_edge (phi, succ);
2724573f
RB
505 tree def = gimple_phi_arg_def (phi, succ->dest_idx);
506 add_phi_arg (phi, unshare_expr (def), s, l);
726a989a 507 }
c9784e6d 508 }
c9784e6d
KH
509 }
510
1197e789
RG
511 /* Move nonlocal labels and computed goto targets as well as user
512 defined labels and labels with an EH landing pad number to the
513 new block, so that the redirection of the abnormal edges works,
514 jump targets end up in a sane place and debug information for
515 labels is retained. */
516 gsi_to = gsi_start_bb (dest);
517 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
518 {
519 tree decl;
520 label = gsi_stmt (gsi);
521 if (is_gimple_debug (label))
522 break;
538dd0b7 523 decl = gimple_label_label (as_a <glabel *> (label));
1197e789
RG
524 if (EH_LANDING_PAD_NR (decl) != 0
525 || DECL_NONLOCAL (decl)
526 || FORCED_LABEL (decl)
527 || !DECL_ARTIFICIAL (decl))
528 {
529 gsi_remove (&gsi, false);
530 gsi_insert_before (&gsi_to, label, GSI_SAME_STMT);
531 }
532 else
533 gsi_next (&gsi);
534 }
535
dc764d10 536 /* Move debug statements if the destination has a single predecessor. */
70235ab9 537 if (can_move_debug_stmts)
c9784e6d 538 {
1197e789
RG
539 gsi_to = gsi_after_labels (dest);
540 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); )
c9784e6d 541 {
70235ab9
JJ
542 gimple debug = gsi_stmt (gsi);
543 if (!is_gimple_debug (debug))
1197e789 544 break;
726a989a 545 gsi_remove (&gsi, false);
70235ab9 546 gsi_insert_before (&gsi_to, debug, GSI_SAME_STMT);
c9784e6d
KH
547 }
548 }
549
672987e8
ZD
550 bitmap_set_bit (cfgcleanup_altered_bbs, dest->index);
551
c9784e6d
KH
552 /* Update the dominators. */
553 if (dom_info_available_p (CDI_DOMINATORS))
554 {
555 basic_block dom, dombb, domdest;
556
557 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
558 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
559 if (domdest == bb)
560 {
561 /* Shortcut to avoid calling (relatively expensive)
562 nearest_common_dominator unless necessary. */
563 dom = dombb;
564 }
565 else
566 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
567
568 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
569 }
570
a3afdbb8
BC
571 /* Adjust latch infomation of BB's parent loop as otherwise
572 the cfg hook has a hard time not to kill the loop. */
573 if (current_loops && bb->loop_father->latch == bb)
fba7c564 574 bb->loop_father->latch = pred;
a3afdbb8 575
c9784e6d
KH
576 /* And kill the forwarder block. */
577 delete_basic_block (bb);
578
579 return true;
580}
581
566d09ef
JH
582/* STMT is a call that has been discovered noreturn. Fixup the CFG
583 and remove LHS. Return true if something changed. */
584
585bool
586fixup_noreturn_call (gimple stmt)
587{
588 basic_block bb = gimple_bb (stmt);
566d09ef
JH
589
590 if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
591 return false;
592
593 /* First split basic block if stmt is not last. */
594 if (stmt != gsi_stmt (gsi_last_bb (bb)))
c83ee180
JJ
595 {
596 if (stmt == gsi_stmt (gsi_last_nondebug_bb (bb)))
597 {
598 /* Don't split if there are only debug stmts
599 after stmt, that can result in -fcompare-debug
600 failures. Remove the debug stmts instead,
601 they should be all unreachable anyway. */
602 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
603 for (gsi_next (&gsi); !gsi_end_p (gsi); )
604 gsi_remove (&gsi, true);
605 }
606 else
607 split_block (bb, stmt);
608 }
566d09ef 609
2f1b0141
EB
610 /* If there is an LHS, remove it. */
611 tree lhs = gimple_call_lhs (stmt);
612 if (lhs)
566d09ef 613 {
566d09ef 614 gimple_call_set_lhs (stmt, NULL_TREE);
02d635a2 615
2f1b0141
EB
616 /* We need to fix up the SSA name to avoid checking errors. */
617 if (TREE_CODE (lhs) == SSA_NAME)
566d09ef 618 {
b731b390 619 tree new_var = create_tmp_reg (TREE_TYPE (lhs));
2f1b0141
EB
620 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, new_var);
621 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
622 set_ssa_default_def (cfun, new_var, lhs);
566d09ef 623 }
2f1b0141 624
566d09ef 625 update_stmt (stmt);
566d09ef 626 }
2f1b0141
EB
627
628 return remove_fallthru_edge (bb->succs);
566d09ef
JH
629}
630
631
672987e8
ZD
632/* Split basic blocks on calls in the middle of a basic block that are now
633 known not to return, and remove the unreachable code. */
c9784e6d
KH
634
635static bool
58041fe6 636split_bb_on_noreturn_calls (basic_block bb)
c9784e6d 637{
c9784e6d 638 bool changed = false;
58041fe6 639 gimple_stmt_iterator gsi;
c9784e6d 640
58041fe6
MJ
641 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
642 {
643 gimple stmt = gsi_stmt (gsi);
672987e8 644
58041fe6
MJ
645 if (!is_gimple_call (stmt))
646 continue;
647
648 if (gimple_call_noreturn_p (stmt))
566d09ef 649 changed |= fixup_noreturn_call (stmt);
58041fe6 650 }
c9784e6d 651
58041fe6
MJ
652 if (changed)
653 bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
c9784e6d
KH
654 return changed;
655}
656
672987e8
ZD
657/* Tries to cleanup cfg in basic block BB. Returns true if anything
658 changes. */
c9784e6d 659
89e80dd4 660static bool
672987e8 661cleanup_tree_cfg_bb (basic_block bb)
c9784e6d 662{
65e7bfe3 663 bool retval = cleanup_control_flow_bb (bb);
b8698a0f 664
f99fcb3b 665 if (tree_forwarder_block_p (bb, false)
672987e8
ZD
666 && remove_forwarder_block (bb))
667 return true;
c9784e6d 668
89e80dd4
DN
669 /* Merging the blocks may create new opportunities for folding
670 conditional branches (due to the elimination of single-valued PHI
671 nodes). */
672987e8
ZD
672 if (single_succ_p (bb)
673 && can_merge_blocks_p (bb, single_succ (bb)))
674 {
675 merge_blocks (bb, single_succ (bb));
676 return true;
677 }
678
679 return retval;
680}
681
682/* Iterate the cfg cleanups, while anything changes. */
683
684static bool
685cleanup_tree_cfg_1 (void)
686{
687 bool retval = false;
688 basic_block bb;
689 unsigned i, n;
690
672987e8
ZD
691 /* Prepare the worklists of altered blocks. */
692 cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL);
693
694 /* During forwarder block cleanup, we may redirect edges out of
695 SWITCH_EXPRs, which can get expensive. So we want to enable
696 recording of edge to CASE_LABEL_EXPR. */
697 start_recording_case_labels ();
89e80dd4 698
11cd3bed 699 /* Start by iterating over all basic blocks. We cannot use FOR_EACH_BB_FN,
672987e8 700 since the basic blocks may get removed. */
8b1c6fd7 701 n = last_basic_block_for_fn (cfun);
672987e8
ZD
702 for (i = NUM_FIXED_BLOCKS; i < n; i++)
703 {
06e28de2 704 bb = BASIC_BLOCK_FOR_FN (cfun, i);
672987e8 705 if (bb)
58041fe6
MJ
706 {
707 retval |= cleanup_tree_cfg_bb (bb);
708 retval |= split_bb_on_noreturn_calls (bb);
709 }
672987e8
ZD
710 }
711
712 /* Now process the altered blocks, as long as any are available. */
713 while (!bitmap_empty_p (cfgcleanup_altered_bbs))
714 {
715 i = bitmap_first_set_bit (cfgcleanup_altered_bbs);
716 bitmap_clear_bit (cfgcleanup_altered_bbs, i);
717 if (i < NUM_FIXED_BLOCKS)
718 continue;
719
06e28de2 720 bb = BASIC_BLOCK_FOR_FN (cfun, i);
672987e8
ZD
721 if (!bb)
722 continue;
723
724 retval |= cleanup_tree_cfg_bb (bb);
725
58041fe6 726 /* Rerun split_bb_on_noreturn_calls, in case we have altered any noreturn
672987e8 727 calls. */
58041fe6 728 retval |= split_bb_on_noreturn_calls (bb);
672987e8 729 }
b8698a0f 730
672987e8
ZD
731 end_recording_case_labels ();
732 BITMAP_FREE (cfgcleanup_altered_bbs);
89e80dd4
DN
733 return retval;
734}
735
736
e3594cb3
DN
737/* Remove unreachable blocks and other miscellaneous clean up work.
738 Return true if the flowgraph was modified, false otherwise. */
89e80dd4 739
592c303d
ZD
740static bool
741cleanup_tree_cfg_noloop (void)
89e80dd4 742{
672987e8 743 bool changed;
89e80dd4
DN
744
745 timevar_push (TV_TREE_CLEANUP_CFG);
746
e3594cb3 747 /* Iterate until there are no more cleanups left to do. If any
672987e8
ZD
748 iteration changed the flowgraph, set CHANGED to true.
749
750 If dominance information is available, there cannot be any unreachable
751 blocks. */
2b28c07a 752 if (!dom_info_available_p (CDI_DOMINATORS))
e3594cb3 753 {
672987e8
ZD
754 changed = delete_unreachable_blocks ();
755 calculate_dominance_info (CDI_DOMINATORS);
e3594cb3 756 }
672987e8 757 else
30251f7a
ZD
758 {
759#ifdef ENABLE_CHECKING
760 verify_dominators (CDI_DOMINATORS);
761#endif
762 changed = false;
763 }
c9784e6d 764
672987e8
ZD
765 changed |= cleanup_tree_cfg_1 ();
766
2b28c07a 767 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
c9784e6d
KH
768 compact_blocks ();
769
770#ifdef ENABLE_CHECKING
771 verify_flow_info ();
772#endif
89e80dd4 773
c9784e6d 774 timevar_pop (TV_TREE_CLEANUP_CFG);
89e80dd4 775
592c303d 776 if (changed && current_loops)
f87000d0 777 loops_state_set (LOOPS_NEED_FIXUP);
592c303d 778
e3594cb3 779 return changed;
c9784e6d
KH
780}
781
592c303d 782/* Repairs loop structures. */
c9784e6d 783
592c303d
ZD
784static void
785repair_loop_structures (void)
c9784e6d 786{
a222c01a 787 bitmap changed_bbs;
8e89b5b5 788 unsigned n_new_loops;
a222c01a 789
cc360b36
SB
790 calculate_dominance_info (CDI_DOMINATORS);
791
a222c01a
MM
792 timevar_push (TV_REPAIR_LOOPS);
793 changed_bbs = BITMAP_ALLOC (NULL);
8e89b5b5 794 n_new_loops = fix_loop_structure (changed_bbs);
c9784e6d 795
592c303d
ZD
796 /* This usually does nothing. But sometimes parts of cfg that originally
797 were inside a loop get out of it due to edge removal (since they
8e89b5b5
RB
798 become unreachable by back edges from latch). Also a former
799 irreducible loop can become reducible - in this case force a full
800 rewrite into loop-closed SSA form. */
f87000d0 801 if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
8e89b5b5
RB
802 rewrite_into_loop_closed_ssa (n_new_loops ? NULL : changed_bbs,
803 TODO_update_ssa);
c9784e6d 804
592c303d 805 BITMAP_FREE (changed_bbs);
c9784e6d
KH
806
807#ifdef ENABLE_CHECKING
592c303d 808 verify_loop_structure ();
c9784e6d 809#endif
592c303d
ZD
810 scev_reset ();
811
a222c01a 812 timevar_pop (TV_REPAIR_LOOPS);
592c303d
ZD
813}
814
815/* Cleanup cfg and repair loop structures. */
816
817bool
818cleanup_tree_cfg (void)
819{
820 bool changed = cleanup_tree_cfg_noloop ();
821
822 if (current_loops != NULL
f87000d0 823 && loops_state_satisfies_p (LOOPS_NEED_FIXUP))
592c303d
ZD
824 repair_loop_structures ();
825
1994bfea 826 return changed;
c9784e6d
KH
827}
828
a9e0d843
RB
829/* Tries to merge the PHI nodes at BB into those at BB's sole successor.
830 Returns true if successful. */
c9784e6d 831
a9e0d843 832static bool
c9784e6d
KH
833remove_forwarder_block_with_phi (basic_block bb)
834{
835 edge succ = single_succ_edge (bb);
836 basic_block dest = succ->dest;
726a989a 837 gimple label;
c9784e6d
KH
838 basic_block dombb, domdest, dom;
839
840 /* We check for infinite loops already in tree_forwarder_block_p.
841 However it may happen that the infinite loop is created
842 afterwards due to removal of forwarders. */
843 if (dest == bb)
a9e0d843 844 return false;
c9784e6d
KH
845
846 /* If the destination block consists of a nonlocal label, do not
847 merge it. */
848 label = first_stmt (dest);
538dd0b7
DM
849 if (label)
850 if (glabel *label_stmt = dyn_cast <glabel *> (label))
851 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
852 return false;
c9784e6d 853
d731ee04
BC
854 /* Record BB's single pred in case we need to update the father
855 loop's latch information later. */
856 basic_block pred = NULL;
857 if (single_pred_p (bb))
858 pred = single_pred (bb);
859
c9784e6d
KH
860 /* Redirect each incoming edge to BB to DEST. */
861 while (EDGE_COUNT (bb->preds) > 0)
862 {
863 edge e = EDGE_PRED (bb, 0), s;
538dd0b7 864 gphi_iterator gsi;
c9784e6d
KH
865
866 s = find_edge (e->src, dest);
867 if (s)
868 {
869 /* We already have an edge S from E->src to DEST. If S and
870 E->dest's sole successor edge have the same PHI arguments
871 at DEST, redirect S to DEST. */
872 if (phi_alternatives_equal (dest, s, succ))
873 {
874 e = redirect_edge_and_branch (e, dest);
ea7e6d5a 875 redirect_edge_var_map_clear (e);
c9784e6d
KH
876 continue;
877 }
878
879 /* PHI arguments are different. Create a forwarder block by
880 splitting E so that we can merge PHI arguments on E to
881 DEST. */
882 e = single_succ_edge (split_edge (e));
883 }
884
885 s = redirect_edge_and_branch (e, dest);
886
887 /* redirect_edge_and_branch must not create a new edge. */
888 gcc_assert (s == e);
889
890 /* Add to the PHI nodes at DEST each PHI argument removed at the
891 destination of E. */
726a989a
RB
892 for (gsi = gsi_start_phis (dest);
893 !gsi_end_p (gsi);
894 gsi_next (&gsi))
c9784e6d 895 {
538dd0b7 896 gphi *phi = gsi.phi ();
726a989a 897 tree def = gimple_phi_arg_def (phi, succ->dest_idx);
f5045c96 898 source_location locus = gimple_phi_arg_location_from_edge (phi, succ);
c9784e6d
KH
899
900 if (TREE_CODE (def) == SSA_NAME)
901 {
c9784e6d
KH
902 /* If DEF is one of the results of PHI nodes removed during
903 redirection, replace it with the PHI argument that used
904 to be on E. */
b787e7a2
TS
905 vec<edge_var_map> *head = redirect_edge_var_map_vector (e);
906 size_t length = head ? head->length () : 0;
907 for (size_t i = 0; i < length; i++)
c9784e6d 908 {
b787e7a2 909 edge_var_map *vm = &(*head)[i];
ea7e6d5a
AH
910 tree old_arg = redirect_edge_var_map_result (vm);
911 tree new_arg = redirect_edge_var_map_def (vm);
c9784e6d
KH
912
913 if (def == old_arg)
914 {
915 def = new_arg;
f5045c96 916 locus = redirect_edge_var_map_location (vm);
c9784e6d
KH
917 break;
918 }
919 }
920 }
921
9e227d60 922 add_phi_arg (phi, def, s, locus);
c9784e6d
KH
923 }
924
ea7e6d5a 925 redirect_edge_var_map_clear (e);
c9784e6d
KH
926 }
927
928 /* Update the dominators. */
929 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
930 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
931 if (domdest == bb)
932 {
933 /* Shortcut to avoid calling (relatively expensive)
934 nearest_common_dominator unless necessary. */
935 dom = dombb;
936 }
937 else
938 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
939
940 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
941
d731ee04
BC
942 /* Adjust latch infomation of BB's parent loop as otherwise
943 the cfg hook has a hard time not to kill the loop. */
944 if (current_loops && bb->loop_father->latch == bb)
945 bb->loop_father->latch = pred;
946
c9784e6d
KH
947 /* Remove BB since all of BB's incoming edges have been redirected
948 to DEST. */
949 delete_basic_block (bb);
a9e0d843
RB
950
951 return true;
c9784e6d
KH
952}
953
954/* This pass merges PHI nodes if one feeds into another. For example,
955 suppose we have the following:
956
957 goto <bb 9> (<L9>);
958
959<L8>:;
960 tem_17 = foo ();
961
962 # tem_6 = PHI <tem_17(8), tem_23(7)>;
963<L9>:;
964
965 # tem_3 = PHI <tem_6(9), tem_2(5)>;
966<L10>:;
967
968 Then we merge the first PHI node into the second one like so:
969
970 goto <bb 9> (<L10>);
971
972<L8>:;
973 tem_17 = foo ();
974
975 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
976<L10>:;
977*/
978
be55bfe6
TS
979namespace {
980
981const pass_data pass_data_merge_phi =
982{
983 GIMPLE_PASS, /* type */
984 "mergephi", /* name */
985 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
986 TV_TREE_MERGE_PHI, /* tv_id */
987 ( PROP_cfg | PROP_ssa ), /* properties_required */
988 0, /* properties_provided */
989 0, /* properties_destroyed */
990 0, /* todo_flags_start */
3bea341f 991 0, /* todo_flags_finish */
be55bfe6
TS
992};
993
994class pass_merge_phi : public gimple_opt_pass
c9784e6d 995{
be55bfe6
TS
996public:
997 pass_merge_phi (gcc::context *ctxt)
998 : gimple_opt_pass (pass_data_merge_phi, ctxt)
999 {}
1000
1001 /* opt_pass methods: */
1002 opt_pass * clone () { return new pass_merge_phi (m_ctxt); }
1003 virtual unsigned int execute (function *);
1004
1005}; // class pass_merge_phi
1006
1007unsigned int
1008pass_merge_phi::execute (function *fun)
1009{
1010 basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (fun));
c9784e6d
KH
1011 basic_block *current = worklist;
1012 basic_block bb;
1013
1014 calculate_dominance_info (CDI_DOMINATORS);
1015
1016 /* Find all PHI nodes that we may be able to merge. */
be55bfe6 1017 FOR_EACH_BB_FN (bb, fun)
c9784e6d
KH
1018 {
1019 basic_block dest;
1020
1021 /* Look for a forwarder block with PHI nodes. */
1022 if (!tree_forwarder_block_p (bb, true))
1023 continue;
1024
1025 dest = single_succ (bb);
1026
1027 /* We have to feed into another basic block with PHI
1028 nodes. */
8eacd016 1029 if (gimple_seq_empty_p (phi_nodes (dest))
c9784e6d
KH
1030 /* We don't want to deal with a basic block with
1031 abnormal edges. */
31ff2426 1032 || bb_has_abnormal_pred (bb))
c9784e6d
KH
1033 continue;
1034
1035 if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
1036 {
1037 /* If BB does not dominate DEST, then the PHI nodes at
1038 DEST must be the only users of the results of the PHI
1039 nodes at BB. */
1040 *current++ = bb;
1041 }
ea65cd37
JL
1042 else
1043 {
538dd0b7 1044 gphi_iterator gsi;
338b5886 1045 unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
ea65cd37
JL
1046
1047 /* BB dominates DEST. There may be many users of the PHI
1048 nodes in BB. However, there is still a trivial case we
1049 can handle. If the result of every PHI in BB is used
1050 only by a PHI in DEST, then we can trivially merge the
1051 PHI nodes from BB into DEST. */
726a989a
RB
1052 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1053 gsi_next (&gsi))
ea65cd37 1054 {
538dd0b7 1055 gphi *phi = gsi.phi ();
726a989a 1056 tree result = gimple_phi_result (phi);
ea65cd37 1057 use_operand_p imm_use;
726a989a 1058 gimple use_stmt;
ea65cd37
JL
1059
1060 /* If the PHI's result is never used, then we can just
1061 ignore it. */
bfc646bf 1062 if (has_zero_uses (result))
ea65cd37
JL
1063 continue;
1064
1065 /* Get the single use of the result of this PHI node. */
1066 if (!single_imm_use (result, &imm_use, &use_stmt)
726a989a
RB
1067 || gimple_code (use_stmt) != GIMPLE_PHI
1068 || gimple_bb (use_stmt) != dest
1069 || gimple_phi_arg_def (use_stmt, dest_idx) != result)
ea65cd37
JL
1070 break;
1071 }
1072
c0220ea4 1073 /* If the loop above iterated through all the PHI nodes
ea65cd37 1074 in BB, then we can merge the PHIs from BB into DEST. */
726a989a 1075 if (gsi_end_p (gsi))
ea65cd37
JL
1076 *current++ = bb;
1077 }
c9784e6d
KH
1078 }
1079
1080 /* Now let's drain WORKLIST. */
a9e0d843 1081 bool changed = false;
c9784e6d
KH
1082 while (current != worklist)
1083 {
1084 bb = *--current;
a9e0d843 1085 changed |= remove_forwarder_block_with_phi (bb);
c9784e6d 1086 }
c9784e6d 1087 free (worklist);
a9e0d843
RB
1088
1089 /* Removing forwarder blocks can cause formerly irreducible loops
1090 to become reducible if we merged two entry blocks. */
1091 if (changed
1092 && current_loops)
1093 loops_state_set (LOOPS_NEED_FIXUP);
1094
c2924966 1095 return 0;
c9784e6d
KH
1096}
1097
27a4cd48
DM
1098} // anon namespace
1099
1100gimple_opt_pass *
1101make_pass_merge_phi (gcc::context *ctxt)
1102{
1103 return new pass_merge_phi (ctxt);
1104}
4484a35a
AM
1105
1106/* Pass: cleanup the CFG just before expanding trees to RTL.
1107 This is just a round of label cleanups and case node grouping
1108 because after the tree optimizers have run such cleanups may
1109 be necessary. */
1110
1111static unsigned int
1112execute_cleanup_cfg_post_optimizing (void)
1113{
1114 unsigned int todo = 0;
1115 if (cleanup_tree_cfg ())
1116 todo |= TODO_update_ssa;
1117 maybe_remove_unreachable_handlers ();
1118 cleanup_dead_labels ();
1119 group_case_labels ();
1120 if ((flag_compare_debug_opt || flag_compare_debug)
1121 && flag_dump_final_insns)
1122 {
1123 FILE *final_output = fopen (flag_dump_final_insns, "a");
1124
1125 if (!final_output)
1126 {
1127 error ("could not open final insn dump file %qs: %m",
1128 flag_dump_final_insns);
1129 flag_dump_final_insns = NULL;
1130 }
1131 else
1132 {
1133 int save_unnumbered = flag_dump_unnumbered;
1134 int save_noaddr = flag_dump_noaddr;
1135
1136 flag_dump_noaddr = flag_dump_unnumbered = 1;
1137 fprintf (final_output, "\n");
1138 dump_enumerated_decls (final_output, dump_flags | TDF_NOUID);
1139 flag_dump_noaddr = save_noaddr;
1140 flag_dump_unnumbered = save_unnumbered;
1141 if (fclose (final_output))
1142 {
1143 error ("could not close final insn dump file %qs: %m",
1144 flag_dump_final_insns);
1145 flag_dump_final_insns = NULL;
1146 }
1147 }
1148 }
1149 return todo;
1150}
1151
1152namespace {
1153
1154const pass_data pass_data_cleanup_cfg_post_optimizing =
1155{
1156 GIMPLE_PASS, /* type */
1157 "optimized", /* name */
1158 OPTGROUP_NONE, /* optinfo_flags */
4484a35a
AM
1159 TV_TREE_CLEANUP_CFG, /* tv_id */
1160 PROP_cfg, /* properties_required */
1161 0, /* properties_provided */
1162 0, /* properties_destroyed */
1163 0, /* todo_flags_start */
1164 TODO_remove_unused_locals, /* todo_flags_finish */
1165};
1166
1167class pass_cleanup_cfg_post_optimizing : public gimple_opt_pass
1168{
1169public:
1170 pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
1171 : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing, ctxt)
1172 {}
1173
1174 /* opt_pass methods: */
be55bfe6
TS
1175 virtual unsigned int execute (function *)
1176 {
1177 return execute_cleanup_cfg_post_optimizing ();
1178 }
4484a35a
AM
1179
1180}; // class pass_cleanup_cfg_post_optimizing
1181
1182} // anon namespace
1183
1184gimple_opt_pass *
1185make_pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
1186{
1187 return new pass_cleanup_cfg_post_optimizing (ctxt);
1188}
1189
1190