]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dce.c
Eliminate ENTRY_BLOCK_PTR and EXIT_BLOCK_PTR macros
[thirdparty/gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49
50 #include "tree.h"
51 #include "calls.h"
52 #include "gimple-pretty-print.h"
53 #include "basic-block.h"
54 #include "gimple.h"
55 #include "gimplify.h"
56 #include "gimple-iterator.h"
57 #include "gimple-ssa.h"
58 #include "tree-cfg.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "expr.h"
66 #include "tree-dfa.h"
67 #include "tree-pass.h"
68 #include "flags.h"
69 #include "cfgloop.h"
70 #include "tree-scalar-evolution.h"
71
72 static struct stmt_stats
73 {
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
79
80 #define STMT_NECESSARY GF_PLF_1
81
82 static vec<gimple> worklist;
83
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
87
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
91
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
94
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
97
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
103
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
107
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
110
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
114
115
116 /* If STMT is not already marked necessary, mark it, and add it to the
117 worklist if ADD_TO_WORKLIST is true. */
118
119 static inline void
120 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
121 {
122 gcc_assert (stmt);
123
124 if (gimple_plf (stmt, STMT_NECESSARY))
125 return;
126
127 if (dump_file && (dump_flags & TDF_DETAILS))
128 {
129 fprintf (dump_file, "Marking useful stmt: ");
130 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
131 fprintf (dump_file, "\n");
132 }
133
134 gimple_set_plf (stmt, STMT_NECESSARY, true);
135 if (add_to_worklist)
136 worklist.safe_push (stmt);
137 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
138 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
139 }
140
141
142 /* Mark the statement defining operand OP as necessary. */
143
144 static inline void
145 mark_operand_necessary (tree op)
146 {
147 gimple stmt;
148 int ver;
149
150 gcc_assert (op);
151
152 ver = SSA_NAME_VERSION (op);
153 if (bitmap_bit_p (processed, ver))
154 {
155 stmt = SSA_NAME_DEF_STMT (op);
156 gcc_assert (gimple_nop_p (stmt)
157 || gimple_plf (stmt, STMT_NECESSARY));
158 return;
159 }
160 bitmap_set_bit (processed, ver);
161
162 stmt = SSA_NAME_DEF_STMT (op);
163 gcc_assert (stmt);
164
165 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
166 return;
167
168 if (dump_file && (dump_flags & TDF_DETAILS))
169 {
170 fprintf (dump_file, "marking necessary through ");
171 print_generic_expr (dump_file, op, 0);
172 fprintf (dump_file, " stmt ");
173 print_gimple_stmt (dump_file, stmt, 0, 0);
174 }
175
176 gimple_set_plf (stmt, STMT_NECESSARY, true);
177 if (bb_contains_live_stmts)
178 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
179 worklist.safe_push (stmt);
180 }
181
182
183 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
184 it can make other statements necessary.
185
186 If AGGRESSIVE is false, control statements are conservatively marked as
187 necessary. */
188
189 static void
190 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
191 {
192 /* With non-call exceptions, we have to assume that all statements could
193 throw. If a statement could throw, it can be deemed necessary. */
194 if (cfun->can_throw_non_call_exceptions
195 && !cfun->can_delete_dead_exceptions
196 && stmt_could_throw_p (stmt))
197 {
198 mark_stmt_necessary (stmt, true);
199 return;
200 }
201
202 /* Statements that are implicitly live. Most function calls, asm
203 and return statements are required. Labels and GIMPLE_BIND nodes
204 are kept because they are control flow, and we have no way of
205 knowing whether they can be removed. DCE can eliminate all the
206 other statements in a block, and CFG can then remove the block
207 and labels. */
208 switch (gimple_code (stmt))
209 {
210 case GIMPLE_PREDICT:
211 case GIMPLE_LABEL:
212 mark_stmt_necessary (stmt, false);
213 return;
214
215 case GIMPLE_ASM:
216 case GIMPLE_RESX:
217 case GIMPLE_RETURN:
218 mark_stmt_necessary (stmt, true);
219 return;
220
221 case GIMPLE_CALL:
222 {
223 tree callee = gimple_call_fndecl (stmt);
224 if (callee != NULL_TREE
225 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
226 switch (DECL_FUNCTION_CODE (callee))
227 {
228 case BUILT_IN_MALLOC:
229 case BUILT_IN_CALLOC:
230 case BUILT_IN_ALLOCA:
231 case BUILT_IN_ALLOCA_WITH_ALIGN:
232 return;
233
234 default:;
235 }
236 /* Most, but not all function calls are required. Function calls that
237 produce no result and have no side effects (i.e. const pure
238 functions) are unnecessary. */
239 if (gimple_has_side_effects (stmt))
240 {
241 mark_stmt_necessary (stmt, true);
242 return;
243 }
244 if (!gimple_call_lhs (stmt))
245 return;
246 break;
247 }
248
249 case GIMPLE_DEBUG:
250 /* Debug temps without a value are not useful. ??? If we could
251 easily locate the debug temp bind stmt for a use thereof,
252 would could refrain from marking all debug temps here, and
253 mark them only if they're used. */
254 if (!gimple_debug_bind_p (stmt)
255 || gimple_debug_bind_has_value_p (stmt)
256 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
257 mark_stmt_necessary (stmt, false);
258 return;
259
260 case GIMPLE_GOTO:
261 gcc_assert (!simple_goto_p (stmt));
262 mark_stmt_necessary (stmt, true);
263 return;
264
265 case GIMPLE_COND:
266 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
267 /* Fall through. */
268
269 case GIMPLE_SWITCH:
270 if (! aggressive)
271 mark_stmt_necessary (stmt, true);
272 break;
273
274 case GIMPLE_ASSIGN:
275 if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
276 && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt)))
277 return;
278 break;
279
280 default:
281 break;
282 }
283
284 /* If the statement has volatile operands, it needs to be preserved.
285 Same for statements that can alter control flow in unpredictable
286 ways. */
287 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
288 {
289 mark_stmt_necessary (stmt, true);
290 return;
291 }
292
293 if (stmt_may_clobber_global_p (stmt))
294 {
295 mark_stmt_necessary (stmt, true);
296 return;
297 }
298
299 return;
300 }
301
302
303 /* Mark the last statement of BB as necessary. */
304
305 static void
306 mark_last_stmt_necessary (basic_block bb)
307 {
308 gimple stmt = last_stmt (bb);
309
310 bitmap_set_bit (last_stmt_necessary, bb->index);
311 bitmap_set_bit (bb_contains_live_stmts, bb->index);
312
313 /* We actually mark the statement only if it is a control statement. */
314 if (stmt && is_ctrl_stmt (stmt))
315 mark_stmt_necessary (stmt, true);
316 }
317
318
319 /* Mark control dependent edges of BB as necessary. We have to do this only
320 once for each basic block so we set the appropriate bit after we're done.
321
322 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
323
324 static void
325 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
326 {
327 bitmap_iterator bi;
328 unsigned edge_number;
329 bool skipped = false;
330
331 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
332
333 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
334 return;
335
336 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
337 0, edge_number, bi)
338 {
339 basic_block cd_bb = cd->get_edge (edge_number)->src;
340
341 if (ignore_self && cd_bb == bb)
342 {
343 skipped = true;
344 continue;
345 }
346
347 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
348 mark_last_stmt_necessary (cd_bb);
349 }
350
351 if (!skipped)
352 bitmap_set_bit (visited_control_parents, bb->index);
353 }
354
355
356 /* Find obviously necessary statements. These are things like most function
357 calls, and stores to file level variables.
358
359 If EL is NULL, control statements are conservatively marked as
360 necessary. Otherwise it contains the list of edges used by control
361 dependence analysis. */
362
363 static void
364 find_obviously_necessary_stmts (bool aggressive)
365 {
366 basic_block bb;
367 gimple_stmt_iterator gsi;
368 edge e;
369 gimple phi, stmt;
370 int flags;
371
372 FOR_EACH_BB (bb)
373 {
374 /* PHI nodes are never inherently necessary. */
375 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
376 {
377 phi = gsi_stmt (gsi);
378 gimple_set_plf (phi, STMT_NECESSARY, false);
379 }
380
381 /* Check all statements in the block. */
382 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
383 {
384 stmt = gsi_stmt (gsi);
385 gimple_set_plf (stmt, STMT_NECESSARY, false);
386 mark_stmt_if_obviously_necessary (stmt, aggressive);
387 }
388 }
389
390 /* Pure and const functions are finite and thus have no infinite loops in
391 them. */
392 flags = flags_from_decl_or_type (current_function_decl);
393 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
394 return;
395
396 /* Prevent the empty possibly infinite loops from being removed. */
397 if (aggressive)
398 {
399 struct loop *loop;
400 scev_initialize ();
401 if (mark_irreducible_loops ())
402 FOR_EACH_BB (bb)
403 {
404 edge_iterator ei;
405 FOR_EACH_EDGE (e, ei, bb->succs)
406 if ((e->flags & EDGE_DFS_BACK)
407 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
408 {
409 if (dump_file)
410 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
411 e->src->index, e->dest->index);
412 mark_control_dependent_edges_necessary (e->dest, false);
413 }
414 }
415
416 FOR_EACH_LOOP (loop, 0)
417 if (!finite_loop_p (loop))
418 {
419 if (dump_file)
420 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
421 mark_control_dependent_edges_necessary (loop->latch, false);
422 }
423 scev_finalize ();
424 }
425 }
426
427
428 /* Return true if REF is based on an aliased base, otherwise false. */
429
430 static bool
431 ref_may_be_aliased (tree ref)
432 {
433 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
434 while (handled_component_p (ref))
435 ref = TREE_OPERAND (ref, 0);
436 if (TREE_CODE (ref) == MEM_REF
437 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
438 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
439 return !(DECL_P (ref)
440 && !may_be_aliased (ref));
441 }
442
443 static bitmap visited = NULL;
444 static unsigned int longest_chain = 0;
445 static unsigned int total_chain = 0;
446 static unsigned int nr_walks = 0;
447 static bool chain_ovfl = false;
448
449 /* Worker for the walker that marks reaching definitions of REF,
450 which is based on a non-aliased decl, necessary. It returns
451 true whenever the defining statement of the current VDEF is
452 a kill for REF, as no dominating may-defs are necessary for REF
453 anymore. DATA points to the basic-block that contains the
454 stmt that refers to REF. */
455
456 static bool
457 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
458 {
459 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
460
461 /* All stmts we visit are necessary. */
462 mark_operand_necessary (vdef);
463
464 /* If the stmt lhs kills ref, then we can stop walking. */
465 if (gimple_has_lhs (def_stmt)
466 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
467 /* The assignment is not necessarily carried out if it can throw
468 and we can catch it in the current function where we could inspect
469 the previous value.
470 ??? We only need to care about the RHS throwing. For aggregate
471 assignments or similar calls and non-call exceptions the LHS
472 might throw as well. */
473 && !stmt_can_throw_internal (def_stmt))
474 {
475 tree base, lhs = gimple_get_lhs (def_stmt);
476 HOST_WIDE_INT size, offset, max_size;
477 ao_ref_base (ref);
478 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
479 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
480 so base == refd->base does not always hold. */
481 if (base == ref->base)
482 {
483 /* For a must-alias check we need to be able to constrain
484 the accesses properly. */
485 if (size != -1 && size == max_size
486 && ref->max_size != -1)
487 {
488 if (offset <= ref->offset
489 && offset + size >= ref->offset + ref->max_size)
490 return true;
491 }
492 /* Or they need to be exactly the same. */
493 else if (ref->ref
494 /* Make sure there is no induction variable involved
495 in the references (gcc.c-torture/execute/pr42142.c).
496 The simplest way is to check if the kill dominates
497 the use. */
498 /* But when both are in the same block we cannot
499 easily tell whether we came from a backedge
500 unless we decide to compute stmt UIDs
501 (see PR58246). */
502 && (basic_block) data != gimple_bb (def_stmt)
503 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
504 gimple_bb (def_stmt))
505 && operand_equal_p (ref->ref, lhs, 0))
506 return true;
507 }
508 }
509
510 /* Otherwise keep walking. */
511 return false;
512 }
513
514 static void
515 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
516 {
517 unsigned int chain;
518 ao_ref refd;
519 gcc_assert (!chain_ovfl);
520 ao_ref_init (&refd, ref);
521 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
522 mark_aliased_reaching_defs_necessary_1,
523 gimple_bb (stmt), NULL);
524 if (chain > longest_chain)
525 longest_chain = chain;
526 total_chain += chain;
527 nr_walks++;
528 }
529
530 /* Worker for the walker that marks reaching definitions of REF, which
531 is not based on a non-aliased decl. For simplicity we need to end
532 up marking all may-defs necessary that are not based on a non-aliased
533 decl. The only job of this walker is to skip may-defs based on
534 a non-aliased decl. */
535
536 static bool
537 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
538 tree vdef, void *data ATTRIBUTE_UNUSED)
539 {
540 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
541
542 /* We have to skip already visited (and thus necessary) statements
543 to make the chaining work after we dropped back to simple mode. */
544 if (chain_ovfl
545 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
546 {
547 gcc_assert (gimple_nop_p (def_stmt)
548 || gimple_plf (def_stmt, STMT_NECESSARY));
549 return false;
550 }
551
552 /* We want to skip stores to non-aliased variables. */
553 if (!chain_ovfl
554 && gimple_assign_single_p (def_stmt))
555 {
556 tree lhs = gimple_assign_lhs (def_stmt);
557 if (!ref_may_be_aliased (lhs))
558 return false;
559 }
560
561 /* We want to skip statments that do not constitute stores but have
562 a virtual definition. */
563 if (is_gimple_call (def_stmt))
564 {
565 tree callee = gimple_call_fndecl (def_stmt);
566 if (callee != NULL_TREE
567 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
568 switch (DECL_FUNCTION_CODE (callee))
569 {
570 case BUILT_IN_MALLOC:
571 case BUILT_IN_CALLOC:
572 case BUILT_IN_ALLOCA:
573 case BUILT_IN_ALLOCA_WITH_ALIGN:
574 case BUILT_IN_FREE:
575 return false;
576
577 default:;
578 }
579 }
580
581 mark_operand_necessary (vdef);
582
583 return false;
584 }
585
586 static void
587 mark_all_reaching_defs_necessary (gimple stmt)
588 {
589 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
590 mark_all_reaching_defs_necessary_1, NULL, &visited);
591 }
592
593 /* Return true for PHI nodes with one or identical arguments
594 can be removed. */
595 static bool
596 degenerate_phi_p (gimple phi)
597 {
598 unsigned int i;
599 tree op = gimple_phi_arg_def (phi, 0);
600 for (i = 1; i < gimple_phi_num_args (phi); i++)
601 if (gimple_phi_arg_def (phi, i) != op)
602 return false;
603 return true;
604 }
605
606 /* Propagate necessity using the operands of necessary statements.
607 Process the uses on each statement in the worklist, and add all
608 feeding statements which contribute to the calculation of this
609 value to the worklist.
610
611 In conservative mode, EL is NULL. */
612
613 static void
614 propagate_necessity (bool aggressive)
615 {
616 gimple stmt;
617
618 if (dump_file && (dump_flags & TDF_DETAILS))
619 fprintf (dump_file, "\nProcessing worklist:\n");
620
621 while (worklist.length () > 0)
622 {
623 /* Take STMT from worklist. */
624 stmt = worklist.pop ();
625
626 if (dump_file && (dump_flags & TDF_DETAILS))
627 {
628 fprintf (dump_file, "processing: ");
629 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
630 fprintf (dump_file, "\n");
631 }
632
633 if (aggressive)
634 {
635 /* Mark the last statement of the basic blocks on which the block
636 containing STMT is control dependent, but only if we haven't
637 already done so. */
638 basic_block bb = gimple_bb (stmt);
639 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
640 && !bitmap_bit_p (visited_control_parents, bb->index))
641 mark_control_dependent_edges_necessary (bb, false);
642 }
643
644 if (gimple_code (stmt) == GIMPLE_PHI
645 /* We do not process virtual PHI nodes nor do we track their
646 necessity. */
647 && !virtual_operand_p (gimple_phi_result (stmt)))
648 {
649 /* PHI nodes are somewhat special in that each PHI alternative has
650 data and control dependencies. All the statements feeding the
651 PHI node's arguments are always necessary. In aggressive mode,
652 we also consider the control dependent edges leading to the
653 predecessor block associated with each PHI alternative as
654 necessary. */
655 size_t k;
656
657 for (k = 0; k < gimple_phi_num_args (stmt); k++)
658 {
659 tree arg = PHI_ARG_DEF (stmt, k);
660 if (TREE_CODE (arg) == SSA_NAME)
661 mark_operand_necessary (arg);
662 }
663
664 /* For PHI operands it matters from where the control flow arrives
665 to the BB. Consider the following example:
666
667 a=exp1;
668 b=exp2;
669 if (test)
670 ;
671 else
672 ;
673 c=PHI(a,b)
674
675 We need to mark control dependence of the empty basic blocks, since they
676 contains computation of PHI operands.
677
678 Doing so is too restrictive in the case the predecestor block is in
679 the loop. Consider:
680
681 if (b)
682 {
683 int i;
684 for (i = 0; i<1000; ++i)
685 ;
686 j = 0;
687 }
688 return j;
689
690 There is PHI for J in the BB containing return statement.
691 In this case the control dependence of predecestor block (that is
692 within the empty loop) also contains the block determining number
693 of iterations of the block that would prevent removing of empty
694 loop in this case.
695
696 This scenario can be avoided by splitting critical edges.
697 To save the critical edge splitting pass we identify how the control
698 dependence would look like if the edge was split.
699
700 Consider the modified CFG created from current CFG by splitting
701 edge B->C. In the postdominance tree of modified CFG, C' is
702 always child of C. There are two cases how chlids of C' can look
703 like:
704
705 1) C' is leaf
706
707 In this case the only basic block C' is control dependent on is B.
708
709 2) C' has single child that is B
710
711 In this case control dependence of C' is same as control
712 dependence of B in original CFG except for block B itself.
713 (since C' postdominate B in modified CFG)
714
715 Now how to decide what case happens? There are two basic options:
716
717 a) C postdominate B. Then C immediately postdominate B and
718 case 2 happens iff there is no other way from B to C except
719 the edge B->C.
720
721 There is other way from B to C iff there is succesor of B that
722 is not postdominated by B. Testing this condition is somewhat
723 expensive, because we need to iterate all succesors of B.
724 We are safe to assume that this does not happen: we will mark B
725 as needed when processing the other path from B to C that is
726 conrol dependent on B and marking control dependencies of B
727 itself is harmless because they will be processed anyway after
728 processing control statement in B.
729
730 b) C does not postdominate B. Always case 1 happens since there is
731 path from C to exit that does not go through B and thus also C'. */
732
733 if (aggressive && !degenerate_phi_p (stmt))
734 {
735 for (k = 0; k < gimple_phi_num_args (stmt); k++)
736 {
737 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
738
739 if (gimple_bb (stmt)
740 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
741 {
742 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
743 mark_last_stmt_necessary (arg_bb);
744 }
745 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
746 && !bitmap_bit_p (visited_control_parents,
747 arg_bb->index))
748 mark_control_dependent_edges_necessary (arg_bb, true);
749 }
750 }
751 }
752 else
753 {
754 /* Propagate through the operands. Examine all the USE, VUSE and
755 VDEF operands in this statement. Mark all the statements
756 which feed this statement's uses as necessary. */
757 ssa_op_iter iter;
758 tree use;
759
760 /* If this is a call to free which is directly fed by an
761 allocation function do not mark that necessary through
762 processing the argument. */
763 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
764 {
765 tree ptr = gimple_call_arg (stmt, 0);
766 gimple def_stmt;
767 tree def_callee;
768 /* If the pointer we free is defined by an allocation
769 function do not add the call to the worklist. */
770 if (TREE_CODE (ptr) == SSA_NAME
771 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
772 && (def_callee = gimple_call_fndecl (def_stmt))
773 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
774 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
775 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
776 continue;
777 }
778
779 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
780 mark_operand_necessary (use);
781
782 use = gimple_vuse (stmt);
783 if (!use)
784 continue;
785
786 /* If we dropped to simple mode make all immediately
787 reachable definitions necessary. */
788 if (chain_ovfl)
789 {
790 mark_all_reaching_defs_necessary (stmt);
791 continue;
792 }
793
794 /* For statements that may load from memory (have a VUSE) we
795 have to mark all reaching (may-)definitions as necessary.
796 We partition this task into two cases:
797 1) explicit loads based on decls that are not aliased
798 2) implicit loads (like calls) and explicit loads not
799 based on decls that are not aliased (like indirect
800 references or loads from globals)
801 For 1) we mark all reaching may-defs as necessary, stopping
802 at dominating kills. For 2) we want to mark all dominating
803 references necessary, but non-aliased ones which we handle
804 in 1). By keeping a global visited bitmap for references
805 we walk for 2) we avoid quadratic behavior for those. */
806
807 if (is_gimple_call (stmt))
808 {
809 tree callee = gimple_call_fndecl (stmt);
810 unsigned i;
811
812 /* Calls to functions that are merely acting as barriers
813 or that only store to memory do not make any previous
814 stores necessary. */
815 if (callee != NULL_TREE
816 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
817 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
818 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
819 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
820 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
821 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
822 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
823 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
824 || (DECL_FUNCTION_CODE (callee)
825 == BUILT_IN_ALLOCA_WITH_ALIGN)
826 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
827 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
828 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
829 continue;
830
831 /* Calls implicitly load from memory, their arguments
832 in addition may explicitly perform memory loads. */
833 mark_all_reaching_defs_necessary (stmt);
834 for (i = 0; i < gimple_call_num_args (stmt); ++i)
835 {
836 tree arg = gimple_call_arg (stmt, i);
837 if (TREE_CODE (arg) == SSA_NAME
838 || is_gimple_min_invariant (arg))
839 continue;
840 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
841 arg = TREE_OPERAND (arg, 0);
842 if (!ref_may_be_aliased (arg))
843 mark_aliased_reaching_defs_necessary (stmt, arg);
844 }
845 }
846 else if (gimple_assign_single_p (stmt))
847 {
848 tree rhs;
849 /* If this is a load mark things necessary. */
850 rhs = gimple_assign_rhs1 (stmt);
851 if (TREE_CODE (rhs) != SSA_NAME
852 && !is_gimple_min_invariant (rhs)
853 && TREE_CODE (rhs) != CONSTRUCTOR)
854 {
855 if (!ref_may_be_aliased (rhs))
856 mark_aliased_reaching_defs_necessary (stmt, rhs);
857 else
858 mark_all_reaching_defs_necessary (stmt);
859 }
860 }
861 else if (gimple_code (stmt) == GIMPLE_RETURN)
862 {
863 tree rhs = gimple_return_retval (stmt);
864 /* A return statement may perform a load. */
865 if (rhs
866 && TREE_CODE (rhs) != SSA_NAME
867 && !is_gimple_min_invariant (rhs)
868 && TREE_CODE (rhs) != CONSTRUCTOR)
869 {
870 if (!ref_may_be_aliased (rhs))
871 mark_aliased_reaching_defs_necessary (stmt, rhs);
872 else
873 mark_all_reaching_defs_necessary (stmt);
874 }
875 }
876 else if (gimple_code (stmt) == GIMPLE_ASM)
877 {
878 unsigned i;
879 mark_all_reaching_defs_necessary (stmt);
880 /* Inputs may perform loads. */
881 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
882 {
883 tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
884 if (TREE_CODE (op) != SSA_NAME
885 && !is_gimple_min_invariant (op)
886 && TREE_CODE (op) != CONSTRUCTOR
887 && !ref_may_be_aliased (op))
888 mark_aliased_reaching_defs_necessary (stmt, op);
889 }
890 }
891 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
892 {
893 /* The beginning of a transaction is a memory barrier. */
894 /* ??? If we were really cool, we'd only be a barrier
895 for the memories touched within the transaction. */
896 mark_all_reaching_defs_necessary (stmt);
897 }
898 else
899 gcc_unreachable ();
900
901 /* If we over-used our alias oracle budget drop to simple
902 mode. The cost metric allows quadratic behavior
903 (number of uses times number of may-defs queries) up to
904 a constant maximal number of queries and after that falls back to
905 super-linear complexity. */
906 if (/* Constant but quadratic for small functions. */
907 total_chain > 128 * 128
908 /* Linear in the number of may-defs. */
909 && total_chain > 32 * longest_chain
910 /* Linear in the number of uses. */
911 && total_chain > nr_walks * 32)
912 {
913 chain_ovfl = true;
914 if (visited)
915 bitmap_clear (visited);
916 }
917 }
918 }
919 }
920
921 /* Remove dead PHI nodes from block BB. */
922
923 static bool
924 remove_dead_phis (basic_block bb)
925 {
926 bool something_changed = false;
927 gimple phi;
928 gimple_stmt_iterator gsi;
929
930 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
931 {
932 stats.total_phis++;
933 phi = gsi_stmt (gsi);
934
935 /* We do not track necessity of virtual PHI nodes. Instead do
936 very simple dead PHI removal here. */
937 if (virtual_operand_p (gimple_phi_result (phi)))
938 {
939 /* Virtual PHI nodes with one or identical arguments
940 can be removed. */
941 if (degenerate_phi_p (phi))
942 {
943 tree vdef = gimple_phi_result (phi);
944 tree vuse = gimple_phi_arg_def (phi, 0);
945
946 use_operand_p use_p;
947 imm_use_iterator iter;
948 gimple use_stmt;
949 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
950 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
951 SET_USE (use_p, vuse);
952 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
953 && TREE_CODE (vuse) == SSA_NAME)
954 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
955 }
956 else
957 gimple_set_plf (phi, STMT_NECESSARY, true);
958 }
959
960 if (!gimple_plf (phi, STMT_NECESSARY))
961 {
962 something_changed = true;
963 if (dump_file && (dump_flags & TDF_DETAILS))
964 {
965 fprintf (dump_file, "Deleting : ");
966 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
967 fprintf (dump_file, "\n");
968 }
969
970 remove_phi_node (&gsi, true);
971 stats.removed_phis++;
972 continue;
973 }
974
975 gsi_next (&gsi);
976 }
977 return something_changed;
978 }
979
980 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
981
982 static edge
983 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
984 {
985 gimple_stmt_iterator gsi;
986 edge e2 = NULL;
987 edge_iterator ei;
988
989 if (dump_file && (dump_flags & TDF_DETAILS))
990 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
991 e->dest->index, post_dom_bb->index);
992
993 e2 = redirect_edge_and_branch (e, post_dom_bb);
994 cfg_altered = true;
995
996 /* If edge was already around, no updating is necessary. */
997 if (e2 != e)
998 return e2;
999
1000 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1001 {
1002 /* We are sure that for every live PHI we are seeing control dependent BB.
1003 This means that we can pick any edge to duplicate PHI args from. */
1004 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1005 if (e2 != e)
1006 break;
1007 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1008 {
1009 gimple phi = gsi_stmt (gsi);
1010 tree op;
1011 source_location locus;
1012
1013 /* PHIs for virtuals have no control dependency relation on them.
1014 We are lost here and must force renaming of the symbol. */
1015 if (virtual_operand_p (gimple_phi_result (phi)))
1016 {
1017 mark_virtual_phi_result_for_renaming (phi);
1018 remove_phi_node (&gsi, true);
1019 continue;
1020 }
1021
1022 /* Dead PHI do not imply control dependency. */
1023 if (!gimple_plf (phi, STMT_NECESSARY))
1024 {
1025 gsi_next (&gsi);
1026 continue;
1027 }
1028
1029 op = gimple_phi_arg_def (phi, e2->dest_idx);
1030 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1031 add_phi_arg (phi, op, e, locus);
1032 /* The resulting PHI if not dead can only be degenerate. */
1033 gcc_assert (degenerate_phi_p (phi));
1034 gsi_next (&gsi);
1035 }
1036 }
1037 return e;
1038 }
1039
1040 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1041 containing I so that we don't have to look it up. */
1042
1043 static void
1044 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1045 {
1046 gimple stmt = gsi_stmt (*i);
1047
1048 if (dump_file && (dump_flags & TDF_DETAILS))
1049 {
1050 fprintf (dump_file, "Deleting : ");
1051 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1052 fprintf (dump_file, "\n");
1053 }
1054
1055 stats.removed++;
1056
1057 /* If we have determined that a conditional branch statement contributes
1058 nothing to the program, then we not only remove it, but we also change
1059 the flow graph so that the current block will simply fall-thru to its
1060 immediate post-dominator. The blocks we are circumventing will be
1061 removed by cleanup_tree_cfg if this change in the flow graph makes them
1062 unreachable. */
1063 if (is_ctrl_stmt (stmt))
1064 {
1065 basic_block post_dom_bb;
1066 edge e, e2;
1067 edge_iterator ei;
1068
1069 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1070
1071 e = find_edge (bb, post_dom_bb);
1072
1073 /* If edge is already there, try to use it. This avoids need to update
1074 PHI nodes. Also watch for cases where post dominator does not exists
1075 or is exit block. These can happen for infinite loops as we create
1076 fake edges in the dominator tree. */
1077 if (e)
1078 ;
1079 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1080 e = EDGE_SUCC (bb, 0);
1081 else
1082 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1083 gcc_assert (e);
1084 e->probability = REG_BR_PROB_BASE;
1085 e->count = bb->count;
1086
1087 /* The edge is no longer associated with a conditional, so it does
1088 not have TRUE/FALSE flags. */
1089 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1090
1091 /* The lone outgoing edge from BB will be a fallthru edge. */
1092 e->flags |= EDGE_FALLTHRU;
1093
1094 /* Remove the remaining outgoing edges. */
1095 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1096 if (e != e2)
1097 {
1098 cfg_altered = true;
1099 remove_edge (e2);
1100 }
1101 else
1102 ei_next (&ei);
1103 }
1104
1105 /* If this is a store into a variable that is being optimized away,
1106 add a debug bind stmt if possible. */
1107 if (MAY_HAVE_DEBUG_STMTS
1108 && gimple_assign_single_p (stmt)
1109 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1110 {
1111 tree lhs = gimple_assign_lhs (stmt);
1112 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1113 && !DECL_IGNORED_P (lhs)
1114 && is_gimple_reg_type (TREE_TYPE (lhs))
1115 && !is_global_var (lhs)
1116 && !DECL_HAS_VALUE_EXPR_P (lhs))
1117 {
1118 tree rhs = gimple_assign_rhs1 (stmt);
1119 gimple note
1120 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1121 gsi_insert_after (i, note, GSI_SAME_STMT);
1122 }
1123 }
1124
1125 unlink_stmt_vdef (stmt);
1126 gsi_remove (i, true);
1127 release_defs (stmt);
1128 }
1129
1130 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1131 contributes nothing to the program, and can be deleted. */
1132
1133 static bool
1134 eliminate_unnecessary_stmts (void)
1135 {
1136 bool something_changed = false;
1137 basic_block bb;
1138 gimple_stmt_iterator gsi, psi;
1139 gimple stmt;
1140 tree call;
1141 vec<basic_block> h;
1142
1143 if (dump_file && (dump_flags & TDF_DETAILS))
1144 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1145
1146 clear_special_calls ();
1147
1148 /* Walking basic blocks and statements in reverse order avoids
1149 releasing SSA names before any other DEFs that refer to them are
1150 released. This helps avoid loss of debug information, as we get
1151 a chance to propagate all RHSs of removed SSAs into debug uses,
1152 rather than only the latest ones. E.g., consider:
1153
1154 x_3 = y_1 + z_2;
1155 a_5 = x_3 - b_4;
1156 # DEBUG a => a_5
1157
1158 If we were to release x_3 before a_5, when we reached a_5 and
1159 tried to substitute it into the debug stmt, we'd see x_3 there,
1160 but x_3's DEF, type, etc would have already been disconnected.
1161 By going backwards, the debug stmt first changes to:
1162
1163 # DEBUG a => x_3 - b_4
1164
1165 and then to:
1166
1167 # DEBUG a => y_1 + z_2 - b_4
1168
1169 as desired. */
1170 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1171 h = get_all_dominated_blocks (CDI_DOMINATORS,
1172 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1173
1174 while (h.length ())
1175 {
1176 bb = h.pop ();
1177
1178 /* Remove dead statements. */
1179 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1180 {
1181 stmt = gsi_stmt (gsi);
1182
1183 psi = gsi;
1184 gsi_prev (&psi);
1185
1186 stats.total++;
1187
1188 /* We can mark a call to free as not necessary if the
1189 defining statement of its argument is an allocation
1190 function and that is not necessary itself. */
1191 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1192 {
1193 tree ptr = gimple_call_arg (stmt, 0);
1194 tree callee2;
1195 gimple def_stmt;
1196 if (TREE_CODE (ptr) != SSA_NAME)
1197 continue;
1198 def_stmt = SSA_NAME_DEF_STMT (ptr);
1199 if (!is_gimple_call (def_stmt)
1200 || gimple_plf (def_stmt, STMT_NECESSARY))
1201 continue;
1202 callee2 = gimple_call_fndecl (def_stmt);
1203 if (callee2 == NULL_TREE
1204 || DECL_BUILT_IN_CLASS (callee2) != BUILT_IN_NORMAL
1205 || (DECL_FUNCTION_CODE (callee2) != BUILT_IN_MALLOC
1206 && DECL_FUNCTION_CODE (callee2) != BUILT_IN_CALLOC))
1207 continue;
1208 gimple_set_plf (stmt, STMT_NECESSARY, false);
1209 }
1210
1211 /* If GSI is not necessary then remove it. */
1212 if (!gimple_plf (stmt, STMT_NECESSARY))
1213 {
1214 if (!is_gimple_debug (stmt))
1215 something_changed = true;
1216 remove_dead_stmt (&gsi, bb);
1217 }
1218 else if (is_gimple_call (stmt))
1219 {
1220 tree name = gimple_call_lhs (stmt);
1221
1222 notice_special_calls (stmt);
1223
1224 /* When LHS of var = call (); is dead, simplify it into
1225 call (); saving one operand. */
1226 if (name
1227 && TREE_CODE (name) == SSA_NAME
1228 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1229 /* Avoid doing so for allocation calls which we
1230 did not mark as necessary, it will confuse the
1231 special logic we apply to malloc/free pair removal. */
1232 && (!(call = gimple_call_fndecl (stmt))
1233 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1234 || (DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1235 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1236 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1237 && (DECL_FUNCTION_CODE (call)
1238 != BUILT_IN_ALLOCA_WITH_ALIGN))))
1239 {
1240 something_changed = true;
1241 if (dump_file && (dump_flags & TDF_DETAILS))
1242 {
1243 fprintf (dump_file, "Deleting LHS of call: ");
1244 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1245 fprintf (dump_file, "\n");
1246 }
1247
1248 gimple_call_set_lhs (stmt, NULL_TREE);
1249 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1250 update_stmt (stmt);
1251 release_ssa_name (name);
1252 }
1253 }
1254 }
1255 }
1256
1257 h.release ();
1258
1259 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1260 rendered some PHI nodes unreachable while they are still in use.
1261 Mark them for renaming. */
1262 if (cfg_altered)
1263 {
1264 basic_block prev_bb;
1265
1266 find_unreachable_blocks ();
1267
1268 /* Delete all unreachable basic blocks in reverse dominator order. */
1269 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1270 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1271 {
1272 prev_bb = bb->prev_bb;
1273
1274 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1275 || !(bb->flags & BB_REACHABLE))
1276 {
1277 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1278 if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
1279 {
1280 bool found = false;
1281 imm_use_iterator iter;
1282
1283 FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1284 {
1285 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1286 continue;
1287 if (gimple_code (stmt) == GIMPLE_PHI
1288 || gimple_plf (stmt, STMT_NECESSARY))
1289 {
1290 found = true;
1291 BREAK_FROM_IMM_USE_STMT (iter);
1292 }
1293 }
1294 if (found)
1295 mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1296 }
1297
1298 if (!(bb->flags & BB_REACHABLE))
1299 {
1300 /* Speed up the removal of blocks that don't
1301 dominate others. Walking backwards, this should
1302 be the common case. ??? Do we need to recompute
1303 dominators because of cfg_altered? */
1304 if (!MAY_HAVE_DEBUG_STMTS
1305 || !first_dom_son (CDI_DOMINATORS, bb))
1306 delete_basic_block (bb);
1307 else
1308 {
1309 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1310
1311 while (h.length ())
1312 {
1313 bb = h.pop ();
1314 prev_bb = bb->prev_bb;
1315 /* Rearrangements to the CFG may have failed
1316 to update the dominators tree, so that
1317 formerly-dominated blocks are now
1318 otherwise reachable. */
1319 if (!!(bb->flags & BB_REACHABLE))
1320 continue;
1321 delete_basic_block (bb);
1322 }
1323
1324 h.release ();
1325 }
1326 }
1327 }
1328 }
1329 }
1330 FOR_EACH_BB (bb)
1331 {
1332 /* Remove dead PHI nodes. */
1333 something_changed |= remove_dead_phis (bb);
1334 }
1335
1336 return something_changed;
1337 }
1338
1339
1340 /* Print out removed statement statistics. */
1341
1342 static void
1343 print_stats (void)
1344 {
1345 float percg;
1346
1347 percg = ((float) stats.removed / (float) stats.total) * 100;
1348 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1349 stats.removed, stats.total, (int) percg);
1350
1351 if (stats.total_phis == 0)
1352 percg = 0;
1353 else
1354 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1355
1356 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1357 stats.removed_phis, stats.total_phis, (int) percg);
1358 }
1359
1360 /* Initialization for this pass. Set up the used data structures. */
1361
1362 static void
1363 tree_dce_init (bool aggressive)
1364 {
1365 memset ((void *) &stats, 0, sizeof (stats));
1366
1367 if (aggressive)
1368 {
1369 last_stmt_necessary = sbitmap_alloc (last_basic_block);
1370 bitmap_clear (last_stmt_necessary);
1371 bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1372 bitmap_clear (bb_contains_live_stmts);
1373 }
1374
1375 processed = sbitmap_alloc (num_ssa_names + 1);
1376 bitmap_clear (processed);
1377
1378 worklist.create (64);
1379 cfg_altered = false;
1380 }
1381
1382 /* Cleanup after this pass. */
1383
1384 static void
1385 tree_dce_done (bool aggressive)
1386 {
1387 if (aggressive)
1388 {
1389 delete cd;
1390 sbitmap_free (visited_control_parents);
1391 sbitmap_free (last_stmt_necessary);
1392 sbitmap_free (bb_contains_live_stmts);
1393 bb_contains_live_stmts = NULL;
1394 }
1395
1396 sbitmap_free (processed);
1397
1398 worklist.release ();
1399 }
1400
1401 /* Main routine to eliminate dead code.
1402
1403 AGGRESSIVE controls the aggressiveness of the algorithm.
1404 In conservative mode, we ignore control dependence and simply declare
1405 all but the most trivially dead branches necessary. This mode is fast.
1406 In aggressive mode, control dependences are taken into account, which
1407 results in more dead code elimination, but at the cost of some time.
1408
1409 FIXME: Aggressive mode before PRE doesn't work currently because
1410 the dominance info is not invalidated after DCE1. This is
1411 not an issue right now because we only run aggressive DCE
1412 as the last tree SSA pass, but keep this in mind when you
1413 start experimenting with pass ordering. */
1414
1415 static unsigned int
1416 perform_tree_ssa_dce (bool aggressive)
1417 {
1418 bool something_changed = 0;
1419
1420 calculate_dominance_info (CDI_DOMINATORS);
1421
1422 /* Preheaders are needed for SCEV to work.
1423 Simple lateches and recorded exits improve chances that loop will
1424 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1425 if (aggressive)
1426 loop_optimizer_init (LOOPS_NORMAL
1427 | LOOPS_HAVE_RECORDED_EXITS);
1428
1429 tree_dce_init (aggressive);
1430
1431 if (aggressive)
1432 {
1433 /* Compute control dependence. */
1434 calculate_dominance_info (CDI_POST_DOMINATORS);
1435 cd = new control_dependences (create_edge_list ());
1436
1437 visited_control_parents = sbitmap_alloc (last_basic_block);
1438 bitmap_clear (visited_control_parents);
1439
1440 mark_dfs_back_edges ();
1441 }
1442
1443 find_obviously_necessary_stmts (aggressive);
1444
1445 if (aggressive)
1446 loop_optimizer_finalize ();
1447
1448 longest_chain = 0;
1449 total_chain = 0;
1450 nr_walks = 0;
1451 chain_ovfl = false;
1452 visited = BITMAP_ALLOC (NULL);
1453 propagate_necessity (aggressive);
1454 BITMAP_FREE (visited);
1455
1456 something_changed |= eliminate_unnecessary_stmts ();
1457 something_changed |= cfg_altered;
1458
1459 /* We do not update postdominators, so free them unconditionally. */
1460 free_dominance_info (CDI_POST_DOMINATORS);
1461
1462 /* If we removed paths in the CFG, then we need to update
1463 dominators as well. I haven't investigated the possibility
1464 of incrementally updating dominators. */
1465 if (cfg_altered)
1466 free_dominance_info (CDI_DOMINATORS);
1467
1468 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1469 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1470
1471 /* Debugging dumps. */
1472 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1473 print_stats ();
1474
1475 tree_dce_done (aggressive);
1476
1477 if (something_changed)
1478 return TODO_update_ssa | TODO_cleanup_cfg;
1479 return 0;
1480 }
1481
1482 /* Pass entry points. */
1483 static unsigned int
1484 tree_ssa_dce (void)
1485 {
1486 return perform_tree_ssa_dce (/*aggressive=*/false);
1487 }
1488
1489 static unsigned int
1490 tree_ssa_dce_loop (void)
1491 {
1492 unsigned int todo;
1493 todo = perform_tree_ssa_dce (/*aggressive=*/false);
1494 if (todo)
1495 {
1496 free_numbers_of_iterations_estimates ();
1497 scev_reset ();
1498 }
1499 return todo;
1500 }
1501
1502 static unsigned int
1503 tree_ssa_cd_dce (void)
1504 {
1505 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1506 }
1507
1508 static bool
1509 gate_dce (void)
1510 {
1511 return flag_tree_dce != 0;
1512 }
1513
1514 namespace {
1515
1516 const pass_data pass_data_dce =
1517 {
1518 GIMPLE_PASS, /* type */
1519 "dce", /* name */
1520 OPTGROUP_NONE, /* optinfo_flags */
1521 true, /* has_gate */
1522 true, /* has_execute */
1523 TV_TREE_DCE, /* tv_id */
1524 ( PROP_cfg | PROP_ssa ), /* properties_required */
1525 0, /* properties_provided */
1526 0, /* properties_destroyed */
1527 0, /* todo_flags_start */
1528 TODO_verify_ssa, /* todo_flags_finish */
1529 };
1530
1531 class pass_dce : public gimple_opt_pass
1532 {
1533 public:
1534 pass_dce (gcc::context *ctxt)
1535 : gimple_opt_pass (pass_data_dce, ctxt)
1536 {}
1537
1538 /* opt_pass methods: */
1539 opt_pass * clone () { return new pass_dce (m_ctxt); }
1540 bool gate () { return gate_dce (); }
1541 unsigned int execute () { return tree_ssa_dce (); }
1542
1543 }; // class pass_dce
1544
1545 } // anon namespace
1546
1547 gimple_opt_pass *
1548 make_pass_dce (gcc::context *ctxt)
1549 {
1550 return new pass_dce (ctxt);
1551 }
1552
1553 namespace {
1554
1555 const pass_data pass_data_dce_loop =
1556 {
1557 GIMPLE_PASS, /* type */
1558 "dceloop", /* name */
1559 OPTGROUP_NONE, /* optinfo_flags */
1560 true, /* has_gate */
1561 true, /* has_execute */
1562 TV_TREE_DCE, /* tv_id */
1563 ( PROP_cfg | PROP_ssa ), /* properties_required */
1564 0, /* properties_provided */
1565 0, /* properties_destroyed */
1566 0, /* todo_flags_start */
1567 TODO_verify_ssa, /* todo_flags_finish */
1568 };
1569
1570 class pass_dce_loop : public gimple_opt_pass
1571 {
1572 public:
1573 pass_dce_loop (gcc::context *ctxt)
1574 : gimple_opt_pass (pass_data_dce_loop, ctxt)
1575 {}
1576
1577 /* opt_pass methods: */
1578 opt_pass * clone () { return new pass_dce_loop (m_ctxt); }
1579 bool gate () { return gate_dce (); }
1580 unsigned int execute () { return tree_ssa_dce_loop (); }
1581
1582 }; // class pass_dce_loop
1583
1584 } // anon namespace
1585
1586 gimple_opt_pass *
1587 make_pass_dce_loop (gcc::context *ctxt)
1588 {
1589 return new pass_dce_loop (ctxt);
1590 }
1591
1592 namespace {
1593
1594 const pass_data pass_data_cd_dce =
1595 {
1596 GIMPLE_PASS, /* type */
1597 "cddce", /* name */
1598 OPTGROUP_NONE, /* optinfo_flags */
1599 true, /* has_gate */
1600 true, /* has_execute */
1601 TV_TREE_CD_DCE, /* tv_id */
1602 ( PROP_cfg | PROP_ssa ), /* properties_required */
1603 0, /* properties_provided */
1604 0, /* properties_destroyed */
1605 0, /* todo_flags_start */
1606 ( TODO_verify_ssa | TODO_verify_flow ), /* todo_flags_finish */
1607 };
1608
1609 class pass_cd_dce : public gimple_opt_pass
1610 {
1611 public:
1612 pass_cd_dce (gcc::context *ctxt)
1613 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1614 {}
1615
1616 /* opt_pass methods: */
1617 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1618 bool gate () { return gate_dce (); }
1619 unsigned int execute () { return tree_ssa_cd_dce (); }
1620
1621 }; // class pass_cd_dce
1622
1623 } // anon namespace
1624
1625 gimple_opt_pass *
1626 make_pass_cd_dce (gcc::context *ctxt)
1627 {
1628 return new pass_cd_dce (ctxt);
1629 }