1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
47 #include "coretypes.h"
53 #include "tree-pass.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
61 #include "gimple-iterator.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-chkp.h"
69 #include "tree-ssa-propagate.h"
70 #include "gimple-fold.h"
72 static struct stmt_stats
80 #define STMT_NECESSARY GF_PLF_1
82 static vec
<gimple
*> worklist
;
84 /* Vector indicating an SSA name has already been processed and marked
86 static sbitmap processed
;
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary
;
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts
;
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
102 static control_dependences
*cd
;
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents
;
108 /* TRUE if this pass alters the CFG (by removing control statements).
111 If this pass alters the CFG, then it will arrange for the dominators
113 static bool cfg_altered
;
116 /* If STMT is not already marked necessary, mark it, and add it to the
117 worklist if ADD_TO_WORKLIST is true. */
120 mark_stmt_necessary (gimple
*stmt
, bool add_to_worklist
)
124 if (gimple_plf (stmt
, STMT_NECESSARY
))
127 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
129 fprintf (dump_file
, "Marking useful stmt: ");
130 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
131 fprintf (dump_file
, "\n");
134 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
136 worklist
.safe_push (stmt
);
137 if (bb_contains_live_stmts
&& !is_gimple_debug (stmt
))
138 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
142 /* Mark the statement defining operand OP as necessary. */
145 mark_operand_necessary (tree op
)
152 ver
= SSA_NAME_VERSION (op
);
153 if (bitmap_bit_p (processed
, ver
))
155 stmt
= SSA_NAME_DEF_STMT (op
);
156 gcc_assert (gimple_nop_p (stmt
)
157 || gimple_plf (stmt
, STMT_NECESSARY
));
160 bitmap_set_bit (processed
, ver
);
162 stmt
= SSA_NAME_DEF_STMT (op
);
165 if (gimple_plf (stmt
, STMT_NECESSARY
) || gimple_nop_p (stmt
))
168 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
170 fprintf (dump_file
, "marking necessary through ");
171 print_generic_expr (dump_file
, op
, 0);
172 fprintf (dump_file
, " stmt ");
173 print_gimple_stmt (dump_file
, stmt
, 0, 0);
176 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
177 if (bb_contains_live_stmts
)
178 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
179 worklist
.safe_push (stmt
);
183 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
184 it can make other statements necessary.
186 If AGGRESSIVE is false, control statements are conservatively marked as
190 mark_stmt_if_obviously_necessary (gimple
*stmt
, bool aggressive
)
192 /* With non-call exceptions, we have to assume that all statements could
193 throw. If a statement could throw, it can be deemed necessary. */
194 if (cfun
->can_throw_non_call_exceptions
195 && !cfun
->can_delete_dead_exceptions
196 && stmt_could_throw_p (stmt
))
198 mark_stmt_necessary (stmt
, true);
202 /* Statements that are implicitly live. Most function calls, asm
203 and return statements are required. Labels and GIMPLE_BIND nodes
204 are kept because they are control flow, and we have no way of
205 knowing whether they can be removed. DCE can eliminate all the
206 other statements in a block, and CFG can then remove the block
208 switch (gimple_code (stmt
))
212 mark_stmt_necessary (stmt
, false);
218 mark_stmt_necessary (stmt
, true);
223 tree callee
= gimple_call_fndecl (stmt
);
224 if (callee
!= NULL_TREE
225 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
226 switch (DECL_FUNCTION_CODE (callee
))
228 case BUILT_IN_MALLOC
:
229 case BUILT_IN_ALIGNED_ALLOC
:
230 case BUILT_IN_CALLOC
:
231 case BUILT_IN_ALLOCA
:
232 case BUILT_IN_ALLOCA_WITH_ALIGN
:
237 /* Most, but not all function calls are required. Function calls that
238 produce no result and have no side effects (i.e. const pure
239 functions) are unnecessary. */
240 if (gimple_has_side_effects (stmt
))
242 mark_stmt_necessary (stmt
, true);
245 if (!gimple_call_lhs (stmt
))
251 /* Debug temps without a value are not useful. ??? If we could
252 easily locate the debug temp bind stmt for a use thereof,
253 would could refrain from marking all debug temps here, and
254 mark them only if they're used. */
255 if (!gimple_debug_bind_p (stmt
)
256 || gimple_debug_bind_has_value_p (stmt
)
257 || TREE_CODE (gimple_debug_bind_get_var (stmt
)) != DEBUG_EXPR_DECL
)
258 mark_stmt_necessary (stmt
, false);
262 gcc_assert (!simple_goto_p (stmt
));
263 mark_stmt_necessary (stmt
, true);
267 gcc_assert (EDGE_COUNT (gimple_bb (stmt
)->succs
) == 2);
272 mark_stmt_necessary (stmt
, true);
276 if (gimple_clobber_p (stmt
))
284 /* If the statement has volatile operands, it needs to be preserved.
285 Same for statements that can alter control flow in unpredictable
287 if (gimple_has_volatile_ops (stmt
) || is_ctrl_altering_stmt (stmt
))
289 mark_stmt_necessary (stmt
, true);
293 if (stmt_may_clobber_global_p (stmt
))
295 mark_stmt_necessary (stmt
, true);
303 /* Mark the last statement of BB as necessary. */
306 mark_last_stmt_necessary (basic_block bb
)
308 gimple
*stmt
= last_stmt (bb
);
310 bitmap_set_bit (last_stmt_necessary
, bb
->index
);
311 bitmap_set_bit (bb_contains_live_stmts
, bb
->index
);
313 /* We actually mark the statement only if it is a control statement. */
314 if (stmt
&& is_ctrl_stmt (stmt
))
315 mark_stmt_necessary (stmt
, true);
319 /* Mark control dependent edges of BB as necessary. We have to do this only
320 once for each basic block so we set the appropriate bit after we're done.
322 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
325 mark_control_dependent_edges_necessary (basic_block bb
, bool ignore_self
)
328 unsigned edge_number
;
329 bool skipped
= false;
331 gcc_assert (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
333 if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
336 EXECUTE_IF_SET_IN_BITMAP (cd
->get_edges_dependent_on (bb
->index
),
339 basic_block cd_bb
= cd
->get_edge (edge_number
)->src
;
341 if (ignore_self
&& cd_bb
== bb
)
347 if (!bitmap_bit_p (last_stmt_necessary
, cd_bb
->index
))
348 mark_last_stmt_necessary (cd_bb
);
352 bitmap_set_bit (visited_control_parents
, bb
->index
);
356 /* Find obviously necessary statements. These are things like most function
357 calls, and stores to file level variables.
359 If EL is NULL, control statements are conservatively marked as
360 necessary. Otherwise it contains the list of edges used by control
361 dependence analysis. */
364 find_obviously_necessary_stmts (bool aggressive
)
367 gimple_stmt_iterator gsi
;
372 FOR_EACH_BB_FN (bb
, cfun
)
374 /* PHI nodes are never inherently necessary. */
375 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
377 phi
= gsi_stmt (gsi
);
378 gimple_set_plf (phi
, STMT_NECESSARY
, false);
381 /* Check all statements in the block. */
382 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
384 stmt
= gsi_stmt (gsi
);
385 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
386 mark_stmt_if_obviously_necessary (stmt
, aggressive
);
390 /* Pure and const functions are finite and thus have no infinite loops in
392 flags
= flags_from_decl_or_type (current_function_decl
);
393 if ((flags
& (ECF_CONST
|ECF_PURE
)) && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
396 /* Prevent the empty possibly infinite loops from being removed. */
401 if (mark_irreducible_loops ())
402 FOR_EACH_BB_FN (bb
, cfun
)
405 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
406 if ((e
->flags
& EDGE_DFS_BACK
)
407 && (e
->flags
& EDGE_IRREDUCIBLE_LOOP
))
410 fprintf (dump_file
, "Marking back edge of irreducible loop %i->%i\n",
411 e
->src
->index
, e
->dest
->index
);
412 mark_control_dependent_edges_necessary (e
->dest
, false);
416 FOR_EACH_LOOP (loop
, 0)
417 if (!finite_loop_p (loop
))
420 fprintf (dump_file
, "can not prove finiteness of loop %i\n", loop
->num
);
421 mark_control_dependent_edges_necessary (loop
->latch
, false);
428 /* Return true if REF is based on an aliased base, otherwise false. */
431 ref_may_be_aliased (tree ref
)
433 gcc_assert (TREE_CODE (ref
) != WITH_SIZE_EXPR
);
434 while (handled_component_p (ref
))
435 ref
= TREE_OPERAND (ref
, 0);
436 if (TREE_CODE (ref
) == MEM_REF
437 && TREE_CODE (TREE_OPERAND (ref
, 0)) == ADDR_EXPR
)
438 ref
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
439 return !(DECL_P (ref
)
440 && !may_be_aliased (ref
));
443 static bitmap visited
= NULL
;
444 static unsigned int longest_chain
= 0;
445 static unsigned int total_chain
= 0;
446 static unsigned int nr_walks
= 0;
447 static bool chain_ovfl
= false;
449 /* Worker for the walker that marks reaching definitions of REF,
450 which is based on a non-aliased decl, necessary. It returns
451 true whenever the defining statement of the current VDEF is
452 a kill for REF, as no dominating may-defs are necessary for REF
453 anymore. DATA points to the basic-block that contains the
454 stmt that refers to REF. */
457 mark_aliased_reaching_defs_necessary_1 (ao_ref
*ref
, tree vdef
, void *data
)
459 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
461 /* All stmts we visit are necessary. */
462 mark_operand_necessary (vdef
);
464 /* If the stmt lhs kills ref, then we can stop walking. */
465 if (gimple_has_lhs (def_stmt
)
466 && TREE_CODE (gimple_get_lhs (def_stmt
)) != SSA_NAME
467 /* The assignment is not necessarily carried out if it can throw
468 and we can catch it in the current function where we could inspect
470 ??? We only need to care about the RHS throwing. For aggregate
471 assignments or similar calls and non-call exceptions the LHS
472 might throw as well. */
473 && !stmt_can_throw_internal (def_stmt
))
475 tree base
, lhs
= gimple_get_lhs (def_stmt
);
476 HOST_WIDE_INT size
, offset
, max_size
;
478 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
479 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
480 so base == refd->base does not always hold. */
481 if (base
== ref
->base
)
483 /* For a must-alias check we need to be able to constrain
484 the accesses properly. */
485 if (size
!= -1 && size
== max_size
486 && ref
->max_size
!= -1)
488 if (offset
<= ref
->offset
489 && offset
+ size
>= ref
->offset
+ ref
->max_size
)
492 /* Or they need to be exactly the same. */
494 /* Make sure there is no induction variable involved
495 in the references (gcc.c-torture/execute/pr42142.c).
496 The simplest way is to check if the kill dominates
498 /* But when both are in the same block we cannot
499 easily tell whether we came from a backedge
500 unless we decide to compute stmt UIDs
502 && (basic_block
) data
!= gimple_bb (def_stmt
)
503 && dominated_by_p (CDI_DOMINATORS
, (basic_block
) data
,
504 gimple_bb (def_stmt
))
505 && operand_equal_p (ref
->ref
, lhs
, 0))
510 /* Otherwise keep walking. */
515 mark_aliased_reaching_defs_necessary (gimple
*stmt
, tree ref
)
519 gcc_assert (!chain_ovfl
);
520 ao_ref_init (&refd
, ref
);
521 chain
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
),
522 mark_aliased_reaching_defs_necessary_1
,
523 gimple_bb (stmt
), NULL
);
524 if (chain
> longest_chain
)
525 longest_chain
= chain
;
526 total_chain
+= chain
;
530 /* Worker for the walker that marks reaching definitions of REF, which
531 is not based on a non-aliased decl. For simplicity we need to end
532 up marking all may-defs necessary that are not based on a non-aliased
533 decl. The only job of this walker is to skip may-defs based on
534 a non-aliased decl. */
537 mark_all_reaching_defs_necessary_1 (ao_ref
*ref ATTRIBUTE_UNUSED
,
538 tree vdef
, void *data ATTRIBUTE_UNUSED
)
540 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
542 /* We have to skip already visited (and thus necessary) statements
543 to make the chaining work after we dropped back to simple mode. */
545 && bitmap_bit_p (processed
, SSA_NAME_VERSION (vdef
)))
547 gcc_assert (gimple_nop_p (def_stmt
)
548 || gimple_plf (def_stmt
, STMT_NECESSARY
));
552 /* We want to skip stores to non-aliased variables. */
554 && gimple_assign_single_p (def_stmt
))
556 tree lhs
= gimple_assign_lhs (def_stmt
);
557 if (!ref_may_be_aliased (lhs
))
561 /* We want to skip statments that do not constitute stores but have
562 a virtual definition. */
563 if (is_gimple_call (def_stmt
))
565 tree callee
= gimple_call_fndecl (def_stmt
);
566 if (callee
!= NULL_TREE
567 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
568 switch (DECL_FUNCTION_CODE (callee
))
570 case BUILT_IN_MALLOC
:
571 case BUILT_IN_ALIGNED_ALLOC
:
572 case BUILT_IN_CALLOC
:
573 case BUILT_IN_ALLOCA
:
574 case BUILT_IN_ALLOCA_WITH_ALIGN
:
582 mark_operand_necessary (vdef
);
588 mark_all_reaching_defs_necessary (gimple
*stmt
)
590 walk_aliased_vdefs (NULL
, gimple_vuse (stmt
),
591 mark_all_reaching_defs_necessary_1
, NULL
, &visited
);
594 /* Return true for PHI nodes with one or identical arguments
597 degenerate_phi_p (gimple
*phi
)
600 tree op
= gimple_phi_arg_def (phi
, 0);
601 for (i
= 1; i
< gimple_phi_num_args (phi
); i
++)
602 if (gimple_phi_arg_def (phi
, i
) != op
)
607 /* Propagate necessity using the operands of necessary statements.
608 Process the uses on each statement in the worklist, and add all
609 feeding statements which contribute to the calculation of this
610 value to the worklist.
612 In conservative mode, EL is NULL. */
615 propagate_necessity (bool aggressive
)
619 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
620 fprintf (dump_file
, "\nProcessing worklist:\n");
622 while (worklist
.length () > 0)
624 /* Take STMT from worklist. */
625 stmt
= worklist
.pop ();
627 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
629 fprintf (dump_file
, "processing: ");
630 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
631 fprintf (dump_file
, "\n");
636 /* Mark the last statement of the basic blocks on which the block
637 containing STMT is control dependent, but only if we haven't
639 basic_block bb
= gimple_bb (stmt
);
640 if (bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
641 && !bitmap_bit_p (visited_control_parents
, bb
->index
))
642 mark_control_dependent_edges_necessary (bb
, false);
645 if (gimple_code (stmt
) == GIMPLE_PHI
646 /* We do not process virtual PHI nodes nor do we track their
648 && !virtual_operand_p (gimple_phi_result (stmt
)))
650 /* PHI nodes are somewhat special in that each PHI alternative has
651 data and control dependencies. All the statements feeding the
652 PHI node's arguments are always necessary. In aggressive mode,
653 we also consider the control dependent edges leading to the
654 predecessor block associated with each PHI alternative as
656 gphi
*phi
= as_a
<gphi
*> (stmt
);
659 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
661 tree arg
= PHI_ARG_DEF (stmt
, k
);
662 if (TREE_CODE (arg
) == SSA_NAME
)
663 mark_operand_necessary (arg
);
666 /* For PHI operands it matters from where the control flow arrives
667 to the BB. Consider the following example:
677 We need to mark control dependence of the empty basic blocks, since they
678 contains computation of PHI operands.
680 Doing so is too restrictive in the case the predecestor block is in
686 for (i = 0; i<1000; ++i)
692 There is PHI for J in the BB containing return statement.
693 In this case the control dependence of predecestor block (that is
694 within the empty loop) also contains the block determining number
695 of iterations of the block that would prevent removing of empty
698 This scenario can be avoided by splitting critical edges.
699 To save the critical edge splitting pass we identify how the control
700 dependence would look like if the edge was split.
702 Consider the modified CFG created from current CFG by splitting
703 edge B->C. In the postdominance tree of modified CFG, C' is
704 always child of C. There are two cases how chlids of C' can look
709 In this case the only basic block C' is control dependent on is B.
711 2) C' has single child that is B
713 In this case control dependence of C' is same as control
714 dependence of B in original CFG except for block B itself.
715 (since C' postdominate B in modified CFG)
717 Now how to decide what case happens? There are two basic options:
719 a) C postdominate B. Then C immediately postdominate B and
720 case 2 happens iff there is no other way from B to C except
723 There is other way from B to C iff there is succesor of B that
724 is not postdominated by B. Testing this condition is somewhat
725 expensive, because we need to iterate all succesors of B.
726 We are safe to assume that this does not happen: we will mark B
727 as needed when processing the other path from B to C that is
728 conrol dependent on B and marking control dependencies of B
729 itself is harmless because they will be processed anyway after
730 processing control statement in B.
732 b) C does not postdominate B. Always case 1 happens since there is
733 path from C to exit that does not go through B and thus also C'. */
735 if (aggressive
&& !degenerate_phi_p (stmt
))
737 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
739 basic_block arg_bb
= gimple_phi_arg_edge (phi
, k
)->src
;
742 != get_immediate_dominator (CDI_POST_DOMINATORS
, arg_bb
))
744 if (!bitmap_bit_p (last_stmt_necessary
, arg_bb
->index
))
745 mark_last_stmt_necessary (arg_bb
);
747 else if (arg_bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
748 && !bitmap_bit_p (visited_control_parents
,
750 mark_control_dependent_edges_necessary (arg_bb
, true);
756 /* Propagate through the operands. Examine all the USE, VUSE and
757 VDEF operands in this statement. Mark all the statements
758 which feed this statement's uses as necessary. */
762 /* If this is a call to free which is directly fed by an
763 allocation function do not mark that necessary through
764 processing the argument. */
765 if (gimple_call_builtin_p (stmt
, BUILT_IN_FREE
))
767 tree ptr
= gimple_call_arg (stmt
, 0);
770 /* If the pointer we free is defined by an allocation
771 function do not add the call to the worklist. */
772 if (TREE_CODE (ptr
) == SSA_NAME
773 && is_gimple_call (def_stmt
= SSA_NAME_DEF_STMT (ptr
))
774 && (def_callee
= gimple_call_fndecl (def_stmt
))
775 && DECL_BUILT_IN_CLASS (def_callee
) == BUILT_IN_NORMAL
776 && (DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_ALIGNED_ALLOC
777 || DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_MALLOC
778 || DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_CALLOC
))
780 gimple
*bounds_def_stmt
;
783 /* For instrumented calls we should also check used
784 bounds are returned by the same allocation call. */
785 if (!gimple_call_with_bounds_p (stmt
)
786 || ((bounds
= gimple_call_arg (stmt
, 1))
787 && TREE_CODE (bounds
) == SSA_NAME
788 && (bounds_def_stmt
= SSA_NAME_DEF_STMT (bounds
))
789 && chkp_gimple_call_builtin_p (bounds_def_stmt
,
790 BUILT_IN_CHKP_BNDRET
)
791 && gimple_call_arg (bounds_def_stmt
, 0) == ptr
))
796 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
797 mark_operand_necessary (use
);
799 use
= gimple_vuse (stmt
);
803 /* If we dropped to simple mode make all immediately
804 reachable definitions necessary. */
807 mark_all_reaching_defs_necessary (stmt
);
811 /* For statements that may load from memory (have a VUSE) we
812 have to mark all reaching (may-)definitions as necessary.
813 We partition this task into two cases:
814 1) explicit loads based on decls that are not aliased
815 2) implicit loads (like calls) and explicit loads not
816 based on decls that are not aliased (like indirect
817 references or loads from globals)
818 For 1) we mark all reaching may-defs as necessary, stopping
819 at dominating kills. For 2) we want to mark all dominating
820 references necessary, but non-aliased ones which we handle
821 in 1). By keeping a global visited bitmap for references
822 we walk for 2) we avoid quadratic behavior for those. */
824 if (is_gimple_call (stmt
))
826 tree callee
= gimple_call_fndecl (stmt
);
829 /* Calls to functions that are merely acting as barriers
830 or that only store to memory do not make any previous
832 if (callee
!= NULL_TREE
833 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
834 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET
835 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET_CHK
836 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MALLOC
837 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALIGNED_ALLOC
838 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
839 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_FREE
840 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
841 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
842 || (DECL_FUNCTION_CODE (callee
)
843 == BUILT_IN_ALLOCA_WITH_ALIGN
)
844 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
845 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
846 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ASSUME_ALIGNED
))
849 /* Calls implicitly load from memory, their arguments
850 in addition may explicitly perform memory loads. */
851 mark_all_reaching_defs_necessary (stmt
);
852 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
854 tree arg
= gimple_call_arg (stmt
, i
);
855 if (TREE_CODE (arg
) == SSA_NAME
856 || is_gimple_min_invariant (arg
))
858 if (TREE_CODE (arg
) == WITH_SIZE_EXPR
)
859 arg
= TREE_OPERAND (arg
, 0);
860 if (!ref_may_be_aliased (arg
))
861 mark_aliased_reaching_defs_necessary (stmt
, arg
);
864 else if (gimple_assign_single_p (stmt
))
867 /* If this is a load mark things necessary. */
868 rhs
= gimple_assign_rhs1 (stmt
);
869 if (TREE_CODE (rhs
) != SSA_NAME
870 && !is_gimple_min_invariant (rhs
)
871 && TREE_CODE (rhs
) != CONSTRUCTOR
)
873 if (!ref_may_be_aliased (rhs
))
874 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
876 mark_all_reaching_defs_necessary (stmt
);
879 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
881 tree rhs
= gimple_return_retval (return_stmt
);
882 /* A return statement may perform a load. */
884 && TREE_CODE (rhs
) != SSA_NAME
885 && !is_gimple_min_invariant (rhs
)
886 && TREE_CODE (rhs
) != CONSTRUCTOR
)
888 if (!ref_may_be_aliased (rhs
))
889 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
891 mark_all_reaching_defs_necessary (stmt
);
894 else if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (stmt
))
897 mark_all_reaching_defs_necessary (stmt
);
898 /* Inputs may perform loads. */
899 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
901 tree op
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
902 if (TREE_CODE (op
) != SSA_NAME
903 && !is_gimple_min_invariant (op
)
904 && TREE_CODE (op
) != CONSTRUCTOR
905 && !ref_may_be_aliased (op
))
906 mark_aliased_reaching_defs_necessary (stmt
, op
);
909 else if (gimple_code (stmt
) == GIMPLE_TRANSACTION
)
911 /* The beginning of a transaction is a memory barrier. */
912 /* ??? If we were really cool, we'd only be a barrier
913 for the memories touched within the transaction. */
914 mark_all_reaching_defs_necessary (stmt
);
919 /* If we over-used our alias oracle budget drop to simple
920 mode. The cost metric allows quadratic behavior
921 (number of uses times number of may-defs queries) up to
922 a constant maximal number of queries and after that falls back to
923 super-linear complexity. */
924 if (/* Constant but quadratic for small functions. */
925 total_chain
> 128 * 128
926 /* Linear in the number of may-defs. */
927 && total_chain
> 32 * longest_chain
928 /* Linear in the number of uses. */
929 && total_chain
> nr_walks
* 32)
933 bitmap_clear (visited
);
939 /* Remove dead PHI nodes from block BB. */
942 remove_dead_phis (basic_block bb
)
944 bool something_changed
= false;
948 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);)
953 /* We do not track necessity of virtual PHI nodes. Instead do
954 very simple dead PHI removal here. */
955 if (virtual_operand_p (gimple_phi_result (phi
)))
957 /* Virtual PHI nodes with one or identical arguments
959 if (degenerate_phi_p (phi
))
961 tree vdef
= gimple_phi_result (phi
);
962 tree vuse
= gimple_phi_arg_def (phi
, 0);
965 imm_use_iterator iter
;
967 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vdef
)
968 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
969 SET_USE (use_p
, vuse
);
970 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef
)
971 && TREE_CODE (vuse
) == SSA_NAME
)
972 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 1;
975 gimple_set_plf (phi
, STMT_NECESSARY
, true);
978 if (!gimple_plf (phi
, STMT_NECESSARY
))
980 something_changed
= true;
981 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
983 fprintf (dump_file
, "Deleting : ");
984 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
985 fprintf (dump_file
, "\n");
988 remove_phi_node (&gsi
, true);
989 stats
.removed_phis
++;
995 return something_changed
;
998 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1001 forward_edge_to_pdom (edge e
, basic_block post_dom_bb
)
1007 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1008 fprintf (dump_file
, "Redirecting edge %i->%i to %i\n", e
->src
->index
,
1009 e
->dest
->index
, post_dom_bb
->index
);
1011 e2
= redirect_edge_and_branch (e
, post_dom_bb
);
1014 /* If edge was already around, no updating is necessary. */
1018 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb
)))
1020 /* We are sure that for every live PHI we are seeing control dependent BB.
1021 This means that we can pick any edge to duplicate PHI args from. */
1022 FOR_EACH_EDGE (e2
, ei
, post_dom_bb
->preds
)
1025 for (gsi
= gsi_start_phis (post_dom_bb
); !gsi_end_p (gsi
);)
1027 gphi
*phi
= gsi
.phi ();
1029 source_location locus
;
1031 /* PHIs for virtuals have no control dependency relation on them.
1032 We are lost here and must force renaming of the symbol. */
1033 if (virtual_operand_p (gimple_phi_result (phi
)))
1035 mark_virtual_phi_result_for_renaming (phi
);
1036 remove_phi_node (&gsi
, true);
1040 /* Dead PHI do not imply control dependency. */
1041 if (!gimple_plf (phi
, STMT_NECESSARY
))
1047 op
= gimple_phi_arg_def (phi
, e2
->dest_idx
);
1048 locus
= gimple_phi_arg_location (phi
, e2
->dest_idx
);
1049 add_phi_arg (phi
, op
, e
, locus
);
1050 /* The resulting PHI if not dead can only be degenerate. */
1051 gcc_assert (degenerate_phi_p (phi
));
1058 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1059 containing I so that we don't have to look it up. */
1062 remove_dead_stmt (gimple_stmt_iterator
*i
, basic_block bb
)
1064 gimple
*stmt
= gsi_stmt (*i
);
1066 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1068 fprintf (dump_file
, "Deleting : ");
1069 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1070 fprintf (dump_file
, "\n");
1075 /* If we have determined that a conditional branch statement contributes
1076 nothing to the program, then we not only remove it, but we also change
1077 the flow graph so that the current block will simply fall-thru to its
1078 immediate post-dominator. The blocks we are circumventing will be
1079 removed by cleanup_tree_cfg if this change in the flow graph makes them
1081 if (is_ctrl_stmt (stmt
))
1083 basic_block post_dom_bb
;
1087 post_dom_bb
= get_immediate_dominator (CDI_POST_DOMINATORS
, bb
);
1089 e
= find_edge (bb
, post_dom_bb
);
1091 /* If edge is already there, try to use it. This avoids need to update
1092 PHI nodes. Also watch for cases where post dominator does not exists
1093 or is exit block. These can happen for infinite loops as we create
1094 fake edges in the dominator tree. */
1097 else if (! post_dom_bb
|| post_dom_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1098 e
= EDGE_SUCC (bb
, 0);
1100 e
= forward_edge_to_pdom (EDGE_SUCC (bb
, 0), post_dom_bb
);
1102 e
->probability
= REG_BR_PROB_BASE
;
1103 e
->count
= bb
->count
;
1105 /* The edge is no longer associated with a conditional, so it does
1106 not have TRUE/FALSE flags. */
1107 e
->flags
&= ~(EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
);
1109 /* The lone outgoing edge from BB will be a fallthru edge. */
1110 e
->flags
|= EDGE_FALLTHRU
;
1112 /* Remove the remaining outgoing edges. */
1113 for (ei
= ei_start (bb
->succs
); (e2
= ei_safe_edge (ei
)); )
1117 /* If we made a BB unconditionally exit a loop or removed
1118 an entry into an irreducible region, then this transform
1119 alters the set of BBs in the loop. Schedule a fixup. */
1120 if (loop_exit_edge_p (bb
->loop_father
, e
)
1121 || (e2
->dest
->flags
& BB_IRREDUCIBLE_LOOP
))
1122 loops_state_set (LOOPS_NEED_FIXUP
);
1129 /* If this is a store into a variable that is being optimized away,
1130 add a debug bind stmt if possible. */
1131 if (MAY_HAVE_DEBUG_STMTS
1132 && gimple_assign_single_p (stmt
)
1133 && is_gimple_val (gimple_assign_rhs1 (stmt
)))
1135 tree lhs
= gimple_assign_lhs (stmt
);
1136 if ((TREE_CODE (lhs
) == VAR_DECL
|| TREE_CODE (lhs
) == PARM_DECL
)
1137 && !DECL_IGNORED_P (lhs
)
1138 && is_gimple_reg_type (TREE_TYPE (lhs
))
1139 && !is_global_var (lhs
)
1140 && !DECL_HAS_VALUE_EXPR_P (lhs
))
1142 tree rhs
= gimple_assign_rhs1 (stmt
);
1144 = gimple_build_debug_bind (lhs
, unshare_expr (rhs
), stmt
);
1145 gsi_insert_after (i
, note
, GSI_SAME_STMT
);
1149 unlink_stmt_vdef (stmt
);
1150 gsi_remove (i
, true);
1151 release_defs (stmt
);
1154 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1155 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1158 find_non_realpart_uses (tree
*tp
, int *walk_subtrees
, void *data
)
1160 if (TYPE_P (*tp
) || TREE_CODE (*tp
) == REALPART_EXPR
)
1162 if (*tp
== (tree
) data
)
1167 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1168 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1169 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1173 maybe_optimize_arith_overflow (gimple_stmt_iterator
*gsi
,
1174 enum tree_code subcode
)
1176 gimple
*stmt
= gsi_stmt (*gsi
);
1177 tree lhs
= gimple_call_lhs (stmt
);
1179 if (lhs
== NULL
|| TREE_CODE (lhs
) != SSA_NAME
)
1182 imm_use_iterator imm_iter
;
1183 use_operand_p use_p
;
1184 bool has_debug_uses
= false;
1185 bool has_realpart_uses
= false;
1186 bool has_other_uses
= false;
1187 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, lhs
)
1189 gimple
*use_stmt
= USE_STMT (use_p
);
1190 if (is_gimple_debug (use_stmt
))
1191 has_debug_uses
= true;
1192 else if (is_gimple_assign (use_stmt
)
1193 && gimple_assign_rhs_code (use_stmt
) == REALPART_EXPR
1194 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt
), 0) == lhs
)
1195 has_realpart_uses
= true;
1198 has_other_uses
= true;
1203 if (!has_realpart_uses
|| has_other_uses
)
1206 tree arg0
= gimple_call_arg (stmt
, 0);
1207 tree arg1
= gimple_call_arg (stmt
, 1);
1208 location_t loc
= gimple_location (stmt
);
1209 tree type
= TREE_TYPE (TREE_TYPE (lhs
));
1211 if (!TYPE_UNSIGNED (type
))
1212 utype
= build_nonstandard_integer_type (TYPE_PRECISION (type
), 1);
1213 tree result
= fold_build2_loc (loc
, subcode
, utype
,
1214 fold_convert_loc (loc
, utype
, arg0
),
1215 fold_convert_loc (loc
, utype
, arg1
));
1216 result
= fold_convert_loc (loc
, type
, result
);
1221 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, lhs
)
1223 if (!gimple_debug_bind_p (use_stmt
))
1225 tree v
= gimple_debug_bind_get_value (use_stmt
);
1226 if (walk_tree (&v
, find_non_realpart_uses
, lhs
, NULL
))
1228 gimple_debug_bind_reset_value (use_stmt
);
1229 update_stmt (use_stmt
);
1234 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
1235 result
= drop_tree_overflow (result
);
1236 tree overflow
= build_zero_cst (type
);
1237 tree ctype
= build_complex_type (type
);
1238 if (TREE_CODE (result
) == INTEGER_CST
)
1239 result
= build_complex (ctype
, result
, overflow
);
1241 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
1242 ctype
, result
, overflow
);
1244 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1246 fprintf (dump_file
, "Transforming call: ");
1247 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1248 fprintf (dump_file
, "because the overflow result is never used into: ");
1249 print_generic_stmt (dump_file
, result
, TDF_SLIM
);
1250 fprintf (dump_file
, "\n");
1253 if (!update_call_from_tree (gsi
, result
))
1254 gimplify_and_update_call_from_tree (gsi
, result
);
1257 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1258 contributes nothing to the program, and can be deleted. */
1261 eliminate_unnecessary_stmts (void)
1263 bool something_changed
= false;
1265 gimple_stmt_iterator gsi
, psi
;
1270 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1271 fprintf (dump_file
, "\nEliminating unnecessary statements:\n");
1273 clear_special_calls ();
1275 /* Walking basic blocks and statements in reverse order avoids
1276 releasing SSA names before any other DEFs that refer to them are
1277 released. This helps avoid loss of debug information, as we get
1278 a chance to propagate all RHSs of removed SSAs into debug uses,
1279 rather than only the latest ones. E.g., consider:
1285 If we were to release x_3 before a_5, when we reached a_5 and
1286 tried to substitute it into the debug stmt, we'd see x_3 there,
1287 but x_3's DEF, type, etc would have already been disconnected.
1288 By going backwards, the debug stmt first changes to:
1290 # DEBUG a => x_3 - b_4
1294 # DEBUG a => y_1 + z_2 - b_4
1297 gcc_assert (dom_info_available_p (CDI_DOMINATORS
));
1298 h
= get_all_dominated_blocks (CDI_DOMINATORS
,
1299 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1305 /* Remove dead statements. */
1306 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi
= psi
)
1308 stmt
= gsi_stmt (gsi
);
1315 /* We can mark a call to free as not necessary if the
1316 defining statement of its argument is not necessary
1317 (and thus is getting removed). */
1318 if (gimple_plf (stmt
, STMT_NECESSARY
)
1319 && gimple_call_builtin_p (stmt
, BUILT_IN_FREE
))
1321 tree ptr
= gimple_call_arg (stmt
, 0);
1322 if (TREE_CODE (ptr
) == SSA_NAME
)
1324 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ptr
);
1325 if (!gimple_nop_p (def_stmt
)
1326 && !gimple_plf (def_stmt
, STMT_NECESSARY
))
1327 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
1329 /* We did not propagate necessity for free calls fed
1330 by allocation function to allow unnecessary
1331 alloc-free sequence elimination. For instrumented
1332 calls it also means we did not mark bounds producer
1333 as necessary and it is time to do it in case free
1334 call is not removed. */
1335 if (gimple_call_with_bounds_p (stmt
))
1337 gimple
*bounds_def_stmt
;
1338 tree bounds
= gimple_call_arg (stmt
, 1);
1339 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
1340 bounds_def_stmt
= SSA_NAME_DEF_STMT (bounds
);
1342 && !gimple_plf (bounds_def_stmt
, STMT_NECESSARY
))
1343 gimple_set_plf (bounds_def_stmt
, STMT_NECESSARY
,
1344 gimple_plf (stmt
, STMT_NECESSARY
));
1348 /* If GSI is not necessary then remove it. */
1349 if (!gimple_plf (stmt
, STMT_NECESSARY
))
1351 /* Keep clobbers that we can keep live live. */
1352 if (gimple_clobber_p (stmt
))
1355 use_operand_p use_p
;
1357 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
1359 tree name
= USE_FROM_PTR (use_p
);
1360 if (!SSA_NAME_IS_DEFAULT_DEF (name
)
1361 && !bitmap_bit_p (processed
, SSA_NAME_VERSION (name
)))
1370 if (!is_gimple_debug (stmt
))
1371 something_changed
= true;
1372 remove_dead_stmt (&gsi
, bb
);
1374 else if (is_gimple_call (stmt
))
1376 tree name
= gimple_call_lhs (stmt
);
1378 notice_special_calls (as_a
<gcall
*> (stmt
));
1380 /* When LHS of var = call (); is dead, simplify it into
1381 call (); saving one operand. */
1383 && TREE_CODE (name
) == SSA_NAME
1384 && !bitmap_bit_p (processed
, SSA_NAME_VERSION (name
))
1385 /* Avoid doing so for allocation calls which we
1386 did not mark as necessary, it will confuse the
1387 special logic we apply to malloc/free pair removal. */
1388 && (!(call
= gimple_call_fndecl (stmt
))
1389 || DECL_BUILT_IN_CLASS (call
) != BUILT_IN_NORMAL
1390 || (DECL_FUNCTION_CODE (call
) != BUILT_IN_ALIGNED_ALLOC
1391 && DECL_FUNCTION_CODE (call
) != BUILT_IN_MALLOC
1392 && DECL_FUNCTION_CODE (call
) != BUILT_IN_CALLOC
1393 && DECL_FUNCTION_CODE (call
) != BUILT_IN_ALLOCA
1394 && (DECL_FUNCTION_CODE (call
)
1395 != BUILT_IN_ALLOCA_WITH_ALIGN
)))
1396 /* Avoid doing so for bndret calls for the same reason. */
1397 && !chkp_gimple_call_builtin_p (stmt
, BUILT_IN_CHKP_BNDRET
))
1399 something_changed
= true;
1400 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1402 fprintf (dump_file
, "Deleting LHS of call: ");
1403 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1404 fprintf (dump_file
, "\n");
1407 gimple_call_set_lhs (stmt
, NULL_TREE
);
1408 maybe_clean_or_replace_eh_stmt (stmt
, stmt
);
1410 release_ssa_name (name
);
1412 /* GOMP_SIMD_LANE without lhs is not needed. */
1413 if (gimple_call_internal_p (stmt
)
1414 && gimple_call_internal_fn (stmt
) == IFN_GOMP_SIMD_LANE
)
1415 remove_dead_stmt (&gsi
, bb
);
1417 else if (gimple_call_internal_p (stmt
))
1418 switch (gimple_call_internal_fn (stmt
))
1420 case IFN_ADD_OVERFLOW
:
1421 maybe_optimize_arith_overflow (&gsi
, PLUS_EXPR
);
1423 case IFN_SUB_OVERFLOW
:
1424 maybe_optimize_arith_overflow (&gsi
, MINUS_EXPR
);
1426 case IFN_MUL_OVERFLOW
:
1427 maybe_optimize_arith_overflow (&gsi
, MULT_EXPR
);
1438 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1439 rendered some PHI nodes unreachable while they are still in use.
1440 Mark them for renaming. */
1443 basic_block prev_bb
;
1445 find_unreachable_blocks ();
1447 /* Delete all unreachable basic blocks in reverse dominator order. */
1448 for (bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
1449 bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
); bb
= prev_bb
)
1451 prev_bb
= bb
->prev_bb
;
1453 if (!bitmap_bit_p (bb_contains_live_stmts
, bb
->index
)
1454 || !(bb
->flags
& BB_REACHABLE
))
1456 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1458 if (virtual_operand_p (gimple_phi_result (gsi
.phi ())))
1461 imm_use_iterator iter
;
1463 FOR_EACH_IMM_USE_STMT (stmt
, iter
,
1464 gimple_phi_result (gsi
.phi ()))
1466 if (!(gimple_bb (stmt
)->flags
& BB_REACHABLE
))
1468 if (gimple_code (stmt
) == GIMPLE_PHI
1469 || gimple_plf (stmt
, STMT_NECESSARY
))
1472 BREAK_FROM_IMM_USE_STMT (iter
);
1476 mark_virtual_phi_result_for_renaming (gsi
.phi ());
1479 if (!(bb
->flags
& BB_REACHABLE
))
1481 /* Speed up the removal of blocks that don't
1482 dominate others. Walking backwards, this should
1483 be the common case. ??? Do we need to recompute
1484 dominators because of cfg_altered? */
1485 if (!MAY_HAVE_DEBUG_STMTS
1486 || !first_dom_son (CDI_DOMINATORS
, bb
))
1487 delete_basic_block (bb
);
1490 h
= get_all_dominated_blocks (CDI_DOMINATORS
, bb
);
1495 prev_bb
= bb
->prev_bb
;
1496 /* Rearrangements to the CFG may have failed
1497 to update the dominators tree, so that
1498 formerly-dominated blocks are now
1499 otherwise reachable. */
1500 if (!!(bb
->flags
& BB_REACHABLE
))
1502 delete_basic_block (bb
);
1511 FOR_EACH_BB_FN (bb
, cfun
)
1513 /* Remove dead PHI nodes. */
1514 something_changed
|= remove_dead_phis (bb
);
1517 return something_changed
;
1521 /* Print out removed statement statistics. */
1528 percg
= ((float) stats
.removed
/ (float) stats
.total
) * 100;
1529 fprintf (dump_file
, "Removed %d of %d statements (%d%%)\n",
1530 stats
.removed
, stats
.total
, (int) percg
);
1532 if (stats
.total_phis
== 0)
1535 percg
= ((float) stats
.removed_phis
/ (float) stats
.total_phis
) * 100;
1537 fprintf (dump_file
, "Removed %d of %d PHI nodes (%d%%)\n",
1538 stats
.removed_phis
, stats
.total_phis
, (int) percg
);
1541 /* Initialization for this pass. Set up the used data structures. */
1544 tree_dce_init (bool aggressive
)
1546 memset ((void *) &stats
, 0, sizeof (stats
));
1550 last_stmt_necessary
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
1551 bitmap_clear (last_stmt_necessary
);
1552 bb_contains_live_stmts
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
1553 bitmap_clear (bb_contains_live_stmts
);
1556 processed
= sbitmap_alloc (num_ssa_names
+ 1);
1557 bitmap_clear (processed
);
1559 worklist
.create (64);
1560 cfg_altered
= false;
1563 /* Cleanup after this pass. */
1566 tree_dce_done (bool aggressive
)
1571 sbitmap_free (visited_control_parents
);
1572 sbitmap_free (last_stmt_necessary
);
1573 sbitmap_free (bb_contains_live_stmts
);
1574 bb_contains_live_stmts
= NULL
;
1577 sbitmap_free (processed
);
1579 worklist
.release ();
1582 /* Main routine to eliminate dead code.
1584 AGGRESSIVE controls the aggressiveness of the algorithm.
1585 In conservative mode, we ignore control dependence and simply declare
1586 all but the most trivially dead branches necessary. This mode is fast.
1587 In aggressive mode, control dependences are taken into account, which
1588 results in more dead code elimination, but at the cost of some time.
1590 FIXME: Aggressive mode before PRE doesn't work currently because
1591 the dominance info is not invalidated after DCE1. This is
1592 not an issue right now because we only run aggressive DCE
1593 as the last tree SSA pass, but keep this in mind when you
1594 start experimenting with pass ordering. */
1597 perform_tree_ssa_dce (bool aggressive
)
1599 bool something_changed
= 0;
1601 calculate_dominance_info (CDI_DOMINATORS
);
1603 /* Preheaders are needed for SCEV to work.
1604 Simple lateches and recorded exits improve chances that loop will
1605 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1607 loop_optimizer_init (LOOPS_NORMAL
1608 | LOOPS_HAVE_RECORDED_EXITS
);
1610 tree_dce_init (aggressive
);
1614 /* Compute control dependence. */
1615 calculate_dominance_info (CDI_POST_DOMINATORS
);
1616 cd
= new control_dependences (create_edge_list ());
1618 visited_control_parents
=
1619 sbitmap_alloc (last_basic_block_for_fn (cfun
));
1620 bitmap_clear (visited_control_parents
);
1622 mark_dfs_back_edges ();
1625 find_obviously_necessary_stmts (aggressive
);
1628 loop_optimizer_finalize ();
1634 visited
= BITMAP_ALLOC (NULL
);
1635 propagate_necessity (aggressive
);
1636 BITMAP_FREE (visited
);
1638 something_changed
|= eliminate_unnecessary_stmts ();
1639 something_changed
|= cfg_altered
;
1641 /* We do not update postdominators, so free them unconditionally. */
1642 free_dominance_info (CDI_POST_DOMINATORS
);
1644 /* If we removed paths in the CFG, then we need to update
1645 dominators as well. I haven't investigated the possibility
1646 of incrementally updating dominators. */
1648 free_dominance_info (CDI_DOMINATORS
);
1650 statistics_counter_event (cfun
, "Statements deleted", stats
.removed
);
1651 statistics_counter_event (cfun
, "PHI nodes deleted", stats
.removed_phis
);
1653 /* Debugging dumps. */
1654 if (dump_file
&& (dump_flags
& (TDF_STATS
|TDF_DETAILS
)))
1657 tree_dce_done (aggressive
);
1659 if (something_changed
)
1661 free_numbers_of_iterations_estimates (cfun
);
1662 if (scev_initialized_p ())
1664 return TODO_update_ssa
| TODO_cleanup_cfg
;
1669 /* Pass entry points. */
1673 return perform_tree_ssa_dce (/*aggressive=*/false);
1677 tree_ssa_cd_dce (void)
1679 return perform_tree_ssa_dce (/*aggressive=*/optimize
>= 2);
1684 const pass_data pass_data_dce
=
1686 GIMPLE_PASS
, /* type */
1688 OPTGROUP_NONE
, /* optinfo_flags */
1689 TV_TREE_DCE
, /* tv_id */
1690 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1691 0, /* properties_provided */
1692 0, /* properties_destroyed */
1693 0, /* todo_flags_start */
1694 0, /* todo_flags_finish */
1697 class pass_dce
: public gimple_opt_pass
1700 pass_dce (gcc::context
*ctxt
)
1701 : gimple_opt_pass (pass_data_dce
, ctxt
)
1704 /* opt_pass methods: */
1705 opt_pass
* clone () { return new pass_dce (m_ctxt
); }
1706 virtual bool gate (function
*) { return flag_tree_dce
!= 0; }
1707 virtual unsigned int execute (function
*) { return tree_ssa_dce (); }
1709 }; // class pass_dce
1714 make_pass_dce (gcc::context
*ctxt
)
1716 return new pass_dce (ctxt
);
1721 const pass_data pass_data_cd_dce
=
1723 GIMPLE_PASS
, /* type */
1725 OPTGROUP_NONE
, /* optinfo_flags */
1726 TV_TREE_CD_DCE
, /* tv_id */
1727 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1728 0, /* properties_provided */
1729 0, /* properties_destroyed */
1730 0, /* todo_flags_start */
1731 0, /* todo_flags_finish */
1734 class pass_cd_dce
: public gimple_opt_pass
1737 pass_cd_dce (gcc::context
*ctxt
)
1738 : gimple_opt_pass (pass_data_cd_dce
, ctxt
)
1741 /* opt_pass methods: */
1742 opt_pass
* clone () { return new pass_cd_dce (m_ctxt
); }
1743 virtual bool gate (function
*) { return flag_tree_dce
!= 0; }
1744 virtual unsigned int execute (function
*) { return tree_ssa_cd_dce (); }
1746 }; // class pass_cd_dce
1751 make_pass_cd_dce (gcc::context
*ctxt
)
1753 return new pass_cd_dce (ctxt
);