1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2021 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Dead code elimination.
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
47 #include "coretypes.h"
53 #include "tree-pass.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
61 #include "gimple-iterator.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-ssa-propagate.h"
69 #include "gimple-fold.h"
72 static struct stmt_stats
80 #define STMT_NECESSARY GF_PLF_1
82 static vec
<gimple
*> worklist
;
84 /* Vector indicating an SSA name has already been processed and marked
86 static sbitmap processed
;
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary
;
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts
;
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
102 static control_dependences
*cd
;
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents
;
108 /* TRUE if this pass alters the CFG (by removing control statements).
111 If this pass alters the CFG, then it will arrange for the dominators
113 static bool cfg_altered
;
115 /* When non-NULL holds map from basic block index into the postorder. */
116 static int *bb_postorder
;
119 /* True if we should treat any stmt with a vdef as necessary. */
124 return optimize_debug
;
127 /* If STMT is not already marked necessary, mark it, and add it to the
128 worklist if ADD_TO_WORKLIST is true. */
131 mark_stmt_necessary (gimple
*stmt
, bool add_to_worklist
)
135 if (gimple_plf (stmt
, STMT_NECESSARY
))
138 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
140 fprintf (dump_file
, "Marking useful stmt: ");
141 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
142 fprintf (dump_file
, "\n");
145 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
147 worklist
.safe_push (stmt
);
148 if (add_to_worklist
&& bb_contains_live_stmts
&& !is_gimple_debug (stmt
))
149 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
153 /* Mark the statement defining operand OP as necessary. */
156 mark_operand_necessary (tree op
)
163 ver
= SSA_NAME_VERSION (op
);
164 if (bitmap_bit_p (processed
, ver
))
166 stmt
= SSA_NAME_DEF_STMT (op
);
167 gcc_assert (gimple_nop_p (stmt
)
168 || gimple_plf (stmt
, STMT_NECESSARY
));
171 bitmap_set_bit (processed
, ver
);
173 stmt
= SSA_NAME_DEF_STMT (op
);
176 if (gimple_plf (stmt
, STMT_NECESSARY
) || gimple_nop_p (stmt
))
179 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
181 fprintf (dump_file
, "marking necessary through ");
182 print_generic_expr (dump_file
, op
);
183 fprintf (dump_file
, " stmt ");
184 print_gimple_stmt (dump_file
, stmt
, 0);
187 gimple_set_plf (stmt
, STMT_NECESSARY
, true);
188 if (bb_contains_live_stmts
)
189 bitmap_set_bit (bb_contains_live_stmts
, gimple_bb (stmt
)->index
);
190 worklist
.safe_push (stmt
);
194 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
195 it can make other statements necessary.
197 If AGGRESSIVE is false, control statements are conservatively marked as
201 mark_stmt_if_obviously_necessary (gimple
*stmt
, bool aggressive
)
203 /* Statements that are implicitly live. Most function calls, asm
204 and return statements are required. Labels and GIMPLE_BIND nodes
205 are kept because they are control flow, and we have no way of
206 knowing whether they can be removed. DCE can eliminate all the
207 other statements in a block, and CFG can then remove the block
209 switch (gimple_code (stmt
))
213 mark_stmt_necessary (stmt
, false);
219 mark_stmt_necessary (stmt
, true);
224 tree callee
= gimple_call_fndecl (stmt
);
225 if (callee
!= NULL_TREE
226 && fndecl_built_in_p (callee
, BUILT_IN_NORMAL
))
227 switch (DECL_FUNCTION_CODE (callee
))
229 case BUILT_IN_MALLOC
:
230 case BUILT_IN_ALIGNED_ALLOC
:
231 case BUILT_IN_CALLOC
:
232 CASE_BUILT_IN_ALLOCA
:
233 case BUILT_IN_STRDUP
:
234 case BUILT_IN_STRNDUP
:
235 case BUILT_IN_GOMP_ALLOC
:
241 if (callee
!= NULL_TREE
242 && flag_allocation_dce
243 && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee
))
246 /* IFN_GOACC_LOOP calls are necessary in that they are used to
247 represent parameter (i.e. step, bound) of a lowered OpenACC
248 partitioned loop. But this kind of partitioned loop might not
249 survive from aggressive loop removal for it has loop exit and
250 is assumed to be finite. Therefore, we need to explicitly mark
251 these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
252 if (gimple_call_internal_p (stmt
, IFN_GOACC_LOOP
))
254 mark_stmt_necessary (stmt
, true);
261 /* Debug temps without a value are not useful. ??? If we could
262 easily locate the debug temp bind stmt for a use thereof,
263 would could refrain from marking all debug temps here, and
264 mark them only if they're used. */
265 if (gimple_debug_nonbind_marker_p (stmt
)
266 || !gimple_debug_bind_p (stmt
)
267 || gimple_debug_bind_has_value_p (stmt
)
268 || TREE_CODE (gimple_debug_bind_get_var (stmt
)) != DEBUG_EXPR_DECL
)
269 mark_stmt_necessary (stmt
, false);
273 gcc_assert (!simple_goto_p (stmt
));
274 mark_stmt_necessary (stmt
, true);
278 gcc_assert (EDGE_COUNT (gimple_bb (stmt
)->succs
) == 2);
283 mark_stmt_necessary (stmt
, true);
287 if (gimple_clobber_p (stmt
))
295 /* If the statement has volatile operands, it needs to be preserved.
296 Same for statements that can alter control flow in unpredictable
298 if (gimple_has_side_effects (stmt
) || is_ctrl_altering_stmt (stmt
))
300 mark_stmt_necessary (stmt
, true);
304 /* If a statement could throw, it can be deemed necessary unless we
305 are allowed to remove dead EH. Test this after checking for
306 new/delete operators since we always elide their EH. */
307 if (!cfun
->can_delete_dead_exceptions
308 && stmt_could_throw_p (cfun
, stmt
))
310 mark_stmt_necessary (stmt
, true);
314 if ((gimple_vdef (stmt
) && keep_all_vdefs_p ())
315 || stmt_may_clobber_global_p (stmt
))
317 mark_stmt_necessary (stmt
, true);
325 /* Mark the last statement of BB as necessary. */
328 mark_last_stmt_necessary (basic_block bb
)
330 gimple
*stmt
= last_stmt (bb
);
332 bitmap_set_bit (last_stmt_necessary
, bb
->index
);
333 bitmap_set_bit (bb_contains_live_stmts
, bb
->index
);
335 /* We actually mark the statement only if it is a control statement. */
336 if (stmt
&& is_ctrl_stmt (stmt
))
337 mark_stmt_necessary (stmt
, true);
341 /* Mark control dependent edges of BB as necessary. We have to do this only
342 once for each basic block so we set the appropriate bit after we're done.
344 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
347 mark_control_dependent_edges_necessary (basic_block bb
, bool ignore_self
)
350 unsigned edge_number
;
351 bool skipped
= false;
353 gcc_assert (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
355 if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
358 EXECUTE_IF_SET_IN_BITMAP (cd
->get_edges_dependent_on (bb
->index
),
361 basic_block cd_bb
= cd
->get_edge_src (edge_number
);
363 if (ignore_self
&& cd_bb
== bb
)
369 if (!bitmap_bit_p (last_stmt_necessary
, cd_bb
->index
))
370 mark_last_stmt_necessary (cd_bb
);
374 bitmap_set_bit (visited_control_parents
, bb
->index
);
378 /* Find obviously necessary statements. These are things like most function
379 calls, and stores to file level variables.
381 If EL is NULL, control statements are conservatively marked as
382 necessary. Otherwise it contains the list of edges used by control
383 dependence analysis. */
386 find_obviously_necessary_stmts (bool aggressive
)
389 gimple_stmt_iterator gsi
;
394 FOR_EACH_BB_FN (bb
, cfun
)
396 /* PHI nodes are never inherently necessary. */
397 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
399 phi
= gsi_stmt (gsi
);
400 gimple_set_plf (phi
, STMT_NECESSARY
, false);
403 /* Check all statements in the block. */
404 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
406 stmt
= gsi_stmt (gsi
);
407 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
408 mark_stmt_if_obviously_necessary (stmt
, aggressive
);
412 /* Pure and const functions are finite and thus have no infinite loops in
414 flags
= flags_from_decl_or_type (current_function_decl
);
415 if ((flags
& (ECF_CONST
|ECF_PURE
)) && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
418 /* Prevent the empty possibly infinite loops from being removed. This is
419 needed to make the logic in remove_dead_stmt work to identify the
420 correct edge to keep when removing a controlling condition. */
423 if (mark_irreducible_loops ())
424 FOR_EACH_BB_FN (bb
, cfun
)
427 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
428 if ((e
->flags
& EDGE_DFS_BACK
)
429 && (e
->flags
& EDGE_IRREDUCIBLE_LOOP
))
432 fprintf (dump_file
, "Marking back edge of irreducible "
433 "loop %i->%i\n", e
->src
->index
, e
->dest
->index
);
434 mark_control_dependent_edges_necessary (e
->dest
, false);
438 for (auto loop
: loops_list (cfun
, 0))
439 /* For loops without an exit do not mark any condition. */
440 if (loop
->exits
->next
->e
&& !finite_loop_p (loop
))
443 fprintf (dump_file
, "cannot prove finiteness of loop %i\n",
445 mark_control_dependent_edges_necessary (loop
->latch
, false);
451 /* Return true if REF is based on an aliased base, otherwise false. */
454 ref_may_be_aliased (tree ref
)
456 gcc_assert (TREE_CODE (ref
) != WITH_SIZE_EXPR
);
457 while (handled_component_p (ref
))
458 ref
= TREE_OPERAND (ref
, 0);
459 if ((TREE_CODE (ref
) == MEM_REF
|| TREE_CODE (ref
) == TARGET_MEM_REF
)
460 && TREE_CODE (TREE_OPERAND (ref
, 0)) == ADDR_EXPR
)
461 ref
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
462 return !(DECL_P (ref
)
463 && !may_be_aliased (ref
));
466 static bitmap visited
= NULL
;
467 static unsigned int longest_chain
= 0;
468 static unsigned int total_chain
= 0;
469 static unsigned int nr_walks
= 0;
470 static bool chain_ovfl
= false;
472 /* Worker for the walker that marks reaching definitions of REF,
473 which is based on a non-aliased decl, necessary. It returns
474 true whenever the defining statement of the current VDEF is
475 a kill for REF, as no dominating may-defs are necessary for REF
476 anymore. DATA points to the basic-block that contains the
477 stmt that refers to REF. */
480 mark_aliased_reaching_defs_necessary_1 (ao_ref
*ref
, tree vdef
, void *data
)
482 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
484 /* All stmts we visit are necessary. */
485 if (! gimple_clobber_p (def_stmt
))
486 mark_operand_necessary (vdef
);
488 /* If the stmt lhs kills ref, then we can stop walking. */
489 if (gimple_has_lhs (def_stmt
)
490 && TREE_CODE (gimple_get_lhs (def_stmt
)) != SSA_NAME
491 /* The assignment is not necessarily carried out if it can throw
492 and we can catch it in the current function where we could inspect
494 ??? We only need to care about the RHS throwing. For aggregate
495 assignments or similar calls and non-call exceptions the LHS
496 might throw as well. */
497 && !stmt_can_throw_internal (cfun
, def_stmt
))
499 tree base
, lhs
= gimple_get_lhs (def_stmt
);
500 poly_int64 size
, offset
, max_size
;
504 = get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
, &reverse
);
505 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
506 so base == refd->base does not always hold. */
507 if (base
== ref
->base
)
509 /* For a must-alias check we need to be able to constrain
510 the accesses properly. */
511 if (known_eq (size
, max_size
)
512 && known_subrange_p (ref
->offset
, ref
->max_size
, offset
, size
))
514 /* Or they need to be exactly the same. */
516 /* Make sure there is no induction variable involved
517 in the references (gcc.c-torture/execute/pr42142.c).
518 The simplest way is to check if the kill dominates
520 /* But when both are in the same block we cannot
521 easily tell whether we came from a backedge
522 unless we decide to compute stmt UIDs
524 && (basic_block
) data
!= gimple_bb (def_stmt
)
525 && dominated_by_p (CDI_DOMINATORS
, (basic_block
) data
,
526 gimple_bb (def_stmt
))
527 && operand_equal_p (ref
->ref
, lhs
, 0))
532 /* Otherwise keep walking. */
537 mark_aliased_reaching_defs_necessary (gimple
*stmt
, tree ref
)
539 /* Should have been caught before calling this function. */
540 gcc_checking_assert (!keep_all_vdefs_p ());
544 gcc_assert (!chain_ovfl
);
545 ao_ref_init (&refd
, ref
);
546 chain
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
),
547 mark_aliased_reaching_defs_necessary_1
,
548 gimple_bb (stmt
), NULL
);
549 if (chain
> longest_chain
)
550 longest_chain
= chain
;
551 total_chain
+= chain
;
555 /* Worker for the walker that marks reaching definitions of REF, which
556 is not based on a non-aliased decl. For simplicity we need to end
557 up marking all may-defs necessary that are not based on a non-aliased
558 decl. The only job of this walker is to skip may-defs based on
559 a non-aliased decl. */
562 mark_all_reaching_defs_necessary_1 (ao_ref
*ref ATTRIBUTE_UNUSED
,
563 tree vdef
, void *data ATTRIBUTE_UNUSED
)
565 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
567 /* We have to skip already visited (and thus necessary) statements
568 to make the chaining work after we dropped back to simple mode. */
570 && bitmap_bit_p (processed
, SSA_NAME_VERSION (vdef
)))
572 gcc_assert (gimple_nop_p (def_stmt
)
573 || gimple_plf (def_stmt
, STMT_NECESSARY
));
577 /* We want to skip stores to non-aliased variables. */
579 && gimple_assign_single_p (def_stmt
))
581 tree lhs
= gimple_assign_lhs (def_stmt
);
582 if (!ref_may_be_aliased (lhs
))
586 /* We want to skip statments that do not constitute stores but have
587 a virtual definition. */
588 if (gcall
*call
= dyn_cast
<gcall
*> (def_stmt
))
590 tree callee
= gimple_call_fndecl (call
);
591 if (callee
!= NULL_TREE
592 && fndecl_built_in_p (callee
, BUILT_IN_NORMAL
))
593 switch (DECL_FUNCTION_CODE (callee
))
595 case BUILT_IN_MALLOC
:
596 case BUILT_IN_ALIGNED_ALLOC
:
597 case BUILT_IN_CALLOC
:
598 CASE_BUILT_IN_ALLOCA
:
600 case BUILT_IN_GOMP_ALLOC
:
601 case BUILT_IN_GOMP_FREE
:
607 if (callee
!= NULL_TREE
608 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee
)
609 || DECL_IS_OPERATOR_DELETE_P (callee
))
610 && gimple_call_from_new_or_delete (call
))
614 if (! gimple_clobber_p (def_stmt
))
615 mark_operand_necessary (vdef
);
621 mark_all_reaching_defs_necessary (gimple
*stmt
)
623 /* Should have been caught before calling this function. */
624 gcc_checking_assert (!keep_all_vdefs_p ());
625 walk_aliased_vdefs (NULL
, gimple_vuse (stmt
),
626 mark_all_reaching_defs_necessary_1
, NULL
, &visited
);
629 /* Return true for PHI nodes with one or identical arguments
632 degenerate_phi_p (gimple
*phi
)
635 tree op
= gimple_phi_arg_def (phi
, 0);
636 for (i
= 1; i
< gimple_phi_num_args (phi
); i
++)
637 if (gimple_phi_arg_def (phi
, i
) != op
)
642 /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
643 and delete operators. */
646 valid_new_delete_pair_p (gimple
*new_call
, gimple
*delete_call
)
648 tree new_asm
= DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call
));
649 tree delete_asm
= DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call
));
650 return valid_new_delete_pair_p (new_asm
, delete_asm
);
653 /* Propagate necessity using the operands of necessary statements.
654 Process the uses on each statement in the worklist, and add all
655 feeding statements which contribute to the calculation of this
656 value to the worklist.
658 In conservative mode, EL is NULL. */
661 propagate_necessity (bool aggressive
)
665 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
666 fprintf (dump_file
, "\nProcessing worklist:\n");
668 while (worklist
.length () > 0)
670 /* Take STMT from worklist. */
671 stmt
= worklist
.pop ();
673 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
675 fprintf (dump_file
, "processing: ");
676 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
677 fprintf (dump_file
, "\n");
682 /* Mark the last statement of the basic blocks on which the block
683 containing STMT is control dependent, but only if we haven't
685 basic_block bb
= gimple_bb (stmt
);
686 if (bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
687 && !bitmap_bit_p (visited_control_parents
, bb
->index
))
688 mark_control_dependent_edges_necessary (bb
, false);
691 if (gimple_code (stmt
) == GIMPLE_PHI
692 /* We do not process virtual PHI nodes nor do we track their
694 && !virtual_operand_p (gimple_phi_result (stmt
)))
696 /* PHI nodes are somewhat special in that each PHI alternative has
697 data and control dependencies. All the statements feeding the
698 PHI node's arguments are always necessary. In aggressive mode,
699 we also consider the control dependent edges leading to the
700 predecessor block associated with each PHI alternative as
702 gphi
*phi
= as_a
<gphi
*> (stmt
);
705 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
707 tree arg
= PHI_ARG_DEF (stmt
, k
);
708 if (TREE_CODE (arg
) == SSA_NAME
)
709 mark_operand_necessary (arg
);
712 /* For PHI operands it matters from where the control flow arrives
713 to the BB. Consider the following example:
723 We need to mark control dependence of the empty basic blocks, since they
724 contains computation of PHI operands.
726 Doing so is too restrictive in the case the predecestor block is in
732 for (i = 0; i<1000; ++i)
738 There is PHI for J in the BB containing return statement.
739 In this case the control dependence of predecestor block (that is
740 within the empty loop) also contains the block determining number
741 of iterations of the block that would prevent removing of empty
744 This scenario can be avoided by splitting critical edges.
745 To save the critical edge splitting pass we identify how the control
746 dependence would look like if the edge was split.
748 Consider the modified CFG created from current CFG by splitting
749 edge B->C. In the postdominance tree of modified CFG, C' is
750 always child of C. There are two cases how chlids of C' can look
755 In this case the only basic block C' is control dependent on is B.
757 2) C' has single child that is B
759 In this case control dependence of C' is same as control
760 dependence of B in original CFG except for block B itself.
761 (since C' postdominate B in modified CFG)
763 Now how to decide what case happens? There are two basic options:
765 a) C postdominate B. Then C immediately postdominate B and
766 case 2 happens iff there is no other way from B to C except
769 There is other way from B to C iff there is succesor of B that
770 is not postdominated by B. Testing this condition is somewhat
771 expensive, because we need to iterate all succesors of B.
772 We are safe to assume that this does not happen: we will mark B
773 as needed when processing the other path from B to C that is
774 conrol dependent on B and marking control dependencies of B
775 itself is harmless because they will be processed anyway after
776 processing control statement in B.
778 b) C does not postdominate B. Always case 1 happens since there is
779 path from C to exit that does not go through B and thus also C'. */
781 if (aggressive
&& !degenerate_phi_p (stmt
))
783 for (k
= 0; k
< gimple_phi_num_args (stmt
); k
++)
785 basic_block arg_bb
= gimple_phi_arg_edge (phi
, k
)->src
;
788 != get_immediate_dominator (CDI_POST_DOMINATORS
, arg_bb
))
790 if (!bitmap_bit_p (last_stmt_necessary
, arg_bb
->index
))
791 mark_last_stmt_necessary (arg_bb
);
793 else if (arg_bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
794 && !bitmap_bit_p (visited_control_parents
,
796 mark_control_dependent_edges_necessary (arg_bb
, true);
802 /* Propagate through the operands. Examine all the USE, VUSE and
803 VDEF operands in this statement. Mark all the statements
804 which feed this statement's uses as necessary. */
808 /* If this is a call to free which is directly fed by an
809 allocation function do not mark that necessary through
810 processing the argument. */
811 bool is_delete_operator
812 = (is_gimple_call (stmt
)
813 && gimple_call_from_new_or_delete (as_a
<gcall
*> (stmt
))
814 && gimple_call_operator_delete_p (as_a
<gcall
*> (stmt
)));
815 if (is_delete_operator
816 || gimple_call_builtin_p (stmt
, BUILT_IN_FREE
)
817 || gimple_call_builtin_p (stmt
, BUILT_IN_GOMP_FREE
))
819 tree ptr
= gimple_call_arg (stmt
, 0);
822 /* If the pointer we free is defined by an allocation
823 function do not add the call to the worklist. */
824 if (TREE_CODE (ptr
) == SSA_NAME
825 && (def_stmt
= dyn_cast
<gcall
*> (SSA_NAME_DEF_STMT (ptr
)))
826 && (def_callee
= gimple_call_fndecl (def_stmt
))
827 && ((DECL_BUILT_IN_CLASS (def_callee
) == BUILT_IN_NORMAL
828 && (DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_ALIGNED_ALLOC
829 || DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_MALLOC
830 || DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_CALLOC
831 || DECL_FUNCTION_CODE (def_callee
) == BUILT_IN_GOMP_ALLOC
))
832 || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee
)
833 && gimple_call_from_new_or_delete (def_stmt
))))
835 if (is_delete_operator
836 && !valid_new_delete_pair_p (def_stmt
, stmt
))
837 mark_operand_necessary (gimple_call_arg (stmt
, 0));
839 /* Delete operators can have alignment and (or) size
840 as next arguments. When being a SSA_NAME, they
841 must be marked as necessary. Similarly GOMP_free. */
842 if (gimple_call_num_args (stmt
) >= 2)
843 for (unsigned i
= 1; i
< gimple_call_num_args (stmt
);
846 tree arg
= gimple_call_arg (stmt
, i
);
847 if (TREE_CODE (arg
) == SSA_NAME
)
848 mark_operand_necessary (arg
);
855 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
856 mark_operand_necessary (use
);
858 use
= gimple_vuse (stmt
);
862 /* No need to search for vdefs if we intrinsicly keep them all. */
863 if (keep_all_vdefs_p ())
866 /* If we dropped to simple mode make all immediately
867 reachable definitions necessary. */
870 mark_all_reaching_defs_necessary (stmt
);
874 /* For statements that may load from memory (have a VUSE) we
875 have to mark all reaching (may-)definitions as necessary.
876 We partition this task into two cases:
877 1) explicit loads based on decls that are not aliased
878 2) implicit loads (like calls) and explicit loads not
879 based on decls that are not aliased (like indirect
880 references or loads from globals)
881 For 1) we mark all reaching may-defs as necessary, stopping
882 at dominating kills. For 2) we want to mark all dominating
883 references necessary, but non-aliased ones which we handle
884 in 1). By keeping a global visited bitmap for references
885 we walk for 2) we avoid quadratic behavior for those. */
887 if (gcall
*call
= dyn_cast
<gcall
*> (stmt
))
889 tree callee
= gimple_call_fndecl (call
);
892 /* Calls to functions that are merely acting as barriers
893 or that only store to memory do not make any previous
895 if (callee
!= NULL_TREE
896 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
897 && (DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET
898 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MEMSET_CHK
899 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_MALLOC
900 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALIGNED_ALLOC
901 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
902 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_FREE
903 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_VA_END
904 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee
))
905 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
906 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
907 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ASSUME_ALIGNED
))
910 if (callee
!= NULL_TREE
911 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee
)
912 || DECL_IS_OPERATOR_DELETE_P (callee
))
913 && gimple_call_from_new_or_delete (call
))
916 /* Calls implicitly load from memory, their arguments
917 in addition may explicitly perform memory loads. */
918 mark_all_reaching_defs_necessary (call
);
919 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
921 tree arg
= gimple_call_arg (call
, i
);
922 if (TREE_CODE (arg
) == SSA_NAME
923 || is_gimple_min_invariant (arg
))
925 if (TREE_CODE (arg
) == WITH_SIZE_EXPR
)
926 arg
= TREE_OPERAND (arg
, 0);
927 if (!ref_may_be_aliased (arg
))
928 mark_aliased_reaching_defs_necessary (call
, arg
);
931 else if (gimple_assign_single_p (stmt
))
934 /* If this is a load mark things necessary. */
935 rhs
= gimple_assign_rhs1 (stmt
);
936 if (TREE_CODE (rhs
) != SSA_NAME
937 && !is_gimple_min_invariant (rhs
)
938 && TREE_CODE (rhs
) != CONSTRUCTOR
)
940 if (!ref_may_be_aliased (rhs
))
941 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
943 mark_all_reaching_defs_necessary (stmt
);
946 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
948 tree rhs
= gimple_return_retval (return_stmt
);
949 /* A return statement may perform a load. */
951 && TREE_CODE (rhs
) != SSA_NAME
952 && !is_gimple_min_invariant (rhs
)
953 && TREE_CODE (rhs
) != CONSTRUCTOR
)
955 if (!ref_may_be_aliased (rhs
))
956 mark_aliased_reaching_defs_necessary (stmt
, rhs
);
958 mark_all_reaching_defs_necessary (stmt
);
961 else if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (stmt
))
964 mark_all_reaching_defs_necessary (stmt
);
965 /* Inputs may perform loads. */
966 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
968 tree op
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
969 if (TREE_CODE (op
) != SSA_NAME
970 && !is_gimple_min_invariant (op
)
971 && TREE_CODE (op
) != CONSTRUCTOR
972 && !ref_may_be_aliased (op
))
973 mark_aliased_reaching_defs_necessary (stmt
, op
);
976 else if (gimple_code (stmt
) == GIMPLE_TRANSACTION
)
978 /* The beginning of a transaction is a memory barrier. */
979 /* ??? If we were really cool, we'd only be a barrier
980 for the memories touched within the transaction. */
981 mark_all_reaching_defs_necessary (stmt
);
986 /* If we over-used our alias oracle budget drop to simple
987 mode. The cost metric allows quadratic behavior
988 (number of uses times number of may-defs queries) up to
989 a constant maximal number of queries and after that falls back to
990 super-linear complexity. */
991 if (/* Constant but quadratic for small functions. */
992 total_chain
> 128 * 128
993 /* Linear in the number of may-defs. */
994 && total_chain
> 32 * longest_chain
995 /* Linear in the number of uses. */
996 && total_chain
> nr_walks
* 32)
1000 bitmap_clear (visited
);
1006 /* Remove dead PHI nodes from block BB. */
1009 remove_dead_phis (basic_block bb
)
1011 bool something_changed
= false;
1015 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);)
1020 /* We do not track necessity of virtual PHI nodes. Instead do
1021 very simple dead PHI removal here. */
1022 if (virtual_operand_p (gimple_phi_result (phi
)))
1024 /* Virtual PHI nodes with one or identical arguments
1026 if (degenerate_phi_p (phi
))
1028 tree vdef
= gimple_phi_result (phi
);
1029 tree vuse
= gimple_phi_arg_def (phi
, 0);
1031 use_operand_p use_p
;
1032 imm_use_iterator iter
;
1034 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vdef
)
1035 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1036 SET_USE (use_p
, vuse
);
1037 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef
)
1038 && TREE_CODE (vuse
) == SSA_NAME
)
1039 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 1;
1042 gimple_set_plf (phi
, STMT_NECESSARY
, true);
1045 if (!gimple_plf (phi
, STMT_NECESSARY
))
1047 something_changed
= true;
1048 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1050 fprintf (dump_file
, "Deleting : ");
1051 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
1052 fprintf (dump_file
, "\n");
1055 remove_phi_node (&gsi
, true);
1056 stats
.removed_phis
++;
1062 return something_changed
;
1066 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1067 containing I so that we don't have to look it up. */
1070 remove_dead_stmt (gimple_stmt_iterator
*i
, basic_block bb
,
1071 vec
<edge
> &to_remove_edges
)
1073 gimple
*stmt
= gsi_stmt (*i
);
1075 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1077 fprintf (dump_file
, "Deleting : ");
1078 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1079 fprintf (dump_file
, "\n");
1084 /* If we have determined that a conditional branch statement contributes
1085 nothing to the program, then we not only remove it, but we need to update
1086 the CFG. We can chose any of edges out of BB as long as we are sure to not
1087 close infinite loops. This is done by always choosing the edge closer to
1088 exit in inverted_post_order_compute order. */
1089 if (is_ctrl_stmt (stmt
))
1094 /* See if there is only one non-abnormal edge. */
1095 if (single_succ_p (bb
))
1096 e
= single_succ_edge (bb
);
1097 /* Otherwise chose one that is closer to bb with live statement in it.
1098 To be able to chose one, we compute inverted post order starting from
1099 all BBs with live statements. */
1104 auto_vec
<int, 20> postorder
;
1105 inverted_post_order_compute (&postorder
,
1106 &bb_contains_live_stmts
);
1107 bb_postorder
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
1108 for (unsigned int i
= 0; i
< postorder
.length (); ++i
)
1109 bb_postorder
[postorder
[i
]] = i
;
1111 FOR_EACH_EDGE (e2
, ei
, bb
->succs
)
1112 if (!e
|| e2
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
1113 || bb_postorder
[e
->dest
->index
]
1114 < bb_postorder
[e2
->dest
->index
])
1118 e
->probability
= profile_probability::always ();
1120 /* The edge is no longer associated with a conditional, so it does
1121 not have TRUE/FALSE flags.
1122 We are also safe to drop EH/ABNORMAL flags and turn them into
1123 normal control flow, because we know that all the destinations (including
1124 those odd edges) are equivalent for program execution. */
1125 e
->flags
&= ~(EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
| EDGE_EH
| EDGE_ABNORMAL
);
1127 /* The lone outgoing edge from BB will be a fallthru edge. */
1128 e
->flags
|= EDGE_FALLTHRU
;
1130 /* Remove the remaining outgoing edges. */
1131 FOR_EACH_EDGE (e2
, ei
, bb
->succs
)
1134 /* If we made a BB unconditionally exit a loop or removed
1135 an entry into an irreducible region, then this transform
1136 alters the set of BBs in the loop. Schedule a fixup. */
1137 if (loop_exit_edge_p (bb
->loop_father
, e
)
1138 || (e2
->dest
->flags
& BB_IRREDUCIBLE_LOOP
))
1139 loops_state_set (LOOPS_NEED_FIXUP
);
1140 to_remove_edges
.safe_push (e2
);
1144 /* If this is a store into a variable that is being optimized away,
1145 add a debug bind stmt if possible. */
1146 if (MAY_HAVE_DEBUG_BIND_STMTS
1147 && gimple_assign_single_p (stmt
)
1148 && is_gimple_val (gimple_assign_rhs1 (stmt
)))
1150 tree lhs
= gimple_assign_lhs (stmt
);
1151 if ((VAR_P (lhs
) || TREE_CODE (lhs
) == PARM_DECL
)
1152 && !DECL_IGNORED_P (lhs
)
1153 && is_gimple_reg_type (TREE_TYPE (lhs
))
1154 && !is_global_var (lhs
)
1155 && !DECL_HAS_VALUE_EXPR_P (lhs
))
1157 tree rhs
= gimple_assign_rhs1 (stmt
);
1159 = gimple_build_debug_bind (lhs
, unshare_expr (rhs
), stmt
);
1160 gsi_insert_after (i
, note
, GSI_SAME_STMT
);
1164 unlink_stmt_vdef (stmt
);
1165 gsi_remove (i
, true);
1166 release_defs (stmt
);
1169 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1170 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1173 find_non_realpart_uses (tree
*tp
, int *walk_subtrees
, void *data
)
1175 if (TYPE_P (*tp
) || TREE_CODE (*tp
) == REALPART_EXPR
)
1177 if (*tp
== (tree
) data
)
1182 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1183 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1184 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1188 maybe_optimize_arith_overflow (gimple_stmt_iterator
*gsi
,
1189 enum tree_code subcode
)
1191 gimple
*stmt
= gsi_stmt (*gsi
);
1192 tree lhs
= gimple_call_lhs (stmt
);
1194 if (lhs
== NULL
|| TREE_CODE (lhs
) != SSA_NAME
)
1197 imm_use_iterator imm_iter
;
1198 use_operand_p use_p
;
1199 bool has_debug_uses
= false;
1200 bool has_realpart_uses
= false;
1201 bool has_other_uses
= false;
1202 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, lhs
)
1204 gimple
*use_stmt
= USE_STMT (use_p
);
1205 if (is_gimple_debug (use_stmt
))
1206 has_debug_uses
= true;
1207 else if (is_gimple_assign (use_stmt
)
1208 && gimple_assign_rhs_code (use_stmt
) == REALPART_EXPR
1209 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt
), 0) == lhs
)
1210 has_realpart_uses
= true;
1213 has_other_uses
= true;
1218 if (!has_realpart_uses
|| has_other_uses
)
1221 tree arg0
= gimple_call_arg (stmt
, 0);
1222 tree arg1
= gimple_call_arg (stmt
, 1);
1223 location_t loc
= gimple_location (stmt
);
1224 tree type
= TREE_TYPE (TREE_TYPE (lhs
));
1226 if (!TYPE_UNSIGNED (type
))
1227 utype
= build_nonstandard_integer_type (TYPE_PRECISION (type
), 1);
1228 tree result
= fold_build2_loc (loc
, subcode
, utype
,
1229 fold_convert_loc (loc
, utype
, arg0
),
1230 fold_convert_loc (loc
, utype
, arg1
));
1231 result
= fold_convert_loc (loc
, type
, result
);
1236 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, lhs
)
1238 if (!gimple_debug_bind_p (use_stmt
))
1240 tree v
= gimple_debug_bind_get_value (use_stmt
);
1241 if (walk_tree (&v
, find_non_realpart_uses
, lhs
, NULL
))
1243 gimple_debug_bind_reset_value (use_stmt
);
1244 update_stmt (use_stmt
);
1249 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
1250 result
= drop_tree_overflow (result
);
1251 tree overflow
= build_zero_cst (type
);
1252 tree ctype
= build_complex_type (type
);
1253 if (TREE_CODE (result
) == INTEGER_CST
)
1254 result
= build_complex (ctype
, result
, overflow
);
1256 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
1257 ctype
, result
, overflow
);
1259 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1261 fprintf (dump_file
, "Transforming call: ");
1262 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1263 fprintf (dump_file
, "because the overflow result is never used into: ");
1264 print_generic_stmt (dump_file
, result
, TDF_SLIM
);
1265 fprintf (dump_file
, "\n");
1268 gimplify_and_update_call_from_tree (gsi
, result
);
1271 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1272 contributes nothing to the program, and can be deleted. */
1275 eliminate_unnecessary_stmts (void)
1277 bool something_changed
= false;
1279 gimple_stmt_iterator gsi
, psi
;
1282 auto_vec
<edge
> to_remove_edges
;
1284 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1285 fprintf (dump_file
, "\nEliminating unnecessary statements:\n");
1287 clear_special_calls ();
1289 /* Walking basic blocks and statements in reverse order avoids
1290 releasing SSA names before any other DEFs that refer to them are
1291 released. This helps avoid loss of debug information, as we get
1292 a chance to propagate all RHSs of removed SSAs into debug uses,
1293 rather than only the latest ones. E.g., consider:
1299 If we were to release x_3 before a_5, when we reached a_5 and
1300 tried to substitute it into the debug stmt, we'd see x_3 there,
1301 but x_3's DEF, type, etc would have already been disconnected.
1302 By going backwards, the debug stmt first changes to:
1304 # DEBUG a => x_3 - b_4
1308 # DEBUG a => y_1 + z_2 - b_4
1311 gcc_assert (dom_info_available_p (CDI_DOMINATORS
));
1312 auto_vec
<basic_block
> h
;
1313 h
= get_all_dominated_blocks (CDI_DOMINATORS
,
1314 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1320 /* Remove dead statements. */
1321 auto_bitmap debug_seen
;
1322 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi
= psi
)
1324 stmt
= gsi_stmt (gsi
);
1331 /* We can mark a call to free as not necessary if the
1332 defining statement of its argument is not necessary
1333 (and thus is getting removed). */
1334 if (gimple_plf (stmt
, STMT_NECESSARY
)
1335 && (gimple_call_builtin_p (stmt
, BUILT_IN_FREE
)
1336 || (is_gimple_call (stmt
)
1337 && gimple_call_from_new_or_delete (as_a
<gcall
*> (stmt
))
1338 && gimple_call_operator_delete_p (as_a
<gcall
*> (stmt
)))))
1340 tree ptr
= gimple_call_arg (stmt
, 0);
1341 if (TREE_CODE (ptr
) == SSA_NAME
)
1343 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ptr
);
1344 if (!gimple_nop_p (def_stmt
)
1345 && !gimple_plf (def_stmt
, STMT_NECESSARY
))
1346 gimple_set_plf (stmt
, STMT_NECESSARY
, false);
1350 /* If GSI is not necessary then remove it. */
1351 if (!gimple_plf (stmt
, STMT_NECESSARY
))
1353 /* Keep clobbers that we can keep live live. */
1354 if (gimple_clobber_p (stmt
))
1357 use_operand_p use_p
;
1359 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
1361 tree name
= USE_FROM_PTR (use_p
);
1362 if (!SSA_NAME_IS_DEFAULT_DEF (name
)
1363 && !bitmap_bit_p (processed
, SSA_NAME_VERSION (name
)))
1371 bitmap_clear (debug_seen
);
1375 if (!is_gimple_debug (stmt
))
1376 something_changed
= true;
1377 remove_dead_stmt (&gsi
, bb
, to_remove_edges
);
1380 else if (is_gimple_call (stmt
))
1382 tree name
= gimple_call_lhs (stmt
);
1384 notice_special_calls (as_a
<gcall
*> (stmt
));
1386 /* When LHS of var = call (); is dead, simplify it into
1387 call (); saving one operand. */
1389 && TREE_CODE (name
) == SSA_NAME
1390 && !bitmap_bit_p (processed
, SSA_NAME_VERSION (name
))
1391 /* Avoid doing so for allocation calls which we
1392 did not mark as necessary, it will confuse the
1393 special logic we apply to malloc/free pair removal. */
1394 && (!(call
= gimple_call_fndecl (stmt
))
1395 || ((DECL_BUILT_IN_CLASS (call
) != BUILT_IN_NORMAL
1396 || (DECL_FUNCTION_CODE (call
) != BUILT_IN_ALIGNED_ALLOC
1397 && DECL_FUNCTION_CODE (call
) != BUILT_IN_MALLOC
1398 && DECL_FUNCTION_CODE (call
) != BUILT_IN_CALLOC
1399 && !ALLOCA_FUNCTION_CODE_P
1400 (DECL_FUNCTION_CODE (call
))))
1401 && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call
))))
1403 something_changed
= true;
1404 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1406 fprintf (dump_file
, "Deleting LHS of call: ");
1407 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1408 fprintf (dump_file
, "\n");
1411 gimple_call_set_lhs (stmt
, NULL_TREE
);
1412 maybe_clean_or_replace_eh_stmt (stmt
, stmt
);
1414 release_ssa_name (name
);
1416 /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1417 without lhs is not needed. */
1418 if (gimple_call_internal_p (stmt
))
1419 switch (gimple_call_internal_fn (stmt
))
1421 case IFN_GOMP_SIMD_LANE
:
1422 if (gimple_call_num_args (stmt
) >= 3
1423 && !integer_nonzerop (gimple_call_arg (stmt
, 2)))
1426 case IFN_ASAN_POISON
:
1427 remove_dead_stmt (&gsi
, bb
, to_remove_edges
);
1433 else if (gimple_call_internal_p (stmt
))
1434 switch (gimple_call_internal_fn (stmt
))
1436 case IFN_ADD_OVERFLOW
:
1437 maybe_optimize_arith_overflow (&gsi
, PLUS_EXPR
);
1439 case IFN_SUB_OVERFLOW
:
1440 maybe_optimize_arith_overflow (&gsi
, MINUS_EXPR
);
1442 case IFN_MUL_OVERFLOW
:
1443 maybe_optimize_arith_overflow (&gsi
, MULT_EXPR
);
1449 else if (gimple_debug_bind_p (stmt
))
1451 /* We are only keeping the last debug-bind of a
1452 non-DEBUG_EXPR_DECL variable in a series of
1453 debug-bind stmts. */
1454 tree var
= gimple_debug_bind_get_var (stmt
);
1455 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
1456 && !bitmap_set_bit (debug_seen
, DECL_UID (var
)))
1457 remove_dead_stmt (&gsi
, bb
, to_remove_edges
);
1460 bitmap_clear (debug_seen
);
1463 /* Remove dead PHI nodes. */
1464 something_changed
|= remove_dead_phis (bb
);
1468 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1469 rendered some PHI nodes unreachable while they are still in use.
1470 Mark them for renaming. */
1471 if (!to_remove_edges
.is_empty ())
1473 basic_block prev_bb
;
1475 /* Remove edges. We've delayed this to not get bogus debug stmts
1476 during PHI node removal. */
1477 for (unsigned i
= 0; i
< to_remove_edges
.length (); ++i
)
1478 remove_edge (to_remove_edges
[i
]);
1481 find_unreachable_blocks ();
1483 /* Delete all unreachable basic blocks in reverse dominator order. */
1484 for (bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
1485 bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
); bb
= prev_bb
)
1487 prev_bb
= bb
->prev_bb
;
1489 if (!bitmap_bit_p (bb_contains_live_stmts
, bb
->index
)
1490 || !(bb
->flags
& BB_REACHABLE
))
1492 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1494 if (virtual_operand_p (gimple_phi_result (gsi
.phi ())))
1497 imm_use_iterator iter
;
1499 FOR_EACH_IMM_USE_STMT (stmt
, iter
,
1500 gimple_phi_result (gsi
.phi ()))
1502 if (!(gimple_bb (stmt
)->flags
& BB_REACHABLE
))
1504 if (gimple_code (stmt
) == GIMPLE_PHI
1505 || gimple_plf (stmt
, STMT_NECESSARY
))
1512 mark_virtual_phi_result_for_renaming (gsi
.phi ());
1515 if (!(bb
->flags
& BB_REACHABLE
))
1517 /* Speed up the removal of blocks that don't
1518 dominate others. Walking backwards, this should
1519 be the common case. ??? Do we need to recompute
1520 dominators because of cfg_altered? */
1521 if (!first_dom_son (CDI_DOMINATORS
, bb
))
1522 delete_basic_block (bb
);
1525 h
= get_all_dominated_blocks (CDI_DOMINATORS
, bb
);
1530 prev_bb
= bb
->prev_bb
;
1531 /* Rearrangements to the CFG may have failed
1532 to update the dominators tree, so that
1533 formerly-dominated blocks are now
1534 otherwise reachable. */
1535 if (!!(bb
->flags
& BB_REACHABLE
))
1537 delete_basic_block (bb
);
1548 free (bb_postorder
);
1549 bb_postorder
= NULL
;
1551 return something_changed
;
1555 /* Print out removed statement statistics. */
1562 percg
= ((float) stats
.removed
/ (float) stats
.total
) * 100;
1563 fprintf (dump_file
, "Removed %d of %d statements (%d%%)\n",
1564 stats
.removed
, stats
.total
, (int) percg
);
1566 if (stats
.total_phis
== 0)
1569 percg
= ((float) stats
.removed_phis
/ (float) stats
.total_phis
) * 100;
1571 fprintf (dump_file
, "Removed %d of %d PHI nodes (%d%%)\n",
1572 stats
.removed_phis
, stats
.total_phis
, (int) percg
);
1575 /* Initialization for this pass. Set up the used data structures. */
1578 tree_dce_init (bool aggressive
)
1580 memset ((void *) &stats
, 0, sizeof (stats
));
1584 last_stmt_necessary
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
1585 bitmap_clear (last_stmt_necessary
);
1586 bb_contains_live_stmts
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
1587 bitmap_clear (bb_contains_live_stmts
);
1590 processed
= sbitmap_alloc (num_ssa_names
+ 1);
1591 bitmap_clear (processed
);
1593 worklist
.create (64);
1594 cfg_altered
= false;
1597 /* Cleanup after this pass. */
1600 tree_dce_done (bool aggressive
)
1605 sbitmap_free (visited_control_parents
);
1606 sbitmap_free (last_stmt_necessary
);
1607 sbitmap_free (bb_contains_live_stmts
);
1608 bb_contains_live_stmts
= NULL
;
1611 sbitmap_free (processed
);
1613 worklist
.release ();
1616 /* Sort PHI argument values for make_forwarders_with_degenerate_phis. */
1619 sort_phi_args (const void *a_
, const void *b_
)
1621 auto *a
= (const std::pair
<edge
, hashval_t
> *) a_
;
1622 auto *b
= (const std::pair
<edge
, hashval_t
> *) b_
;
1623 hashval_t ha
= a
->second
;
1624 hashval_t hb
= b
->second
;
1633 /* Look for a non-virtual PHIs and make a forwarder block when all PHIs
1634 have the same argument on a set of edges. This is to not consider
1635 control dependences of individual edges for same values but only for
1639 make_forwarders_with_degenerate_phis (function
*fn
)
1644 FOR_EACH_BB_FN (bb
, fn
)
1646 /* Only PHIs with three or more arguments have opportunities. */
1647 if (EDGE_COUNT (bb
->preds
) < 3)
1649 /* Do not touch loop headers. */
1650 if (bb
->loop_father
->header
== bb
)
1653 /* Take one PHI node as template to look for identical
1654 arguments. Build a vector of candidates forming sets
1655 of argument edges with equal values. Note optimality
1656 depends on the particular choice of the template PHI
1657 since equal arguments are unordered leaving other PHIs
1658 with more than one set of equal arguments within this
1659 argument range unsorted. We'd have to break ties by
1660 looking at other PHI nodes. */
1661 gphi_iterator gsi
= gsi_start_nonvirtual_phis (bb
);
1662 if (gsi_end_p (gsi
))
1664 gphi
*phi
= gsi
.phi ();
1665 auto_vec
<std::pair
<edge
, hashval_t
>, 8> args
;
1666 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
1668 edge e
= gimple_phi_arg_edge (phi
, i
);
1669 /* Skip abnormal edges since we cannot redirect them. */
1670 if (e
->flags
& EDGE_ABNORMAL
)
1672 /* Skip loop exit edges when we are in loop-closed SSA form
1673 since the forwarder we'd create does not have a PHI node. */
1674 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1675 && loop_exit_edge_p (e
->src
->loop_father
, e
))
1677 args
.safe_push (std::make_pair (e
, iterative_hash_expr
1678 (gimple_phi_arg_def (phi
, i
), 0)));
1680 if (args
.length () < 2)
1682 args
.qsort (sort_phi_args
);
1684 /* From the candidates vector now verify true candidates for
1685 forwarders and create them. */
1686 gphi
*vphi
= get_virtual_phi (bb
);
1688 while (start
< args
.length () - 1)
1691 for (i
= start
+ 1; i
< args
.length (); ++i
)
1692 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, args
[start
].first
),
1693 PHI_ARG_DEF_FROM_EDGE (phi
, args
[i
].first
)))
1695 /* args[start]..args[i-1] are equal. */
1698 /* Check all PHI nodes for argument equality. */
1700 gphi_iterator gsi2
= gsi
;
1702 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
1704 gphi
*phi2
= gsi2
.phi ();
1705 if (virtual_operand_p (gimple_phi_result (phi2
)))
1708 = PHI_ARG_DEF_FROM_EDGE (phi2
, args
[start
].first
);
1709 for (unsigned j
= start
+ 1; j
< i
; ++j
)
1711 if (!operand_equal_p (start_arg
,
1712 PHI_ARG_DEF_FROM_EDGE
1713 (phi2
, args
[j
].first
)))
1715 /* Another PHI might have a shorter set of
1716 equivalent args. Go for that. */
1728 /* If we are asked to forward all edges the block
1729 has all degenerate PHIs. Do nothing in that case. */
1731 && i
== args
.length ()
1732 && args
.length () == gimple_phi_num_args (phi
))
1734 /* Instead of using make_forwarder_block we are
1735 rolling our own variant knowing that the forwarder
1736 does not need PHI nodes apart from eventually
1738 auto_vec
<tree
, 8> vphi_args
;
1741 vphi_args
.reserve_exact (i
- start
);
1742 for (unsigned j
= start
; j
< i
; ++j
)
1743 vphi_args
.quick_push
1744 (PHI_ARG_DEF_FROM_EDGE (vphi
, args
[j
].first
));
1746 free_dominance_info (fn
, CDI_DOMINATORS
);
1747 basic_block forwarder
= split_edge (args
[start
].first
);
1748 for (unsigned j
= start
+ 1; j
< i
; ++j
)
1750 edge e
= args
[j
].first
;
1751 redirect_edge_and_branch_force (e
, forwarder
);
1752 redirect_edge_var_map_clear (e
);
1756 tree def
= copy_ssa_name (vphi_args
[0]);
1757 gphi
*vphi_copy
= create_phi_node (def
, forwarder
);
1758 for (unsigned j
= start
; j
< i
; ++j
)
1759 add_phi_arg (vphi_copy
, vphi_args
[j
- start
],
1760 args
[j
].first
, UNKNOWN_LOCATION
);
1762 (vphi
, single_succ_edge (forwarder
)->dest_idx
, def
);
1764 todo
|= TODO_cleanup_cfg
;
1767 /* Continue searching for more opportunities. */
1774 /* Main routine to eliminate dead code.
1776 AGGRESSIVE controls the aggressiveness of the algorithm.
1777 In conservative mode, we ignore control dependence and simply declare
1778 all but the most trivially dead branches necessary. This mode is fast.
1779 In aggressive mode, control dependences are taken into account, which
1780 results in more dead code elimination, but at the cost of some time.
1782 FIXME: Aggressive mode before PRE doesn't work currently because
1783 the dominance info is not invalidated after DCE1. This is
1784 not an issue right now because we only run aggressive DCE
1785 as the last tree SSA pass, but keep this in mind when you
1786 start experimenting with pass ordering. */
1789 perform_tree_ssa_dce (bool aggressive
)
1791 bool something_changed
= 0;
1794 /* Preheaders are needed for SCEV to work.
1795 Simple lateches and recorded exits improve chances that loop will
1796 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1797 bool in_loop_pipeline
= scev_initialized_p ();
1798 if (aggressive
&& ! in_loop_pipeline
)
1801 loop_optimizer_init (LOOPS_NORMAL
1802 | LOOPS_HAVE_RECORDED_EXITS
);
1806 todo
|= make_forwarders_with_degenerate_phis (cfun
);
1808 calculate_dominance_info (CDI_DOMINATORS
);
1810 tree_dce_init (aggressive
);
1814 /* Compute control dependence. */
1815 calculate_dominance_info (CDI_POST_DOMINATORS
);
1816 cd
= new control_dependences ();
1818 visited_control_parents
=
1819 sbitmap_alloc (last_basic_block_for_fn (cfun
));
1820 bitmap_clear (visited_control_parents
);
1822 mark_dfs_back_edges ();
1825 find_obviously_necessary_stmts (aggressive
);
1827 if (aggressive
&& ! in_loop_pipeline
)
1829 loop_optimizer_finalize ();
1837 visited
= BITMAP_ALLOC (NULL
);
1838 propagate_necessity (aggressive
);
1839 BITMAP_FREE (visited
);
1841 something_changed
|= eliminate_unnecessary_stmts ();
1842 something_changed
|= cfg_altered
;
1844 /* We do not update postdominators, so free them unconditionally. */
1845 free_dominance_info (CDI_POST_DOMINATORS
);
1847 /* If we removed paths in the CFG, then we need to update
1848 dominators as well. I haven't investigated the possibility
1849 of incrementally updating dominators. */
1851 free_dominance_info (CDI_DOMINATORS
);
1853 statistics_counter_event (cfun
, "Statements deleted", stats
.removed
);
1854 statistics_counter_event (cfun
, "PHI nodes deleted", stats
.removed_phis
);
1856 /* Debugging dumps. */
1857 if (dump_file
&& (dump_flags
& (TDF_STATS
|TDF_DETAILS
)))
1860 tree_dce_done (aggressive
);
1862 if (something_changed
)
1864 free_numbers_of_iterations_estimates (cfun
);
1865 if (in_loop_pipeline
)
1867 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
1872 /* Pass entry points. */
1876 return perform_tree_ssa_dce (/*aggressive=*/false);
1880 tree_ssa_cd_dce (void)
1882 return perform_tree_ssa_dce (/*aggressive=*/optimize
>= 2);
1887 const pass_data pass_data_dce
=
1889 GIMPLE_PASS
, /* type */
1891 OPTGROUP_NONE
, /* optinfo_flags */
1892 TV_TREE_DCE
, /* tv_id */
1893 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1894 0, /* properties_provided */
1895 0, /* properties_destroyed */
1896 0, /* todo_flags_start */
1897 0, /* todo_flags_finish */
1900 class pass_dce
: public gimple_opt_pass
1903 pass_dce (gcc::context
*ctxt
)
1904 : gimple_opt_pass (pass_data_dce
, ctxt
)
1907 /* opt_pass methods: */
1908 opt_pass
* clone () { return new pass_dce (m_ctxt
); }
1909 virtual bool gate (function
*) { return flag_tree_dce
!= 0; }
1910 virtual unsigned int execute (function
*) { return tree_ssa_dce (); }
1912 }; // class pass_dce
1917 make_pass_dce (gcc::context
*ctxt
)
1919 return new pass_dce (ctxt
);
1924 const pass_data pass_data_cd_dce
=
1926 GIMPLE_PASS
, /* type */
1928 OPTGROUP_NONE
, /* optinfo_flags */
1929 TV_TREE_CD_DCE
, /* tv_id */
1930 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1931 0, /* properties_provided */
1932 0, /* properties_destroyed */
1933 0, /* todo_flags_start */
1934 0, /* todo_flags_finish */
1937 class pass_cd_dce
: public gimple_opt_pass
1940 pass_cd_dce (gcc::context
*ctxt
)
1941 : gimple_opt_pass (pass_data_cd_dce
, ctxt
), update_address_taken_p (false)
1944 /* opt_pass methods: */
1945 opt_pass
* clone () { return new pass_cd_dce (m_ctxt
); }
1946 void set_pass_param (unsigned n
, bool param
)
1948 gcc_assert (n
== 0);
1949 update_address_taken_p
= param
;
1951 virtual bool gate (function
*) { return flag_tree_dce
!= 0; }
1952 virtual unsigned int execute (function
*)
1954 return (tree_ssa_cd_dce ()
1955 | (update_address_taken_p
? TODO_update_address_taken
: 0));
1959 bool update_address_taken_p
;
1960 }; // class pass_cd_dce
1965 make_pass_cd_dce (gcc::context
*ctxt
)
1967 return new pass_cd_dce (ctxt
);
1971 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1972 is consumed by this function. The function has linear complexity in
1973 the number of dead stmts with a constant factor like the average SSA
1974 use operands number. */
1977 simple_dce_from_worklist (bitmap worklist
)
1979 while (! bitmap_empty_p (worklist
))
1982 unsigned i
= bitmap_first_set_bit (worklist
);
1983 bitmap_clear_bit (worklist
, i
);
1985 tree def
= ssa_name (i
);
1986 /* Removed by somebody else or still in use. */
1987 if (! def
|| ! has_zero_uses (def
))
1990 gimple
*t
= SSA_NAME_DEF_STMT (def
);
1991 if (gimple_has_side_effects (t
))
1994 /* Don't remove statements that are needed for non-call
1996 if (stmt_unremovable_because_of_non_call_eh_p (cfun
, t
))
1999 /* Add uses to the worklist. */
2001 use_operand_p use_p
;
2002 FOR_EACH_PHI_OR_STMT_USE (use_p
, t
, iter
, SSA_OP_USE
)
2004 tree use
= USE_FROM_PTR (use_p
);
2005 if (TREE_CODE (use
) == SSA_NAME
2006 && ! SSA_NAME_IS_DEFAULT_DEF (use
))
2007 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
2011 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2013 fprintf (dump_file
, "Removing dead stmt:");
2014 print_gimple_stmt (dump_file
, t
, 0);
2016 gimple_stmt_iterator gsi
= gsi_for_stmt (t
);
2017 if (gimple_code (t
) == GIMPLE_PHI
)
2018 remove_phi_node (&gsi
, true);
2021 gsi_remove (&gsi
, true);