/* Dead code elimination pass for the GNU compiler.
- Copyright (C) 2002-2015 Free Software Foundation, Inc.
+ Copyright (C) 2002-2021 Free Software Foundation, Inc.
Contributed by Ben Elliston <bje@redhat.com>
and Andrew MacLeod <amacleod@redhat.com>
Adapted to use control dependence by Steven Bosscher, SUSE Labs.
#include "system.h"
#include "coretypes.h"
#include "backend.h"
-#include "cfghooks.h"
+#include "rtl.h"
#include "tree.h"
#include "gimple.h"
-#include "rtl.h"
+#include "cfghooks.h"
+#include "tree-pass.h"
#include "ssa.h"
-#include "alias.h"
+#include "gimple-pretty-print.h"
#include "fold-const.h"
#include "calls.h"
-#include "gimple-pretty-print.h"
#include "cfganal.h"
-#include "internal-fn.h"
#include "tree-eh.h"
#include "gimplify.h"
#include "gimple-iterator.h"
#include "tree-cfg.h"
#include "tree-ssa-loop-niter.h"
#include "tree-into-ssa.h"
-#include "flags.h"
-#include "insn-config.h"
-#include "expmed.h"
-#include "dojump.h"
-#include "explow.h"
-#include "emit-rtl.h"
-#include "varasm.h"
-#include "stmt.h"
-#include "expr.h"
#include "tree-dfa.h"
-#include "tree-pass.h"
#include "cfgloop.h"
#include "tree-scalar-evolution.h"
-#include "tree-chkp.h"
#include "tree-ssa-propagate.h"
#include "gimple-fold.h"
to be recomputed. */
static bool cfg_altered;
+/* When non-NULL holds map from basic block index into the postorder. */
+static int *bb_postorder;
+
+
+/* True if we should treat any stmt with a vdef as necessary. */
+
+static inline bool
+keep_all_vdefs_p ()
+{
+ return optimize_debug;
+}
/* If STMT is not already marked necessary, mark it, and add it to the
worklist if ADD_TO_WORKLIST is true. */
gimple_set_plf (stmt, STMT_NECESSARY, true);
if (add_to_worklist)
worklist.safe_push (stmt);
- if (bb_contains_live_stmts && !is_gimple_debug (stmt))
+ if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
}
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "marking necessary through ");
- print_generic_expr (dump_file, op, 0);
+ print_generic_expr (dump_file, op);
fprintf (dump_file, " stmt ");
- print_gimple_stmt (dump_file, stmt, 0, 0);
+ print_gimple_stmt (dump_file, stmt, 0);
}
gimple_set_plf (stmt, STMT_NECESSARY, true);
{
/* With non-call exceptions, we have to assume that all statements could
throw. If a statement could throw, it can be deemed necessary. */
- if (cfun->can_throw_non_call_exceptions
- && !cfun->can_delete_dead_exceptions
- && stmt_could_throw_p (stmt))
+ if (stmt_unremovable_because_of_non_call_eh_p (cfun, stmt))
{
mark_stmt_necessary (stmt, true);
return;
{
tree callee = gimple_call_fndecl (stmt);
if (callee != NULL_TREE
- && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
+ && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
switch (DECL_FUNCTION_CODE (callee))
{
case BUILT_IN_MALLOC:
case BUILT_IN_ALIGNED_ALLOC:
case BUILT_IN_CALLOC:
- case BUILT_IN_ALLOCA:
- case BUILT_IN_ALLOCA_WITH_ALIGN:
+ CASE_BUILT_IN_ALLOCA:
+ case BUILT_IN_STRDUP:
+ case BUILT_IN_STRNDUP:
+ case BUILT_IN_GOMP_ALLOC:
return;
default:;
}
+
+ if (callee != NULL_TREE
+ && flag_allocation_dce
+ && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
+ return;
+
/* Most, but not all function calls are required. Function calls that
produce no result and have no side effects (i.e. const pure
functions) are unnecessary. */
mark_stmt_necessary (stmt, true);
return;
}
+ /* IFN_GOACC_LOOP calls are necessary in that they are used to
+ represent parameter (i.e. step, bound) of a lowered OpenACC
+ partitioned loop. But this kind of partitioned loop might not
+ survive from aggressive loop removal for it has loop exit and
+ is assumed to be finite. Therefore, we need to explicitly mark
+ these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
+ if (gimple_call_internal_p (stmt, IFN_GOACC_LOOP))
+ {
+ mark_stmt_necessary (stmt, true);
+ return;
+ }
if (!gimple_call_lhs (stmt))
return;
break;
easily locate the debug temp bind stmt for a use thereof,
would could refrain from marking all debug temps here, and
mark them only if they're used. */
- if (!gimple_debug_bind_p (stmt)
+ if (gimple_debug_nonbind_marker_p (stmt)
+ || !gimple_debug_bind_p (stmt)
|| gimple_debug_bind_has_value_p (stmt)
|| TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
mark_stmt_necessary (stmt, false);
return;
}
+ if (gimple_vdef (stmt) && keep_all_vdefs_p ())
+ {
+ mark_stmt_necessary (stmt, true);
+ return;
+ }
+
return;
}
EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
0, edge_number, bi)
{
- basic_block cd_bb = cd->get_edge (edge_number)->src;
+ basic_block cd_bb = cd->get_edge_src (edge_number);
if (ignore_self && cd_bb == bb)
{
/* Prevent the empty possibly infinite loops from being removed. */
if (aggressive)
{
- struct loop *loop;
- scev_initialize ();
+ class loop *loop;
if (mark_irreducible_loops ())
FOR_EACH_BB_FN (bb, cfun)
{
if (!finite_loop_p (loop))
{
if (dump_file)
- fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
+ fprintf (dump_file, "cannot prove finiteness of loop %i\n", loop->num);
mark_control_dependent_edges_necessary (loop->latch, false);
}
- scev_finalize ();
}
}
gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
/* All stmts we visit are necessary. */
- mark_operand_necessary (vdef);
+ if (! gimple_clobber_p (def_stmt))
+ mark_operand_necessary (vdef);
/* If the stmt lhs kills ref, then we can stop walking. */
if (gimple_has_lhs (def_stmt)
??? We only need to care about the RHS throwing. For aggregate
assignments or similar calls and non-call exceptions the LHS
might throw as well. */
- && !stmt_can_throw_internal (def_stmt))
+ && !stmt_can_throw_internal (cfun, def_stmt))
{
tree base, lhs = gimple_get_lhs (def_stmt);
- HOST_WIDE_INT size, offset, max_size;
+ poly_int64 size, offset, max_size;
+ bool reverse;
ao_ref_base (ref);
- base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
+ base
+ = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
/* We can get MEM[symbol: sZ, index: D.8862_1] here,
so base == refd->base does not always hold. */
if (base == ref->base)
{
/* For a must-alias check we need to be able to constrain
the accesses properly. */
- if (size != -1 && size == max_size
- && ref->max_size != -1)
- {
- if (offset <= ref->offset
- && offset + size >= ref->offset + ref->max_size)
- return true;
- }
+ if (known_eq (size, max_size)
+ && known_subrange_p (ref->offset, ref->max_size, offset, size))
+ return true;
/* Or they need to be exactly the same. */
else if (ref->ref
/* Make sure there is no induction variable involved
static void
mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
{
+ /* Should have been caught before calling this function. */
+ gcc_checking_assert (!keep_all_vdefs_p ());
+
unsigned int chain;
ao_ref refd;
gcc_assert (!chain_ovfl);
/* We want to skip statments that do not constitute stores but have
a virtual definition. */
- if (is_gimple_call (def_stmt))
+ if (gcall *call = dyn_cast <gcall *> (def_stmt))
{
- tree callee = gimple_call_fndecl (def_stmt);
+ tree callee = gimple_call_fndecl (call);
if (callee != NULL_TREE
- && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
+ && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
switch (DECL_FUNCTION_CODE (callee))
{
case BUILT_IN_MALLOC:
case BUILT_IN_ALIGNED_ALLOC:
case BUILT_IN_CALLOC:
- case BUILT_IN_ALLOCA:
- case BUILT_IN_ALLOCA_WITH_ALIGN:
+ CASE_BUILT_IN_ALLOCA:
case BUILT_IN_FREE:
+ case BUILT_IN_GOMP_ALLOC:
+ case BUILT_IN_GOMP_FREE:
return false;
default:;
}
+
+ if (callee != NULL_TREE
+ && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
+ || DECL_IS_OPERATOR_DELETE_P (callee))
+ && gimple_call_from_new_or_delete (call))
+ return false;
}
- mark_operand_necessary (vdef);
+ if (! gimple_clobber_p (def_stmt))
+ mark_operand_necessary (vdef);
return false;
}
static void
mark_all_reaching_defs_necessary (gimple *stmt)
{
+ /* Should have been caught before calling this function. */
+ gcc_checking_assert (!keep_all_vdefs_p ());
walk_aliased_vdefs (NULL, gimple_vuse (stmt),
mark_all_reaching_defs_necessary_1, NULL, &visited);
}
return true;
}
+/* Return that NEW_CALL and DELETE_CALL are a valid pair of new
+ and delete operators. */
+
+static bool
+valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
+{
+ tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
+ tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
+ return valid_new_delete_pair_p (new_asm, delete_asm);
+}
+
/* Propagate necessity using the operands of necessary statements.
Process the uses on each statement in the worklist, and add all
feeding statements which contribute to the calculation of this
/* If this is a call to free which is directly fed by an
allocation function do not mark that necessary through
processing the argument. */
- if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
+ bool is_delete_operator
+ = (is_gimple_call (stmt)
+ && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
+ && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
+ if (is_delete_operator
+ || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
+ || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
{
tree ptr = gimple_call_arg (stmt, 0);
- gimple *def_stmt;
+ gcall *def_stmt;
tree def_callee;
/* If the pointer we free is defined by an allocation
function do not add the call to the worklist. */
if (TREE_CODE (ptr) == SSA_NAME
- && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
+ && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
&& (def_callee = gimple_call_fndecl (def_stmt))
- && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
- && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
- || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
- || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
+ && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
+ && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
+ || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
+ || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC
+ || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_GOMP_ALLOC))
+ || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)
+ && gimple_call_from_new_or_delete (def_stmt))))
{
- gimple *bounds_def_stmt;
- tree bounds;
-
- /* For instrumented calls we should also check used
- bounds are returned by the same allocation call. */
- if (!gimple_call_with_bounds_p (stmt)
- || ((bounds = gimple_call_arg (stmt, 1))
- && TREE_CODE (bounds) == SSA_NAME
- && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
- && chkp_gimple_call_builtin_p (bounds_def_stmt,
- BUILT_IN_CHKP_BNDRET)
- && gimple_call_arg (bounds_def_stmt, 0) == ptr))
- continue;
+ if (is_delete_operator
+ && !valid_new_delete_pair_p (def_stmt, stmt))
+ mark_operand_necessary (gimple_call_arg (stmt, 0));
+
+ /* Delete operators can have alignment and (or) size
+ as next arguments. When being a SSA_NAME, they
+ must be marked as necessary. Similarly GOMP_free. */
+ if (gimple_call_num_args (stmt) >= 2)
+ for (unsigned i = 1; i < gimple_call_num_args (stmt);
+ i++)
+ {
+ tree arg = gimple_call_arg (stmt, i);
+ if (TREE_CODE (arg) == SSA_NAME)
+ mark_operand_necessary (arg);
+ }
+
+ continue;
}
}
if (!use)
continue;
+ /* No need to search for vdefs if we intrinsicly keep them all. */
+ if (keep_all_vdefs_p ())
+ continue;
+
/* If we dropped to simple mode make all immediately
reachable definitions necessary. */
if (chain_ovfl)
in 1). By keeping a global visited bitmap for references
we walk for 2) we avoid quadratic behavior for those. */
- if (is_gimple_call (stmt))
+ if (gcall *call = dyn_cast <gcall *> (stmt))
{
- tree callee = gimple_call_fndecl (stmt);
+ tree callee = gimple_call_fndecl (call);
unsigned i;
/* Calls to functions that are merely acting as barriers
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
- || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
- || (DECL_FUNCTION_CODE (callee)
- == BUILT_IN_ALLOCA_WITH_ALIGN)
+ || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
continue;
+ if (callee != NULL_TREE
+ && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
+ || DECL_IS_OPERATOR_DELETE_P (callee))
+ && gimple_call_from_new_or_delete (call))
+ continue;
+
/* Calls implicitly load from memory, their arguments
in addition may explicitly perform memory loads. */
- mark_all_reaching_defs_necessary (stmt);
- for (i = 0; i < gimple_call_num_args (stmt); ++i)
+ mark_all_reaching_defs_necessary (call);
+ for (i = 0; i < gimple_call_num_args (call); ++i)
{
- tree arg = gimple_call_arg (stmt, i);
+ tree arg = gimple_call_arg (call, i);
if (TREE_CODE (arg) == SSA_NAME
|| is_gimple_min_invariant (arg))
continue;
if (TREE_CODE (arg) == WITH_SIZE_EXPR)
arg = TREE_OPERAND (arg, 0);
if (!ref_may_be_aliased (arg))
- mark_aliased_reaching_defs_necessary (stmt, arg);
+ mark_aliased_reaching_defs_necessary (call, arg);
}
}
else if (gimple_assign_single_p (stmt))
return something_changed;
}
-/* Forward edge E to respective POST_DOM_BB and update PHIs. */
-
-static edge
-forward_edge_to_pdom (edge e, basic_block post_dom_bb)
-{
- gphi_iterator gsi;
- edge e2 = NULL;
- edge_iterator ei;
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
- e->dest->index, post_dom_bb->index);
-
- e2 = redirect_edge_and_branch (e, post_dom_bb);
- cfg_altered = true;
-
- /* If edge was already around, no updating is necessary. */
- if (e2 != e)
- return e2;
-
- if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
- {
- /* We are sure that for every live PHI we are seeing control dependent BB.
- This means that we can pick any edge to duplicate PHI args from. */
- FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
- if (e2 != e)
- break;
- for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
- {
- gphi *phi = gsi.phi ();
- tree op;
- source_location locus;
-
- /* PHIs for virtuals have no control dependency relation on them.
- We are lost here and must force renaming of the symbol. */
- if (virtual_operand_p (gimple_phi_result (phi)))
- {
- mark_virtual_phi_result_for_renaming (phi);
- remove_phi_node (&gsi, true);
- continue;
- }
-
- /* Dead PHI do not imply control dependency. */
- if (!gimple_plf (phi, STMT_NECESSARY))
- {
- gsi_next (&gsi);
- continue;
- }
-
- op = gimple_phi_arg_def (phi, e2->dest_idx);
- locus = gimple_phi_arg_location (phi, e2->dest_idx);
- add_phi_arg (phi, op, e, locus);
- /* The resulting PHI if not dead can only be degenerate. */
- gcc_assert (degenerate_phi_p (phi));
- gsi_next (&gsi);
- }
- }
- return e;
-}
/* Remove dead statement pointed to by iterator I. Receives the basic block BB
containing I so that we don't have to look it up. */
static void
-remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
+remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
+ vec<edge> &to_remove_edges)
{
gimple *stmt = gsi_stmt (*i);
stats.removed++;
/* If we have determined that a conditional branch statement contributes
- nothing to the program, then we not only remove it, but we also change
- the flow graph so that the current block will simply fall-thru to its
- immediate post-dominator. The blocks we are circumventing will be
- removed by cleanup_tree_cfg if this change in the flow graph makes them
- unreachable. */
+ nothing to the program, then we not only remove it, but we need to update
+ the CFG. We can chose any of edges out of BB as long as we are sure to not
+ close infinite loops. This is done by always choosing the edge closer to
+ exit in inverted_post_order_compute order. */
if (is_ctrl_stmt (stmt))
{
- basic_block post_dom_bb;
- edge e, e2;
edge_iterator ei;
-
- post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
-
- e = find_edge (bb, post_dom_bb);
-
- /* If edge is already there, try to use it. This avoids need to update
- PHI nodes. Also watch for cases where post dominator does not exists
- or is exit block. These can happen for infinite loops as we create
- fake edges in the dominator tree. */
- if (e)
- ;
- else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
- e = EDGE_SUCC (bb, 0);
- else
- e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
+ edge e = NULL, e2;
+
+ /* See if there is only one non-abnormal edge. */
+ if (single_succ_p (bb))
+ e = single_succ_edge (bb);
+ /* Otherwise chose one that is closer to bb with live statement in it.
+ To be able to chose one, we compute inverted post order starting from
+ all BBs with live statements. */
+ if (!e)
+ {
+ if (!bb_postorder)
+ {
+ auto_vec<int, 20> postorder;
+ inverted_post_order_compute (&postorder,
+ &bb_contains_live_stmts);
+ bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
+ for (unsigned int i = 0; i < postorder.length (); ++i)
+ bb_postorder[postorder[i]] = i;
+ }
+ FOR_EACH_EDGE (e2, ei, bb->succs)
+ if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
+ || bb_postorder [e->dest->index]
+ < bb_postorder [e2->dest->index])
+ e = e2;
+ }
gcc_assert (e);
- e->probability = REG_BR_PROB_BASE;
- e->count = bb->count;
+ e->probability = profile_probability::always ();
/* The edge is no longer associated with a conditional, so it does
- not have TRUE/FALSE flags. */
- e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
+ not have TRUE/FALSE flags.
+ We are also safe to drop EH/ABNORMAL flags and turn them into
+ normal control flow, because we know that all the destinations (including
+ those odd edges) are equivalent for program execution. */
+ e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
/* The lone outgoing edge from BB will be a fallthru edge. */
e->flags |= EDGE_FALLTHRU;
/* Remove the remaining outgoing edges. */
- for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
+ FOR_EACH_EDGE (e2, ei, bb->succs)
if (e != e2)
{
- cfg_altered = true;
/* If we made a BB unconditionally exit a loop or removed
an entry into an irreducible region, then this transform
alters the set of BBs in the loop. Schedule a fixup. */
if (loop_exit_edge_p (bb->loop_father, e)
|| (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
loops_state_set (LOOPS_NEED_FIXUP);
- remove_edge (e2);
+ to_remove_edges.safe_push (e2);
}
- else
- ei_next (&ei);
}
/* If this is a store into a variable that is being optimized away,
add a debug bind stmt if possible. */
- if (MAY_HAVE_DEBUG_STMTS
+ if (MAY_HAVE_DEBUG_BIND_STMTS
&& gimple_assign_single_p (stmt)
&& is_gimple_val (gimple_assign_rhs1 (stmt)))
{
tree lhs = gimple_assign_lhs (stmt);
- if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
+ if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
&& !DECL_IGNORED_P (lhs)
&& is_gimple_reg_type (TREE_TYPE (lhs))
&& !is_global_var (lhs)
gimple *stmt;
tree call;
vec<basic_block> h;
+ auto_vec<edge> to_remove_edges;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\nEliminating unnecessary statements:\n");
bb = h.pop ();
/* Remove dead statements. */
+ auto_bitmap debug_seen;
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
{
stmt = gsi_stmt (gsi);
defining statement of its argument is not necessary
(and thus is getting removed). */
if (gimple_plf (stmt, STMT_NECESSARY)
- && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
+ && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
+ || (is_gimple_call (stmt)
+ && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
+ && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
{
tree ptr = gimple_call_arg (stmt, 0);
if (TREE_CODE (ptr) == SSA_NAME)
&& !gimple_plf (def_stmt, STMT_NECESSARY))
gimple_set_plf (stmt, STMT_NECESSARY, false);
}
- /* We did not propagate necessity for free calls fed
- by allocation function to allow unnecessary
- alloc-free sequence elimination. For instrumented
- calls it also means we did not mark bounds producer
- as necessary and it is time to do it in case free
- call is not removed. */
- if (gimple_call_with_bounds_p (stmt))
- {
- gimple *bounds_def_stmt;
- tree bounds = gimple_call_arg (stmt, 1);
- gcc_assert (TREE_CODE (bounds) == SSA_NAME);
- bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
- if (bounds_def_stmt
- && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
- gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
- gimple_plf (stmt, STMT_NECESSARY));
- }
}
/* If GSI is not necessary then remove it. */
}
}
if (!dead)
- continue;
+ {
+ bitmap_clear (debug_seen);
+ continue;
+ }
}
if (!is_gimple_debug (stmt))
something_changed = true;
- remove_dead_stmt (&gsi, bb);
+ remove_dead_stmt (&gsi, bb, to_remove_edges);
+ continue;
}
else if (is_gimple_call (stmt))
{
did not mark as necessary, it will confuse the
special logic we apply to malloc/free pair removal. */
&& (!(call = gimple_call_fndecl (stmt))
- || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
- || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
- && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
- && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
- && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
- && (DECL_FUNCTION_CODE (call)
- != BUILT_IN_ALLOCA_WITH_ALIGN)))
- /* Avoid doing so for bndret calls for the same reason. */
- && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
+ || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
+ || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
+ && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
+ && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
+ && !ALLOCA_FUNCTION_CODE_P
+ (DECL_FUNCTION_CODE (call))))
+ && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
{
something_changed = true;
if (dump_file && (dump_flags & TDF_DETAILS))
update_stmt (stmt);
release_ssa_name (name);
- /* GOMP_SIMD_LANE without lhs is not needed. */
- if (gimple_call_internal_p (stmt)
- && gimple_call_internal_fn (stmt) == IFN_GOMP_SIMD_LANE)
- remove_dead_stmt (&gsi, bb);
+ /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
+ without lhs is not needed. */
+ if (gimple_call_internal_p (stmt))
+ switch (gimple_call_internal_fn (stmt))
+ {
+ case IFN_GOMP_SIMD_LANE:
+ if (gimple_call_num_args (stmt) >= 3
+ && !integer_nonzerop (gimple_call_arg (stmt, 2)))
+ break;
+ /* FALLTHRU */
+ case IFN_ASAN_POISON:
+ remove_dead_stmt (&gsi, bb, to_remove_edges);
+ break;
+ default:
+ break;
+ }
}
else if (gimple_call_internal_p (stmt))
switch (gimple_call_internal_fn (stmt))
break;
}
}
+ else if (gimple_debug_bind_p (stmt))
+ {
+ /* We are only keeping the last debug-bind of a
+ non-DEBUG_EXPR_DECL variable in a series of
+ debug-bind stmts. */
+ tree var = gimple_debug_bind_get_var (stmt);
+ if (TREE_CODE (var) != DEBUG_EXPR_DECL
+ && !bitmap_set_bit (debug_seen, DECL_UID (var)))
+ remove_dead_stmt (&gsi, bb, to_remove_edges);
+ continue;
+ }
+ bitmap_clear (debug_seen);
}
+
+ /* Remove dead PHI nodes. */
+ something_changed |= remove_dead_phis (bb);
}
h.release ();
/* Since we don't track liveness of virtual PHI nodes, it is possible that we
rendered some PHI nodes unreachable while they are still in use.
Mark them for renaming. */
- if (cfg_altered)
+ if (!to_remove_edges.is_empty ())
{
basic_block prev_bb;
+ /* Remove edges. We've delayed this to not get bogus debug stmts
+ during PHI node removal. */
+ for (unsigned i = 0; i < to_remove_edges.length (); ++i)
+ remove_edge (to_remove_edges[i]);
+ cfg_altered = true;
+
find_unreachable_blocks ();
/* Delete all unreachable basic blocks in reverse dominator order. */
dominate others. Walking backwards, this should
be the common case. ??? Do we need to recompute
dominators because of cfg_altered? */
- if (!MAY_HAVE_DEBUG_STMTS
- || !first_dom_son (CDI_DOMINATORS, bb))
+ if (!first_dom_son (CDI_DOMINATORS, bb))
delete_basic_block (bb);
else
{
}
}
}
- FOR_EACH_BB_FN (bb, cfun)
- {
- /* Remove dead PHI nodes. */
- something_changed |= remove_dead_phis (bb);
- }
+
+ if (bb_postorder)
+ free (bb_postorder);
+ bb_postorder = NULL;
return something_changed;
}
/* Preheaders are needed for SCEV to work.
Simple lateches and recorded exits improve chances that loop will
proved to be finite in testcases such as in loop-15.c and loop-24.c */
- if (aggressive)
- loop_optimizer_init (LOOPS_NORMAL
- | LOOPS_HAVE_RECORDED_EXITS);
+ bool in_loop_pipeline = scev_initialized_p ();
+ if (aggressive && ! in_loop_pipeline)
+ {
+ scev_initialize ();
+ loop_optimizer_init (LOOPS_NORMAL
+ | LOOPS_HAVE_RECORDED_EXITS);
+ }
tree_dce_init (aggressive);
{
/* Compute control dependence. */
calculate_dominance_info (CDI_POST_DOMINATORS);
- cd = new control_dependences (create_edge_list ());
+ cd = new control_dependences ();
visited_control_parents =
sbitmap_alloc (last_basic_block_for_fn (cfun));
find_obviously_necessary_stmts (aggressive);
- if (aggressive)
- loop_optimizer_finalize ();
+ if (aggressive && ! in_loop_pipeline)
+ {
+ loop_optimizer_finalize ();
+ scev_finalize ();
+ }
longest_chain = 0;
total_chain = 0;
if (something_changed)
{
free_numbers_of_iterations_estimates (cfun);
- if (scev_initialized_p ())
+ if (in_loop_pipeline)
scev_reset ();
return TODO_update_ssa | TODO_cleanup_cfg;
}
{
return new pass_cd_dce (ctxt);
}
+
+
+/* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
+ is consumed by this function. The function has linear complexity in
+ the number of dead stmts with a constant factor like the average SSA
+ use operands number. */
+
+void
+simple_dce_from_worklist (bitmap worklist)
+{
+ while (! bitmap_empty_p (worklist))
+ {
+ /* Pop item. */
+ unsigned i = bitmap_first_set_bit (worklist);
+ bitmap_clear_bit (worklist, i);
+
+ tree def = ssa_name (i);
+ /* Removed by somebody else or still in use. */
+ if (! def || ! has_zero_uses (def))
+ continue;
+
+ gimple *t = SSA_NAME_DEF_STMT (def);
+ if (gimple_has_side_effects (t))
+ continue;
+
+ /* Add uses to the worklist. */
+ ssa_op_iter iter;
+ use_operand_p use_p;
+ FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
+ {
+ tree use = USE_FROM_PTR (use_p);
+ if (TREE_CODE (use) == SSA_NAME
+ && ! SSA_NAME_IS_DEFAULT_DEF (use))
+ bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
+ }
+
+ /* Remove stmt. */
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Removing dead stmt:");
+ print_gimple_stmt (dump_file, t, 0);
+ }
+ gimple_stmt_iterator gsi = gsi_for_stmt (t);
+ if (gimple_code (t) == GIMPLE_PHI)
+ remove_phi_node (&gsi, true);
+ else
+ {
+ gsi_remove (&gsi, true);
+ release_defs (t);
+ }
+ }
+}