/* Tail merging for gimple.
- Copyright (C) 2011-2015 Free Software Foundation, Inc.
+ Copyright (C) 2011-2021 Free Software Foundation, Inc.
Contributed by Tom de Vries (tom@codesourcery.com)
This file is part of GCC.
#include "system.h"
#include "coretypes.h"
#include "backend.h"
-#include "cfghooks.h"
#include "tree.h"
#include "gimple.h"
-#include "hard-reg-set.h"
+#include "cfghooks.h"
+#include "tree-pass.h"
#include "ssa.h"
-#include "alias.h"
#include "fold-const.h"
-#include "stor-layout.h"
#include "trans-mem.h"
-#include "tm_p.h"
#include "cfganal.h"
#include "cfgcleanup.h"
-#include "flags.h"
-#include "internal-fn.h"
-#include "tree-eh.h"
#include "gimple-iterator.h"
#include "tree-cfg.h"
#include "tree-into-ssa.h"
-#include "params.h"
-#include "gimple-pretty-print.h"
#include "tree-ssa-sccvn.h"
-#include "tree-dump.h"
#include "cfgloop.h"
-#include "tree-pass.h"
-#include "trans-mem.h"
+#include "tree-eh.h"
+#include "tree-cfgcleanup.h"
+
+const int ignore_edge_flags = EDGE_DFS_BACK | EDGE_EXECUTABLE;
/* Describes a group of bbs with the same successors. The successor bbs are
cached in succs, and the successor edge flags are cached in succ_flags.
Additionally, the hash value for the struct is cached in hashval, and
in_worklist indicates whether it's currently part of worklist. */
-struct same_succ_def : pointer_hash <same_succ_def>
+struct same_succ : pointer_hash <same_succ>
{
/* The bbs that have the same successor bbs. */
bitmap bbs;
hashval_t hashval;
/* hash_table support. */
- static inline hashval_t hash (const same_succ_def *);
- static int equal (const same_succ_def *, const same_succ_def *);
- static void remove (same_succ_def *);
+ static inline hashval_t hash (const same_succ *);
+ static int equal (const same_succ *, const same_succ *);
+ static void remove (same_succ *);
};
-typedef struct same_succ_def *same_succ;
-typedef const struct same_succ_def *const_same_succ;
/* hash routine for hash_table support, returns hashval of E. */
inline hashval_t
-same_succ_def::hash (const same_succ_def *e)
+same_succ::hash (const same_succ *e)
{
return e->hashval;
}
/* A group of bbs where 1 bb from bbs can replace the other bbs. */
-struct bb_cluster_def
+struct bb_cluster
{
/* The bbs in the cluster. */
bitmap bbs;
/* The bb to replace the cluster with. */
basic_block rep_bb;
};
-typedef struct bb_cluster_def *bb_cluster;
-typedef const struct bb_cluster_def *const_bb_cluster;
/* Per bb-info. */
/* The number of non-debug statements in the bb. */
int size;
/* The same_succ that this bb is a member of. */
- same_succ bb_same_succ;
+ same_succ *bb_same_succ;
/* The cluster that this bb is a member of. */
- bb_cluster cluster;
+ bb_cluster *cluster;
/* The vop state at the exit of a bb. This is shortlived data, used to
communicate data between update_block_by and update_vuses. */
tree vop_at_exit;
#define BB_VOP_AT_EXIT(bb) (((struct aux_bb_info *)bb->aux)->vop_at_exit)
#define BB_DEP_BB(bb) (((struct aux_bb_info *)bb->aux)->dep_bb)
+/* Valueization helper querying the VN lattice. */
+
+static tree
+tail_merge_valueize (tree name)
+{
+ if (TREE_CODE (name) == SSA_NAME
+ && has_VN_INFO (name))
+ {
+ tree tem = VN_INFO (name)->valnum;
+ if (tem != VN_TOP)
+ return tem;
+ }
+ return name;
+}
+
/* Returns true if the only effect a statement STMT has, is to define locally
used SSA_NAMEs. */
if (gimple_vdef (stmt) != NULL_TREE
|| gimple_has_side_effects (stmt)
|| gimple_could_trap_p_1 (stmt, false, false)
- || gimple_vuse (stmt) != NULL_TREE)
+ || gimple_vuse (stmt) != NULL_TREE
+ /* Copied from tree-ssa-ifcombine.c:bb_no_side_effects_p():
+ const calls don't match any of the above, yet they could
+ still have some side-effects - they could contain
+ gimple_could_trap_p statements, like floating point
+ exceptions or integer division by zero. See PR70586.
+ FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
+ should handle this. */
+ || is_gimple_call (stmt))
return false;
def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
if (val1 == val2)
return true;
- if (vn_valueize (val1) != vn_valueize (val2))
+ if (tail_merge_valueize (val1) != tail_merge_valueize (val2))
return false;
return ((TREE_CODE (val1) == SSA_NAME || CONSTANT_CLASS_P (val1))
/* Prints E to FILE. */
static void
-same_succ_print (FILE *file, const same_succ e)
+same_succ_print (FILE *file, const same_succ *e)
{
unsigned int i;
bitmap_print (file, e->bbs, "bbs:", "\n");
/* Prints same_succ VE to VFILE. */
inline int
-ssa_same_succ_print_traverse (same_succ *pe, FILE *file)
+ssa_same_succ_print_traverse (same_succ **pe, FILE *file)
{
- const same_succ e = *pe;
+ const same_succ *e = *pe;
same_succ_print (file, e);
return 1;
}
/* Calculates hash value for same_succ VE. */
static hashval_t
-same_succ_hash (const_same_succ e)
+same_succ_hash (const same_succ *e)
{
inchash::hash hstate (bitmap_hash (e->succs));
int flags;
for (i = 0; i < gimple_call_num_args (stmt); i++)
{
arg = gimple_call_arg (stmt, i);
- arg = vn_valueize (arg);
+ arg = tail_merge_valueize (arg);
inchash::add_expr (arg, hstate);
}
}
hstate.add_int (size);
BB_SIZE (bb) = size;
+ hstate.add_int (bb->loop_father->num);
+
for (i = 0; i < e->succ_flags.length (); ++i)
{
flags = e->succ_flags[i];
the other edge flags. */
static bool
-inverse_flags (const_same_succ e1, const_same_succ e2)
+inverse_flags (const same_succ *e1, const same_succ *e2)
{
int f1a, f1b, f2a, f2b;
int mask = ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
/* Compares SAME_SUCCs E1 and E2. */
int
-same_succ_def::equal (const same_succ_def *e1, const same_succ_def *e2)
+same_succ::equal (const same_succ *e1, const same_succ *e2)
{
unsigned int i, first1, first2;
gimple_stmt_iterator gsi1, gsi2;
gimple *s1, *s2;
basic_block bb1, bb2;
+ if (e1 == e2)
+ return 1;
+
if (e1->hashval != e2->hashval)
return 0;
if (BB_SIZE (bb1) != BB_SIZE (bb2))
return 0;
+ if (bb1->loop_father != bb2->loop_father)
+ return 0;
+
gsi1 = gsi_start_nondebug_bb (bb1);
gsi2 = gsi_start_nondebug_bb (bb2);
gsi_advance_fw_nondebug_nonlocal (&gsi1);
/* Alloc and init a new SAME_SUCC. */
-static same_succ
+static same_succ *
same_succ_alloc (void)
{
- same_succ same = XNEW (struct same_succ_def);
+ same_succ *same = XNEW (struct same_succ);
same->bbs = BITMAP_ALLOC (NULL);
same->succs = BITMAP_ALLOC (NULL);
/* Delete same_succ E. */
void
-same_succ_def::remove (same_succ e)
+same_succ::remove (same_succ *e)
{
BITMAP_FREE (e->bbs);
BITMAP_FREE (e->succs);
/* Reset same_succ SAME. */
static void
-same_succ_reset (same_succ same)
+same_succ_reset (same_succ *same)
{
bitmap_clear (same->bbs);
bitmap_clear (same->succs);
same->succ_flags.truncate (0);
}
-static hash_table<same_succ_def> *same_succ_htab;
+static hash_table<same_succ> *same_succ_htab;
/* Array that is used to store the edge flags for a successor. */
/* Vector of bbs to process. */
-static vec<same_succ> worklist;
+static vec<same_succ *> worklist;
/* Prints worklist to FILE. */
/* Adds SAME to worklist. */
static void
-add_to_worklist (same_succ same)
+add_to_worklist (same_succ *same)
{
if (same->in_worklist)
return;
/* Add BB to same_succ_htab. */
static void
-find_same_succ_bb (basic_block bb, same_succ *same_p)
+find_same_succ_bb (basic_block bb, same_succ **same_p)
{
unsigned int j;
bitmap_iterator bj;
- same_succ same = *same_p;
- same_succ *slot;
+ same_succ *same = *same_p;
+ same_succ **slot;
edge_iterator ei;
edge e;
- if (bb == NULL
- /* Be conservative with loop structure. It's not evident that this test
- is sufficient. Before tail-merge, we've just called
- loop_optimizer_finalize, and LOOPS_MAY_HAVE_MULTIPLE_LATCHES is now
- set, so there's no guarantee that the loop->latch value is still valid.
- But we assume that, since we've forced LOOPS_HAVE_SIMPLE_LATCHES at the
- start of pre, we've kept that property intact throughout pre, and are
- keeping it throughout tail-merge using this test. */
- || bb->loop_father->latch == bb)
+ if (bb == NULL)
return;
bitmap_set_bit (same->bbs, bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
{
int index = e->dest->index;
bitmap_set_bit (same->succs, index);
- same_succ_edge_flags[index] = e->flags;
+ same_succ_edge_flags[index] = (e->flags & ~ignore_edge_flags);
}
EXECUTE_IF_SET_IN_BITMAP (same->succs, 0, j, bj)
same->succ_flags.safe_push (same_succ_edge_flags[j]);
static void
find_same_succ (void)
{
- same_succ same = same_succ_alloc ();
+ same_succ *same = same_succ_alloc ();
basic_block bb;
FOR_EACH_BB_FN (bb, cfun)
same = same_succ_alloc ();
}
- same_succ_def::remove (same);
+ same_succ::remove (same);
}
/* Initializes worklist administration. */
init_worklist (void)
{
alloc_aux_for_blocks (sizeof (struct aux_bb_info));
- same_succ_htab = new hash_table<same_succ_def> (n_basic_blocks_for_fn (cfun));
+ same_succ_htab = new hash_table<same_succ> (n_basic_blocks_for_fn (cfun));
same_succ_edge_flags = XCNEWVEC (int, last_basic_block_for_fn (cfun));
deleted_bbs = BITMAP_ALLOC (NULL);
deleted_bb_preds = BITMAP_ALLOC (NULL);
static void
same_succ_flush_bb (basic_block bb)
{
- same_succ same = BB_SAME_SUCC (bb);
+ same_succ *same = BB_SAME_SUCC (bb);
+ if (! same)
+ return;
+
BB_SAME_SUCC (bb) = NULL;
if (bitmap_single_bit_set_p (same->bbs))
same_succ_htab->remove_elt_with_hash (same, same->hashval);
unsigned int i;
bitmap_iterator bi;
basic_block bb;
- same_succ same;
+ same_succ *same;
bitmap_and_compl_into (deleted_bb_preds, deleted_bbs);
bitmap_clear (deleted_bbs);
if (same == NULL)
same = same_succ_alloc ();
}
- same_succ_def::remove (same);
+ same_succ::remove (same);
bitmap_clear (deleted_bb_preds);
}
/* Prints cluster C to FILE. */
static void
-print_cluster (FILE *file, bb_cluster c)
+print_cluster (FILE *file, bb_cluster *c)
{
if (c == NULL)
return;
/* Prints cluster C to stderr. */
-extern void debug_cluster (bb_cluster);
+extern void debug_cluster (bb_cluster *);
DEBUG_FUNCTION void
-debug_cluster (bb_cluster c)
+debug_cluster (bb_cluster *c)
{
print_cluster (stderr, c);
}
/* Update C->rep_bb, given that BB is added to the cluster. */
static void
-update_rep_bb (bb_cluster c, basic_block bb)
+update_rep_bb (bb_cluster *c, basic_block bb)
{
/* Initial. */
if (c->rep_bb == NULL)
/* Add BB to cluster C. Sets BB in C->bbs, and preds of BB in C->preds. */
static void
-add_bb_to_cluster (bb_cluster c, basic_block bb)
+add_bb_to_cluster (bb_cluster *c, basic_block bb)
{
edge e;
edge_iterator ei;
/* Allocate and init new cluster. */
-static bb_cluster
+static bb_cluster *
new_cluster (void)
{
- bb_cluster c;
- c = XCNEW (struct bb_cluster_def);
+ bb_cluster *c;
+ c = XCNEW (bb_cluster);
c->bbs = BITMAP_ALLOC (NULL);
c->preds = BITMAP_ALLOC (NULL);
c->rep_bb = NULL;
/* Delete clusters. */
static void
-delete_cluster (bb_cluster c)
+delete_cluster (bb_cluster *c)
{
if (c == NULL)
return;
/* Array that contains all clusters. */
-static vec<bb_cluster> all_clusters;
+static vec<bb_cluster *> all_clusters;
/* Allocate all cluster vectors. */
/* Merge cluster C2 into C1. */
static void
-merge_clusters (bb_cluster c1, bb_cluster c2)
+merge_clusters (bb_cluster *c1, bb_cluster *c2)
{
bitmap_ior_into (c1->bbs, c2->bbs);
bitmap_ior_into (c1->preds, c2->preds);
set_cluster (basic_block bb1, basic_block bb2)
{
basic_block merge_bb, other_bb;
- bb_cluster merge, old, c;
+ bb_cluster *merge, *old, *c;
if (BB_CLUSTER (bb1) == NULL && BB_CLUSTER (bb2) == NULL)
{
|| t2 == NULL_TREE)
return false;
- if (operand_equal_p (t1, t2, 0))
+ if (operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS))
return true;
return gvn_uses_equal (t1, t2);
gimple_bb (s2) are members of SAME_SUCC. */
static bool
-gimple_equal_p (same_succ same_succ, gimple *s1, gimple *s2)
+gimple_equal_p (same_succ *same_succ, gimple *s1, gimple *s2)
{
unsigned int i;
tree lhs1, lhs2;
if (lhs1 == NULL_TREE || lhs2 == NULL_TREE)
return false;
if (TREE_CODE (lhs1) == SSA_NAME && TREE_CODE (lhs2) == SSA_NAME)
- return vn_valueize (lhs1) == vn_valueize (lhs2);
+ return tail_merge_valueize (lhs1) == tail_merge_valueize (lhs2);
return operand_equal_p (lhs1, lhs2, 0);
case GIMPLE_ASSIGN:
if (!gimple_operand_equal_value_p (t1, t2))
return false;
- code1 = gimple_expr_code (s1);
- code2 = gimple_expr_code (s2);
+ code1 = gimple_cond_code (s1);
+ code2 = gimple_cond_code (s2);
inv_cond = (bitmap_bit_p (same_succ->inverse, bb1->index)
!= bitmap_bit_p (same_succ->inverse, bb2->index));
if (inv_cond)
}
}
+/* Return true if equal (in the sense of gimple_equal_p) statements STMT1 and
+ STMT2 are allowed to be merged. */
+
+static bool
+merge_stmts_p (gimple *stmt1, gimple *stmt2)
+{
+ /* What could be better than this here is to blacklist the bb
+ containing the stmt, when encountering the stmt f.i. in
+ same_succ_hash. */
+ if (is_tm_ending (stmt1))
+ return false;
+
+ /* Verify EH landing pads. */
+ if (lookup_stmt_eh_lp_fn (cfun, stmt1) != lookup_stmt_eh_lp_fn (cfun, stmt2))
+ return false;
+
+ if (is_gimple_call (stmt1)
+ && gimple_call_internal_p (stmt1))
+ switch (gimple_call_internal_fn (stmt1))
+ {
+ case IFN_UBSAN_NULL:
+ case IFN_UBSAN_BOUNDS:
+ case IFN_UBSAN_VPTR:
+ case IFN_UBSAN_CHECK_ADD:
+ case IFN_UBSAN_CHECK_SUB:
+ case IFN_UBSAN_CHECK_MUL:
+ case IFN_UBSAN_OBJECT_SIZE:
+ case IFN_UBSAN_PTR:
+ case IFN_ASAN_CHECK:
+ /* For these internal functions, gimple_location is an implicit
+ parameter, which will be used explicitly after expansion.
+ Merging these statements may cause confusing line numbers in
+ sanitizer messages. */
+ return gimple_location (stmt1) == gimple_location (stmt2);
+ default:
+ break;
+ }
+
+ return true;
+}
+
/* Determines whether BB1 and BB2 (members of same_succ) are duplicates. If so,
clusters them. */
static void
-find_duplicate (same_succ same_succ, basic_block bb1, basic_block bb2)
+find_duplicate (same_succ *same_succ, basic_block bb1, basic_block bb2)
{
gimple_stmt_iterator gsi1 = gsi_last_nondebug_bb (bb1);
gimple_stmt_iterator gsi2 = gsi_last_nondebug_bb (bb2);
gimple *stmt1 = gsi_stmt (gsi1);
gimple *stmt2 = gsi_stmt (gsi2);
- /* What could be better than this here is to blacklist the bb
- containing the stmt, when encountering the stmt f.i. in
- same_succ_hash. */
- if (is_tm_ending (stmt1)
- || is_tm_ending (stmt2))
- return;
+ if (gimple_code (stmt1) == GIMPLE_LABEL
+ && gimple_code (stmt2) == GIMPLE_LABEL)
+ break;
if (!gimple_equal_p (same_succ, stmt1, stmt2))
return;
+ if (!merge_stmts_p (stmt1, stmt2))
+ return;
+
gsi_prev_nondebug (&gsi1);
gsi_prev_nondebug (&gsi2);
gsi_advance_bw_nondebug_nonlocal (&gsi1, &vuse1, &vuse_escaped);
gsi_advance_bw_nondebug_nonlocal (&gsi2, &vuse2, &vuse_escaped);
}
+ while (!gsi_end_p (gsi1) && gimple_code (gsi_stmt (gsi1)) == GIMPLE_LABEL)
+ {
+ tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi1)));
+ if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
+ return;
+ gsi_prev (&gsi1);
+ }
+ while (!gsi_end_p (gsi2) && gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL)
+ {
+ tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)));
+ if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
+ return;
+ gsi_prev (&gsi2);
+ }
if (!(gsi_end_p (gsi1) && gsi_end_p (gsi2)))
return;
phi alternatives for BB1 and BB2 are equal. */
static bool
-same_phi_alternatives (same_succ same_succ, basic_block bb1, basic_block bb2)
+same_phi_alternatives (same_succ *same_succ, basic_block bb1, basic_block bb2)
{
unsigned int s;
bitmap_iterator bs;
basic_block cd, dep_bb = BB_DEP_BB (to);
edge_iterator ei;
edge e;
- bitmap from_preds = BITMAP_ALLOC (NULL);
if (dep_bb == NULL)
return true;
+ bitmap from_preds = BITMAP_ALLOC (NULL);
FOR_EACH_EDGE (e, ei, from->preds)
bitmap_set_bit (from_preds, e->src->index);
cd = nearest_common_dominator_for_set (CDI_DOMINATORS, from_preds);
/* Within SAME_SUCC->bbs, find clusters of bbs which can be merged. */
static void
-find_clusters_1 (same_succ same_succ)
+find_clusters_1 (same_succ *same_succ)
{
basic_block bb1, bb2;
unsigned int i, j;
bitmap_iterator bi, bj;
int nr_comparisons;
- int max_comparisons = PARAM_VALUE (PARAM_MAX_TAIL_MERGE_COMPARISONS);
+ int max_comparisons = param_max_tail_merge_comparisons;
EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, 0, i, bi)
{
/* TODO: handle blocks with phi-nodes. We'll have to find corresponding
phi-nodes in bb1 and bb2, with the same alternatives for the same
preds. */
- if (bb_has_non_vop_phi (bb1))
+ if (bb_has_non_vop_phi (bb1) || bb_has_eh_pred (bb1)
+ || bb_has_abnormal_pred (bb1))
continue;
nr_comparisons = 0;
{
bb2 = BASIC_BLOCK_FOR_FN (cfun, j);
- if (bb_has_non_vop_phi (bb2))
+ if (bb_has_non_vop_phi (bb2) || bb_has_eh_pred (bb2)
+ || bb_has_abnormal_pred (bb2))
continue;
if (BB_CLUSTER (bb1) != NULL && BB_CLUSTER (bb1) == BB_CLUSTER (bb2))
continue;
- /* Limit quadratic behaviour. */
+ /* Limit quadratic behavior. */
nr_comparisons++;
if (nr_comparisons > max_comparisons)
break;
static void
find_clusters (void)
{
- same_succ same;
+ same_succ *same;
while (!worklist.is_empty ())
{
replace_block_by (basic_block bb1, basic_block bb2)
{
edge pred_edge;
- edge e1, e2;
- edge_iterator ei;
unsigned int i;
gphi *bb2_phi;
pred_edge, UNKNOWN_LOCATION);
}
- bb2->frequency += bb1->frequency;
- if (bb2->frequency > BB_FREQ_MAX)
- bb2->frequency = BB_FREQ_MAX;
+ /* Merge the outgoing edge counts from bb1 onto bb2. */
+ edge e1, e2;
+ edge_iterator ei;
+
+ if (bb2->count.initialized_p ())
+ FOR_EACH_EDGE (e1, ei, bb1->succs)
+ {
+ e2 = find_edge (bb2, e1->dest);
+ gcc_assert (e2);
+
+ /* If probabilities are same, we are done.
+ If counts are nonzero we can distribute accordingly. In remaining
+ cases just avreage the values and hope for the best. */
+ e2->probability = e1->probability.combine_with_count
+ (bb1->count, e2->probability, bb2->count);
+ }
bb2->count += bb1->count;
- /* Merge the outgoing edge counts from bb1 onto bb2. */
- gcov_type out_sum = 0;
- FOR_EACH_EDGE (e1, ei, bb1->succs)
- {
- e2 = find_edge (bb2, e1->dest);
- gcc_assert (e2);
- e2->count += e1->count;
- out_sum += e2->count;
- }
- /* Recompute the edge probabilities from the new merged edge count.
- Use the sum of the new merged edge counts computed above instead
- of bb2's merged count, in case there are profile count insanities
- making the bb count inconsistent with the edge weights. */
- FOR_EACH_EDGE (e2, ei, bb2->succs)
+ /* Move over any user labels from bb1 after the bb2 labels. */
+ gimple_stmt_iterator gsi1 = gsi_start_bb (bb1);
+ if (!gsi_end_p (gsi1) && gimple_code (gsi_stmt (gsi1)) == GIMPLE_LABEL)
{
- e2->probability = GCOV_COMPUTE_SCALE (e2->count, out_sum);
+ gimple_stmt_iterator gsi2 = gsi_after_labels (bb2);
+ while (!gsi_end_p (gsi1)
+ && gimple_code (gsi_stmt (gsi1)) == GIMPLE_LABEL)
+ {
+ tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi1)));
+ gcc_assert (!DECL_NONLOCAL (label) && !FORCED_LABEL (label));
+ if (DECL_ARTIFICIAL (label))
+ gsi_next (&gsi1);
+ else
+ gsi_move_before (&gsi1, &gsi2);
+ }
}
+ /* Clear range info from all stmts in BB2 -- this transformation
+ could make them out of date. */
+ reset_flow_sensitive_info_in_bb (bb2);
+
/* Do updates that use bb1, before deleting bb1. */
release_last_vdef (bb1);
same_succ_flush_bb (bb1);
apply_clusters (void)
{
basic_block bb1, bb2;
- bb_cluster c;
+ bb_cluster *c;
unsigned int i, j;
bitmap_iterator bj;
int nr_bbs_removed = 0;
int nr_bbs_removed;
bool loop_entered = false;
int iteration_nr = 0;
- int max_iterations = PARAM_VALUE (PARAM_MAX_TAIL_MERGE_ITERATIONS);
+ int max_iterations = param_max_tail_merge_iterations;
if (!flag_tree_tail_merge
|| max_iterations == 0)
timevar_push (TV_TREE_TAIL_MERGE);
+ /* We enter from PRE which has critical edges split. Elimination
+ does not process trivially dead code so cleanup the CFG if we
+ are told so. And re-split critical edges then. */
+ if (todo & TODO_cleanup_cfg)
+ {
+ cleanup_tree_cfg ();
+ todo &= ~TODO_cleanup_cfg;
+ split_edges_for_insertion ();
+ }
+
if (!dom_info_available_p (CDI_DOMINATORS))
{
/* PRE can leave us with unreachable blocks, remove them now. */
if (nr_bbs_removed_total > 0)
{
- if (MAY_HAVE_DEBUG_STMTS)
+ if (MAY_HAVE_DEBUG_BIND_STMTS)
{
calculate_dominance_info (CDI_DOMINATORS);
update_debug_stmts ();