/* Tail merging for gimple.
- Copyright (C) 2011-2015 Free Software Foundation, Inc.
+ Copyright (C) 2011-2021 Free Software Foundation, Inc.
Contributed by Tom de Vries (tom@codesourcery.com)
This file is part of GCC.
# BLOCK 7 freq:10000
# PRED: 3 [100.0%] (fallthru,exec) 5 [100.0%] (fallthru,exec)
- 6 [100.0%] (fallthru,exec)
+ 6 [100.0%] (fallthru,exec)
# PT = nonlocal null
# ctxD.2601_1 = PHI <0B(3), 0B(5), ctxD.2601_5(D)(6)>
# .MEMD.3923_11 = PHI <.MEMD.3923_15(3), .MEMD.3923_17(5),
- .MEMD.3923_18(6)>
+ .MEMD.3923_18(6)>
# VUSE <.MEMD.3923_11>
return ctxD.2601_1;
# SUCC: EXIT [100.0%]
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
#include "tree.h"
-#include "stor-layout.h"
+#include "gimple.h"
+#include "cfghooks.h"
+#include "tree-pass.h"
+#include "ssa.h"
+#include "fold-const.h"
#include "trans-mem.h"
-#include "inchash.h"
-#include "tm_p.h"
-#include "predict.h"
-#include "vec.h"
-#include "hashtab.h"
-#include "hash-set.h"
-#include "machmode.h"
-#include "hard-reg-set.h"
-#include "input.h"
-#include "function.h"
-#include "dominance.h"
-#include "cfg.h"
#include "cfganal.h"
#include "cfgcleanup.h"
-#include "basic-block.h"
-#include "flags.h"
-#include "hash-table.h"
-#include "tree-ssa-alias.h"
-#include "internal-fn.h"
-#include "tree-eh.h"
-#include "gimple-expr.h"
-#include "is-a.h"
-#include "gimple.h"
#include "gimple-iterator.h"
-#include "gimple-ssa.h"
#include "tree-cfg.h"
-#include "tree-phinodes.h"
-#include "ssa-iterators.h"
#include "tree-into-ssa.h"
-#include "params.h"
-#include "gimple-pretty-print.h"
#include "tree-ssa-sccvn.h"
-#include "tree-dump.h"
#include "cfgloop.h"
-#include "tree-pass.h"
-#include "trans-mem.h"
+#include "tree-eh.h"
+#include "tree-cfgcleanup.h"
+
+const int ignore_edge_flags = EDGE_DFS_BACK | EDGE_EXECUTABLE;
/* Describes a group of bbs with the same successors. The successor bbs are
cached in succs, and the successor edge flags are cached in succ_flags.
Additionally, the hash value for the struct is cached in hashval, and
in_worklist indicates whether it's currently part of worklist. */
-struct same_succ_def
+struct same_succ : pointer_hash <same_succ>
{
/* The bbs that have the same successor bbs. */
bitmap bbs;
hashval_t hashval;
/* hash_table support. */
- typedef same_succ_def value_type;
- typedef same_succ_def compare_type;
- static inline hashval_t hash (const value_type *);
- static int equal (const value_type *, const compare_type *);
- static void remove (value_type *);
+ static inline hashval_t hash (const same_succ *);
+ static int equal (const same_succ *, const same_succ *);
+ static void remove (same_succ *);
};
-typedef struct same_succ_def *same_succ;
-typedef const struct same_succ_def *const_same_succ;
/* hash routine for hash_table support, returns hashval of E. */
inline hashval_t
-same_succ_def::hash (const value_type *e)
+same_succ::hash (const same_succ *e)
{
return e->hashval;
}
/* A group of bbs where 1 bb from bbs can replace the other bbs. */
-struct bb_cluster_def
+struct bb_cluster
{
/* The bbs in the cluster. */
bitmap bbs;
/* The bb to replace the cluster with. */
basic_block rep_bb;
};
-typedef struct bb_cluster_def *bb_cluster;
-typedef const struct bb_cluster_def *const_bb_cluster;
/* Per bb-info. */
/* The number of non-debug statements in the bb. */
int size;
/* The same_succ that this bb is a member of. */
- same_succ bb_same_succ;
+ same_succ *bb_same_succ;
/* The cluster that this bb is a member of. */
- bb_cluster cluster;
+ bb_cluster *cluster;
/* The vop state at the exit of a bb. This is shortlived data, used to
communicate data between update_block_by and update_vuses. */
tree vop_at_exit;
#define BB_VOP_AT_EXIT(bb) (((struct aux_bb_info *)bb->aux)->vop_at_exit)
#define BB_DEP_BB(bb) (((struct aux_bb_info *)bb->aux)->dep_bb)
+/* Valueization helper querying the VN lattice. */
+
+static tree
+tail_merge_valueize (tree name)
+{
+ if (TREE_CODE (name) == SSA_NAME
+ && has_VN_INFO (name))
+ {
+ tree tem = VN_INFO (name)->valnum;
+ if (tem != VN_TOP)
+ return tem;
+ }
+ return name;
+}
+
/* Returns true if the only effect a statement STMT has, is to define locally
used SSA_NAMEs. */
static bool
-stmt_local_def (gimple stmt)
+stmt_local_def (gimple *stmt)
{
basic_block bb, def_bb;
imm_use_iterator iter;
if (gimple_vdef (stmt) != NULL_TREE
|| gimple_has_side_effects (stmt)
|| gimple_could_trap_p_1 (stmt, false, false)
- || gimple_vuse (stmt) != NULL_TREE)
+ || gimple_vuse (stmt) != NULL_TREE
+ /* Copied from tree-ssa-ifcombine.c:bb_no_side_effects_p():
+ const calls don't match any of the above, yet they could
+ still have some side-effects - they could contain
+ gimple_could_trap_p statements, like floating point
+ exceptions or integer division by zero. See PR70586.
+ FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
+ should handle this. */
+ || is_gimple_call (stmt))
return false;
def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
static void
gsi_advance_fw_nondebug_nonlocal (gimple_stmt_iterator *gsi)
{
- gimple stmt;
+ gimple *stmt;
while (true)
{
stmt = gsi_stmt (*gsi);
if (!stmt_local_def (stmt))
return;
- gsi_next_nondebug (gsi);
+ gsi_next_nondebug (gsi);
}
}
if (val1 == val2)
return true;
- if (vn_valueize (val1) != vn_valueize (val2))
+ if (tail_merge_valueize (val1) != tail_merge_valueize (val2))
return false;
return ((TREE_CODE (val1) == SSA_NAME || CONSTANT_CLASS_P (val1))
/* Prints E to FILE. */
static void
-same_succ_print (FILE *file, const same_succ e)
+same_succ_print (FILE *file, const same_succ *e)
{
unsigned int i;
bitmap_print (file, e->bbs, "bbs:", "\n");
/* Prints same_succ VE to VFILE. */
inline int
-ssa_same_succ_print_traverse (same_succ *pe, FILE *file)
+ssa_same_succ_print_traverse (same_succ **pe, FILE *file)
{
- const same_succ e = *pe;
+ const same_succ *e = *pe;
same_succ_print (file, e);
return 1;
}
/* Update BB_DEP_BB, given the dependencies in STMT. */
static void
-stmt_update_dep_bb (gimple stmt)
+stmt_update_dep_bb (gimple *stmt)
{
ssa_op_iter iter;
use_operand_p use;
/* Calculates hash value for same_succ VE. */
static hashval_t
-same_succ_hash (const_same_succ e)
+same_succ_hash (const same_succ *e)
{
inchash::hash hstate (bitmap_hash (e->succs));
int flags;
unsigned int first = bitmap_first_set_bit (e->bbs);
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, first);
int size = 0;
- gimple stmt;
+ gimple *stmt;
tree arg;
unsigned int s;
bitmap_iterator bs;
if (!is_gimple_call (stmt))
continue;
if (gimple_call_internal_p (stmt))
- hstate.add_int (gimple_call_internal_fn (stmt));
+ hstate.add_int (gimple_call_internal_fn (stmt));
else
{
inchash::add_expr (gimple_call_fn (stmt), hstate);
for (i = 0; i < gimple_call_num_args (stmt); i++)
{
arg = gimple_call_arg (stmt, i);
- arg = vn_valueize (arg);
+ arg = tail_merge_valueize (arg);
inchash::add_expr (arg, hstate);
}
}
hstate.add_int (size);
BB_SIZE (bb) = size;
+ hstate.add_int (bb->loop_father->num);
+
for (i = 0; i < e->succ_flags.length (); ++i)
{
flags = e->succ_flags[i];
the other edge flags. */
static bool
-inverse_flags (const_same_succ e1, const_same_succ e2)
+inverse_flags (const same_succ *e1, const same_succ *e2)
{
int f1a, f1b, f2a, f2b;
int mask = ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
/* Compares SAME_SUCCs E1 and E2. */
int
-same_succ_def::equal (const value_type *e1, const compare_type *e2)
+same_succ::equal (const same_succ *e1, const same_succ *e2)
{
unsigned int i, first1, first2;
gimple_stmt_iterator gsi1, gsi2;
- gimple s1, s2;
+ gimple *s1, *s2;
basic_block bb1, bb2;
+ if (e1 == e2)
+ return 1;
+
if (e1->hashval != e2->hashval)
return 0;
if (!inverse_flags (e1, e2))
{
for (i = 0; i < e1->succ_flags.length (); ++i)
- if (e1->succ_flags[i] != e1->succ_flags[i])
+ if (e1->succ_flags[i] != e2->succ_flags[i])
return 0;
}
if (BB_SIZE (bb1) != BB_SIZE (bb2))
return 0;
+ if (bb1->loop_father != bb2->loop_father)
+ return 0;
+
gsi1 = gsi_start_nondebug_bb (bb1);
gsi2 = gsi_start_nondebug_bb (bb2);
gsi_advance_fw_nondebug_nonlocal (&gsi1);
/* Alloc and init a new SAME_SUCC. */
-static same_succ
+static same_succ *
same_succ_alloc (void)
{
- same_succ same = XNEW (struct same_succ_def);
+ same_succ *same = XNEW (struct same_succ);
same->bbs = BITMAP_ALLOC (NULL);
same->succs = BITMAP_ALLOC (NULL);
/* Delete same_succ E. */
void
-same_succ_def::remove (same_succ e)
+same_succ::remove (same_succ *e)
{
BITMAP_FREE (e->bbs);
BITMAP_FREE (e->succs);
/* Reset same_succ SAME. */
static void
-same_succ_reset (same_succ same)
+same_succ_reset (same_succ *same)
{
bitmap_clear (same->bbs);
bitmap_clear (same->succs);
same->succ_flags.truncate (0);
}
-static hash_table<same_succ_def> *same_succ_htab;
+static hash_table<same_succ> *same_succ_htab;
/* Array that is used to store the edge flags for a successor. */
/* Vector of bbs to process. */
-static vec<same_succ> worklist;
+static vec<same_succ *> worklist;
/* Prints worklist to FILE. */
/* Adds SAME to worklist. */
static void
-add_to_worklist (same_succ same)
+add_to_worklist (same_succ *same)
{
if (same->in_worklist)
return;
/* Add BB to same_succ_htab. */
static void
-find_same_succ_bb (basic_block bb, same_succ *same_p)
+find_same_succ_bb (basic_block bb, same_succ **same_p)
{
unsigned int j;
bitmap_iterator bj;
- same_succ same = *same_p;
- same_succ *slot;
+ same_succ *same = *same_p;
+ same_succ **slot;
edge_iterator ei;
edge e;
- if (bb == NULL
- /* Be conservative with loop structure. It's not evident that this test
- is sufficient. Before tail-merge, we've just called
- loop_optimizer_finalize, and LOOPS_MAY_HAVE_MULTIPLE_LATCHES is now
- set, so there's no guarantee that the loop->latch value is still valid.
- But we assume that, since we've forced LOOPS_HAVE_SIMPLE_LATCHES at the
- start of pre, we've kept that property intact throughout pre, and are
- keeping it throughout tail-merge using this test. */
- || bb->loop_father->latch == bb)
+ if (bb == NULL)
return;
bitmap_set_bit (same->bbs, bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
{
int index = e->dest->index;
bitmap_set_bit (same->succs, index);
- same_succ_edge_flags[index] = e->flags;
+ same_succ_edge_flags[index] = (e->flags & ~ignore_edge_flags);
}
EXECUTE_IF_SET_IN_BITMAP (same->succs, 0, j, bj)
same->succ_flags.safe_push (same_succ_edge_flags[j]);
static void
find_same_succ (void)
{
- same_succ same = same_succ_alloc ();
+ same_succ *same = same_succ_alloc ();
basic_block bb;
FOR_EACH_BB_FN (bb, cfun)
same = same_succ_alloc ();
}
- same_succ_def::remove (same);
+ same_succ::remove (same);
}
/* Initializes worklist administration. */
init_worklist (void)
{
alloc_aux_for_blocks (sizeof (struct aux_bb_info));
- same_succ_htab = new hash_table<same_succ_def> (n_basic_blocks_for_fn (cfun));
+ same_succ_htab = new hash_table<same_succ> (n_basic_blocks_for_fn (cfun));
same_succ_edge_flags = XCNEWVEC (int, last_basic_block_for_fn (cfun));
deleted_bbs = BITMAP_ALLOC (NULL);
deleted_bb_preds = BITMAP_ALLOC (NULL);
static void
same_succ_flush_bb (basic_block bb)
{
- same_succ same = BB_SAME_SUCC (bb);
+ same_succ *same = BB_SAME_SUCC (bb);
+ if (! same)
+ return;
+
BB_SAME_SUCC (bb) = NULL;
if (bitmap_single_bit_set_p (same->bbs))
same_succ_htab->remove_elt_with_hash (same, same->hashval);
for (gimple_stmt_iterator i = gsi_last_bb (bb); !gsi_end_p (i);
gsi_prev_nondebug (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
if (gimple_vdef (stmt) == NULL_TREE)
continue;
mark_virtual_phi_result_for_renaming (phi);
return;
}
-
}
/* For deleted_bb_preds, find bbs with same successors. */
unsigned int i;
bitmap_iterator bi;
basic_block bb;
- same_succ same;
+ same_succ *same;
bitmap_and_compl_into (deleted_bb_preds, deleted_bbs);
bitmap_clear (deleted_bbs);
if (same == NULL)
same = same_succ_alloc ();
}
- same_succ_def::remove (same);
+ same_succ::remove (same);
bitmap_clear (deleted_bb_preds);
}
/* Prints cluster C to FILE. */
static void
-print_cluster (FILE *file, bb_cluster c)
+print_cluster (FILE *file, bb_cluster *c)
{
if (c == NULL)
return;
/* Prints cluster C to stderr. */
-extern void debug_cluster (bb_cluster);
+extern void debug_cluster (bb_cluster *);
DEBUG_FUNCTION void
-debug_cluster (bb_cluster c)
+debug_cluster (bb_cluster *c)
{
print_cluster (stderr, c);
}
/* Update C->rep_bb, given that BB is added to the cluster. */
static void
-update_rep_bb (bb_cluster c, basic_block bb)
+update_rep_bb (bb_cluster *c, basic_block bb)
{
/* Initial. */
if (c->rep_bb == NULL)
/* Add BB to cluster C. Sets BB in C->bbs, and preds of BB in C->preds. */
static void
-add_bb_to_cluster (bb_cluster c, basic_block bb)
+add_bb_to_cluster (bb_cluster *c, basic_block bb)
{
edge e;
edge_iterator ei;
/* Allocate and init new cluster. */
-static bb_cluster
+static bb_cluster *
new_cluster (void)
{
- bb_cluster c;
- c = XCNEW (struct bb_cluster_def);
+ bb_cluster *c;
+ c = XCNEW (bb_cluster);
c->bbs = BITMAP_ALLOC (NULL);
c->preds = BITMAP_ALLOC (NULL);
c->rep_bb = NULL;
/* Delete clusters. */
static void
-delete_cluster (bb_cluster c)
+delete_cluster (bb_cluster *c)
{
if (c == NULL)
return;
/* Array that contains all clusters. */
-static vec<bb_cluster> all_clusters;
+static vec<bb_cluster *> all_clusters;
/* Allocate all cluster vectors. */
/* Merge cluster C2 into C1. */
static void
-merge_clusters (bb_cluster c1, bb_cluster c2)
+merge_clusters (bb_cluster *c1, bb_cluster *c2)
{
bitmap_ior_into (c1->bbs, c2->bbs);
bitmap_ior_into (c1->preds, c2->preds);
set_cluster (basic_block bb1, basic_block bb2)
{
basic_block merge_bb, other_bb;
- bb_cluster merge, old, c;
+ bb_cluster *merge, *old, *c;
if (BB_CLUSTER (bb1) == NULL && BB_CLUSTER (bb2) == NULL)
{
|| t2 == NULL_TREE)
return false;
- if (operand_equal_p (t1, t2, 0))
+ if (operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS))
return true;
return gvn_uses_equal (t1, t2);
gimple_bb (s2) are members of SAME_SUCC. */
static bool
-gimple_equal_p (same_succ same_succ, gimple s1, gimple s2)
+gimple_equal_p (same_succ *same_succ, gimple *s1, gimple *s2)
{
unsigned int i;
tree lhs1, lhs2;
{
case GIMPLE_CALL:
if (!gimple_call_same_target_p (s1, s2))
- return false;
+ return false;
t1 = gimple_call_chain (s1);
t2 = gimple_call_chain (s2);
if (lhs1 == NULL_TREE || lhs2 == NULL_TREE)
return false;
if (TREE_CODE (lhs1) == SSA_NAME && TREE_CODE (lhs2) == SSA_NAME)
- return vn_valueize (lhs1) == vn_valueize (lhs2);
+ return tail_merge_valueize (lhs1) == tail_merge_valueize (lhs2);
return operand_equal_p (lhs1, lhs2, 0);
case GIMPLE_ASSIGN:
if (!gimple_operand_equal_value_p (t1, t2))
return false;
- code1 = gimple_expr_code (s1);
- code2 = gimple_expr_code (s2);
+ code1 = gimple_cond_code (s1);
+ code2 = gimple_cond_code (s2);
inv_cond = (bitmap_bit_p (same_succ->inverse, bb1->index)
!= bitmap_bit_p (same_succ->inverse, bb2->index));
if (inv_cond)
gsi_advance_bw_nondebug_nonlocal (gimple_stmt_iterator *gsi, tree *vuse,
bool *vuse_escaped)
{
- gimple stmt;
+ gimple *stmt;
tree lvuse;
while (true)
}
}
+/* Return true if equal (in the sense of gimple_equal_p) statements STMT1 and
+ STMT2 are allowed to be merged. */
+
+static bool
+merge_stmts_p (gimple *stmt1, gimple *stmt2)
+{
+ /* What could be better than this here is to blacklist the bb
+ containing the stmt, when encountering the stmt f.i. in
+ same_succ_hash. */
+ if (is_tm_ending (stmt1))
+ return false;
+
+ /* Verify EH landing pads. */
+ if (lookup_stmt_eh_lp_fn (cfun, stmt1) != lookup_stmt_eh_lp_fn (cfun, stmt2))
+ return false;
+
+ if (is_gimple_call (stmt1)
+ && gimple_call_internal_p (stmt1))
+ switch (gimple_call_internal_fn (stmt1))
+ {
+ case IFN_UBSAN_NULL:
+ case IFN_UBSAN_BOUNDS:
+ case IFN_UBSAN_VPTR:
+ case IFN_UBSAN_CHECK_ADD:
+ case IFN_UBSAN_CHECK_SUB:
+ case IFN_UBSAN_CHECK_MUL:
+ case IFN_UBSAN_OBJECT_SIZE:
+ case IFN_UBSAN_PTR:
+ case IFN_ASAN_CHECK:
+ /* For these internal functions, gimple_location is an implicit
+ parameter, which will be used explicitly after expansion.
+ Merging these statements may cause confusing line numbers in
+ sanitizer messages. */
+ return gimple_location (stmt1) == gimple_location (stmt2);
+ default:
+ break;
+ }
+
+ return true;
+}
+
/* Determines whether BB1 and BB2 (members of same_succ) are duplicates. If so,
clusters them. */
static void
-find_duplicate (same_succ same_succ, basic_block bb1, basic_block bb2)
+find_duplicate (same_succ *same_succ, basic_block bb1, basic_block bb2)
{
gimple_stmt_iterator gsi1 = gsi_last_nondebug_bb (bb1);
gimple_stmt_iterator gsi2 = gsi_last_nondebug_bb (bb2);
while (!gsi_end_p (gsi1) && !gsi_end_p (gsi2))
{
- gimple stmt1 = gsi_stmt (gsi1);
- gimple stmt2 = gsi_stmt (gsi2);
-
- /* What could be better than to this this here is to blacklist the bb
- containing the stmt, when encountering the stmt f.i. in
- same_succ_hash. */
- if (is_tm_ending (stmt1)
- || is_tm_ending (stmt2))
- return;
+ gimple *stmt1 = gsi_stmt (gsi1);
+ gimple *stmt2 = gsi_stmt (gsi2);
+
+ if (gimple_code (stmt1) == GIMPLE_LABEL
+ && gimple_code (stmt2) == GIMPLE_LABEL)
+ break;
if (!gimple_equal_p (same_succ, stmt1, stmt2))
return;
+ if (!merge_stmts_p (stmt1, stmt2))
+ return;
+
gsi_prev_nondebug (&gsi1);
gsi_prev_nondebug (&gsi2);
gsi_advance_bw_nondebug_nonlocal (&gsi1, &vuse1, &vuse_escaped);
gsi_advance_bw_nondebug_nonlocal (&gsi2, &vuse2, &vuse_escaped);
}
+ while (!gsi_end_p (gsi1) && gimple_code (gsi_stmt (gsi1)) == GIMPLE_LABEL)
+ {
+ tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi1)));
+ if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
+ return;
+ gsi_prev (&gsi1);
+ }
+ while (!gsi_end_p (gsi2) && gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL)
+ {
+ tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)));
+ if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
+ return;
+ gsi_prev (&gsi2);
+ }
if (!(gsi_end_p (gsi1) && gsi_end_p (gsi2)))
return;
continue;
if (operand_equal_for_phi_arg_p (val1, val2))
- continue;
+ continue;
if (gvn_uses_equal (val1, val2))
continue;
phi alternatives for BB1 and BB2 are equal. */
static bool
-same_phi_alternatives (same_succ same_succ, basic_block bb1, basic_block bb2)
+same_phi_alternatives (same_succ *same_succ, basic_block bb1, basic_block bb2)
{
unsigned int s;
bitmap_iterator bs;
bb_has_non_vop_phi (basic_block bb)
{
gimple_seq phis = phi_nodes (bb);
- gimple phi;
+ gimple *phi;
if (phis == NULL)
return false;
basic_block cd, dep_bb = BB_DEP_BB (to);
edge_iterator ei;
edge e;
- bitmap from_preds = BITMAP_ALLOC (NULL);
if (dep_bb == NULL)
return true;
+ bitmap from_preds = BITMAP_ALLOC (NULL);
FOR_EACH_EDGE (e, ei, from->preds)
bitmap_set_bit (from_preds, e->src->index);
cd = nearest_common_dominator_for_set (CDI_DOMINATORS, from_preds);
/* Within SAME_SUCC->bbs, find clusters of bbs which can be merged. */
static void
-find_clusters_1 (same_succ same_succ)
+find_clusters_1 (same_succ *same_succ)
{
basic_block bb1, bb2;
unsigned int i, j;
bitmap_iterator bi, bj;
int nr_comparisons;
- int max_comparisons = PARAM_VALUE (PARAM_MAX_TAIL_MERGE_COMPARISONS);
+ int max_comparisons = param_max_tail_merge_comparisons;
EXECUTE_IF_SET_IN_BITMAP (same_succ->bbs, 0, i, bi)
{
/* TODO: handle blocks with phi-nodes. We'll have to find corresponding
phi-nodes in bb1 and bb2, with the same alternatives for the same
preds. */
- if (bb_has_non_vop_phi (bb1))
+ if (bb_has_non_vop_phi (bb1) || bb_has_eh_pred (bb1)
+ || bb_has_abnormal_pred (bb1))
continue;
nr_comparisons = 0;
{
bb2 = BASIC_BLOCK_FOR_FN (cfun, j);
- if (bb_has_non_vop_phi (bb2))
+ if (bb_has_non_vop_phi (bb2) || bb_has_eh_pred (bb2)
+ || bb_has_abnormal_pred (bb2))
continue;
if (BB_CLUSTER (bb1) != NULL && BB_CLUSTER (bb1) == BB_CLUSTER (bb2))
continue;
- /* Limit quadratic behaviour. */
+ /* Limit quadratic behavior. */
nr_comparisons++;
if (nr_comparisons > max_comparisons)
break;
continue;
find_duplicate (same_succ, bb1, bb2);
- }
+ }
}
}
static void
find_clusters (void)
{
- same_succ same;
+ same_succ *same;
while (!worklist.is_empty ())
{
replace_block_by (basic_block bb1, basic_block bb2)
{
edge pred_edge;
- edge e1, e2;
- edge_iterator ei;
unsigned int i;
gphi *bb2_phi;
pred_edge, UNKNOWN_LOCATION);
}
- bb2->frequency += bb1->frequency;
- if (bb2->frequency > BB_FREQ_MAX)
- bb2->frequency = BB_FREQ_MAX;
+ /* Merge the outgoing edge counts from bb1 onto bb2. */
+ edge e1, e2;
+ edge_iterator ei;
+
+ if (bb2->count.initialized_p ())
+ FOR_EACH_EDGE (e1, ei, bb1->succs)
+ {
+ e2 = find_edge (bb2, e1->dest);
+ gcc_assert (e2);
+
+ /* If probabilities are same, we are done.
+ If counts are nonzero we can distribute accordingly. In remaining
+ cases just avreage the values and hope for the best. */
+ e2->probability = e1->probability.combine_with_count
+ (bb1->count, e2->probability, bb2->count);
+ }
bb2->count += bb1->count;
- /* Merge the outgoing edge counts from bb1 onto bb2. */
- gcov_type out_sum = 0;
- FOR_EACH_EDGE (e1, ei, bb1->succs)
+ /* Move over any user labels from bb1 after the bb2 labels. */
+ gimple_stmt_iterator gsi1 = gsi_start_bb (bb1);
+ if (!gsi_end_p (gsi1) && gimple_code (gsi_stmt (gsi1)) == GIMPLE_LABEL)
{
- e2 = find_edge (bb2, e1->dest);
- gcc_assert (e2);
- e2->count += e1->count;
- out_sum += e2->count;
- }
- /* Recompute the edge probabilities from the new merged edge count.
- Use the sum of the new merged edge counts computed above instead
- of bb2's merged count, in case there are profile count insanities
- making the bb count inconsistent with the edge weights. */
- FOR_EACH_EDGE (e2, ei, bb2->succs)
- {
- e2->probability = GCOV_COMPUTE_SCALE (e2->count, out_sum);
+ gimple_stmt_iterator gsi2 = gsi_after_labels (bb2);
+ while (!gsi_end_p (gsi1)
+ && gimple_code (gsi_stmt (gsi1)) == GIMPLE_LABEL)
+ {
+ tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi1)));
+ gcc_assert (!DECL_NONLOCAL (label) && !FORCED_LABEL (label));
+ if (DECL_ARTIFICIAL (label))
+ gsi_next (&gsi1);
+ else
+ gsi_move_before (&gsi1, &gsi2);
+ }
}
+ /* Clear range info from all stmts in BB2 -- this transformation
+ could make them out of date. */
+ reset_flow_sensitive_info_in_bb (bb2);
+
/* Do updates that use bb1, before deleting bb1. */
release_last_vdef (bb1);
same_succ_flush_bb (bb1);
apply_clusters (void)
{
basic_block bb1, bb2;
- bb_cluster c;
+ bb_cluster *c;
unsigned int i, j;
bitmap_iterator bj;
int nr_bbs_removed = 0;
defs. */
static void
-update_debug_stmt (gimple stmt)
+update_debug_stmt (gimple *stmt)
{
use_operand_p use_p;
ssa_op_iter oi;
FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, oi, SSA_OP_USE)
{
tree name = USE_FROM_PTR (use_p);
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
basic_block bbdef = gimple_bb (def_stmt);
if (bbdef == NULL || bbuse == bbdef
|| dominated_by_p (CDI_DOMINATORS, bbuse, bbdef))
EXECUTE_IF_SET_IN_BITMAP (update_bbs, 0, i, bi)
{
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
bb = BASIC_BLOCK_FOR_FN (cfun, i);
int nr_bbs_removed;
bool loop_entered = false;
int iteration_nr = 0;
- int max_iterations = PARAM_VALUE (PARAM_MAX_TAIL_MERGE_ITERATIONS);
+ int max_iterations = param_max_tail_merge_iterations;
if (!flag_tree_tail_merge
|| max_iterations == 0)
timevar_push (TV_TREE_TAIL_MERGE);
+ /* We enter from PRE which has critical edges split. Elimination
+ does not process trivially dead code so cleanup the CFG if we
+ are told so. And re-split critical edges then. */
+ if (todo & TODO_cleanup_cfg)
+ {
+ cleanup_tree_cfg ();
+ todo &= ~TODO_cleanup_cfg;
+ split_edges_for_insertion ();
+ }
+
if (!dom_info_available_p (CDI_DOMINATORS))
{
/* PRE can leave us with unreachable blocks, remove them now. */
if (nr_bbs_removed_total > 0)
{
- if (MAY_HAVE_DEBUG_STMTS)
+ if (MAY_HAVE_DEBUG_BIND_STMTS)
{
calculate_dominance_info (CDI_DOMINATORS);
update_debug_stmts ();