combine_blocks (loop);
}
- /* Perform local CSE, this esp. helps the vectorizer analysis if loads
- and stores are involved. CSE only the loop body, not the entry
- PHIs, those are to be kept in sync with the non-if-converted copy.
- ??? We'll still keep dead stores though. */
- exit_bbs = BITMAP_ALLOC (NULL);
- bitmap_set_bit (exit_bbs, single_exit (loop)->dest->index);
- bitmap_set_bit (exit_bbs, loop->latch->index);
-
std::pair <tree, tree> *name_pair;
unsigned ssa_names_idx;
FOR_EACH_VEC_ELT (redundant_ssa_names, ssa_names_idx, name_pair)
replace_uses_by (name_pair->first, name_pair->second);
redundant_ssa_names.release ();
- todo |= do_rpo_vn (cfun, loop_preheader_edge (loop), exit_bbs);
+ /* Perform local CSE, this esp. helps the vectorizer analysis if loads
+ and stores are involved. CSE only the loop body, not the entry
+ PHIs, those are to be kept in sync with the non-if-converted copy.
+ ??? We'll still keep dead stores though. */
+ exit_bbs = BITMAP_ALLOC (NULL);
+ for (edge exit : get_loop_exit_edges (loop))
+ bitmap_set_bit (exit_bbs, exit->dest->index);
+ todo |= do_rpo_vn (cfun, loop_preheader_edge (loop), exit_bbs,
+ false, true, true);
/* Delete dead predicate computations. */
ifcvt_local_dce (loop);
static unsigned
do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
- bool iterate, bool eliminate, vn_lookup_kind kind);
+ bool iterate, bool eliminate, bool skip_entry_phis,
+ vn_lookup_kind kind);
void
run_rpo_vn (vn_lookup_kind kind)
{
- do_rpo_vn_1 (cfun, NULL, NULL, true, false, kind);
+ do_rpo_vn_1 (cfun, NULL, NULL, true, false, false, kind);
/* ??? Prune requirement of these. */
constant_to_value_id = new hash_table<vn_constant_hasher> (23);
/* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
If ITERATE is true then treat backedges optimistically as not
executed and iterate. If ELIMINATE is true then perform
- elimination, otherwise leave that to the caller. */
+ elimination, otherwise leave that to the caller. If SKIP_ENTRY_PHIS
+ is true then force PHI nodes in ENTRY->dest to VARYING. */
static unsigned
do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
- bool iterate, bool eliminate, vn_lookup_kind kind)
+ bool iterate, bool eliminate, bool skip_entry_phis,
+ vn_lookup_kind kind)
{
unsigned todo = 0;
default_vn_walk_kind = kind;
if (e != entry
&& !(e->flags & EDGE_DFS_BACK))
break;
- bool skip_entry_phis = e != NULL;
- if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
+ if (e != NULL && dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Region does not contain all edges into "
"the entry block, skipping its PHIs.\n");
+ skip_entry_phis |= e != NULL;
int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
for (int i = 0; i < n; ++i)
If ITERATE is true then treat backedges optimistically as not
executed and iterate. If ELIMINATE is true then perform
elimination, otherwise leave that to the caller.
+ If SKIP_ENTRY_PHIS is true then force PHI nodes in ENTRY->dest to VARYING.
KIND specifies the amount of work done for handling memory operations. */
unsigned
do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
- bool iterate, bool eliminate, vn_lookup_kind kind)
+ bool iterate, bool eliminate, bool skip_entry_phis,
+ vn_lookup_kind kind)
{
auto_timevar tv (TV_TREE_RPO_VN);
- unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate, kind);
+ unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate,
+ skip_entry_phis, kind);
free_rpo_vn ();
return todo;
}
if (iterate_p)
loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
- todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, VN_WALKREWRITE);
+ todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, false, VN_WALKREWRITE);
free_rpo_vn ();
if (iterate_p)