vuse = vop;
if (vuse != lvop
&& walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
- NULL, NULL, limit, lvop) == NULL)
+ NULL, NULL, NULL, limit, lvop) == NULL)
bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
}
}
{
gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
- if (gimple_code (stmt) == GIMPLE_PHI)
+ if (gphi *phi = dyn_cast <gphi *> (stmt))
{
- dom_vuse = get_continuation_for_phi (stmt, &r, true,
+ dom_vuse = get_continuation_for_phi (phi, &r, true,
fbi->aa_walk_budget,
&visited, false, NULL, NULL);
continue;
--- /dev/null
+/* { dg-do run } */
+
+__attribute__((noipa))
+int
+func_1 (int g_258, int func_1_BS_COND_11, int g_64)
+{
+ int BS_VAR_1 = 10;
+ unsigned char BS_VAR_5[2] = { 19, 28 };
+ int LOCAL_CHECKSUM = 0;
+ if (func_1_BS_COND_11)
+ goto BS_LABEL_0;
+ BS_VAR_1 = 0;
+ while (g_64 <= 5)
+ {
+BS_LABEL_0:
+ for (;;)
+ {
+ LOCAL_CHECKSUM = BS_VAR_5[1];
+ if (g_258 != 0) break;
+ goto out;
+ }
+ BS_VAR_5[BS_VAR_1 < 5] = 0;
+ g_258 = 0;
+ }
+out:
+ return LOCAL_CHECKSUM;
+}
+
+int
+main ()
+{
+ if (func_1 (50, 0, 0) != 0)
+ __builtin_abort ();
+ return 0;
+}
ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
bitmap *visited, bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, translate_flags *),
+ bool (*is_backedge)(edge, void *),
translate_flags disambiguate_only,
void *data)
{
}
/* Recurse for PHI nodes. */
- if (gimple_code (def_stmt) == GIMPLE_PHI)
+ if (gphi *phi = dyn_cast <gphi *> (def_stmt))
{
/* An already visited PHI node ends the walk successfully. */
- if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
+ if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (phi))))
return !abort_on_visited;
- vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
+ vuse = get_continuation_for_phi (phi, ref, tbaa_p, limit,
visited, abort_on_visited,
- translate, data, disambiguate_only);
+ translate, data, is_backedge,
+ disambiguate_only);
if (!vuse)
return false;
continue;
Returns NULL_TREE if no suitable virtual operand can be found. */
tree
-get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
+get_continuation_for_phi (gphi *phi, ao_ref *ref, bool tbaa_p,
unsigned int &limit, bitmap *visited,
bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *,
translate_flags *),
void *data,
+ bool (*is_backedge)(edge, void *),
translate_flags disambiguate_only)
{
unsigned nargs = gimple_phi_num_args (phi);
else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
limit, visited,
abort_on_visited,
- translate,
+ translate, is_backedge,
/* Do not valueize when walking over
backedges. */
- dominated_by_p
- (CDI_DOMINATORS,
- gimple_bb (SSA_NAME_DEF_STMT (arg1)),
- phi_bb)
- ? TR_DISAMBIGUATE
- : disambiguate_only, data))
+ (is_backedge
+ && !is_backedge
+ (gimple_phi_arg_edge (phi, i), data))
+ ? disambiguate_only : TR_DISAMBIGUATE,
+ data))
return NULL_TREE;
}
void *(*walker)(ao_ref *, tree, void *),
void *(*translate)(ao_ref *, tree, void *,
translate_flags *),
+ bool (*is_backedge)(edge, void *),
tree (*valueize)(tree),
unsigned &limit, void *data)
{
def_stmt = SSA_NAME_DEF_STMT (vuse);
if (gimple_nop_p (def_stmt))
break;
- else if (gimple_code (def_stmt) == GIMPLE_PHI)
- vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
- &visited, translated, translate, data);
+ else if (gphi *phi = dyn_cast <gphi *> (def_stmt))
+ vuse = get_continuation_for_phi (phi, ref, tbaa_p, limit,
+ &visited, translated, translate, data,
+ is_backedge);
else
{
if ((int)limit <= 0)
enum translate_flags
{ TR_TRANSLATE, TR_VALUEIZE_AND_DISAMBIGUATE, TR_DISAMBIGUATE };
-extern tree get_continuation_for_phi (gimple *, ao_ref *, bool,
+extern tree get_continuation_for_phi (gphi *, ao_ref *, bool,
unsigned int &, bitmap *, bool,
void *(*)(ao_ref *, tree, void *,
translate_flags *),
- void *, translate_flags
+ void *,
+ bool (*)(edge, void *) = nullptr,
+ translate_flags
= TR_VALUEIZE_AND_DISAMBIGUATE);
extern void *walk_non_aliased_vuses (ao_ref *, tree, bool,
void *(*)(ao_ref *, tree, void *),
void *(*)(ao_ref *, tree, void *,
translate_flags *),
+ bool (*)(edge, void *),
tree (*)(tree), unsigned &, void *);
extern int walk_aliased_vdefs (ao_ref *, tree,
bool (*)(ao_ref *, tree, void *),
tree type, tree vuse, edge e, bool *same_valid)
{
basic_block phiblock = e->dest;
- gimple *phi = SSA_NAME_DEF_STMT (vuse);
+ gimple *def = SSA_NAME_DEF_STMT (vuse);
ao_ref ref;
if (same_valid)
/* If value-numbering provided a memory state for this
that dominates PHIBLOCK we can just use that. */
- if (gimple_nop_p (phi)
- || (gimple_bb (phi) != phiblock
- && dominated_by_p (CDI_DOMINATORS, phiblock, gimple_bb (phi))))
+ if (gimple_nop_p (def)
+ || (gimple_bb (def) != phiblock
+ && dominated_by_p (CDI_DOMINATORS, phiblock, gimple_bb (def))))
return vuse;
/* We have pruned expressions that are killed in PHIBLOCK via
live at the start of the block. If there is no virtual PHI to translate
through return the VUSE live at entry. Otherwise the VUSE to translate
is the def of the virtual PHI node. */
- phi = get_virtual_phi (phiblock);
+ gphi *phi = get_virtual_phi (phiblock);
if (!phi)
return BB_LIVE_VOP_ON_EXIT
(get_immediate_dominator (CDI_DOMINATORS, phiblock));
/* Global RPO state for access from hooks. */
static class eliminate_dom_walker *rpo_avail;
basic_block vn_context_bb;
+int *vn_bb_to_rpo;
/* Valueization hook for simplify_replace_tree. Valueize NAME if it is
return (void *)-1;
}
+/* Return true if E is a backedge with respect to our CFG walk order. */
+
+static bool
+vn_is_backedge (edge e, void *)
+{
+ /* During PRE elimination we no longer have access to this info. */
+ return (!vn_bb_to_rpo
+ || vn_bb_to_rpo[e->dest->index] <= vn_bb_to_rpo[e->src->index]);
+}
+
/* Return a reference op vector from OP that can be used for
vn_reference_lookup_pieces. The caller is responsible for releasing
the vector. */
*vnresult
= ((vn_reference_t)
walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
- vn_reference_lookup_3, vuse_valueize,
- limit, &data));
+ vn_reference_lookup_3, vn_is_backedge,
+ vuse_valueize, limit, &data));
if (ops_for_ref != shared_lookup_references)
ops_for_ref.release ();
gcc_checking_assert (vr1.operands == shared_lookup_references);
wvnresult
= ((vn_reference_t)
walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
- vn_reference_lookup_3, vuse_valueize, limit,
- &data));
+ vn_reference_lookup_3, vn_is_backedge,
+ vuse_valueize, limit, &data));
gcc_checking_assert (vr1.operands == shared_lookup_references);
if (wvnresult)
{
int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
for (int i = 0; i < n; ++i)
bb_to_rpo[rpo[i]] = i;
+ vn_bb_to_rpo = bb_to_rpo;
unwind_state *rpo_state = XNEWVEC (unwind_state, n);
vn_valueize = NULL;
rpo_avail = NULL;
+ vn_bb_to_rpo = NULL;
XDELETEVEC (bb_to_rpo);
XDELETEVEC (rpo);
&& (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
&& walk_non_aliased_vuses (&ref, vuse2, true, vuse_eq, NULL, NULL,
- limit, vuse1) != NULL))
+ NULL, limit, vuse1) != NULL))
{
if (insert)
{