+2019-08-15 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91445
+ Backport from mainline
+ 2019-07-05 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91091
+ * tree-ssa-alias.h (get_continuation_for_phi): Add tbaa_p parameter.
+ (walk_non_aliased_vuses): Likewise.
+ * tree-ssa-alias.c (maybe_skip_until): Pass down tbaa_p.
+ (get_continuation_for_phi): New tbaa_p parameter and pass
+ it down.
+ (walk_non_aliased_vuses): Likewise.
+ * tree-ssa-pre.c (translate_vuse_through_block): Likewise.
+ * tree-ssa-scopedtables.c (avail_exprs_stack::lookup_avail_expr):
+ Likewise.
+ * tree-ssa-sccvn.c (struct vn_walk_cb_data): Add tbaa_p flag.
+ (vn_reference_lookup_3): Handle and pass down tbaa_p flag.
+ (vn_reference_lookup_pieces): Adjust.
+ (vn_reference_lookup): Remove alias-set altering, instead pass
+ down false as tbaa_p.
+
+ 2019-07-04 Richard Biener <rguenther@suse.de>
+
+ * tree-ssa-sccvn.h (vn_reference_lookup): Add last_vuse_ptr
+ argument.
+ * tree-ssa-sccvn.c (last_vuse_ptr, vn_walk_kind): Move
+ globals into...
+ (struct vn_walk_cb_data): New callback data struct.
+ (vn_reference_lookup_2): Adjust.
+ (vn_reference_lookup_3): Likewise.
+ (vn_reference_lookup_pieces): Likewise.
+ (vn_reference_lookup): Likewise, get last_vuse_ptr argument.
+ (visit_reference_op_load): Adjust.
+
2019-08-14 Martin Sebor <msebor@redhat.com>
Backport from mainline
+2019-08-15 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91445
+ * gcc.dg/torture/pr91445.c: New testcase.
+
+ Backport from mainline
+ 2019-07-05 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91091
+ * gcc.dg/tree-ssa/pr91091-2.c: New testcase.
+
2019-08-14 Martin Sebor <msebor@redhat.com>
Backport from mainline
--- /dev/null
+/* { dg-do run } */
+
+struct S { _Bool x; };
+
+void
+foo (struct S *s)
+{
+ __builtin_memset (s, 0x11, sizeof (struct S));
+ s->x = 1;
+}
+
+int
+main ()
+{
+ struct S s;
+ foo (&s);
+ char c;
+ __builtin_memcpy (&c, &s.x, 1);
+ if (c != 1)
+ __builtin_abort ();
+ return 0;
+}
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-fre1" } */
+
+struct s { int x; };
+struct t { int x; };
+
+void swap(struct s* p, struct t* q)
+{
+ p->x = q->x;
+ q->x = p->x;
+}
+
+/* The second statement is redundant. */
+/* { dg-final { scan-tree-dump-times "x = " 1 "fre1" } } */
+/* { dg-final { scan-tree-dump-times " = \[^;\]*x;" 1 "fre1" } } */
static bool
maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
- ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
- bool abort_on_visited,
+ ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
+ bitmap *visited, bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool *),
void *data)
{
/* An already visited PHI node ends the walk successfully. */
if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
return !abort_on_visited;
- vuse = get_continuation_for_phi (def_stmt, ref, limit,
+ vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
visited, abort_on_visited,
translate, data);
if (!vuse)
if ((int)limit <= 0)
return false;
--limit;
- if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
+ if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
{
bool disambiguate_only = true;
if (translate
Returns NULL_TREE if no suitable virtual operand can be found. */
tree
-get_continuation_for_phi (gimple *phi, ao_ref *ref,
+get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
unsigned int &limit, bitmap *visited,
bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool *),
arg1 = PHI_ARG_DEF (phi, i);
if (arg1 == arg0)
;
- else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
+ else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
+ limit, visited,
abort_on_visited,
/* Do not translate when walking over
backedges. */
TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
void *
-walk_non_aliased_vuses (ao_ref *ref, tree vuse,
+walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
void *(*walker)(ao_ref *, tree, void *),
void *(*translate)(ao_ref *, tree, void *, bool *),
tree (*valueize)(tree),
if (gimple_nop_p (def_stmt))
break;
else if (gimple_code (def_stmt) == GIMPLE_PHI)
- vuse = get_continuation_for_phi (def_stmt, ref, limit,
+ vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
&visited, translated, translate, data);
else
{
break;
}
--limit;
- if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
+ if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
{
if (!translate)
break;
extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *);
extern bool stmt_kills_ref_p (gimple *, tree);
extern bool stmt_kills_ref_p (gimple *, ao_ref *);
-extern tree get_continuation_for_phi (gimple *, ao_ref *,
+extern tree get_continuation_for_phi (gimple *, ao_ref *, bool,
unsigned int &, bitmap *, bool,
void *(*)(ao_ref *, tree, void *, bool *),
void *);
-extern void *walk_non_aliased_vuses (ao_ref *, tree,
+extern void *walk_non_aliased_vuses (ao_ref *, tree, bool,
void *(*)(ao_ref *, tree, void *),
void *(*)(ao_ref *, tree, void *, bool *),
tree (*)(tree), unsigned &, void *);
bitmap visited = NULL;
/* Try to find a vuse that dominates this phi node by skipping
non-clobbering statements. */
- vuse = get_continuation_for_phi (phi, &ref, cnt, &visited, false,
- NULL, NULL);
+ vuse = get_continuation_for_phi (phi, &ref, true,
+ cnt, &visited, false, NULL, NULL);
if (visited)
BITMAP_FREE (visited);
}
/* There's no BB_EXECUTABLE but we can use BB_VISITED. */
#define BB_EXECUTABLE BB_VISITED
-static tree *last_vuse_ptr;
-static vn_lookup_kind vn_walk_kind;
static vn_lookup_kind default_vn_walk_kind;
/* vn_nary_op hashtable helpers. */
return NULL_TREE;
}
+struct vn_walk_cb_data
+{
+ vn_walk_cb_data (vn_reference_t vr_, tree *last_vuse_ptr_,
+ vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
+ : vr (vr_), last_vuse_ptr (last_vuse_ptr_), vn_walk_kind (vn_walk_kind_),
+ tbaa_p (tbaa_p_)
+ {}
+
+ vn_reference_t vr;
+ tree *last_vuse_ptr;
+ vn_lookup_kind vn_walk_kind;
+ bool tbaa_p;
+};
+
/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
with the current VUSE and performs the expression lookup. */
static void *
-vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
+vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
{
- vn_reference_t vr = (vn_reference_t)vr_;
+ vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
+ vn_reference_t vr = data->vr;
vn_reference_s **slot;
hashval_t hash;
- if (last_vuse_ptr)
- *last_vuse_ptr = vuse;
+ if (data->last_vuse_ptr)
+ *data->last_vuse_ptr = vuse;
/* Fixup vuse and hash. */
if (vr->vuse)
*DISAMBIGUATE_ONLY is set to true. */
static void *
-vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
+vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
bool *disambiguate_only)
{
- vn_reference_t vr = (vn_reference_t)vr_;
+ vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
+ vn_reference_t vr = data->vr;
gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
tree base = ao_ref_base (ref);
HOST_WIDE_INT offseti, maxsizei;
get_alias_set (lhs),
TREE_TYPE (lhs), lhs_ops);
if (lhs_ref_ok
- && !refs_may_alias_p_1 (ref, &lhs_ref, true))
+ && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
{
*disambiguate_only = true;
return NULL;
we find a VN result with exactly the same value as the
possible clobber. In this case we can ignore the clobber
and return the found value. */
- if (vn_walk_kind == VN_WALKREWRITE
+ if (data->vn_walk_kind == VN_WALKREWRITE
&& is_gimple_reg_type (TREE_TYPE (lhs))
&& types_compatible_p (TREE_TYPE (lhs), vr->type)
&& ref->ref)
{
- tree *saved_last_vuse_ptr = last_vuse_ptr;
+ tree *saved_last_vuse_ptr = data->last_vuse_ptr;
/* Do not update last_vuse_ptr in vn_reference_lookup_2. */
- last_vuse_ptr = NULL;
+ data->last_vuse_ptr = NULL;
tree saved_vuse = vr->vuse;
hashval_t saved_hashcode = vr->hashcode;
- void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), vr);
+ void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
/* Need to restore vr->vuse and vr->hashcode. */
vr->vuse = saved_vuse;
vr->hashcode = saved_hashcode;
- last_vuse_ptr = saved_last_vuse_ptr;
+ data->last_vuse_ptr = saved_last_vuse_ptr;
if (res && res != (void *)-1)
{
vn_reference_t vnresult = (vn_reference_t) res;
}
}
- if (*disambiguate_only)
+ /* If we are looking for redundant stores do not create new hashtable
+ entries from aliasing defs with made up alias-sets. */
+ if (*disambiguate_only || !data->tbaa_p)
return (void *)-1;
/* If we cannot constrain the size of the reference we cannot
/* 5) For aggregate copies translate the reference through them if
the copy kills ref. */
- else if (vn_walk_kind == VN_WALKREWRITE
+ else if (data->vn_walk_kind == VN_WALKREWRITE
&& gimple_assign_single_p (def_stmt)
&& (DECL_P (gimple_assign_rhs1 (def_stmt))
|| TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
*ref = r;
/* Do not update last seen VUSE after translating. */
- last_vuse_ptr = NULL;
+ data->last_vuse_ptr = NULL;
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
/* 6) For memcpy copies translate the reference through them if
the copy kills ref. */
- else if (vn_walk_kind == VN_WALKREWRITE
+ else if (data->vn_walk_kind == VN_WALKREWRITE
&& is_gimple_reg_type (vr->type)
/* ??? Handle BCOPY as well. */
&& (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
*ref = r;
/* Do not update last seen VUSE after translating. */
- last_vuse_ptr = NULL;
+ data->last_vuse_ptr = NULL;
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
{
ao_ref r;
unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
- vn_walk_kind = kind;
+ vn_walk_cb_data data (&vr1, NULL, kind, true);
if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
*vnresult =
- (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
+ (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
vn_reference_lookup_2,
vn_reference_lookup_3,
- vuse_valueize, limit, &vr1);
+ vuse_valueize, limit, &data);
gcc_checking_assert (vr1.operands == shared_lookup_references);
}
not exist in the hash table or if the result field of the structure
was NULL.. VNRESULT will be filled in with the vn_reference_t
stored in the hashtable if one exists. When TBAA_P is false assume
- we are looking up a store and treat it as having alias-set zero. */
+ we are looking up a store and treat it as having alias-set zero.
+ *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. */
tree
vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
- vn_reference_t *vnresult, bool tbaa_p)
+ vn_reference_t *vnresult, bool tbaa_p, tree *last_vuse_ptr)
{
vec<vn_reference_op_s> operands;
struct vn_reference_s vr1;
vr1.operands = operands
= valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
vr1.type = TREE_TYPE (op);
- vr1.set = tbaa_p ? get_alias_set (op) : 0;
+ vr1.set = get_alias_set (op);
vr1.hashcode = vn_reference_compute_hash (&vr1);
if ((cst = fully_constant_vn_reference_p (&vr1)))
return cst;
|| !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
vr1.operands))
ao_ref_init (&r, op);
- if (! tbaa_p)
- r.ref_alias_set = r.base_alias_set = 0;
- vn_walk_kind = kind;
+ vn_walk_cb_data data (&vr1, last_vuse_ptr, kind, tbaa_p);
wvnresult =
- (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
+ (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p,
vn_reference_lookup_2,
vn_reference_lookup_3,
- vuse_valueize, limit, &vr1);
+ vuse_valueize, limit, &data);
gcc_checking_assert (vr1.operands == shared_lookup_references);
if (wvnresult)
{
tree result;
last_vuse = gimple_vuse (stmt);
- last_vuse_ptr = &last_vuse;
result = vn_reference_lookup (op, gimple_vuse (stmt),
- default_vn_walk_kind, NULL, true);
- last_vuse_ptr = NULL;
+ default_vn_walk_kind, NULL, true, &last_vuse);
/* We handle type-punning through unions by value-numbering based
on offset and size of the access. Be prepared to handle a
tree vn_reference_lookup_pieces (tree, alias_set_type, tree,
vec<vn_reference_op_s> ,
vn_reference_t *, vn_lookup_kind);
-tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *, bool);
+tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *, bool,
+ tree * = NULL);
void vn_reference_lookup_call (gcall *, vn_reference_t *, vn_reference_t);
vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree,
vec<vn_reference_op_s> ,
&& TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
&& (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
- && walk_non_aliased_vuses (&ref, vuse2, vuse_eq, NULL, NULL,
+ && walk_non_aliased_vuses (&ref, vuse2, true, vuse_eq, NULL, NULL,
limit, vuse1) != NULL))
{
if (insert)