/* Alias analysis for trees.
- Copyright (C) 2004-2017 Free Software Foundation, Inc.
+ Copyright (C) 2004-2019 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
return true;
}
- /* Non-aliased variables can not be pointed to. */
+ /* Non-aliased variables cannot be pointed to. */
if (!may_be_aliased (decl))
return false;
void
ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
{
- HOST_WIDE_INT t, size_hwi, extra_offset = 0;
+ poly_int64 t, size_hwi, extra_offset = 0;
ref->ref = NULL_TREE;
if (TREE_CODE (ptr) == SSA_NAME)
{
ptr = gimple_assign_rhs1 (stmt);
else if (is_gimple_assign (stmt)
&& gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
- && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
+ && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
{
ptr = gimple_assign_rhs1 (stmt);
- extra_offset = BITS_PER_UNIT
- * int_cst_value (gimple_assign_rhs2 (stmt));
+ extra_offset *= BITS_PER_UNIT;
}
}
}
else
{
+ gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
ref->base = build2 (MEM_REF, char_type_node,
ptr, null_pointer_node);
ref->offset = 0;
}
ref->offset += extra_offset;
if (size
- && tree_fits_shwi_p (size)
- && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
+ && poly_int_tree_p (size, &size_hwi)
+ && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
else
ref->max_size = ref->size = -1;
aliasing_component_refs_p (tree ref1,
alias_set_type ref1_alias_set,
alias_set_type base1_alias_set,
- HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
+ poly_int64 offset1, poly_int64 max_size1,
tree ref2,
alias_set_type ref2_alias_set,
alias_set_type base2_alias_set,
- HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
+ poly_int64 offset2, poly_int64 max_size2,
bool ref2_is_decl)
{
/* If one reference is a component references through pointers try to find a
return true;
else if (same_p == 1)
{
- HOST_WIDE_INT offadj, sztmp, msztmp;
+ poly_int64 offadj, sztmp, msztmp;
bool reverse;
get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
offset2 -= offadj;
get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
offset1 -= offadj;
- return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
+ return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
}
/* If we didn't find a common base, try the other way around. */
refp = &ref1;
return true;
else if (same_p == 1)
{
- HOST_WIDE_INT offadj, sztmp, msztmp;
+ poly_int64 offadj, sztmp, msztmp;
bool reverse;
get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
offset1 -= offadj;
get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
offset2 -= offadj;
- return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
+ return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
}
/* If we have two type access paths B1.path1 and B2.path2 they may
static bool
decl_refs_may_alias_p (tree ref1, tree base1,
- HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
+ poly_int64 offset1, poly_int64 max_size1,
tree ref2, tree base2,
- HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
+ poly_int64 offset2, poly_int64 max_size2)
{
gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
/* If both references are based on the same variable, they cannot alias if
the accesses do not overlap. */
- if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
+ if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
return false;
/* For components with variable position, the above test isn't sufficient,
static bool
indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
- HOST_WIDE_INT offset1,
- HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
+ poly_int64 offset1, poly_int64 max_size1,
alias_set_type ref1_alias_set,
alias_set_type base1_alias_set,
tree ref2 ATTRIBUTE_UNUSED, tree base2,
- HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
+ poly_int64 offset2, poly_int64 max_size2,
alias_set_type ref2_alias_set,
alias_set_type base2_alias_set, bool tbaa_p)
{
&& DECL_P (base2));
ptr1 = TREE_OPERAND (base1, 0);
- offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
+ poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
/* If only one reference is based on a variable, they cannot alias if
the pointer access is beyond the extent of the variable access.
is bigger than the size of the decl we can't possibly access the
decl via that pointer. */
if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
- && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
- && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
+ && poly_int_tree_p (DECL_SIZE (base2))
+ && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (ptrtype1)))
/* ??? This in turn may run afoul when a decl of type T which is
a member of union type U is accessed through a pointer to
type U and sizeof T is smaller than sizeof U. */
&& TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
&& TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
- && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
+ && known_lt (wi::to_poly_widest (DECL_SIZE (base2)),
+ wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ptrtype1)))))
return false;
if (!ref2)
dbase2 = ref2;
while (handled_component_p (dbase2))
dbase2 = TREE_OPERAND (dbase2, 0);
- HOST_WIDE_INT doffset1 = offset1;
- offset_int doffset2 = offset2;
+ poly_int64 doffset1 = offset1;
+ poly_offset_int doffset2 = offset2;
if (TREE_CODE (dbase2) == MEM_REF
|| TREE_CODE (dbase2) == TARGET_MEM_REF)
doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
static bool
indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
- HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
+ poly_int64 offset1, poly_int64 max_size1,
alias_set_type ref1_alias_set,
alias_set_type base1_alias_set,
tree ref2 ATTRIBUTE_UNUSED, tree base2,
- HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
+ poly_int64 offset2, poly_int64 max_size2,
alias_set_type ref2_alias_set,
alias_set_type base2_alias_set, bool tbaa_p)
{
&& operand_equal_p (TMR_INDEX2 (base1),
TMR_INDEX2 (base2), 0))))))
{
- offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
- offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
+ poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
+ poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
offset2 + moff2, max_size2);
}
/* But avoid treating arrays as "objects", instead assume they
can overlap by an exact multiple of their element size. */
&& TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
- return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
+ return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
/* Do type-based disambiguation. */
if (base1_alias_set != base2_alias_set
refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
{
tree base1, base2;
- HOST_WIDE_INT offset1 = 0, offset2 = 0;
- HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
+ poly_int64 offset1 = 0, offset2 = 0;
+ poly_int64 max_size1 = -1, max_size2 = -1;
bool var1_p, var2_p, ind1_p, ind2_p;
gcc_checking_assert ((!ref1->ref
ao_ref_alias_set (ref2)))
return false;
+ /* If the reference is based on a pointer that points to memory
+ that may not be written to then the other reference cannot possibly
+ clobber it. */
+ if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
+ && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
+ || (ind1_p
+ && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
+ && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
+ return false;
+
/* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
if (var1_p && ind2_p)
return indirect_ref_may_alias_decl_p (ref2->ref, base2,
}
static bool
-refs_may_alias_p (tree ref1, ao_ref *ref2)
+refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
{
ao_ref r1;
ao_ref_init (&r1, ref1);
- return refs_may_alias_p_1 (&r1, ref2, true);
+ return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
}
bool
-refs_may_alias_p (tree ref1, tree ref2)
+refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
{
ao_ref r1, r2;
bool res;
ao_ref_init (&r1, ref1);
ao_ref_init (&r2, ref2);
- res = refs_may_alias_p_1 (&r1, &r2, true);
+ res = refs_may_alias_p_1 (&r1, &r2, tbaa_p);
if (res)
++alias_stats.refs_may_alias_p_may_alias;
else
otherwise return false. */
static bool
-ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
+ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
{
tree base, callee;
unsigned i;
{
ao_ref r;
ao_ref_init (&r, op);
- if (refs_may_alias_p_1 (&r, ref, true))
+ if (refs_may_alias_p_1 (&r, ref, tbaa_p))
return true;
}
}
}
static bool
-ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
+ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
{
bool res;
- res = ref_maybe_used_by_call_p_1 (call, ref);
+ res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
if (res)
++alias_stats.ref_maybe_used_by_call_p_may_alias;
else
true, otherwise return false. */
bool
-ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
+ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
{
if (is_gimple_assign (stmt))
{
|| gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
return false;
- return refs_may_alias_p (rhs, ref);
+ return refs_may_alias_p (rhs, ref, tbaa_p);
}
else if (is_gimple_call (stmt))
- return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
+ return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
tree retval = gimple_return_retval (return_stmt);
if (retval
&& TREE_CODE (retval) != SSA_NAME
&& !is_gimple_min_invariant (retval)
- && refs_may_alias_p (retval, ref))
+ && refs_may_alias_p (retval, ref, tbaa_p))
return true;
/* If ref escapes the function then the return acts as a use. */
tree base = ao_ref_base (ref);
}
bool
-ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
+ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
{
ao_ref r;
ao_ref_init (&r, ref);
- return ref_maybe_used_by_stmt_p (stmt, &r);
+ return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
}
/* If the call in statement CALL may clobber the memory reference REF
|| !is_global_var (base)))
return false;
+ /* If the reference is based on a pointer that points to memory
+ that may not be written to then the call cannot possibly clobber it. */
+ if ((TREE_CODE (base) == MEM_REF
+ || TREE_CODE (base) == TARGET_MEM_REF)
+ && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
+ && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
+ return false;
+
callee = gimple_call_fndecl (call);
/* Handle those builtin functions explicitly that do not act as
otherwise return false. */
bool
-stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
+stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
{
if (is_gimple_call (stmt))
{
{
ao_ref r;
ao_ref_init (&r, lhs);
- if (refs_may_alias_p_1 (ref, &r, true))
+ if (refs_may_alias_p_1 (ref, &r, tbaa_p))
return true;
}
{
ao_ref r;
ao_ref_init (&r, lhs);
- return refs_may_alias_p_1 (ref, &r, true);
+ return refs_may_alias_p_1 (ref, &r, tbaa_p);
}
}
else if (gimple_code (stmt) == GIMPLE_ASM)
}
bool
-stmt_may_clobber_ref_p (gimple *stmt, tree ref)
+stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
{
ao_ref r;
ao_ref_init (&r, ref);
- return stmt_may_clobber_ref_p_1 (stmt, &r);
+ return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
}
/* Return true if store1 and store2 described by corresponding tuples
/* Be conservative with non-call exceptions when the address might
be NULL. */
- if (flag_non_call_exceptions && pi->pt.null)
+ if (cfun->can_throw_non_call_exceptions && pi->pt.null)
return false;
/* Check that ptr points relative to obj. */
??? We only need to care about the RHS throwing. For aggregate
assignments or similar calls and non-call exceptions the LHS
might throw as well. */
- && !stmt_can_throw_internal (stmt))
+ && !stmt_can_throw_internal (cfun, stmt))
{
tree lhs = gimple_get_lhs (stmt);
/* If LHS is literally a base of the access we are done. */
handling constant offset and size. */
/* For a must-alias check we need to be able to constrain
the access properly. */
- if (ref->max_size == -1)
+ if (!ref->max_size_known_p ())
return false;
- HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
+ poly_int64 size, offset, max_size, ref_offset = ref->offset;
bool reverse;
- tree base
- = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
+ tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
+ &reverse);
/* We can get MEM[symbol: sZ, index: D.8862_1] here,
so base == ref->base does not always hold. */
if (base != ref->base)
if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
TREE_OPERAND (ref->base, 1)))
{
- offset_int off1 = mem_ref_offset (base);
+ poly_offset_int off1 = mem_ref_offset (base);
off1 <<= LOG2_BITS_PER_UNIT;
off1 += offset;
- offset_int off2 = mem_ref_offset (ref->base);
+ poly_offset_int off2 = mem_ref_offset (ref->base);
off2 <<= LOG2_BITS_PER_UNIT;
off2 += ref_offset;
- if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
- {
- offset = off1.to_shwi ();
- ref_offset = off2.to_shwi ();
- }
- else
+ if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
size = -1;
}
}
}
/* For a must-alias check we need to be able to constrain
the access properly. */
- if (size != -1 && size == max_size)
- {
- if (offset <= ref_offset
- && offset + size >= ref_offset + ref->max_size)
- return true;
- }
+ if (known_eq (size, max_size)
+ && known_subrange_p (ref_offset, ref->max_size, offset, size))
+ return true;
}
if (is_gimple_call (stmt))
{
/* For a must-alias check we need to be able to constrain
the access properly. */
- if (ref->max_size == -1)
+ if (!ref->max_size_known_p ())
return false;
tree dest = gimple_call_arg (stmt, 0);
tree len = gimple_call_arg (stmt, 2);
- if (!tree_fits_shwi_p (len))
+ if (!poly_int_tree_p (len))
return false;
tree rbase = ref->base;
- offset_int roffset = ref->offset;
+ poly_offset_int roffset = ref->offset;
ao_ref dref;
ao_ref_init_from_ptr_and_size (&dref, dest, len);
tree base = ao_ref_base (&dref);
- offset_int offset = dref.offset;
- if (!base || dref.size == -1)
+ poly_offset_int offset = dref.offset;
+ if (!base || !known_size_p (dref.size))
return false;
if (TREE_CODE (base) == MEM_REF)
{
rbase = TREE_OPERAND (rbase, 0);
}
if (base == rbase
- && offset <= roffset
- && (roffset + ref->max_size
- <= offset + (wi::to_offset (len) << LOG2_BITS_PER_UNIT)))
+ && known_subrange_p (roffset, ref->max_size, offset,
+ wi::to_poly_offset (len)
+ << LOG2_BITS_PER_UNIT))
return true;
break;
}
if (arg1 == arg0)
;
else if (! maybe_skip_until (phi, arg0, ref, arg1, cnt, visited,
- abort_on_visited, translate, data))
+ abort_on_visited,
+ /* Do not translate when walking over
+ backedges. */
+ dominated_by_p
+ (CDI_DOMINATORS,
+ gimple_bb (SSA_NAME_DEF_STMT (arg1)),
+ phi_bb)
+ ? NULL : translate, data))
return NULL_TREE;
}
break;
if (valueize)
- vuse = valueize (vuse);
+ {
+ vuse = valueize (vuse);
+ if (!vuse)
+ {
+ res = NULL;
+ break;
+ }
+ }
def_stmt = SSA_NAME_DEF_STMT (vuse);
if (gimple_nop_p (def_stmt))
break;