like math-related flags or undefined behavior; only return true for
values that are provably bitwise identical in all circumstances.
+ If OEP_ASSUME_WRAPV is set, then require the values to be bitwise identical
+ under two's compliment arithmetic (ignoring any possible Undefined Behaviour)
+ rather than just numerically equivalent. The compared expressions must
+ however perform the same operations but may do intermediate computations in
+ differing signs. Because this comparison ignores any possible UB it cannot
+ be used blindly without ensuring that the context you are using it in itself
+ doesn't guarantee that there will be no UB. Conditional expressions are
+ excluded from this relaxation.
+
+ When OEP_ASSUME_WRAPV is used operand_compare::hash_operand may return
+ differing hashes even for cases where operand_compare::operand_equal_p
+ compares equal.
+
Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
any operand with side effect. This is unnecesarily conservative in the
case we know that arg0 and arg1 are in disjoint code paths (such as in
return operand_equal_p (TREE_TYPE (arg0), arg0, TREE_TYPE (arg1), arg1, flags);
}
-/* The same as operand_equal_p however the type of ARG0 and ARG1 are assumed to be
- the TYPE0 and TYPE1 respectively. */
+/* The same as operand_equal_p however the type of ARG0 and ARG1 are assumed to
+ be the TYPE0 and TYPE1 respectively. TYPE0 and TYPE1 represent the type the
+ expression is being compared under for equality. This means that they can
+ differ from the actual TREE_TYPE (..) value of ARG0 and ARG1. */
bool
operand_compare::operand_equal_p (tree type0, const_tree arg0,
return tree_int_cst_equal (arg0, arg1);
}
+ if ((flags & OEP_ASSUME_WRAPV)
+ && (CONVERT_EXPR_P (arg0) || CONVERT_EXPR_P (arg1)))
+ {
+ const_tree t_arg0 = arg0;
+ const_tree t_arg1 = arg1;
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+ /* Only recurse if the conversion was one that was valid to strip. */
+ if (t_arg0 != arg0 || t_arg1 != arg1)
+ return operand_equal_p (type0, arg0, type1, arg1, flags);
+ }
+
if (!(flags & OEP_ADDRESS_OF))
{
+ /* Check if we are checking an operation where the two's compliment
+ bitwise representation of the result is not the same between signed and
+ unsigned arithmetic. */
+ bool enforce_signedness = true;
+ if (flags & OEP_ASSUME_WRAPV)
+ {
+ switch (TREE_CODE (arg0))
+ {
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ case BIT_AND_EXPR:
+ case BIT_NOT_EXPR:
+ case ABS_EXPR:
+ CASE_CONVERT:
+ case SSA_NAME:
+ case INTEGER_CST:
+ case VAR_DECL:
+ case PARM_DECL:
+ case RESULT_DECL:
+ enforce_signedness = false;
+ break;
+
+ default:
+ break;
+ }
+ }
+
/* If both types don't have the same signedness, then we can't consider
them equal. We must check this before the STRIP_NOPS calls
because they may change the signedness of the arguments. As pointers
strictly don't have a signedness, require either two pointers or
two non-pointers as well. */
- if (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
- || POINTER_TYPE_P (type0) != POINTER_TYPE_P (type1))
+ if (POINTER_TYPE_P (type0) != POINTER_TYPE_P (type1)
+ || (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
+ && enforce_signedness))
return false;
/* If both types don't have the same precision, then it is not safe
/* Address of CONSTRUCTOR is defined in GENERIC to mean the value
of the CONSTRUCTOR referenced indirectly. */
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
for (unsigned idx = 0; idx < vec_safe_length (v0); ++idx)
{
if (TYPE_MAIN_VARIANT (type0) != TYPE_MAIN_VARIANT (type1))
return false;
}
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
return OP_SAME (0);
case IMAGPART_EXPR:
/* Require the same offset. */
if (!operand_equal_p (TYPE_SIZE (type0),
TYPE_SIZE (type1),
- flags & ~OEP_ADDRESS_OF))
+ flags & ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV)))
return false;
/* Fallthru. */
if (TYPE_ALIGN (type0) != TYPE_ALIGN (type1))
return false;
}
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
return (OP_SAME (0) && OP_SAME (1)
/* TARGET_MEM_REF require equal extra operands. */
&& (TREE_CODE (arg0) != TARGET_MEM_REF
case ARRAY_RANGE_REF:
if (!OP_SAME (0))
return false;
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
/* Compare the array index by value if it is constant first as we
may have different types but same value here. */
return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
/* Most of time we only need to compare FIELD_DECLs for equality.
However when determining address look into actual offsets.
These may match for unions and unshared record types. */
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
if (!OP_SAME (1))
{
if (compare_address
case BIT_FIELD_REF:
if (!OP_SAME (0))
return false;
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
return OP_SAME (1) && OP_SAME (2);
default:
case COND_EXPR:
if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
return false;
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
return OP_SAME (0);
case BIT_INSERT_EXPR:
if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
OBJ_TYPE_REF_EXPR (arg1), flags))
return false;
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
!= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
return false;
{
unsigned HOST_WIDE_INT idx;
tree field, value;
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
{
case INDIRECT_REF:
case MEM_REF:
case TARGET_MEM_REF:
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
sflags = flags;
break;
case ARRAY_REF:
case ARRAY_RANGE_REF:
case BIT_FIELD_REF:
- sflags &= ~OEP_ADDRESS_OF;
+ sflags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
break;
case COND_EXPR:
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
break;
case WIDEN_MULT_PLUS_EXPR:
case OBJ_TYPE_REF:
/* Virtual table reference. */
inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
- flags &= ~OEP_ADDRESS_OF;
+ flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
if (!virtual_method_call_p (t))
{
if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
{
- if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
+ if (arg0 != arg1 && !(flags & (OEP_DECL_NAME | OEP_ASSUME_WRAPV)))
{
inchash::hash hstate0 (0), hstate1 (0);
hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
x);
}
+namespace test_operand_equality {
+
+/* Verify structural equality. */
+
+/* Execute fold_vec_perm_cst unit tests. */
+
+static void
+test ()
+{
+ tree stype = integer_type_node;
+ tree utype = unsigned_type_node;
+ tree x = create_tmp_var_raw (stype, "x");
+ tree y = create_tmp_var_raw (stype, "y");
+ tree z = create_tmp_var_raw (stype, "z");
+ tree four = build_int_cst (stype, 4);
+ tree lhs1 = fold_build2 (PLUS_EXPR, stype, x, y);
+ tree rhs1 = fold_convert (stype,
+ fold_build2 (PLUS_EXPR, utype,
+ fold_convert (utype, x),
+ fold_convert (utype, y)));
+
+ /* (int)((unsigned x) + (unsigned y)) == x + y. */
+ ASSERT_TRUE (operand_equal_p (lhs1, rhs1, OEP_ASSUME_WRAPV));
+ ASSERT_FALSE (operand_equal_p (lhs1, rhs1, 0));
+
+ /* (int)(unsigned) x == x. */
+ tree lhs2 = build1 (NOP_EXPR, stype,
+ build1 (NOP_EXPR, utype, x));
+ tree rhs2 = x;
+ ASSERT_TRUE (operand_equal_p (lhs2, rhs2, OEP_ASSUME_WRAPV));
+ ASSERT_TRUE (operand_equal_p (lhs2, rhs2, 0));
+
+ /* (unsigned x) + (unsigned y) == x + y. */
+ tree lhs3 = lhs1;
+ tree rhs3 = fold_build2 (PLUS_EXPR, utype,
+ fold_convert (utype, x),
+ fold_convert (utype, y));
+ ASSERT_TRUE (operand_equal_p (lhs3, rhs3, OEP_ASSUME_WRAPV));
+ ASSERT_FALSE (operand_equal_p (lhs3, rhs3, 0));
+
+ /* (unsigned x) / (unsigned y) == x / y. */
+ tree lhs4 = fold_build2 (TRUNC_DIV_EXPR, stype, x, y);;
+ tree rhs4 = fold_build2 (TRUNC_DIV_EXPR, utype,
+ fold_convert (utype, x),
+ fold_convert (utype, y));
+ ASSERT_FALSE (operand_equal_p (lhs4, rhs4, OEP_ASSUME_WRAPV));
+ ASSERT_FALSE (operand_equal_p (lhs4, rhs4, 0));
+
+ /* (long x) / 4 == (long)(x / 4). */
+ tree lstype = long_long_integer_type_node;
+ tree lfour = build_int_cst (lstype, 4);
+ tree lhs5 = fold_build2 (TRUNC_DIV_EXPR, lstype,
+ fold_build1 (VIEW_CONVERT_EXPR, lstype, x), lfour);
+ tree rhs5 = fold_build1 (VIEW_CONVERT_EXPR, lstype,
+ fold_build2 (TRUNC_DIV_EXPR, stype, x, four));
+ ASSERT_FALSE (operand_equal_p (lhs5, rhs5, OEP_ASSUME_WRAPV));
+ ASSERT_FALSE (operand_equal_p (lhs5, rhs5, 0));
+
+ /* (unsigned x) / 4 == x / 4. */
+ tree lhs6 = fold_build2 (TRUNC_DIV_EXPR, stype, x, four);;
+ tree rhs6 = fold_build2 (TRUNC_DIV_EXPR, utype,
+ fold_convert (utype, x),
+ fold_convert (utype, four));
+ ASSERT_FALSE (operand_equal_p (lhs6, rhs6, OEP_ASSUME_WRAPV));
+ ASSERT_FALSE (operand_equal_p (lhs6, rhs6, 0));
+
+ /* a / (int)((unsigned)b - (unsigned)c)) == a / (b - c). */
+ tree lhs7 = fold_build2 (TRUNC_DIV_EXPR, stype, x, lhs1);
+ tree rhs7 = fold_build2 (TRUNC_DIV_EXPR, stype, x, rhs1);
+ ASSERT_TRUE (operand_equal_p (lhs7, rhs7, OEP_ASSUME_WRAPV));
+ ASSERT_FALSE (operand_equal_p (lhs7, rhs7, 0));
+
+ /* (unsigned x) + 4 == x + 4. */
+ tree lhs8 = fold_build2 (PLUS_EXPR, stype, x, four);
+ tree rhs8 = fold_build2 (PLUS_EXPR, utype,
+ fold_convert (utype, x),
+ fold_convert (utype, four));
+ ASSERT_TRUE (operand_equal_p (lhs8, rhs8, OEP_ASSUME_WRAPV));
+ ASSERT_FALSE (operand_equal_p (lhs8, rhs8, 0));
+
+ /* (unsigned x) + 4 == 4 + x. */
+ tree lhs9 = fold_build2 (PLUS_EXPR, stype, four, x);
+ tree rhs9 = fold_build2 (PLUS_EXPR, utype,
+ fold_convert (utype, x),
+ fold_convert (utype, four));
+ ASSERT_TRUE (operand_equal_p (lhs9, rhs9, OEP_ASSUME_WRAPV));
+ ASSERT_FALSE (operand_equal_p (lhs9, rhs9, 0));
+
+ /* ((unsigned x) + 4) * (unsigned y)) + z == ((4 + x) * y) + z. */
+ tree lhs10 = fold_build2 (PLUS_EXPR, stype,
+ fold_build2 (MULT_EXPR, stype,
+ fold_build2 (PLUS_EXPR, stype, four, x),
+ y),
+ z);
+ tree rhs10 = fold_build2 (MULT_EXPR, utype,
+ fold_build2 (PLUS_EXPR, utype,
+ fold_convert (utype, x),
+ fold_convert (utype, four)),
+ fold_convert (utype, y));
+ rhs10 = fold_build2 (PLUS_EXPR, stype, fold_convert (stype, rhs10), z);
+ ASSERT_TRUE (operand_equal_p (lhs10, rhs10, OEP_ASSUME_WRAPV));
+ ASSERT_FALSE (operand_equal_p (lhs10, rhs10, 0));
+}
+}
+
namespace test_fold_vec_perm_cst {
/* Build a VECTOR_CST corresponding to VMODE, and has
test_vector_folding ();
test_vec_duplicate_folding ();
test_fold_vec_perm_cst::test ();
+ test_operand_equality::test ();
}
} // namespace selftest