/* Fold a constant sub-tree into a single node for C-compiler
- Copyright (C) 1987-2018 Free Software Foundation, Inc.
+ Copyright (C) 1987-2020 Free Software Foundation, Inc.
This file is part of GCC.
#include "builtins.h"
#include "generic-match.h"
#include "gimple-fold.h"
-#include "params.h"
#include "tree-into-ssa.h"
#include "md5.h"
#include "case-cfn-macros.h"
static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
-static int twoval_comparison_p (tree, tree *, tree *, int *);
+static bool twoval_comparison_p (tree, tree *, tree *);
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
static tree optimize_bit_field_compare (location_t, enum tree_code,
tree, tree, tree);
-static int simple_operand_p (const_tree);
+static bool simple_operand_p (const_tree);
static bool simple_operand_p_2 (tree);
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
static tree range_predecessor (tree);
CASE_CFN_LLROUND:
CASE_CFN_LROUND:
CASE_CFN_ROUND:
+ CASE_CFN_ROUNDEVEN:
+ CASE_CFN_ROUNDEVEN_FN:
CASE_CFN_SIN:
CASE_CFN_SINH:
CASE_CFN_TAN:
case EXACT_DIV_EXPR:
if (TYPE_UNSIGNED (type))
break;
- if (negate_expr_p (TREE_OPERAND (t, 0)))
+ /* In general we can't negate A in A / B, because if A is INT_MIN and
+ B is not 1 we change the sign of the result. */
+ if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
+ && negate_expr_p (TREE_OPERAND (t, 0)))
return true;
/* In general we can't negate B in A / B, because if A is INT_MIN and
B is 1, we may turn this into INT_MIN / -1 which is undefined
and actually traps on some architectures. */
- if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
+ if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
|| TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
&& ! integer_onep (TREE_OPERAND (t, 1))))
case EXACT_DIV_EXPR:
if (TYPE_UNSIGNED (type))
break;
- if (negate_expr_p (TREE_OPERAND (t, 0)))
+ /* In general we can't negate A in A / B, because if A is INT_MIN and
+ B is not 1 we change the sign of the result. */
+ if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
+ && negate_expr_p (TREE_OPERAND (t, 0)))
return fold_build2_loc (loc, TREE_CODE (t), type,
negate_expr (TREE_OPERAND (t, 0)),
TREE_OPERAND (t, 1));
/* In general we can't negate B in A / B, because if A is INT_MIN and
B is 1, we may turn this into INT_MIN / -1 which is undefined
and actually traps on some architectures. */
- if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
+ if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
|| TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
&& ! integer_onep (TREE_OPERAND (t, 1))))
return fold_convert_loc (loc, type, tem);
}
-/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
+/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
negated in a simpler way. Also allow for T to be NULL_TREE, in which case
return NULL_TREE. */
&& TYPE_MODE (type1) == TYPE_MODE (type2);
}
-/* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs. */
+/* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
+ a new constant in RES. Return FALSE if we don't know how to
+ evaluate CODE at compile-time. */
-static tree
-int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
- int overflowable)
+bool
+wide_int_binop (wide_int &res,
+ enum tree_code code, const wide_int &arg1, const wide_int &arg2,
+ signop sign, wi::overflow_type *overflow)
{
- wide_int res;
- tree t;
- tree type = TREE_TYPE (parg1);
- signop sign = TYPE_SIGN (type);
- bool overflow = false;
-
- wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
- wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
-
+ wide_int tmp;
+ *overflow = wi::OVF_NONE;
switch (code)
{
case BIT_IOR_EXPR:
case LSHIFT_EXPR:
if (wi::neg_p (arg2))
{
- arg2 = -arg2;
+ tmp = -arg2;
if (code == RSHIFT_EXPR)
code = LSHIFT_EXPR;
else
code = RSHIFT_EXPR;
}
+ else
+ tmp = arg2;
if (code == RSHIFT_EXPR)
/* It's unclear from the C standard whether shifts can overflow.
The following code ignores overflow; perhaps a C standard
interpretation ruling is needed. */
- res = wi::rshift (arg1, arg2, sign);
+ res = wi::rshift (arg1, tmp, sign);
else
- res = wi::lshift (arg1, arg2);
+ res = wi::lshift (arg1, tmp);
break;
case RROTATE_EXPR:
case LROTATE_EXPR:
if (wi::neg_p (arg2))
{
- arg2 = -arg2;
+ tmp = -arg2;
if (code == RROTATE_EXPR)
code = LROTATE_EXPR;
else
code = RROTATE_EXPR;
}
+ else
+ tmp = arg2;
if (code == RROTATE_EXPR)
- res = wi::rrotate (arg1, arg2);
+ res = wi::rrotate (arg1, tmp);
else
- res = wi::lrotate (arg1, arg2);
+ res = wi::lrotate (arg1, tmp);
break;
case PLUS_EXPR:
- res = wi::add (arg1, arg2, sign, &overflow);
+ res = wi::add (arg1, arg2, sign, overflow);
break;
case MINUS_EXPR:
- res = wi::sub (arg1, arg2, sign, &overflow);
+ res = wi::sub (arg1, arg2, sign, overflow);
break;
case MULT_EXPR:
- res = wi::mul (arg1, arg2, sign, &overflow);
+ res = wi::mul (arg1, arg2, sign, overflow);
break;
case MULT_HIGHPART_EXPR:
case TRUNC_DIV_EXPR:
case EXACT_DIV_EXPR:
if (arg2 == 0)
- return NULL_TREE;
- res = wi::div_trunc (arg1, arg2, sign, &overflow);
+ return false;
+ res = wi::div_trunc (arg1, arg2, sign, overflow);
break;
case FLOOR_DIV_EXPR:
if (arg2 == 0)
- return NULL_TREE;
- res = wi::div_floor (arg1, arg2, sign, &overflow);
+ return false;
+ res = wi::div_floor (arg1, arg2, sign, overflow);
break;
case CEIL_DIV_EXPR:
if (arg2 == 0)
- return NULL_TREE;
- res = wi::div_ceil (arg1, arg2, sign, &overflow);
+ return false;
+ res = wi::div_ceil (arg1, arg2, sign, overflow);
break;
case ROUND_DIV_EXPR:
if (arg2 == 0)
- return NULL_TREE;
- res = wi::div_round (arg1, arg2, sign, &overflow);
+ return false;
+ res = wi::div_round (arg1, arg2, sign, overflow);
break;
case TRUNC_MOD_EXPR:
if (arg2 == 0)
- return NULL_TREE;
- res = wi::mod_trunc (arg1, arg2, sign, &overflow);
+ return false;
+ res = wi::mod_trunc (arg1, arg2, sign, overflow);
break;
case FLOOR_MOD_EXPR:
if (arg2 == 0)
- return NULL_TREE;
- res = wi::mod_floor (arg1, arg2, sign, &overflow);
+ return false;
+ res = wi::mod_floor (arg1, arg2, sign, overflow);
break;
case CEIL_MOD_EXPR:
if (arg2 == 0)
- return NULL_TREE;
- res = wi::mod_ceil (arg1, arg2, sign, &overflow);
+ return false;
+ res = wi::mod_ceil (arg1, arg2, sign, overflow);
break;
case ROUND_MOD_EXPR:
if (arg2 == 0)
- return NULL_TREE;
- res = wi::mod_round (arg1, arg2, sign, &overflow);
+ return false;
+ res = wi::mod_round (arg1, arg2, sign, overflow);
break;
case MIN_EXPR:
break;
default:
- return NULL_TREE;
+ return false;
}
-
- t = force_fit_type (type, res, overflowable,
- (((sign == SIGNED || overflowable == -1)
- && overflow)
- | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
-
- return t;
+ return true;
}
-/* Combine two integer constants PARG1 and PARG2 under operation CODE
- to produce a new constant. Return NULL_TREE if we don't know how
+/* Combine two poly int's ARG1 and ARG2 under operation CODE to
+ produce a new constant in RES. Return FALSE if we don't know how
to evaluate CODE at compile-time. */
-static tree
-int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
- int overflowable)
+static bool
+poly_int_binop (poly_wide_int &res, enum tree_code code,
+ const_tree arg1, const_tree arg2,
+ signop sign, wi::overflow_type *overflow)
{
- if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
- return int_const_binop_2 (code, arg1, arg2, overflowable);
-
gcc_assert (NUM_POLY_INT_COEFFS != 1);
-
- if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
+ gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
+ switch (code)
{
- poly_wide_int res;
- bool overflow;
- tree type = TREE_TYPE (arg1);
- signop sign = TYPE_SIGN (type);
- switch (code)
- {
- case PLUS_EXPR:
- res = wi::add (wi::to_poly_wide (arg1),
- wi::to_poly_wide (arg2), sign, &overflow);
- break;
+ case PLUS_EXPR:
+ res = wi::add (wi::to_poly_wide (arg1),
+ wi::to_poly_wide (arg2), sign, overflow);
+ break;
- case MINUS_EXPR:
- res = wi::sub (wi::to_poly_wide (arg1),
- wi::to_poly_wide (arg2), sign, &overflow);
- break;
+ case MINUS_EXPR:
+ res = wi::sub (wi::to_poly_wide (arg1),
+ wi::to_poly_wide (arg2), sign, overflow);
+ break;
- case MULT_EXPR:
- if (TREE_CODE (arg2) == INTEGER_CST)
- res = wi::mul (wi::to_poly_wide (arg1),
- wi::to_wide (arg2), sign, &overflow);
- else if (TREE_CODE (arg1) == INTEGER_CST)
- res = wi::mul (wi::to_poly_wide (arg2),
- wi::to_wide (arg1), sign, &overflow);
- else
- return NULL_TREE;
- break;
+ case MULT_EXPR:
+ if (TREE_CODE (arg2) == INTEGER_CST)
+ res = wi::mul (wi::to_poly_wide (arg1),
+ wi::to_wide (arg2), sign, overflow);
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ res = wi::mul (wi::to_poly_wide (arg2),
+ wi::to_wide (arg1), sign, overflow);
+ else
+ return NULL_TREE;
+ break;
- case LSHIFT_EXPR:
- if (TREE_CODE (arg2) == INTEGER_CST)
- res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
- else
- return NULL_TREE;
- break;
+ case LSHIFT_EXPR:
+ if (TREE_CODE (arg2) == INTEGER_CST)
+ res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
+ else
+ return false;
+ break;
- case BIT_IOR_EXPR:
- if (TREE_CODE (arg2) != INTEGER_CST
- || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
- &res))
- return NULL_TREE;
- break;
+ case BIT_IOR_EXPR:
+ if (TREE_CODE (arg2) != INTEGER_CST
+ || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
+ &res))
+ return false;
+ break;
- default:
- return NULL_TREE;
- }
- return force_fit_type (type, res, overflowable,
- (((sign == SIGNED || overflowable == -1)
- && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
+ default:
+ return false;
}
-
- return NULL_TREE;
+ return true;
}
+/* Combine two integer constants ARG1 and ARG2 under operation CODE to
+ produce a new constant. Return NULL_TREE if we don't know how to
+ evaluate CODE at compile-time. */
+
tree
-int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
+int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
+ int overflowable)
{
- return int_const_binop_1 (code, arg1, arg2, 1);
+ poly_wide_int poly_res;
+ tree type = TREE_TYPE (arg1);
+ signop sign = TYPE_SIGN (type);
+ wi::overflow_type overflow = wi::OVF_NONE;
+
+ if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+ {
+ wide_int warg1 = wi::to_wide (arg1), res;
+ wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
+ if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
+ return NULL_TREE;
+ poly_res = res;
+ }
+ else if (!poly_int_tree_p (arg1)
+ || !poly_int_tree_p (arg2)
+ || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
+ return NULL_TREE;
+ return force_fit_type (type, poly_res, overflowable,
+ (((sign == SIGNED || overflowable == -1)
+ && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
}
/* Return true if binary operation OP distributes over addition in operand
if (TREE_CODE (arg1) == VECTOR_CST
&& TREE_CODE (arg2) == VECTOR_CST
- && (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))
- == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
+ && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
+ TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
{
tree type = TREE_TYPE (arg1);
bool step_ok_p;
return NULL_TREE;
case POINTER_DIFF_EXPR:
- if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+ if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
{
- offset_int res = wi::sub (wi::to_offset (arg1),
- wi::to_offset (arg2));
+ poly_offset_int res = (wi::to_poly_offset (arg1)
+ - wi::to_poly_offset (arg2));
return force_fit_type (type, res, 1,
TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
}
case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_FIX_TRUNC_EXPR:
+ case VEC_PACK_FLOAT_EXPR:
{
- unsigned int out_nelts, in_nelts, i;
+ unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
if (TREE_CODE (arg1) != VECTOR_CST
|| TREE_CODE (arg2) != VECTOR_CST)
return NULL_TREE;
- in_nelts = VECTOR_CST_NELTS (arg1);
+ if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
+ return NULL_TREE;
+
out_nelts = in_nelts * 2;
- gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
- && out_nelts == TYPE_VECTOR_SUBPARTS (type));
+ gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
+ && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
tree_vector_builder elts (type, out_nelts, 1);
for (i = 0; i < out_nelts; i++)
? VECTOR_CST_ELT (arg1, i)
: VECTOR_CST_ELT (arg2, i - in_nelts));
elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
- ? NOP_EXPR : FIX_TRUNC_EXPR,
+ ? NOP_EXPR
+ : code == VEC_PACK_FLOAT_EXPR
+ ? FLOAT_EXPR : FIX_TRUNC_EXPR,
TREE_TYPE (type), elt);
if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
case VEC_WIDEN_MULT_EVEN_EXPR:
case VEC_WIDEN_MULT_ODD_EXPR:
{
- unsigned int out_nelts, in_nelts, out, ofs, scale;
+ unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
return NULL_TREE;
- in_nelts = VECTOR_CST_NELTS (arg1);
+ if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
+ return NULL_TREE;
out_nelts = in_nelts / 2;
- gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
- && out_nelts == TYPE_VECTOR_SUBPARTS (type));
+ gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
+ && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
if (code == VEC_WIDEN_MULT_LO_EXPR)
scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
&& HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
&& REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
&& code != NEGATE_EXPR
- && code != ABS_EXPR)
+ && code != ABS_EXPR
+ && code != ABSU_EXPR)
return NULL_TREE;
switch (code)
}
case ABS_EXPR:
+ case ABSU_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
return fold_abs_const (arg0, type);
break;
case VEC_UNPACK_HI_EXPR:
case VEC_UNPACK_FLOAT_LO_EXPR:
case VEC_UNPACK_FLOAT_HI_EXPR:
+ case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
+ case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
{
- unsigned int out_nelts, in_nelts, i;
+ unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
enum tree_code subcode;
if (TREE_CODE (arg0) != VECTOR_CST)
return NULL_TREE;
- in_nelts = VECTOR_CST_NELTS (arg0);
+ if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
+ return NULL_TREE;
out_nelts = in_nelts / 2;
- gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
+ gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
unsigned int offset = 0;
if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
- || code == VEC_UNPACK_FLOAT_LO_EXPR))
+ || code == VEC_UNPACK_FLOAT_LO_EXPR
+ || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
offset = out_nelts;
if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
subcode = NOP_EXPR;
- else
+ else if (code == VEC_UNPACK_FLOAT_LO_EXPR
+ || code == VEC_UNPACK_FLOAT_HI_EXPR)
subcode = FLOAT_EXPR;
+ else
+ subcode = FIX_TRUNC_EXPR;
tree_vector_builder elts (type, out_nelts, 1);
for (i = 0; i < out_nelts; i++)
/* And some specific cases even faster than that. */
if (code == PLUS_EXPR)
{
- if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
+ if (integer_zerop (arg0)
+ && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
return arg1;
- if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
+ if (integer_zerop (arg1)
+ && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
return arg0;
}
else if (code == MINUS_EXPR)
{
- if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
+ if (integer_zerop (arg1)
+ && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
return arg0;
}
else if (code == MULT_EXPR)
{
- if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
+ if (integer_onep (arg0)
+ && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
return arg1;
}
/* Handle general case of two integer constants. For sizetype
constant calculations we always want to know about overflow,
even in the unsigned case. */
- tree res = int_const_binop_1 (code, arg0, arg1, -1);
+ tree res = int_const_binop (code, arg0, arg1, -1);
if (res != NULL_TREE)
return res;
}
else if (TREE_CODE (type) == VECTOR_TYPE)
{
if (TREE_CODE (arg1) == VECTOR_CST
- && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
+ && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
{
tree elttype = TREE_TYPE (type);
tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
case POINTER_TYPE: case REFERENCE_TYPE:
case OFFSET_TYPE:
- return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
+ return (INTEGRAL_TYPE_P (orig)
+ || (POINTER_TYPE_P (orig)
+ && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
|| TREE_CODE (orig) == OFFSET_TYPE);
case REAL_TYPE:
case FIXED_POINT_TYPE:
- case VECTOR_TYPE:
case VOID_TYPE:
return TREE_CODE (type) == TREE_CODE (orig);
+ case VECTOR_TYPE:
+ return (VECTOR_TYPE_P (orig)
+ && known_eq (TYPE_VECTOR_SUBPARTS (type),
+ TYPE_VECTOR_SUBPARTS (orig))
+ && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
+
default:
return false;
}
case TARGET_EXPR:
case COND_EXPR:
case BIND_EXPR:
+ case VIEW_CONVERT_EXPR:
break;
default:
}
}
+/* Return true if COND1 tests the opposite condition of COND2. */
+
+bool
+inverse_conditions_p (const_tree cond1, const_tree cond2)
+{
+ return (COMPARISON_CLASS_P (cond1)
+ && COMPARISON_CLASS_P (cond2)
+ && (invert_tree_comparison
+ (TREE_CODE (cond1),
+ HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
+ && operand_equal_p (TREE_OPERAND (cond1, 0),
+ TREE_OPERAND (cond2, 0), 0)
+ && operand_equal_p (TREE_OPERAND (cond1, 1),
+ TREE_OPERAND (cond2, 1), 0));
+}
+
/* Return a tree for the comparison which is the combination of
doing the AND or OR (depending on CODE) of the two operations LCODE
and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
+ If OEP_BITWISE is set, then require the values to be bitwise identical
+ rather than simply numerically equal. Do not take advantage of things
+ like math-related flags or undefined behavior; only return true for
+ values that are provably bitwise identical in all circumstances.
+
Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
any operand with side effect. This is unnecesarily conservative in the
case we know that arg0 and arg1 are in disjoint code paths (such as in
addresses with TREE_CONSTANT flag set so we know that &var == &var
even if var is volatile. */
-int
-operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
+bool
+operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
+ unsigned int flags)
{
- /* When checking, verify at the outermost operand_equal_p call that
- if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
- hash value. */
- if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
- {
- if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
- {
- if (arg0 != arg1)
- {
- inchash::hash hstate0 (0), hstate1 (0);
- inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
- inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
- hashval_t h0 = hstate0.end ();
- hashval_t h1 = hstate1.end ();
- gcc_assert (h0 == h1);
- }
- return 1;
- }
- else
- return 0;
- }
+ bool r;
+ if (verify_hash_value (arg0, arg1, flags, &r))
+ return r;
+
+ STRIP_ANY_LOCATION_WRAPPER (arg0);
+ STRIP_ANY_LOCATION_WRAPPER (arg1);
/* If either is ERROR_MARK, they aren't equal. */
if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
|| TREE_TYPE (arg0) == error_mark_node
|| TREE_TYPE (arg1) == error_mark_node)
- return 0;
+ return false;
- /* Similar, if either does not have a type (like a released SSA name),
+ /* Similar, if either does not have a type (like a template id),
they aren't equal. */
if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
- return 0;
+ return false;
+
+ /* Bitwise identity makes no sense if the values have different layouts. */
+ if ((flags & OEP_BITWISE)
+ && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
+ return false;
/* We cannot consider pointers to different address space equal. */
if (POINTER_TYPE_P (TREE_TYPE (arg0))
&& POINTER_TYPE_P (TREE_TYPE (arg1))
&& (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
!= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
- return 0;
+ return false;
/* Check equality of integer constants before bailing out due to
precision differences. */
if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
|| POINTER_TYPE_P (TREE_TYPE (arg0))
!= POINTER_TYPE_P (TREE_TYPE (arg1)))
- return 0;
+ return false;
/* If both types don't have the same precision, then it is not safe
to strip NOPs. */
if (element_precision (TREE_TYPE (arg0))
!= element_precision (TREE_TYPE (arg1)))
- return 0;
+ return false;
STRIP_NOPS (arg0);
STRIP_NOPS (arg1);
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
&& TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
&& integer_zerop (TREE_OPERAND (arg0, 1)))
- return 1;
+ return true;
else if (TREE_CODE (arg1) == MEM_REF
&& DECL_P (arg0)
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
&& TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
&& integer_zerop (TREE_OPERAND (arg1, 1)))
- return 1;
- return 0;
+ return true;
+ return false;
}
else
- return 0;
+ return false;
}
/* When not checking adddresses, this is needed for conversions and for
|| TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
|| (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
&& !(flags & OEP_ADDRESS_OF)))
- return 0;
+ return false;
/* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
We don't care about side effects in that case because the SAVE_EXPR
&& (TREE_CODE (arg0) == SAVE_EXPR
|| (flags & OEP_MATCH_SIDE_EFFECTS)
|| (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
- return 1;
+ return true;
/* Next handle constant cases, those for which we can return 1 even
if ONLY_CONST is set. */
case REAL_CST:
if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
- return 1;
-
+ return true;
- if (!HONOR_SIGNED_ZEROS (arg0))
+ if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
{
/* If we do not distinguish between signed and unsigned zero,
consider them equal. */
if (real_zerop (arg0) && real_zerop (arg1))
- return 1;
+ return true;
}
- return 0;
+ return false;
case VECTOR_CST:
{
if (VECTOR_CST_LOG2_NPATTERNS (arg0)
!= VECTOR_CST_LOG2_NPATTERNS (arg1))
- return 0;
+ return false;
if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
!= VECTOR_CST_NELTS_PER_PATTERN (arg1))
- return 0;
+ return false;
unsigned int count = vector_cst_encoded_nelts (arg0);
for (unsigned int i = 0; i < count; ++i)
if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
VECTOR_CST_ENCODED_ELT (arg1, i), flags))
- return 0;
- return 1;
+ return false;
+ return true;
}
case COMPLEX_CST:
break;
}
- if (flags & OEP_ONLY_CONST)
- return 0;
+ /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
+ two instances of undefined behavior will give identical results. */
+ if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
+ return false;
/* Define macros to test an operand from arg0 and arg1 for equality and a
variant that allows null and views null as being different from any
case FIX_TRUNC_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg0))
!= TYPE_UNSIGNED (TREE_TYPE (arg1)))
- return 0;
+ return false;
break;
default:
break;
case tcc_comparison:
case tcc_binary:
if (OP_SAME (0) && OP_SAME (1))
- return 1;
+ return true;
/* For commutative ops, allow the other order. */
return (commutative_tree_code (TREE_CODE (arg0))
if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
&& (TREE_SIDE_EFFECTS (arg0)
|| TREE_SIDE_EFFECTS (arg1)))
- return 0;
+ return false;
switch (TREE_CODE (arg0))
{
case INDIRECT_REF:
- if (!(flags & OEP_ADDRESS_OF)
- && (TYPE_ALIGN (TREE_TYPE (arg0))
- != TYPE_ALIGN (TREE_TYPE (arg1))))
- return 0;
+ if (!(flags & OEP_ADDRESS_OF))
+ {
+ if (TYPE_ALIGN (TREE_TYPE (arg0))
+ != TYPE_ALIGN (TREE_TYPE (arg1)))
+ return false;
+ /* Verify that the access types are compatible. */
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
+ != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
+ return false;
+ }
flags &= ~OEP_ADDRESS_OF;
return OP_SAME (0);
if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
TYPE_SIZE (TREE_TYPE (arg1)),
flags & ~OEP_ADDRESS_OF))
- return 0;
+ return false;
/* Fallthru. */
case REALPART_EXPR:
|| !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
TYPE_SIZE (TREE_TYPE (arg1)),
flags)))
- return 0;
+ return false;
/* Verify that access happens in similar types. */
if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
- return 0;
+ return false;
/* Verify that accesses are TBAA compatible. */
if (!alias_ptr_types_compatible_p
(TREE_TYPE (TREE_OPERAND (arg0, 1)),
!= MR_DEPENDENCE_CLIQUE (arg1))
|| (MR_DEPENDENCE_BASE (arg0)
!= MR_DEPENDENCE_BASE (arg1)))
- return 0;
+ return false;
/* Verify that alignment is compatible. */
if (TYPE_ALIGN (TREE_TYPE (arg0))
!= TYPE_ALIGN (TREE_TYPE (arg1)))
- return 0;
+ return false;
}
flags &= ~OEP_ADDRESS_OF;
return (OP_SAME (0) && OP_SAME (1)
case ARRAY_REF:
case ARRAY_RANGE_REF:
if (!OP_SAME (0))
- return 0;
+ return false;
flags &= ~OEP_ADDRESS_OF;
/* Compare the array index by value if it is constant first as we
may have different types but same value here. */
may be NULL when we're called to compare MEM_EXPRs. */
if (!OP_SAME_WITH_NULL (0)
|| !OP_SAME (1))
- return 0;
+ return false;
flags &= ~OEP_ADDRESS_OF;
return OP_SAME_WITH_NULL (2);
case BIT_FIELD_REF:
if (!OP_SAME (0))
- return 0;
+ return false;
flags &= ~OEP_ADDRESS_OF;
return OP_SAME (1) && OP_SAME (2);
+ /* Virtual table call. */
+ case OBJ_TYPE_REF:
+ {
+ if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
+ OBJ_TYPE_REF_EXPR (arg1), flags))
+ return false;
+ if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
+ != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
+ return false;
+ if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
+ OBJ_TYPE_REF_OBJECT (arg1), flags))
+ return false;
+ if (!types_same_for_odr (obj_type_ref_class (arg0),
+ obj_type_ref_class (arg1)))
+ return false;
+ return true;
+ }
+
default:
- return 0;
+ return false;
}
case tcc_expression:
case TRUTH_ORIF_EXPR:
return OP_SAME (0) && OP_SAME (1);
- case FMA_EXPR:
case WIDEN_MULT_PLUS_EXPR:
case WIDEN_MULT_MINUS_EXPR:
if (!OP_SAME (2))
- return 0;
+ return false;
/* The multiplcation operands are commutative. */
/* FALLTHRU */
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
if (OP_SAME (0) && OP_SAME (1))
- return 1;
+ return true;
/* Otherwise take into account this is a commutative operation. */
return (operand_equal_p (TREE_OPERAND (arg0, 0),
case COND_EXPR:
if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
- return 0;
+ return false;
flags &= ~OEP_ADDRESS_OF;
return OP_SAME (0);
case POSTINCREMENT_EXPR:
if (flags & OEP_LEXICOGRAPHIC)
return OP_SAME (0) && OP_SAME (1);
- return 0;
+ return false;
case CLEANUP_POINT_EXPR:
case EXPR_STMT:
+ case SAVE_EXPR:
if (flags & OEP_LEXICOGRAPHIC)
return OP_SAME (0);
- return 0;
+ return false;
default:
- return 0;
+ return false;
}
case tcc_vl_exp:
!= (CALL_EXPR_FN (arg1) == NULL_TREE))
/* If not both CALL_EXPRs are either internal or normal function
functions, then they are not equal. */
- return 0;
+ return false;
else if (CALL_EXPR_FN (arg0) == NULL_TREE)
{
/* If the CALL_EXPRs call different internal functions, then they
are not equal. */
if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
- return 0;
+ return false;
}
else
{
equal. */
if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
flags))
- return 0;
+ return false;
}
/* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
else
cef &= ECF_CONST;
if (!cef && !(flags & OEP_LEXICOGRAPHIC))
- return 0;
+ return false;
}
/* Now see if all the arguments are the same. */
a0 = next_const_call_expr_arg (&iter0),
a1 = next_const_call_expr_arg (&iter1))
if (! operand_equal_p (a0, a1, flags))
- return 0;
+ return false;
/* If we get here and both argument lists are exhausted
then the CALL_EXPRs are equal. */
return ! (a0 || a1);
}
default:
- return 0;
+ return false;
}
case tcc_declaration:
/* Consider __builtin_sqrt equal to sqrt. */
return (TREE_CODE (arg0) == FUNCTION_DECL
- && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
+ && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
&& DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
- && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
+ && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
+ == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
case tcc_exceptional:
if (TREE_CODE (arg0) == CONSTRUCTOR)
{
+ if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
+ return false;
+
/* In GIMPLE constructors are used only to build vectors from
elements. Individual elements in the constructor must be
indexed in increasing order and form an initial sequence.
constants). */
if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
|| !VECTOR_TYPE_P (TREE_TYPE (arg1)))
- return 0;
+ return false;
/* Be sure that vectors constructed have the same representation.
We only tested element precision and modes to match.
Vectors may be BLKmode and thus also check that the number of
parts match. */
- if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
- != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
- return 0;
+ if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
+ TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
+ return false;
vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
unsigned int len = vec_safe_length (v0);
if (len != vec_safe_length (v1))
- return 0;
+ return false;
for (unsigned int i = 0; i < len; i++)
{
positives for GENERIC. */
|| (c0->index
&& (TREE_CODE (c0->index) != INTEGER_CST
- || !compare_tree_int (c0->index, i)))
+ || compare_tree_int (c0->index, i)))
|| (c1->index
&& (TREE_CODE (c1->index) != INTEGER_CST
- || !compare_tree_int (c1->index, i))))
- return 0;
+ || compare_tree_int (c1->index, i))))
+ return false;
}
- return 1;
+ return true;
}
else if (TREE_CODE (arg0) == STATEMENT_LIST
&& (flags & OEP_LEXICOGRAPHIC))
{
/* The lists don't have the same number of statements. */
if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
- return 0;
+ return false;
if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
- return 1;
+ return true;
if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
- OEP_LEXICOGRAPHIC))
- return 0;
+ flags & (OEP_LEXICOGRAPHIC
+ | OEP_NO_HASH_CHECK)))
+ return false;
+ }
+ }
+ return false;
+
+ case tcc_statement:
+ switch (TREE_CODE (arg0))
+ {
+ case RETURN_EXPR:
+ if (flags & OEP_LEXICOGRAPHIC)
+ return OP_SAME_WITH_NULL (0);
+ return false;
+ case DEBUG_BEGIN_STMT:
+ if (flags & OEP_LEXICOGRAPHIC)
+ return true;
+ return false;
+ default:
+ return false;
+ }
+
+ default:
+ return false;
+ }
+
+#undef OP_SAME
+#undef OP_SAME_WITH_NULL
+}
+
+/* Generate a hash value for an expression. This can be used iteratively
+ by passing a previous result as the HSTATE argument. */
+
+void
+operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
+ unsigned int flags)
+{
+ int i;
+ enum tree_code code;
+ enum tree_code_class tclass;
+
+ if (t == NULL_TREE || t == error_mark_node)
+ {
+ hstate.merge_hash (0);
+ return;
+ }
+
+ STRIP_ANY_LOCATION_WRAPPER (t);
+
+ if (!(flags & OEP_ADDRESS_OF))
+ STRIP_NOPS (t);
+
+ code = TREE_CODE (t);
+
+ switch (code)
+ {
+ /* Alas, constants aren't shared, so we can't rely on pointer
+ identity. */
+ case VOID_CST:
+ hstate.merge_hash (0);
+ return;
+ case INTEGER_CST:
+ gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
+ for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
+ hstate.add_hwi (TREE_INT_CST_ELT (t, i));
+ return;
+ case REAL_CST:
+ {
+ unsigned int val2;
+ if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
+ val2 = rvc_zero;
+ else
+ val2 = real_hash (TREE_REAL_CST_PTR (t));
+ hstate.merge_hash (val2);
+ return;
+ }
+ case FIXED_CST:
+ {
+ unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
+ hstate.merge_hash (val2);
+ return;
+ }
+ case STRING_CST:
+ hstate.add ((const void *) TREE_STRING_POINTER (t),
+ TREE_STRING_LENGTH (t));
+ return;
+ case COMPLEX_CST:
+ hash_operand (TREE_REALPART (t), hstate, flags);
+ hash_operand (TREE_IMAGPART (t), hstate, flags);
+ return;
+ case VECTOR_CST:
+ {
+ hstate.add_int (VECTOR_CST_NPATTERNS (t));
+ hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
+ unsigned int count = vector_cst_encoded_nelts (t);
+ for (unsigned int i = 0; i < count; ++i)
+ hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
+ return;
+ }
+ case SSA_NAME:
+ /* We can just compare by pointer. */
+ hstate.add_hwi (SSA_NAME_VERSION (t));
+ return;
+ case PLACEHOLDER_EXPR:
+ /* The node itself doesn't matter. */
+ return;
+ case BLOCK:
+ case OMP_CLAUSE:
+ /* Ignore. */
+ return;
+ case TREE_LIST:
+ /* A list of expressions, for a CALL_EXPR or as the elements of a
+ VECTOR_CST. */
+ for (; t; t = TREE_CHAIN (t))
+ hash_operand (TREE_VALUE (t), hstate, flags);
+ return;
+ case CONSTRUCTOR:
+ {
+ unsigned HOST_WIDE_INT idx;
+ tree field, value;
+ flags &= ~OEP_ADDRESS_OF;
+ hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
+ {
+ /* In GIMPLE the indexes can be either NULL or matching i. */
+ if (field == NULL_TREE)
+ field = bitsize_int (idx);
+ hash_operand (field, hstate, flags);
+ hash_operand (value, hstate, flags);
+ }
+ return;
+ }
+ case STATEMENT_LIST:
+ {
+ tree_stmt_iterator i;
+ for (i = tsi_start (CONST_CAST_TREE (t));
+ !tsi_end_p (i); tsi_next (&i))
+ hash_operand (tsi_stmt (i), hstate, flags);
+ return;
+ }
+ case TREE_VEC:
+ for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
+ hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
+ return;
+ case IDENTIFIER_NODE:
+ hstate.add_object (IDENTIFIER_HASH_VALUE (t));
+ return;
+ case FUNCTION_DECL:
+ /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
+ Otherwise nodes that compare equal according to operand_equal_p might
+ get different hash codes. However, don't do this for machine specific
+ or front end builtins, since the function code is overloaded in those
+ cases. */
+ if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
+ && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
+ {
+ t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
+ code = TREE_CODE (t);
+ }
+ /* FALL THROUGH */
+ default:
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
+ return;
+ }
+ tclass = TREE_CODE_CLASS (code);
+
+ if (tclass == tcc_declaration)
+ {
+ /* DECL's have a unique ID */
+ hstate.add_hwi (DECL_UID (t));
+ }
+ else if (tclass == tcc_comparison && !commutative_tree_code (code))
+ {
+ /* For comparisons that can be swapped, use the lower
+ tree code. */
+ enum tree_code ccode = swap_tree_comparison (code);
+ if (code < ccode)
+ ccode = code;
+ hstate.add_object (ccode);
+ hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
+ hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
+ }
+ else if (CONVERT_EXPR_CODE_P (code))
+ {
+ /* NOP_EXPR and CONVERT_EXPR are considered equal by
+ operand_equal_p. */
+ enum tree_code ccode = NOP_EXPR;
+ hstate.add_object (ccode);
+
+ /* Don't hash the type, that can lead to having nodes which
+ compare equal according to operand_equal_p, but which
+ have different hash codes. Make sure to include signedness
+ in the hash computation. */
+ hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
+ hash_operand (TREE_OPERAND (t, 0), hstate, flags);
+ }
+ /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
+ else if (code == MEM_REF
+ && (flags & OEP_ADDRESS_OF) != 0
+ && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
+ && integer_zerop (TREE_OPERAND (t, 1)))
+ hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
+ hstate, flags);
+ /* Don't ICE on FE specific trees, or their arguments etc.
+ during operand_equal_p hash verification. */
+ else if (!IS_EXPR_CODE_CLASS (tclass))
+ gcc_assert (flags & OEP_HASH_CHECK);
+ else
+ {
+ unsigned int sflags = flags;
+
+ hstate.add_object (code);
+
+ switch (code)
+ {
+ case ADDR_EXPR:
+ gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
+ flags |= OEP_ADDRESS_OF;
+ sflags = flags;
+ break;
+
+ case INDIRECT_REF:
+ case MEM_REF:
+ case TARGET_MEM_REF:
+ flags &= ~OEP_ADDRESS_OF;
+ sflags = flags;
+ break;
+
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ sflags &= ~OEP_ADDRESS_OF;
+ break;
+
+ case COND_EXPR:
+ flags &= ~OEP_ADDRESS_OF;
+ break;
+
+ case WIDEN_MULT_PLUS_EXPR:
+ case WIDEN_MULT_MINUS_EXPR:
+ {
+ /* The multiplication operands are commutative. */
+ inchash::hash one, two;
+ hash_operand (TREE_OPERAND (t, 0), one, flags);
+ hash_operand (TREE_OPERAND (t, 1), two, flags);
+ hstate.add_commutative (one, two);
+ hash_operand (TREE_OPERAND (t, 2), two, flags);
+ return;
+ }
+
+ case CALL_EXPR:
+ if (CALL_EXPR_FN (t) == NULL_TREE)
+ hstate.add_int (CALL_EXPR_IFN (t));
+ break;
+
+ case TARGET_EXPR:
+ /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
+ Usually different TARGET_EXPRs just should use
+ different temporaries in their slots. */
+ hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
+ return;
+
+ /* Virtual table call. */
+ case OBJ_TYPE_REF:
+ inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
+ inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
+ inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
+ return;
+ default:
+ break;
+ }
+
+ /* Don't hash the type, that can lead to having nodes which
+ compare equal according to operand_equal_p, but which
+ have different hash codes. */
+ if (code == NON_LVALUE_EXPR)
+ {
+ /* Make sure to include signness in the hash computation. */
+ hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
+ hash_operand (TREE_OPERAND (t, 0), hstate, flags);
+ }
+
+ else if (commutative_tree_code (code))
+ {
+ /* It's a commutative expression. We want to hash it the same
+ however it appears. We do this by first hashing both operands
+ and then rehashing based on the order of their independent
+ hashes. */
+ inchash::hash one, two;
+ hash_operand (TREE_OPERAND (t, 0), one, flags);
+ hash_operand (TREE_OPERAND (t, 1), two, flags);
+ hstate.add_commutative (one, two);
+ }
+ else
+ for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
+ hash_operand (TREE_OPERAND (t, i), hstate,
+ i == 0 ? flags : sflags);
+ }
+ return;
+ }
+}
+
+bool
+operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
+ unsigned int flags, bool *ret)
+{
+ /* When checking, verify at the outermost operand_equal_p call that
+ if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
+ hash value. */
+ if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
+ {
+ if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
+ {
+ if (arg0 != arg1)
+ {
+ inchash::hash hstate0 (0), hstate1 (0);
+ hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
+ hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
+ hashval_t h0 = hstate0.end ();
+ hashval_t h1 = hstate1.end ();
+ gcc_assert (h0 == h1);
}
+ *ret = true;
}
- return 0;
-
- case tcc_statement:
- switch (TREE_CODE (arg0))
- {
- case RETURN_EXPR:
- if (flags & OEP_LEXICOGRAPHIC)
- return OP_SAME_WITH_NULL (0);
- return 0;
- default:
- return 0;
- }
+ else
+ *ret = false;
- default:
- return 0;
+ return true;
}
-#undef OP_SAME
-#undef OP_SAME_WITH_NULL
+ return false;
+}
+
+
+static operand_compare default_compare_instance;
+
+/* Conveinece wrapper around operand_compare class because usually we do
+ not need to play with the valueizer. */
+
+bool
+operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
+{
+ return default_compare_instance.operand_equal_p (arg0, arg1, flags);
+}
+
+namespace inchash
+{
+
+/* Generate a hash value for an expression. This can be used iteratively
+ by passing a previous result as the HSTATE argument.
+
+ This function is intended to produce the same hash for expressions which
+ would compare equal using operand_equal_p. */
+void
+add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
+{
+ default_compare_instance.hash_operand (t, hstate, flags);
+}
+
}
\f
/* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
two different values, which will be stored in *CVAL1 and *CVAL2; if
they are nonzero it means that some operands have already been found.
No variables may be used anywhere else in the expression except in the
- comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
- the expression and save_expr needs to be called with CVAL1 and CVAL2.
+ comparisons.
If this is true, return 1. Otherwise, return zero. */
-static int
-twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
+static bool
+twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
{
enum tree_code code = TREE_CODE (arg);
enum tree_code_class tclass = TREE_CODE_CLASS (code);
|| code == COMPOUND_EXPR))
tclass = tcc_binary;
- else if (tclass == tcc_expression && code == SAVE_EXPR
- && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
- {
- /* If we've already found a CVAL1 or CVAL2, this expression is
- two complex to handle. */
- if (*cval1 || *cval2)
- return 0;
-
- tclass = tcc_unary;
- *save_p = 1;
- }
-
switch (tclass)
{
case tcc_unary:
- return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
+ return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
case tcc_binary:
- return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
- && twoval_comparison_p (TREE_OPERAND (arg, 1),
- cval1, cval2, save_p));
+ return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
+ && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
case tcc_constant:
- return 1;
+ return true;
case tcc_expression:
if (code == COND_EXPR)
- return (twoval_comparison_p (TREE_OPERAND (arg, 0),
- cval1, cval2, save_p)
- && twoval_comparison_p (TREE_OPERAND (arg, 1),
- cval1, cval2, save_p)
- && twoval_comparison_p (TREE_OPERAND (arg, 2),
- cval1, cval2, save_p));
- return 0;
+ return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
+ && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
+ && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
+ return false;
case tcc_comparison:
/* First see if we can handle the first operand, then the second. For
if (operand_equal_p (TREE_OPERAND (arg, 0),
TREE_OPERAND (arg, 1), 0))
- return 0;
+ return false;
if (*cval1 == 0)
*cval1 = TREE_OPERAND (arg, 0);
else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
;
else
- return 0;
+ return false;
if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
;
else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
;
else
- return 0;
+ return false;
- return 1;
+ return true;
default:
- return 0;
+ return false;
}
}
\f
|| !known_size_p (plbitsize)
|| !plbitsize.is_constant (&lbitsize)
|| !plbitpos.is_constant (&lbitpos)
- || lbitsize == GET_MODE_BITSIZE (lmode)
+ || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
|| offset != 0
|| TREE_CODE (linner) == PLACEHOLDER_EXPR
|| lvolatilep)
There are problems with FP fields since the type_for_size call
below can fail for, e.g., XFmode. */
if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
- return 0;
+ return NULL_TREE;
/* We are interested in the bare arrangement of bits, so strip everything
that doesn't affect the machine mode. However, record the type of the
exp = TREE_OPERAND (exp, 0);
STRIP_NOPS (exp); STRIP_NOPS (and_mask);
if (TREE_CODE (and_mask) != INTEGER_CST)
- return 0;
+ return NULL_TREE;
}
poly_int64 poly_bitsize, poly_bitpos;
|| (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
&& compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
*pbitpos + *pbitsize) < 0))
- return 0;
+ return NULL_TREE;
+
+ unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
+ if (unsigned_type == NULL_TREE)
+ return NULL_TREE;
*exp_ = exp;
*punsignedp = TYPE_UNSIGNED (outer_type);
/* Compute the mask to access the bitfield. */
- unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
precision = TYPE_PRECISION (unsigned_type);
mask = build_int_cst_type (unsigned_type, -1);
/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
bit positions and MASK is SIGNED. */
-static int
+static bool
all_ones_mask_p (const_tree mask, unsigned int size)
{
tree type = TREE_TYPE (mask);
/* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
to be evaluated unconditionally. */
-static int
+static bool
simple_operand_p (const_tree exp)
{
/* Strip any conversions that don't change the machine mode. */
{
enum tree_code code;
- if (TREE_SIDE_EFFECTS (exp)
- || tree_could_trap_p (exp))
+ if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
return false;
while (CONVERT_EXPR_P (exp))
/* First make sure that arithmetics in this type is valid, then make sure
that it wraps around. */
if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
- etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
- TYPE_UNSIGNED (etype));
+ etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
- if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
+ if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
{
tree utype, minv, maxv;
else
return NULL_TREE;
}
+ else if (POINTER_TYPE_P (etype))
+ etype = unsigned_type_for (etype);
return etype;
}
/* Disable this optimization for function pointer expressions
on targets that require function pointer canonicalization. */
if (targetm.have_canonicalize_funcptr_for_compare ()
- && TREE_CODE (etype) == POINTER_TYPE
- && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
+ && POINTER_TYPE_P (etype)
+ && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
return NULL_TREE;
if (! in_p)
if (etype == NULL_TREE)
return NULL_TREE;
- if (POINTER_TYPE_P (etype))
- etype = unsigned_type_for (etype);
-
high = fold_convert_loc (loc, etype, high);
low = fold_convert_loc (loc, etype, low);
exp = fold_convert_loc (loc, etype, exp);
tem = high0, high0 = high1, high1 = tem;
}
+ /* If the second range is != high1 where high1 is the type maximum of
+ the type, try first merging with < high1 range. */
+ if (low1
+ && high1
+ && TREE_CODE (low1) == INTEGER_CST
+ && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
+ || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
+ && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
+ GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
+ && operand_equal_p (low1, high1, 0))
+ {
+ if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
+ && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
+ !in1_p, NULL_TREE, range_predecessor (low1)))
+ return true;
+ /* Similarly for the second range != low1 where low1 is the type minimum
+ of the type, try first merging with > low1 range. */
+ if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
+ && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
+ !in1_p, range_successor (low1), NULL_TREE))
+ return true;
+ }
+
/* Now flag two cases, whether the ranges are disjoint or whether the
second range is totally subsumed in the first. Note that the tests
below are simplified by the ones above. */
switch (TREE_CODE (TREE_TYPE (low0)))
{
case ENUMERAL_TYPE:
- if (TYPE_PRECISION (TREE_TYPE (low0))
- != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
+ if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
+ GET_MODE_BITSIZE
+ (TYPE_MODE (TREE_TYPE (low0)))))
break;
/* FALLTHROUGH */
case INTEGER_TYPE:
switch (TREE_CODE (TREE_TYPE (high1)))
{
case ENUMERAL_TYPE:
- if (TYPE_PRECISION (TREE_TYPE (high1))
- != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
+ if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
+ GET_MODE_BITSIZE
+ (TYPE_MODE (TREE_TYPE (high1)))))
break;
/* FALLTHROUGH */
case INTEGER_TYPE:
/* On machines where the branch cost is expensive, if this is a
short-circuited branch and the underlying object on both sides
is the same, make a non-short-circuit operation. */
- else if (LOGICAL_OP_NON_SHORT_CIRCUIT
- && !flag_sanitize_coverage
- && lhs != 0 && rhs != 0
- && (code == TRUTH_ANDIF_EXPR
- || code == TRUTH_ORIF_EXPR)
- && operand_equal_p (lhs, rhs, 0))
+ bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
+ if (param_logical_op_non_short_circuit != -1)
+ logical_op_non_short_circuit
+ = param_logical_op_non_short_circuit;
+ if (logical_op_non_short_circuit
+ && !flag_sanitize_coverage
+ && lhs != 0 && rhs != 0
+ && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
+ && operand_equal_p (lhs, rhs, 0))
{
/* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
}
/* If the right sides are not constant, do the same for it. Also,
- disallow this optimization if a size or signedness mismatch occurs
- between the left and right sides. */
+ disallow this optimization if a size, signedness or storage order
+ mismatch occurs between the left and right sides. */
if (l_const == 0)
{
if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
|| ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
+ || ll_reversep != lr_reversep
/* Make sure the two fields on the right
correspond to the left without being swapped. */
|| ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
apply the distributive law to commute the multiply and addition
if the multiplication of the constants doesn't overflow
and overflow is defined. With undefined overflow
- op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
- if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
+ op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
+ But fold_plusminus_mult_expr would factor back any power-of-two
+ value so do not distribute in the first place in this case. */
+ if (code == MULT_EXPR
+ && TYPE_OVERFLOW_WRAPS (ctype)
+ && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
return fold_build2 (tcode, ctype,
fold_build2 (code, ctype,
fold_convert (ctype, op0),
if (tcode == code)
{
bool overflow_p = false;
- bool overflow_mul_p;
+ wi::overflow_type overflow_mul;
signop sign = TYPE_SIGN (ctype);
unsigned prec = TYPE_PRECISION (ctype);
wide_int mul = wi::mul (wi::to_wide (op1, prec),
wi::to_wide (c, prec),
- sign, &overflow_mul_p);
+ sign, &overflow_mul);
overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
- if (overflow_mul_p
+ if (overflow_mul
&& ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
overflow_p = true;
if (!overflow_p)
tree rhs = NULL_TREE;
enum tree_code cond_code = COND_EXPR;
+ /* Do not move possibly trapping operations into the conditional as this
+ pessimizes code and causes gimplification issues when applied late. */
+ if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
+ ANY_INTEGRAL_TYPE_P (type)
+ && TYPE_OVERFLOW_TRAPS (type), op1))
+ return NULL_TREE;
+
if (TREE_CODE (cond) == COND_EXPR
|| TREE_CODE (cond) == VEC_COND_EXPR)
{
return false;
/* Don't allow the fold with -fsignaling-nans. */
- if (HONOR_SNANS (element_mode (type)))
+ if (HONOR_SNANS (type))
return false;
/* Allow the fold if zeros aren't signed, or their sign isn't important. */
- if (!HONOR_SIGNED_ZEROS (element_mode (type)))
+ if (!HONOR_SIGNED_ZEROS (type))
return true;
+ /* There is no case that is safe for all rounding modes. */
+ if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
+ return false;
+
/* In a vector or complex, we would need to check the sign of all zeros. */
- if (TREE_CODE (addend) != REAL_CST)
+ if (TREE_CODE (addend) == VECTOR_CST)
+ addend = uniform_vector_p (addend);
+ if (!addend || TREE_CODE (addend) != REAL_CST)
return false;
/* Treat x + -0 as x - 0 and x - -0 as x + 0. */
/* The mode has signed zeros, and we have to honor their sign.
In this situation, there is only one case we can return true for.
- X - 0 is the same as X unless rounding towards -infinity is
- supported. */
- return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
+ X - 0 is the same as X with default rounding. */
+ return negate;
}
/* Subroutine of match.pd that optimizes comparisons of a division by
{
tree prod, tmp, type = TREE_TYPE (c1);
signop sign = TYPE_SIGN (type);
- bool overflow;
+ wi::overflow_type overflow;
/* We have to do this the hard way to detect unsigned overflow.
prod = int_const_binop (MULT_EXPR, c1, c2); */
/* No identical multiplicands; see if we can find a common
power-of-two factor in non-power-of-two multiplies. This
can help in multi-dimensional array access. */
- else if (tree_fits_shwi_p (arg01)
- && tree_fits_shwi_p (arg11))
+ else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
{
- HOST_WIDE_INT int01, int11, tmp;
+ HOST_WIDE_INT int01 = tree_to_shwi (arg01);
+ HOST_WIDE_INT int11 = tree_to_shwi (arg11);
+ HOST_WIDE_INT tmp;
bool swap = false;
tree maybe_same;
- int01 = tree_to_shwi (arg01);
- int11 = tree_to_shwi (arg11);
/* Move min of absolute values to int11. */
if (absu_hwi (int01) < absu_hwi (int11))
else
maybe_same = arg11;
- if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
+ const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
+ if (factor > 1
+ && pow2p_hwi (factor)
+ && (int01 & (factor - 1)) == 0
/* The remainder should not be a constant, otherwise we
end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
increased the number of multiplications necessary. */
if (!same)
return NULL_TREE;
- if (! INTEGRAL_TYPE_P (type)
+ if (! ANY_INTEGRAL_TYPE_P (type)
|| TYPE_OVERFLOW_WRAPS (type)
/* We are neither factoring zero nor minus one. */
|| TREE_CODE (same) == INTEGER_CST)
/* Same may be zero and thus the operation 'code' may overflow. Likewise
same may be minus one and thus the multiplication may overflow. Perform
- the operations in an unsigned type. */
+ the sum operation in an unsigned type. */
tree utype = unsigned_type_for (type);
tree tem = fold_build2_loc (loc, code, utype,
fold_convert_loc (loc, utype, alt0),
return fold_build2_loc (loc, MULT_EXPR, type,
fold_convert (type, tem), same);
- return fold_convert_loc (loc, type,
- fold_build2_loc (loc, MULT_EXPR, utype, tem,
- fold_convert_loc (loc, utype, same)));
+ /* Do not resort to unsigned multiplication because
+ we lose the no-overflow property of the expression. */
+ return NULL_TREE;
}
/* Subroutine of native_encode_expr. Encode the INTEGER_CST
return rsize + isize;
}
-
-/* Subroutine of native_encode_expr. Encode the VECTOR_CST
- specified by EXPR into the buffer PTR of length LEN bytes.
- Return the number of bytes placed in the buffer, or zero
- upon failure. */
+/* Like native_encode_vector, but only encode the first COUNT elements.
+ The other arguments are as for native_encode_vector. */
static int
-native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
-{
- unsigned i, count;
- int size, offset;
- tree itype, elem;
+native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
+ int off, unsigned HOST_WIDE_INT count)
+{
+ tree itype = TREE_TYPE (TREE_TYPE (expr));
+ if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
+ && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
+ {
+ /* This is the only case in which elements can be smaller than a byte.
+ Element 0 is always in the lsb of the containing byte. */
+ unsigned int elt_bits = TYPE_PRECISION (itype);
+ int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
+ if ((off == -1 && total_bytes > len) || off >= total_bytes)
+ return 0;
+
+ if (off == -1)
+ off = 0;
+
+ /* Zero the buffer and then set bits later where necessary. */
+ int extract_bytes = MIN (len, total_bytes - off);
+ if (ptr)
+ memset (ptr, 0, extract_bytes);
+
+ unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
+ unsigned int first_elt = off * elts_per_byte;
+ unsigned int extract_elts = extract_bytes * elts_per_byte;
+ for (unsigned int i = 0; i < extract_elts; ++i)
+ {
+ tree elt = VECTOR_CST_ELT (expr, first_elt + i);
+ if (TREE_CODE (elt) != INTEGER_CST)
+ return 0;
+
+ if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
+ {
+ unsigned int bit = i * elt_bits;
+ ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
+ }
+ }
+ return extract_bytes;
+ }
- offset = 0;
- count = VECTOR_CST_NELTS (expr);
- itype = TREE_TYPE (TREE_TYPE (expr));
- size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
- for (i = 0; i < count; i++)
+ int offset = 0;
+ int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
+ for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
{
if (off >= size)
{
off -= size;
continue;
}
- elem = VECTOR_CST_ELT (expr, i);
+ tree elem = VECTOR_CST_ELT (expr, i);
int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
len - offset, off);
if ((off == -1 && res != size) || res == 0)
return 0;
offset += res;
if (offset >= len)
- return offset;
+ return (off == -1 && i < count - 1) ? 0 : offset;
if (off != -1)
off = 0;
}
return offset;
}
+/* Subroutine of native_encode_expr. Encode the VECTOR_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
+
+static int
+native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
+{
+ unsigned HOST_WIDE_INT count;
+ if (!VECTOR_CST_NELTS (expr).is_constant (&count))
+ return 0;
+ return native_encode_vector_part (expr, ptr, len, off, count);
+}
+
/* Subroutine of native_encode_expr. Encode the STRING_CST
specified by EXPR into the buffer PTR of length LEN bytes.
return 0;
if (off == -1)
off = 0;
+ len = MIN (total_bytes - off, len);
if (ptr == NULL)
/* Dry run. */;
- else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
+ else
{
int written = 0;
if (off < TREE_STRING_LENGTH (expr))
written = MIN (len, TREE_STRING_LENGTH (expr) - off);
memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
}
- memset (ptr + written, 0,
- MIN (total_bytes - written, len - written));
+ memset (ptr + written, 0, len - written);
}
- else
- memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
- return MIN (total_bytes - off, len);
+ return len;
}
}
}
+/* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
+ NON_LVALUE_EXPRs and nops. */
+
+int
+native_encode_initializer (tree init, unsigned char *ptr, int len,
+ int off)
+{
+ /* We don't support starting at negative offset and -1 is special. */
+ if (off < -1 || init == NULL_TREE)
+ return 0;
+
+ STRIP_NOPS (init);
+ switch (TREE_CODE (init))
+ {
+ case VIEW_CONVERT_EXPR:
+ case NON_LVALUE_EXPR:
+ return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
+ default:
+ return native_encode_expr (init, ptr, len, off);
+ case CONSTRUCTOR:
+ tree type = TREE_TYPE (init);
+ HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
+ if (total_bytes < 0)
+ return 0;
+ if ((off == -1 && total_bytes > len) || off >= total_bytes)
+ return 0;
+ int o = off == -1 ? 0 : off;
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ HOST_WIDE_INT min_index;
+ unsigned HOST_WIDE_INT cnt;
+ HOST_WIDE_INT curpos = 0, fieldsize;
+ constructor_elt *ce;
+
+ if (TYPE_DOMAIN (type) == NULL_TREE
+ || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
+ return 0;
+
+ fieldsize = int_size_in_bytes (TREE_TYPE (type));
+ if (fieldsize <= 0)
+ return 0;
+
+ min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
+ if (ptr != NULL)
+ memset (ptr, '\0', MIN (total_bytes - off, len));
+
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
+ {
+ tree val = ce->value;
+ tree index = ce->index;
+ HOST_WIDE_INT pos = curpos, count = 0;
+ bool full = false;
+ if (index && TREE_CODE (index) == RANGE_EXPR)
+ {
+ if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
+ || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
+ return 0;
+ pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
+ * fieldsize;
+ count = (tree_to_shwi (TREE_OPERAND (index, 1))
+ - tree_to_shwi (TREE_OPERAND (index, 0)));
+ }
+ else if (index)
+ {
+ if (!tree_fits_shwi_p (index))
+ return 0;
+ pos = (tree_to_shwi (index) - min_index) * fieldsize;
+ }
+
+ curpos = pos;
+ if (val)
+ do
+ {
+ if (off == -1
+ || (curpos >= off
+ && (curpos + fieldsize
+ <= (HOST_WIDE_INT) off + len)))
+ {
+ if (full)
+ {
+ if (ptr)
+ memcpy (ptr + (curpos - o), ptr + (pos - o),
+ fieldsize);
+ }
+ else if (!native_encode_initializer (val,
+ ptr
+ ? ptr + curpos - o
+ : NULL,
+ fieldsize,
+ off == -1 ? -1
+ : 0))
+ return 0;
+ else
+ {
+ full = true;
+ pos = curpos;
+ }
+ }
+ else if (curpos + fieldsize > off
+ && curpos < (HOST_WIDE_INT) off + len)
+ {
+ /* Partial overlap. */
+ unsigned char *p = NULL;
+ int no = 0;
+ int l;
+ if (curpos >= off)
+ {
+ if (ptr)
+ p = ptr + curpos - off;
+ l = MIN ((HOST_WIDE_INT) off + len - curpos,
+ fieldsize);
+ }
+ else
+ {
+ p = ptr;
+ no = off - curpos;
+ l = len;
+ }
+ if (!native_encode_initializer (val, p, l, no))
+ return 0;
+ }
+ curpos += fieldsize;
+ }
+ while (count-- != 0);
+ }
+ return MIN (total_bytes - off, len);
+ }
+ else if (TREE_CODE (type) == RECORD_TYPE
+ || TREE_CODE (type) == UNION_TYPE)
+ {
+ unsigned HOST_WIDE_INT cnt;
+ constructor_elt *ce;
+
+ if (ptr != NULL)
+ memset (ptr, '\0', MIN (total_bytes - off, len));
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
+ {
+ tree field = ce->index;
+ tree val = ce->value;
+ HOST_WIDE_INT pos, fieldsize;
+
+ if (field == NULL_TREE)
+ return 0;
+
+ pos = int_byte_position (field);
+ if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
+ continue;
+
+ if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
+ && TYPE_DOMAIN (TREE_TYPE (field))
+ && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
+ return 0;
+ if (DECL_SIZE_UNIT (field) == NULL_TREE
+ || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
+ return 0;
+ fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
+ if (fieldsize == 0)
+ continue;
+
+ if (off != -1 && pos + fieldsize <= off)
+ continue;
+
+ if (DECL_BIT_FIELD (field))
+ return 0;
+
+ if (val == NULL_TREE)
+ continue;
+
+ if (off == -1
+ || (pos >= off
+ && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
+ {
+ if (!native_encode_initializer (val, ptr ? ptr + pos - o
+ : NULL,
+ fieldsize,
+ off == -1 ? -1 : 0))
+ return 0;
+ }
+ else
+ {
+ /* Partial overlap. */
+ unsigned char *p = NULL;
+ int no = 0;
+ int l;
+ if (pos >= off)
+ {
+ if (ptr)
+ p = ptr + pos - off;
+ l = MIN ((HOST_WIDE_INT) off + len - pos,
+ fieldsize);
+ }
+ else
+ {
+ p = ptr;
+ no = off - pos;
+ l = len;
+ }
+ if (!native_encode_initializer (val, p, l, no))
+ return 0;
+ }
+ }
+ return MIN (total_bytes - off, len);
+ }
+ return 0;
+ }
+}
+
/* Subroutine of native_interpret_expr. Interpret the contents of
the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
return build_complex (type, rpart, ipart);
}
+/* Read a vector of type TYPE from the target memory image given by BYTES,
+ which contains LEN bytes. The vector is known to be encodable using
+ NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
+
+ Return the vector on success, otherwise return null. */
+
+static tree
+native_interpret_vector_part (tree type, const unsigned char *bytes,
+ unsigned int len, unsigned int npatterns,
+ unsigned int nelts_per_pattern)
+{
+ tree elt_type = TREE_TYPE (type);
+ if (VECTOR_BOOLEAN_TYPE_P (type)
+ && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
+ {
+ /* This is the only case in which elements can be smaller than a byte.
+ Element 0 is always in the lsb of the containing byte. */
+ unsigned int elt_bits = TYPE_PRECISION (elt_type);
+ if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
+ return NULL_TREE;
+
+ tree_vector_builder builder (type, npatterns, nelts_per_pattern);
+ for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
+ {
+ unsigned int bit_index = i * elt_bits;
+ unsigned int byte_index = bit_index / BITS_PER_UNIT;
+ unsigned int lsb = bit_index % BITS_PER_UNIT;
+ builder.quick_push (bytes[byte_index] & (1 << lsb)
+ ? build_all_ones_cst (elt_type)
+ : build_zero_cst (elt_type));
+ }
+ return builder.build ();
+ }
+
+ unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
+ if (elt_bytes * npatterns * nelts_per_pattern > len)
+ return NULL_TREE;
+
+ tree_vector_builder builder (type, npatterns, nelts_per_pattern);
+ for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
+ {
+ tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
+ if (!elt)
+ return NULL_TREE;
+ builder.quick_push (elt);
+ bytes += elt_bytes;
+ }
+ return builder.build ();
+}
/* Subroutine of native_interpret_expr. Interpret the contents of
the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
If the buffer cannot be interpreted, return NULL_TREE. */
static tree
-native_interpret_vector (tree type, const unsigned char *ptr, int len)
+native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
{
- tree etype, elem;
- int i, size, count;
+ tree etype;
+ unsigned int size;
+ unsigned HOST_WIDE_INT count;
etype = TREE_TYPE (type);
size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
- count = TYPE_VECTOR_SUBPARTS (type);
- if (size * count > len)
+ if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
+ || size * count > len)
return NULL_TREE;
- tree_vector_builder elements (type, count, 1);
- for (i = 0; i < count; ++i)
- {
- elem = native_interpret_expr (etype, ptr+(i*size), size);
- if (!elem)
- return NULL_TREE;
- elements.quick_push (elem);
- }
- return elements.build ();
+ return native_interpret_vector_part (type, ptr, len, count, 1);
}
}
}
-/* Returns true if we can interpret the contents of a native encoding
- as TYPE. */
+/* Returns true if we can interpret the contents of a native encoding
+ as TYPE. */
+
+bool
+can_native_interpret_type_p (tree type)
+{
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE:
+ case ENUMERAL_TYPE:
+ case BOOLEAN_TYPE:
+ case POINTER_TYPE:
+ case REFERENCE_TYPE:
+ case FIXED_POINT_TYPE:
+ case REAL_TYPE:
+ case COMPLEX_TYPE:
+ case VECTOR_TYPE:
+ return true;
+ default:
+ return false;
+ }
+}
+
+/* Routines for manipulation of native_encode_expr encoded data if the encoded
+ or extracted constant positions and/or sizes aren't byte aligned. */
+
+/* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
+ bits between adjacent elements. AMNT should be within
+ [0, BITS_PER_UNIT).
+ Example, AMNT = 2:
+ 00011111|11100000 << 2 = 01111111|10000000
+ PTR[1] | PTR[0] PTR[1] | PTR[0]. */
+
+void
+shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
+ unsigned int amnt)
+{
+ if (amnt == 0)
+ return;
+
+ unsigned char carry_over = 0U;
+ unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
+ unsigned char clear_mask = (~0U) << amnt;
+
+ for (unsigned int i = 0; i < sz; i++)
+ {
+ unsigned prev_carry_over = carry_over;
+ carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
+
+ ptr[i] <<= amnt;
+ if (i != 0)
+ {
+ ptr[i] &= clear_mask;
+ ptr[i] |= prev_carry_over;
+ }
+ }
+}
+
+/* Like shift_bytes_in_array_left but for big-endian.
+ Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
+ bits between adjacent elements. AMNT should be within
+ [0, BITS_PER_UNIT).
+ Example, AMNT = 2:
+ 00011111|11100000 >> 2 = 00000111|11111000
+ PTR[0] | PTR[1] PTR[0] | PTR[1]. */
+
+void
+shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
+ unsigned int amnt)
+{
+ if (amnt == 0)
+ return;
+
+ unsigned char carry_over = 0U;
+ unsigned char carry_mask = ~(~0U << amnt);
+
+ for (unsigned int i = 0; i < sz; i++)
+ {
+ unsigned prev_carry_over = carry_over;
+ carry_over = ptr[i] & carry_mask;
+
+ carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
+ ptr[i] >>= amnt;
+ ptr[i] |= prev_carry_over;
+ }
+}
+
+/* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
+ directly on the VECTOR_CST encoding, in a way that works for variable-
+ length vectors. Return the resulting VECTOR_CST on success or null
+ on failure. */
-static bool
-can_native_interpret_type_p (tree type)
+static tree
+fold_view_convert_vector_encoding (tree type, tree expr)
{
- switch (TREE_CODE (type))
- {
- case INTEGER_TYPE:
- case ENUMERAL_TYPE:
- case BOOLEAN_TYPE:
- case POINTER_TYPE:
- case REFERENCE_TYPE:
- case FIXED_POINT_TYPE:
- case REAL_TYPE:
- case COMPLEX_TYPE:
- case VECTOR_TYPE:
- return true;
- default:
- return false;
- }
-}
+ tree expr_type = TREE_TYPE (expr);
+ poly_uint64 type_bits, expr_bits;
+ if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
+ || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
+ return NULL_TREE;
+
+ poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
+ poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
+ unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
+ unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
+ /* We can only preserve the semantics of a stepped pattern if the new
+ vector element is an integer of the same size. */
+ if (VECTOR_CST_STEPPED_P (expr)
+ && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
+ return NULL_TREE;
+
+ /* The number of bits needed to encode one element from every pattern
+ of the original vector. */
+ unsigned int expr_sequence_bits
+ = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
+
+ /* The number of bits needed to encode one element from every pattern
+ of the result. */
+ unsigned int type_sequence_bits
+ = least_common_multiple (expr_sequence_bits, type_elt_bits);
+
+ /* Don't try to read more bytes than are available, which can happen
+ for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
+ The general VIEW_CONVERT handling can cope with that case, so there's
+ no point complicating things here. */
+ unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
+ unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
+ BITS_PER_UNIT);
+ unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
+ if (known_gt (buffer_bits, expr_bits))
+ return NULL_TREE;
+
+ /* Get enough bytes of EXPR to form the new encoding. */
+ auto_vec<unsigned char, 128> buffer (buffer_bytes);
+ buffer.quick_grow (buffer_bytes);
+ if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
+ buffer_bits / expr_elt_bits)
+ != (int) buffer_bytes)
+ return NULL_TREE;
+
+ /* Reencode the bytes as TYPE. */
+ unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
+ return native_interpret_vector_part (type, &buffer[0], buffer.length (),
+ type_npatterns, nelts_per_pattern);
+}
/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
TYPE at compile-time. If we're unable to perform the conversion
if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
return NULL_TREE;
+ if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
+ if (tree res = fold_view_convert_vector_encoding (type, expr))
+ return res;
+
len = native_encode_expr (expr, buffer, sizeof (buffer));
if (len == 0)
return NULL_TREE;
}
else if (TREE_CODE (t) == MEM_REF
&& integer_zerop (TREE_OPERAND (t, 1)))
- return TREE_OPERAND (t, 0);
+ {
+ t = TREE_OPERAND (t, 0);
+
+ if (TREE_TYPE (t) != ptrtype)
+ t = fold_convert_loc (loc, ptrtype, t);
+ }
else if (TREE_CODE (t) == MEM_REF
&& TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
return fold_binary (POINTER_PLUS_EXPR, ptrtype,
if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
return tem;
- if (LOGICAL_OP_NON_SHORT_CIRCUIT
+ bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
+ if (param_logical_op_non_short_circuit != -1)
+ logical_op_non_short_circuit
+ = param_logical_op_non_short_circuit;
+ if (logical_op_non_short_circuit
&& !flag_sanitize_coverage
&& (code == TRUTH_AND_EXPR
|| code == TRUTH_ANDIF_EXPR
/* Return whether BASE + OFFSET + BITPOS may wrap around the address
space. This is used to avoid issuing overflow warnings for
- expressions like &p->x which can not wrap. */
+ expressions like &p->x which cannot wrap. */
static bool
pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
else
wi_offset = wi::to_poly_wide (offset);
- bool overflow;
+ wi::overflow_type overflow;
poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
precision);
poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
{
/* We can fold this expression to a constant if the non-constant
offset parts are equal. */
- if (offset0 == offset1
- || (offset0 && offset1
- && operand_equal_p (offset0, offset1, 0)))
+ if ((offset0 == offset1
+ || (offset0 && offset1
+ && operand_equal_p (offset0, offset1, 0)))
+ && (equality_code
+ || (indirect_base0
+ && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
+ || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
{
if (!equality_code
&& maybe_ne (bitpos0, bitpos1)
{
case EQ_EXPR:
if (known_eq (bitpos0, bitpos1))
- return boolean_true_node;
+ return constant_boolean_node (true, type);
if (known_ne (bitpos0, bitpos1))
- return boolean_false_node;
+ return constant_boolean_node (false, type);
break;
case NE_EXPR:
if (known_ne (bitpos0, bitpos1))
- return boolean_true_node;
+ return constant_boolean_node (true, type);
if (known_eq (bitpos0, bitpos1))
- return boolean_false_node;
+ return constant_boolean_node (false, type);
break;
case LT_EXPR:
if (known_lt (bitpos0, bitpos1))
- return boolean_true_node;
+ return constant_boolean_node (true, type);
if (known_ge (bitpos0, bitpos1))
- return boolean_false_node;
+ return constant_boolean_node (false, type);
break;
case LE_EXPR:
if (known_le (bitpos0, bitpos1))
- return boolean_true_node;
+ return constant_boolean_node (true, type);
if (known_gt (bitpos0, bitpos1))
- return boolean_false_node;
+ return constant_boolean_node (false, type);
break;
case GE_EXPR:
if (known_ge (bitpos0, bitpos1))
- return boolean_true_node;
+ return constant_boolean_node (true, type);
if (known_lt (bitpos0, bitpos1))
- return boolean_false_node;
+ return constant_boolean_node (false, type);
break;
case GT_EXPR:
if (known_gt (bitpos0, bitpos1))
- return boolean_true_node;
+ return constant_boolean_node (true, type);
if (known_le (bitpos0, bitpos1))
- return boolean_false_node;
+ return constant_boolean_node (false, type);
break;
default:;
}
because pointer arithmetic is restricted to retain within an
object and overflow on pointer differences is undefined as of
6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
- else if (known_eq (bitpos0, bitpos1))
+ else if (known_eq (bitpos0, bitpos1)
+ && (equality_code
+ || (indirect_base0
+ && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
+ || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
{
/* By converting to signed sizetype we cover middle-end pointer
arithmetic which operates on unsigned pointer types of size
if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
{
tree cval1 = 0, cval2 = 0;
- int save_p = 0;
- if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
+ if (twoval_comparison_p (arg0, &cval1, &cval2)
/* Don't handle degenerate cases here; they should already
have been handled anyway. */
&& cval1 != 0 && cval2 != 0
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
}
- if (save_p)
- {
- tem = save_expr (build2 (code, type, cval1, cval2));
- protected_set_expr_location (tem, loc);
- return tem;
- }
return fold_build2_loc (loc, code, type, cval1, cval2);
}
}
static bool
vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
{
- unsigned int i;
+ unsigned HOST_WIDE_INT i, nunits;
- if (TREE_CODE (arg) == VECTOR_CST)
+ if (TREE_CODE (arg) == VECTOR_CST
+ && VECTOR_CST_NELTS (arg).is_constant (&nunits))
{
- for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
+ for (i = 0; i < nunits; ++i)
elts[i] = VECTOR_CST_ELT (arg, i);
}
else if (TREE_CODE (arg) == CONSTRUCTOR)
selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
NULL_TREE otherwise. */
-static tree
+tree
fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
{
unsigned int i;
if (!sel.length ().is_constant (&nelts))
return NULL_TREE;
- gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
- && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
- && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
+ gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
+ && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
+ && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
|| TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
return NULL_TREE;
tree fndecl = get_callee_fndecl (t);
if (!fndecl) return false;
if (flag_delete_null_pointer_checks && !flag_check_new
- && DECL_IS_OPERATOR_NEW (fndecl)
+ && DECL_IS_OPERATOR_NEW_P (fndecl)
&& !TREE_NOTHROW (fndecl))
return true;
if (flag_delete_null_pointer_checks
expr_not_equal_to (tree t, const wide_int &w)
{
wide_int min, max, nz;
- value_range_type rtype;
+ value_range_kind rtype;
switch (TREE_CODE (t))
{
case INTEGER_CST:
if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
|| code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (type) != VECTOR_TYPE
+ && !VECTOR_TYPE_P (TREE_TYPE (arg0))
&& ((truth_value_p (TREE_CODE (arg0))
&& (truth_value_p (TREE_CODE (arg1))
|| (TREE_CODE (arg1) == BIT_AND_EXPR
if (!base)
return NULL_TREE;
return fold_build2 (MEM_REF, type,
- build_fold_addr_expr (base),
+ build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
int_const_binop (PLUS_EXPR, arg1,
size_int (coffset)));
}
/* With undefined overflow prefer doing association in a type
which wraps on overflow, if that is one of the operand types. */
- if (POINTER_TYPE_P (type)
- || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
+ if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ && !TYPE_OVERFLOW_WRAPS (type))
{
if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
/* With undefined overflow we can only associate constants with one
variable, and constants whose association doesn't overflow. */
- if (POINTER_TYPE_P (atype)
- || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
+ if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
+ && !TYPE_OVERFLOW_WRAPS (atype))
{
if ((var0 && var1) || (minus_var0 && minus_var1))
{
}
}
- /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
- ((A & N) + B) & M -> (A + B) & M
- Similarly if (N & M) == 0,
- ((A | N) + B) & M -> (A + B) & M
- and for - instead of + (or unary - instead of +)
- and/or ^ instead of |.
- If B is constant and (B & M) == 0, fold into A & M. */
- if (TREE_CODE (arg1) == INTEGER_CST)
- {
- wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
- if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
- && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
- && (TREE_CODE (arg0) == PLUS_EXPR
- || TREE_CODE (arg0) == MINUS_EXPR
- || TREE_CODE (arg0) == NEGATE_EXPR)
- && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
- || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
- {
- tree pmop[2];
- int which = 0;
- wide_int cst0;
-
- /* Now we know that arg0 is (C + D) or (C - D) or
- -C and arg1 (M) is == (1LL << cst) - 1.
- Store C into PMOP[0] and D into PMOP[1]. */
- pmop[0] = TREE_OPERAND (arg0, 0);
- pmop[1] = NULL;
- if (TREE_CODE (arg0) != NEGATE_EXPR)
- {
- pmop[1] = TREE_OPERAND (arg0, 1);
- which = 1;
- }
-
- if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
- which = -1;
-
- for (; which >= 0; which--)
- switch (TREE_CODE (pmop[which]))
- {
- case BIT_AND_EXPR:
- case BIT_IOR_EXPR:
- case BIT_XOR_EXPR:
- if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
- != INTEGER_CST)
- break;
- cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
- if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
- {
- if (cst0 != cst1)
- break;
- }
- else if (cst0 != 0)
- break;
- /* If C or D is of the form (A & N) where
- (N & M) == M, or of the form (A | N) or
- (A ^ N) where (N & M) == 0, replace it with A. */
- pmop[which] = TREE_OPERAND (pmop[which], 0);
- break;
- case INTEGER_CST:
- /* If C or D is a N where (N & M) == 0, it can be
- omitted (assumed 0). */
- if ((TREE_CODE (arg0) == PLUS_EXPR
- || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
- && (cst1 & wi::to_wide (pmop[which])) == 0)
- pmop[which] = NULL;
- break;
- default:
- break;
- }
-
- /* Only build anything new if we optimized one or both arguments
- above. */
- if (pmop[0] != TREE_OPERAND (arg0, 0)
- || (TREE_CODE (arg0) != NEGATE_EXPR
- && pmop[1] != TREE_OPERAND (arg0, 1)))
- {
- tree utype = TREE_TYPE (arg0);
- if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
- {
- /* Perform the operations in a type that has defined
- overflow behavior. */
- utype = unsigned_type_for (TREE_TYPE (arg0));
- if (pmop[0] != NULL)
- pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
- if (pmop[1] != NULL)
- pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
- }
-
- if (TREE_CODE (arg0) == NEGATE_EXPR)
- tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
- else if (TREE_CODE (arg0) == PLUS_EXPR)
- {
- if (pmop[0] != NULL && pmop[1] != NULL)
- tem = fold_build2_loc (loc, PLUS_EXPR, utype,
- pmop[0], pmop[1]);
- else if (pmop[0] != NULL)
- tem = pmop[0];
- else if (pmop[1] != NULL)
- tem = pmop[1];
- else
- return build_int_cst (type, 0);
- }
- else if (pmop[0] == NULL)
- tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
- else
- tem = fold_build2_loc (loc, MINUS_EXPR, utype,
- pmop[0], pmop[1]);
- /* TEM is now the new binary +, - or unary - replacement. */
- tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
- fold_convert_loc (loc, utype, arg1));
- return fold_convert_loc (loc, type, tem);
- }
- }
- }
-
/* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
&& TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
/* Convert -A / -B to A / B when the type is signed and overflow is
undefined. */
- if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
+ if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
&& TREE_CODE (op0) == NEGATE_EXPR
&& negate_expr_p (op1))
{
- if (INTEGRAL_TYPE_P (type))
+ if (ANY_INTEGRAL_TYPE_P (type))
fold_overflow_warning (("assuming signed overflow does not occur "
"when distributing negation across "
"division"),
TREE_OPERAND (arg0, 0)),
negate_expr (op1));
}
- if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
+ if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
&& TREE_CODE (arg1) == NEGATE_EXPR
&& negate_expr_p (op0))
{
- if (INTEGRAL_TYPE_P (type))
+ if (ANY_INTEGRAL_TYPE_P (type))
fold_overflow_warning (("assuming signed overflow does not occur "
"when distributing negation across "
"division"),
}
}
- /* If this is an NE or EQ comparison of zero against the result of a
- signed MOD operation whose second operand is a power of 2, make
- the MOD operation unsigned since it is simpler and equivalent. */
- if (integer_zerop (arg1)
- && !TYPE_UNSIGNED (TREE_TYPE (arg0))
- && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
- || TREE_CODE (arg0) == CEIL_MOD_EXPR
- || TREE_CODE (arg0) == FLOOR_MOD_EXPR
- || TREE_CODE (arg0) == ROUND_MOD_EXPR)
- && integer_pow2p (TREE_OPERAND (arg0, 1)))
- {
- tree newtype = unsigned_type_for (TREE_TYPE (arg0));
- tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
- fold_convert_loc (loc, newtype,
- TREE_OPERAND (arg0, 0)),
- fold_convert_loc (loc, newtype,
- TREE_OPERAND (arg0, 1)));
-
- return fold_build2_loc (loc, code, type, newmod,
- fold_convert_loc (loc, newtype, arg1));
- }
-
/* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
C1 is a valid shift constant, and C2 is a power of two, i.e.
a single bit. */
strlen(ptr) != 0 => *ptr != 0
Other cases should reduce to one of these two (or a constant)
due to the return value of strlen being unsigned. */
- if (TREE_CODE (arg0) == CALL_EXPR
- && integer_zerop (arg1))
+ if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
{
tree fndecl = get_callee_fndecl (arg0);
if (fndecl
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
+ && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
&& call_expr_nargs (arg0) == 1
- && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
+ && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
+ == POINTER_TYPE))
{
- tree iref = build_fold_indirect_ref_loc (loc,
- CALL_EXPR_ARG (arg0, 0));
+ tree ptrtype
+ = build_pointer_type (build_qualified_type (char_type_node,
+ TYPE_QUAL_CONST));
+ tree ptr = fold_convert_loc (loc, ptrtype,
+ CALL_EXPR_ARG (arg0, 0));
+ tree iref = build_fold_indirect_ref_loc (loc, ptr);
return fold_build2_loc (loc, code, type, iref,
- build_int_cst (TREE_TYPE (iref), 0));
+ build_int_cst (TREE_TYPE (iref), 0));
}
}
return omit_one_operand_loc (loc, type, res, arg0);
}
- /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && TREE_CODE (arg1) == BIT_AND_EXPR)
- {
- tree arg00 = TREE_OPERAND (arg0, 0);
- tree arg01 = TREE_OPERAND (arg0, 1);
- tree arg10 = TREE_OPERAND (arg1, 0);
- tree arg11 = TREE_OPERAND (arg1, 1);
- tree itype = TREE_TYPE (arg0);
-
- if (operand_equal_p (arg01, arg11, 0))
- {
- tem = fold_convert_loc (loc, itype, arg10);
- tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
- tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
- return fold_build2_loc (loc, code, type, tem,
- build_zero_cst (itype));
- }
- if (operand_equal_p (arg01, arg10, 0))
- {
- tem = fold_convert_loc (loc, itype, arg11);
- tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
- tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
- return fold_build2_loc (loc, code, type, tem,
- build_zero_cst (itype));
- }
- if (operand_equal_p (arg00, arg11, 0))
- {
- tem = fold_convert_loc (loc, itype, arg10);
- tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
- tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
- return fold_build2_loc (loc, code, type, tem,
- build_zero_cst (itype));
- }
- if (operand_equal_p (arg00, arg10, 0))
- {
- tem = fold_convert_loc (loc, itype, arg11);
- tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
- tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
- return fold_build2_loc (loc, code, type, tem,
- build_zero_cst (itype));
- }
- }
-
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& TREE_CODE (arg1) == BIT_XOR_EXPR)
{
} /* switch (code) */
}
+/* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
+ ((A & N) + B) & M -> (A + B) & M
+ Similarly if (N & M) == 0,
+ ((A | N) + B) & M -> (A + B) & M
+ and for - instead of + (or unary - instead of +)
+ and/or ^ instead of |.
+ If B is constant and (B & M) == 0, fold into A & M.
+
+ This function is a helper for match.pd patterns. Return non-NULL
+ type in which the simplified operation should be performed only
+ if any optimization is possible.
+
+ ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
+ then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
+ Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
+ +/-. */
+tree
+fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
+ tree arg00, enum tree_code code00, tree arg000, tree arg001,
+ tree arg01, enum tree_code code01, tree arg010, tree arg011,
+ tree *pmop)
+{
+ gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
+ gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
+ wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
+ if (~cst1 == 0
+ || (cst1 & (cst1 + 1)) != 0
+ || !INTEGRAL_TYPE_P (type)
+ || (!TYPE_OVERFLOW_WRAPS (type)
+ && TREE_CODE (type) != INTEGER_TYPE)
+ || (wi::max_value (type) & cst1) != cst1)
+ return NULL_TREE;
+
+ enum tree_code codes[2] = { code00, code01 };
+ tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
+ int which = 0;
+ wide_int cst0;
+
+ /* Now we know that arg0 is (C + D) or (C - D) or -C and
+ arg1 (M) is == (1LL << cst) - 1.
+ Store C into PMOP[0] and D into PMOP[1]. */
+ pmop[0] = arg00;
+ pmop[1] = arg01;
+ which = code != NEGATE_EXPR;
+
+ for (; which >= 0; which--)
+ switch (codes[which])
+ {
+ case BIT_AND_EXPR:
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
+ cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
+ if (codes[which] == BIT_AND_EXPR)
+ {
+ if (cst0 != cst1)
+ break;
+ }
+ else if (cst0 != 0)
+ break;
+ /* If C or D is of the form (A & N) where
+ (N & M) == M, or of the form (A | N) or
+ (A ^ N) where (N & M) == 0, replace it with A. */
+ pmop[which] = arg0xx[2 * which];
+ break;
+ case ERROR_MARK:
+ if (TREE_CODE (pmop[which]) != INTEGER_CST)
+ break;
+ /* If C or D is a N where (N & M) == 0, it can be
+ omitted (replaced with 0). */
+ if ((code == PLUS_EXPR
+ || (code == MINUS_EXPR && which == 0))
+ && (cst1 & wi::to_wide (pmop[which])) == 0)
+ pmop[which] = build_int_cst (type, 0);
+ /* Similarly, with C - N where (-N & M) == 0. */
+ if (code == MINUS_EXPR
+ && which == 1
+ && (cst1 & -wi::to_wide (pmop[which])) == 0)
+ pmop[which] = build_int_cst (type, 0);
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ /* Only build anything new if we optimized one or both arguments above. */
+ if (pmop[0] == arg00 && pmop[1] == arg01)
+ return NULL_TREE;
+
+ if (TYPE_OVERFLOW_WRAPS (type))
+ return type;
+ else
+ return unsigned_type_for (type);
+}
+
/* Used by contains_label_[p1]. */
struct contains_label_data
}
else if (TREE_CODE (arg0) == VECTOR_CST)
{
+ unsigned HOST_WIDE_INT nelts;
if ((TREE_CODE (arg1) == VECTOR_CST
|| TREE_CODE (arg1) == CONSTRUCTOR)
&& (TREE_CODE (arg2) == VECTOR_CST
- || TREE_CODE (arg2) == CONSTRUCTOR))
+ || TREE_CODE (arg2) == CONSTRUCTOR)
+ && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
{
- unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
- gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
vec_perm_builder sel (nelts, nelts, 1);
- for (i = 0; i < nelts; i++)
+ for (unsigned int i = 0; i < nelts; i++)
{
tree val = VECTOR_CST_ELT (arg0, i);
if (integer_all_onesp (val))
else /* Currently unreachable. */
return NULL_TREE;
}
- tree t = fold_vec_perm (type, arg1, arg2,
- vec_perm_indices (sel, 2, nelts));
+ vec_perm_indices indices (sel, 2, nelts);
+ tree t = fold_vec_perm (type, arg1, arg2, indices);
if (t != NULL_TREE)
return t;
}
&& integer_pow2p (arg1)
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
&& operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
- arg1, OEP_ONLY_CONST))
+ arg1, OEP_ONLY_CONST)
+ /* operand_equal_p compares just value, not precision, so e.g.
+ arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
+ second operand 32-bit -128, which is not a power of two (or vice
+ versa. */
+ && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
return pedantic_non_lvalue_loc (loc,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)));
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0,
+ 0)));
/* Disable the transformations below for vectors, since
fold_binary_op_with_conditional_arg may undo them immediately,
case BIT_FIELD_REF:
if (TREE_CODE (arg0) == VECTOR_CST
&& (type == TREE_TYPE (TREE_TYPE (arg0))
- || (TREE_CODE (type) == VECTOR_TYPE
- && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
+ || (VECTOR_TYPE_P (type)
+ && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
+ && tree_fits_uhwi_p (op1)
+ && tree_fits_uhwi_p (op2))
{
tree eltype = TREE_TYPE (TREE_TYPE (arg0));
unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
if (n != 0
&& (idx % width) == 0
&& (n % width) == 0
- && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
+ && known_le ((idx + n) / width,
+ TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
{
idx = idx / width;
n = n / width;
if (TREE_CODE (arg0) == VECTOR_CST)
{
if (n == 1)
- return VECTOR_CST_ELT (arg0, idx);
+ {
+ tem = VECTOR_CST_ELT (arg0, idx);
+ if (VECTOR_TYPE_P (type))
+ tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
+ return tem;
+ }
tree_vector_builder vals (type, n, 1);
for (unsigned i = 0; i < n; ++i)
return NULL_TREE;
- case FMA_EXPR:
- /* For integers we can decompose the FMA if possible. */
- if (TREE_CODE (arg0) == INTEGER_CST
- && TREE_CODE (arg1) == INTEGER_CST)
- return fold_build2_loc (loc, PLUS_EXPR, type,
- const_binop (MULT_EXPR, arg0, arg1), arg2);
- if (integer_zerop (arg2))
- return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
-
- return fold_fma (loc, type, arg0, arg1, arg2);
-
case VEC_PERM_EXPR:
- if (TREE_CODE (arg2) == VECTOR_CST)
+ /* Perform constant folding of BIT_INSERT_EXPR. */
+ if (TREE_CODE (arg2) == VECTOR_CST
+ && TREE_CODE (op0) == VECTOR_CST
+ && TREE_CODE (op1) == VECTOR_CST)
{
/* Build a vector of integers from the tree mask. */
vec_perm_builder builder;
return NULL_TREE;
/* Create a vec_perm_indices for the integer vector. */
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
+ poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
bool single_arg = (op0 == op1);
vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
-
- /* Check for cases that fold to OP0 or OP1 in their original
- element order. */
- if (sel.series_p (0, 1, 0, 1))
- return op0;
- if (sel.series_p (0, 1, nelts, 1))
- return op1;
-
- if (!single_arg)
- {
- if (sel.all_from_input_p (0))
- op1 = op0;
- else if (sel.all_from_input_p (1))
- {
- op0 = op1;
- sel.rotate_inputs (1);
- }
- }
-
- if ((TREE_CODE (op0) == VECTOR_CST
- || TREE_CODE (op0) == CONSTRUCTOR)
- && (TREE_CODE (op1) == VECTOR_CST
- || TREE_CODE (op1) == CONSTRUCTOR))
- {
- tree t = fold_vec_perm (type, op0, op1, sel);
- if (t != NULL_TREE)
- return t;
- }
-
- bool changed = (op0 == op1 && !single_arg);
-
- /* Generate a canonical form of the selector. */
- if (arg2 == op2 && sel.encoding () != builder)
- {
- /* Some targets are deficient and fail to expand a single
- argument permutation while still allowing an equivalent
- 2-argument version. */
- if (sel.ninputs () == 2
- || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
- op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
- else
- {
- vec_perm_indices sel2 (builder, 2, nelts);
- if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
- op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
- else
- /* Not directly supported with either encoding,
- so use the preferred form. */
- op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
- }
- changed = true;
- }
-
- if (changed)
- return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
+ return fold_vec_perm (type, op0, op1, sel);
}
return NULL_TREE;
if (bitpos % elsize == 0)
{
unsigned k = bitpos / elsize;
+ unsigned HOST_WIDE_INT nelts;
if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
return arg0;
- else
+ else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
{
- unsigned int nelts = VECTOR_CST_NELTS (arg0);
tree_vector_builder elts (type, nelts, 1);
elts.quick_grow (nelts);
- for (unsigned int i = 0; i < nelts; ++i)
+ for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
return elts.build ();
}
}
/* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
- of an array (or vector). */
+ of an array (or vector). *CTOR_IDX if non-NULL is updated with the
+ constructor element index of the value returned. If the element is
+ not found NULL_TREE is returned and *CTOR_IDX is updated to
+ the index of the element after the ACCESS_INDEX position (which
+ may be outside of the CTOR array). */
tree
-get_array_ctor_element_at_index (tree ctor, offset_int access_index)
+get_array_ctor_element_at_index (tree ctor, offset_int access_index,
+ unsigned *ctor_idx)
{
tree index_type = NULL_TREE;
+ signop index_sgn = UNSIGNED;
offset_int low_bound = 0;
if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
/* Static constructors for variably sized objects makes no sense. */
gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
- low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
+ /* ??? When it is obvious that the range is signed, treat it so. */
+ if (TYPE_UNSIGNED (index_type)
+ && TYPE_MAX_VALUE (domain_type)
+ && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
+ TYPE_MIN_VALUE (domain_type)))
+ {
+ index_sgn = SIGNED;
+ low_bound
+ = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
+ SIGNED);
+ }
+ else
+ {
+ index_sgn = TYPE_SIGN (index_type);
+ low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
+ }
}
}
if (index_type)
access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
- TYPE_SIGN (index_type));
+ index_sgn);
- offset_int index = low_bound - 1;
+ offset_int index = low_bound;
if (index_type)
- index = wi::ext (index, TYPE_PRECISION (index_type),
- TYPE_SIGN (index_type));
+ index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
- offset_int max_index;
- unsigned HOST_WIDE_INT cnt;
+ offset_int max_index = index;
+ unsigned cnt;
tree cfield, cval;
+ bool first_p = true;
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
{
if (cfield)
{
if (TREE_CODE (cfield) == INTEGER_CST)
- max_index = index = wi::to_offset (cfield);
+ max_index = index
+ = offset_int::from (wi::to_wide (cfield), index_sgn);
else
{
gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
- index = wi::to_offset (TREE_OPERAND (cfield, 0));
- max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
+ index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
+ index_sgn);
+ max_index
+ = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
+ index_sgn);
+ gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
}
}
- else
+ else if (!first_p)
{
- index += 1;
+ index = max_index + 1;
if (index_type)
- index = wi::ext (index, TYPE_PRECISION (index_type),
- TYPE_SIGN (index_type));
+ index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
+ gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
max_index = index;
}
+ else
+ first_p = false;
- /* Do we have match? */
- if (wi::cmpu (access_index, index) >= 0
- && wi::cmpu (access_index, max_index) <= 0)
- return cval;
- }
+ /* Do we have match? */
+ if (wi::cmp (access_index, index, index_sgn) >= 0)
+ {
+ if (wi::cmp (access_index, max_index, index_sgn) <= 0)
+ {
+ if (ctor_idx)
+ *ctor_idx = cnt;
+ return cval;
+ }
+ }
+ else if (in_gimple_form)
+ /* We're past the element we search for. Note during parsing
+ the elements might not be sorted.
+ ??? We should use a binary search and a flag on the
+ CONSTRUCTOR as to whether elements are sorted in declaration
+ order. */
+ break;
+ }
+ if (ctor_idx)
+ *ctor_idx = cnt;
return NULL_TREE;
}
{
const tree_node **slot;
enum tree_code code;
- union tree_node buf;
+ union tree_node *buf;
int i, len;
recursive_label:
&& HAS_DECL_ASSEMBLER_NAME_P (expr))
{
/* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
- memcpy ((char *) &buf, expr, tree_size (expr));
- SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
- buf.decl_with_vis.symtab_node = NULL;
- expr = (tree) &buf;
+ size_t sz = tree_size (expr);
+ buf = XALLOCAVAR (union tree_node, sz);
+ memcpy ((char *) buf, expr, sz);
+ SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
+ buf->decl_with_vis.symtab_node = NULL;
+ buf->base.nowarning_flag = 0;
+ expr = (tree) buf;
}
else if (TREE_CODE_CLASS (code) == tcc_type
&& (TYPE_POINTER_TO (expr)
{
/* Allow these fields to be modified. */
tree tmp;
- memcpy ((char *) &buf, expr, tree_size (expr));
- expr = tmp = (tree) &buf;
+ size_t sz = tree_size (expr);
+ buf = XALLOCAVAR (union tree_node, sz);
+ memcpy ((char *) buf, expr, sz);
+ expr = tmp = (tree) buf;
TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
TYPE_POINTER_TO (tmp) = NULL;
TYPE_REFERENCE_TO (tmp) = NULL;
TYPE_CACHED_VALUES (tmp) = NULL;
}
}
+ else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
+ {
+ /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
+ and change builtins.c etc. instead - see PR89543. */
+ size_t sz = tree_size (expr);
+ buf = XALLOCAVAR (union tree_node, sz);
+ memcpy ((char *) buf, expr, sz);
+ buf->base.nowarning_flag = 0;
+ expr = (tree) buf;
+ }
md5_process_bytes (expr, tree_size (expr), ctx);
if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
if (!integer_pow2p (bottom))
return 0;
- /* FALLTHRU */
+ return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
+ || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
case MULT_EXPR:
+ if (TREE_CODE (bottom) == INTEGER_CST)
+ {
+ op1 = TREE_OPERAND (top, 0);
+ op2 = TREE_OPERAND (top, 1);
+ if (TREE_CODE (op1) == INTEGER_CST)
+ std::swap (op1, op2);
+ if (TREE_CODE (op2) == INTEGER_CST)
+ {
+ if (multiple_of_p (type, op2, bottom))
+ return 1;
+ /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
+ if (multiple_of_p (type, bottom, op2))
+ {
+ widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
+ wi::to_widest (op2));
+ if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
+ {
+ op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
+ return multiple_of_p (type, op1, op2);
+ }
+ }
+ return multiple_of_p (type, op1, bottom);
+ }
+ }
return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
|| multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
would not, passes that need this information could be revised
to provide it through dataflow propagation. */
return (!name_registered_for_update_p (t)
- && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
+ && depth < param_max_ssa_name_query_depth
&& gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
strict_overflow_p, depth));
CASE_CFN_POPCOUNT:
CASE_CFN_CLZ:
CASE_CFN_CLRSB:
+ case CFN_BUILT_IN_BSWAP16:
case CFN_BUILT_IN_BSWAP32:
case CFN_BUILT_IN_BSWAP64:
+ case CFN_BUILT_IN_BSWAP128:
/* Always true. */
return true;
CASE_CFN_RINT_FN:
CASE_CFN_ROUND:
CASE_CFN_ROUND_FN:
+ CASE_CFN_ROUNDEVEN:
+ CASE_CFN_ROUNDEVEN_FN:
CASE_CFN_SCALB:
CASE_CFN_SCALBLN:
CASE_CFN_SCALBN:
CASE_CFN_RINT_FN:
CASE_CFN_ROUND:
CASE_CFN_ROUND_FN:
+ CASE_CFN_ROUNDEVEN:
+ CASE_CFN_ROUNDEVEN_FN:
CASE_CFN_TRUNC:
CASE_CFN_TRUNC_FN:
return true;
would not, passes that need this information could be revised
to provide it through dataflow propagation. */
return (!name_registered_for_update_p (t)
- && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
+ && depth < param_max_ssa_name_query_depth
&& gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
depth));
if (t == error_mark_node)
return false;
+ STRIP_ANY_LOCATION_WRAPPER (t);
+
tree_code code = TREE_CODE (t);
switch (TREE_CODE_CLASS (code))
{
location_t loc = EXPR_LOCATION (exp);
if (TREE_CODE (exp) == INDIRECT_REF)
- string = string_constant (exp1, &index);
+ string = string_constant (exp1, &index, NULL, NULL);
else
{
tree low_bound = array_ref_low_bound (exp);
return NULL;
}
+/* Folds a read from vector element at IDX of vector ARG. */
+
+tree
+fold_read_from_vector (tree arg, poly_uint64 idx)
+{
+ unsigned HOST_WIDE_INT i;
+ if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
+ && known_ge (idx, 0u)
+ && idx.is_constant (&i))
+ {
+ if (TREE_CODE (arg) == VECTOR_CST)
+ return VECTOR_CST_ELT (arg, i);
+ else if (TREE_CODE (arg) == CONSTRUCTOR)
+ {
+ if (i >= CONSTRUCTOR_NELTS (arg))
+ return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
+ return CONSTRUCTOR_ELT (arg, i)->value;
+ }
+ }
+ return NULL_TREE;
+}
+
/* Return the tree for neg (ARG0) when ARG0 is known to be either
an integer constant, real, or fixed-point constant.
default:
if (poly_int_tree_p (arg0))
{
- bool overflow;
+ wi::overflow_type overflow;
poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
t = force_fit_type (type, res, 1,
(overflow && ! TYPE_UNSIGNED (type))
{
/* If the value is unsigned or non-negative, then the absolute value
is the same as the ordinary value. */
- if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
- t = arg0;
+ wide_int val = wi::to_wide (arg0);
+ wi::overflow_type overflow = wi::OVF_NONE;
+ if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
+ ;
/* If the value is negative, then the absolute value is
its negation. */
else
- {
- bool overflow;
- wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
- t = force_fit_type (type, val, -1,
- overflow | TREE_OVERFLOW (arg0));
- }
+ val = wi::neg (val, &overflow);
+
+ /* Force to the destination type, set TREE_OVERFLOW for signed
+ TYPE only. */
+ t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
}
- break;
+ break;
case REAL_CST:
if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
{
/* Have vector comparison with scalar boolean result. */
gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
- && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
- for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
+ && known_eq (VECTOR_CST_NELTS (op0),
+ VECTOR_CST_NELTS (op1)));
+ unsigned HOST_WIDE_INT nunits;
+ if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
+ return NULL_TREE;
+ for (unsigned i = 0; i < nunits; i++)
{
tree elem0 = VECTOR_CST_ELT (op0, i);
tree elem1 = VECTOR_CST_ELT (op1, i);
- tree tmp = fold_relational_const (code, type, elem0, elem1);
+ tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
if (tmp == NULL_TREE)
return NULL_TREE;
if (integer_zerop (tmp))
- return constant_boolean_node (false, type);
+ return constant_boolean_node (code == NE_EXPR, type);
}
- return constant_boolean_node (true, type);
+ return constant_boolean_node (code == EQ_EXPR, type);
}
tree_vector_builder elts;
if (!elts.new_binary_operation (type, op0, op1, false))
{
tree op = TREE_OPERAND (sub, 0);
tree optype = TREE_TYPE (op);
+
/* *&CONST_DECL -> to the value of the const decl. */
if (TREE_CODE (op) == CONST_DECL)
return DECL_INITIAL (op);
&& type == TREE_TYPE (optype))
return fold_build1_loc (loc, REALPART_EXPR, type, op);
/* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
- else if (TREE_CODE (optype) == VECTOR_TYPE
+ else if (VECTOR_TYPE_P (optype)
&& type == TREE_TYPE (optype))
{
tree part_width = TYPE_SIZE (type);
tree index = bitsize_int (0);
- return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
+ return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
+ index);
}
}
op00type = TREE_TYPE (op00);
/* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
- if (TREE_CODE (op00type) == VECTOR_TYPE
- && type == TREE_TYPE (op00type))
+ if (VECTOR_TYPE_P (op00type)
+ && type == TREE_TYPE (op00type)
+ /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
+ but we want to treat offsets with MSB set as negative.
+ For the code below negative offsets are invalid and
+ TYPE_SIZE of the element is something unsigned, so
+ check whether op01 fits into poly_int64, which implies
+ it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
+ then just use poly_uint64 because we want to treat the
+ value as unsigned. */
+ && tree_fits_poly_int64_p (op01))
{
tree part_width = TYPE_SIZE (type);
poly_uint64 max_offset
&& type == TREE_TYPE (op00type))
{
tree type_domain = TYPE_DOMAIN (op00type);
- tree min = size_zero_node;
+ tree min_val = size_zero_node;
if (type_domain && TYPE_MIN_VALUE (type_domain))
- min = TYPE_MIN_VALUE (type_domain);
- offset_int off = wi::to_offset (op01);
- offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
- offset_int remainder;
- off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
- if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
+ min_val = TYPE_MIN_VALUE (type_domain);
+ poly_uint64 type_size, index;
+ if (poly_int_tree_p (min_val)
+ && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
+ && multiple_p (const_op01, type_size, &index))
{
- off = off + wi::to_offset (min);
+ poly_offset_int off = index + wi::to_poly_offset (min_val);
op01 = wide_int_to_tree (sizetype, off);
return build4_loc (loc, ARRAY_REF, type, op00, op01,
NULL_TREE, NULL_TREE);
ptr, size_int (off));
}
-/* Return a char pointer for a C string if it is a string constant
- or sum of string constant and integer constant. We only support
- string constants properly terminated with '\0' character.
- If STRLEN is a valid pointer, length (including terminating character)
- of returned string is stored to the argument. */
+/* Return a pointer P to a NUL-terminated string representing the sequence
+ of constant characters referred to by SRC (or a subsequence of such
+ characters within it if SRC is a reference to a string plus some
+ constant offset). If STRLEN is non-null, store the number of bytes
+ in the string constant including the terminating NUL char. *STRLEN is
+ typically strlen(P) + 1 in the absence of embedded NUL characters. */
const char *
-c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
+c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
{
tree offset_node;
+ tree mem_size;
if (strlen)
*strlen = 0;
- src = string_constant (src, &offset_node);
+ src = string_constant (src, &offset_node, &mem_size, NULL);
if (src == 0)
return NULL;
offset = tree_to_uhwi (offset_node);
}
+ if (!tree_fits_uhwi_p (mem_size))
+ return NULL;
+
+ /* STRING_LENGTH is the size of the string literal, including any
+ embedded NULs. STRING_SIZE is the size of the array the string
+ literal is stored in. */
unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
+ unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
+
+ /* Ideally this would turn into a gcc_checking_assert over time. */
+ if (string_length > string_size)
+ string_length = string_size;
+
const char *string = TREE_STRING_POINTER (src);
- /* Support only properly null-terminated strings. */
+ /* Ideally this would turn into a gcc_checking_assert over time. */
+ if (string_length > string_size)
+ string_length = string_size;
+
if (string_length == 0
- || string[string_length - 1] != '\0'
- || offset >= string_length)
+ || offset >= string_size)
return NULL;
if (strlen)
- *strlen = string_length - offset;
- return string + offset;
+ {
+ /* Compute and store the length of the substring at OFFSET.
+ All offsets past the initial length refer to null strings. */
+ if (offset < string_length)
+ *strlen = string_length - offset;
+ else
+ *strlen = 1;
+ }
+ else
+ {
+ tree eltype = TREE_TYPE (TREE_TYPE (src));
+ /* Support only properly NUL-terminated single byte strings. */
+ if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
+ return NULL;
+ if (string[string_length - 1] != '\0')
+ return NULL;
+ }
+
+ return offset < string_length ? string + offset : "";
+}
+
+/* Given a tree T, compute which bits in T may be nonzero. */
+
+wide_int
+tree_nonzero_bits (const_tree t)
+{
+ switch (TREE_CODE (t))
+ {
+ case INTEGER_CST:
+ return wi::to_wide (t);
+ case SSA_NAME:
+ return get_nonzero_bits (t);
+ case NON_LVALUE_EXPR:
+ case SAVE_EXPR:
+ return tree_nonzero_bits (TREE_OPERAND (t, 0));
+ case BIT_AND_EXPR:
+ return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
+ tree_nonzero_bits (TREE_OPERAND (t, 1)));
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
+ tree_nonzero_bits (TREE_OPERAND (t, 1)));
+ case COND_EXPR:
+ return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
+ tree_nonzero_bits (TREE_OPERAND (t, 2)));
+ CASE_CONVERT:
+ return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
+ TYPE_PRECISION (TREE_TYPE (t)),
+ TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
+ case PLUS_EXPR:
+ if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
+ {
+ wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
+ wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
+ if (wi::bit_and (nzbits1, nzbits2) == 0)
+ return wi::bit_or (nzbits1, nzbits2);
+ }
+ break;
+ case LSHIFT_EXPR:
+ if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
+ {
+ tree type = TREE_TYPE (t);
+ wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
+ wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
+ TYPE_PRECISION (type));
+ return wi::neg_p (arg1)
+ ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
+ : wi::lshift (nzbits, arg1);
+ }
+ break;
+ case RSHIFT_EXPR:
+ if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
+ {
+ tree type = TREE_TYPE (t);
+ wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
+ wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
+ TYPE_PRECISION (type));
+ return wi::neg_p (arg1)
+ ? wi::lshift (nzbits, -arg1)
+ : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
+ }
+ break;
+ default:
+ break;
+ }
+
+ return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
}
#if CHECKING_P
tree type = build_vector_type (inner_type, 4);
tree zero = build_zero_cst (type);
tree one = build_one_cst (type);
+ tree index = build_index_vector (type, 0, 1);
/* Verify equality tests that return a scalar boolean result. */
tree res_type = boolean_type_node;
ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
+ ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
+ ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
+ index, one)));
+ ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
+ index, index)));
+ ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
+ index, index)));
}
/* Verify folding of VEC_DUPLICATE_EXPRs. */
scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
/* This will be 1 if VEC_MODE isn't a vector mode. */
- unsigned int nunits = GET_MODE_NUNITS (vec_mode);
+ poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
tree type = build_vector_type (ssizetype, nunits);
tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));