{ constant_boolean_node (cmp == LT_EXPR, type); })))))
/* Arguments on which one can call get_nonzero_bits to get the bits
- possibly set. */
-(match with_possible_nonzero_bits
+ possibly set. with_possible_nonzero_bits_1 is an internal version,
+ use with_possible_nonzero_bits. */
+(match with_possible_nonzero_bits_1
INTEGER_CST@0)
-(match with_possible_nonzero_bits
+(match with_possible_nonzero_bits_1
POLY_INT_CST@0)
-(match with_possible_nonzero_bits
+(match with_possible_nonzero_bits_1
SSA_NAME@0
(if (INTEGRAL_TYPE_P (TREE_TYPE (@0)) || POINTER_TYPE_P (TREE_TYPE (@0)))))
/* Slightly extended version, do not make it recursive to keep it cheap. */
-(match (with_possible_nonzero_bits2 @0)
- with_possible_nonzero_bits@0)
-(match (with_possible_nonzero_bits2 @0)
- (bit_and:c with_possible_nonzero_bits@0 @2))
-
-/* Same for bits that are known to be set, but we do not have
- an equivalent to get_nonzero_bits yet. */
-(match (with_certain_nonzero_bits2 @0)
+(match with_possible_nonzero_bits
+ with_possible_nonzero_bits_1@0)
+#if GENERIC
+(match with_possible_nonzero_bits
+ (bit_and:c with_possible_nonzero_bits_1@0 @1))
+#endif
+
+/* Arguments on which one can call get_known_nonzero_bits to get the
+ bits known to be set. with_known_nonzero_bits_1 is an internal version,
+ use with_known_nonzero_bits. */
+(match with_known_nonzero_bits_1
INTEGER_CST@0)
-(match (with_certain_nonzero_bits2 @0)
- (bit_ior @1 INTEGER_CST@0))
+(match with_known_nonzero_bits_1
+ SSA_NAME@0
+ (if (INTEGRAL_TYPE_P (TREE_TYPE (@0)))))
+/* Slightly extended version, do not make it recursive to keep it cheap. */
+(match with_known_nonzero_bits
+ with_known_nonzero_bits_1@0)
+#if GENERIC
+(match with_known_nonzero_bits
+ (bit_ior:c with_known_nonzero_bits_1@0 @1))
+#endif
/* X == C (or X & Z == Y | C) is impossible if ~nonzero(X) & C != 0. */
(for cmp (eq ne)
(simplify
- (cmp:c (with_possible_nonzero_bits2 @0) (with_certain_nonzero_bits2 @1))
- (if (wi::bit_and_not (wi::to_wide (@1), get_nonzero_bits (@0)) != 0)
+ (cmp:c with_possible_nonzero_bits@0 with_known_nonzero_bits@1)
+ (if (wi::bit_and_not (get_known_nonzero_bits (@1),
+ get_nonzero_bits (@0)) != 0)
{ constant_boolean_node (cmp == NE_EXPR, type); })))
/* ((X inner_op C0) outer_op C1)
set_range_info (name, r);
}
-/* Return a widest_int with potentially non-zero bits in SSA_NAME
+/* Return a wide_int with potentially non-zero bits in SSA_NAME
NAME, the constant for INTEGER_CST, or -1 if unknown. */
-wide_int
-get_nonzero_bits (const_tree name)
+static wide_int
+get_nonzero_bits_1 (const_tree name)
{
if (TREE_CODE (name) == INTEGER_CST)
return wi::to_wide (name);
/* Use element_precision instead of TYPE_PRECISION so complex and
vector types get a non-zero precision. */
unsigned int precision = element_precision (TREE_TYPE (name));
+ if (TREE_CODE (name) != SSA_NAME)
+ return wi::shwi (-1, precision);
+
if (POINTER_TYPE_P (TREE_TYPE (name)))
{
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
return tmp.get_nonzero_bits ();
}
+/* Return a wide_int with potentially non-zero bits in SSA_NAME
+ NAME, the constant for INTEGER_CST, or -1 if unknown.
+ In addition to what get_nonzero_bits_1 handles, this handles one
+ level of BIT_AND_EXPR, either as a def_stmt or tree directly. */
+
+wide_int
+get_nonzero_bits (const_tree name)
+{
+ if (TREE_CODE (name) == BIT_AND_EXPR)
+ return (get_nonzero_bits_1 (TREE_OPERAND (name, 0))
+ & get_nonzero_bits_1 (TREE_OPERAND (name, 1)));
+ if (TREE_CODE (name) == SSA_NAME)
+ {
+ gimple *g = SSA_NAME_DEF_STMT (name);
+ if (g
+ && is_gimple_assign (g)
+ && gimple_assign_rhs_code (g) == BIT_AND_EXPR)
+ return (get_nonzero_bits_1 (name)
+ & get_nonzero_bits_1 (gimple_assign_rhs1 (g))
+ & get_nonzero_bits_1 (gimple_assign_rhs2 (g)));
+ }
+ return get_nonzero_bits_1 (name);
+}
+
+/* Return a wide_int with known non-zero bits in SSA_NAME
+ NAME (bits whose values aren't known are also clear), the constant
+ for INTEGER_CST, or 0 if unknown. */
+
+static wide_int
+get_known_nonzero_bits_1 (const_tree name)
+{
+ if (TREE_CODE (name) == INTEGER_CST)
+ return wi::to_wide (name);
+
+ /* Use element_precision instead of TYPE_PRECISION so complex and
+ vector types get a non-zero precision. */
+ unsigned int precision = element_precision (TREE_TYPE (name));
+ if (TREE_CODE (name) != SSA_NAME || POINTER_TYPE_P (TREE_TYPE (name)))
+ return wi::shwi (0, precision);
+
+ if (!range_info_p (name) || !irange::supports_p (TREE_TYPE (name)))
+ return wi::shwi (0, precision);
+
+ int_range_max tmp;
+ range_info_get_range (name, tmp);
+ if (tmp.undefined_p ())
+ return wi::shwi (0, precision);
+ irange_bitmask bm = tmp.get_bitmask ();
+ return bm.value () & ~bm.mask ();
+}
+
+/* Return a wide_int with known non-zero bits in SSA_NAME
+ NAME, the constant for INTEGER_CST, or -1 if unknown.
+ In addition to what get_known_nonzero_bits_1 handles, this handles one
+ level of BIT_IOR_EXPR, either as a def_stmt or tree directly. */
+
+wide_int
+get_known_nonzero_bits (const_tree name)
+{
+ if (TREE_CODE (name) == BIT_IOR_EXPR)
+ return (get_known_nonzero_bits_1 (TREE_OPERAND (name, 0))
+ | get_known_nonzero_bits_1 (TREE_OPERAND (name, 1)));
+ if (TREE_CODE (name) == SSA_NAME)
+ {
+ gimple *g = SSA_NAME_DEF_STMT (name);
+ if (g
+ && is_gimple_assign (g)
+ && gimple_assign_rhs_code (g) == BIT_IOR_EXPR)
+ return (get_known_nonzero_bits_1 (name)
+ | get_known_nonzero_bits_1 (gimple_assign_rhs1 (g))
+ | get_known_nonzero_bits_1 (gimple_assign_rhs2 (g)));
+ }
+ return get_known_nonzero_bits_1 (name);
+}
+
/* Return TRUE is OP, an SSA_NAME has a range of values [0..1], false
otherwise.