This patch extends get_nonzero_bits to handle POLY_INT_CSTs,
The easiest (but also most useful) case is that the number
of trailing zeros in the runtime value is at least the number
of trailing zeros in each individual component.
In principle, we could do this for coeffs 1 and above only,
and then OR in ceoff 0. This would give ~0x11 for [14, 32], say.
But that's future work.
gcc/
* tree-ssanames.cc (get_nonzero_bits): Handle POLY_INT_CSTs.
* match.pd (with_possible_nonzero_bits): Likewise.
gcc/testsuite/
* gcc.target/aarch64/sve/cnt_fold_4.c: New test.
possibly set. */
(match with_possible_nonzero_bits
INTEGER_CST@0)
+(match with_possible_nonzero_bits
+ POLY_INT_CST@0)
(match with_possible_nonzero_bits
SSA_NAME@0
(if (INTEGRAL_TYPE_P (TREE_TYPE (@0)) || POINTER_TYPE_P (TREE_TYPE (@0)))))
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2" } */
+/* { dg-final { check-function-bodies "**" "" } } */
+
+#include <arm_sve.h>
+
+/*
+** f1:
+** cnth x0
+** ret
+*/
+uint64_t
+f1 ()
+{
+ uint64_t x = svcntw ();
+ x >>= 2;
+ return x << 3;
+}
+
+/*
+** f2:
+** [^\n]+
+** [^\n]+
+** ...
+** ret
+*/
+uint64_t
+f2 ()
+{
+ uint64_t x = svcntd ();
+ x >>= 2;
+ return x << 3;
+}
+
+/*
+** f3:
+** cntb x0, all, mul #4
+** ret
+*/
+uint64_t
+f3 ()
+{
+ uint64_t x = svcntd ();
+ x >>= 1;
+ return x << 6;
+}
+
+/*
+** f4:
+** [^\n]+
+** [^\n]+
+** ...
+** ret
+*/
+uint64_t
+f4 ()
+{
+ uint64_t x = svcntd ();
+ x >>= 2;
+ return x << 2;
+}
if (TREE_CODE (name) == INTEGER_CST)
return wi::to_wide (name);
+ if (POLY_INT_CST_P (name))
+ return -known_alignment (wi::to_poly_wide (name));
+
/* Use element_precision instead of TYPE_PRECISION so complex and
vector types get a non-zero precision. */
unsigned int precision = element_precision (TREE_TYPE (name));