if (shiftrt)
{
+ /* Punt if we're shifting by more than the loaded bitfield (after
+ adjustment), or if there's a shift after a change of signedness, punt.
+ When comparing this field with a constant, we'll check that the
+ constant is a proper sign- or zero-extension (depending on signedness)
+ of a value that would fit in the selected portion of the bitfield. A
+ shift after a change of signedness would make the extension
+ non-uniform, and we can't deal with that (yet ???). See
+ gcc.dg/field-merge-22.c for a test that would go wrong. */
+ if (*pbitsize <= shiftrt
+ || (convert_before_shift
+ && outer_type && unsignedp != TYPE_UNSIGNED (outer_type)))
+ return NULL_TREE;
if (!*preversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
*pbitpos += shiftrt;
*pbitsize -= shiftrt;
and bit position. */
if (l_const.get_precision ())
{
+ /* Before clipping upper bits of the right-hand operand of the compare,
+ check that they're sign or zero extensions, depending on how the
+ left-hand operand would be extended. */
+ bool l_non_ext_bits = false;
+ if (ll_bitsize < lr_bitsize)
+ {
+ wide_int zext = wi::zext (l_const, ll_bitsize);
+ if ((ll_unsignedp ? zext : wi::sext (l_const, ll_bitsize)) == l_const)
+ l_const = zext;
+ else
+ l_non_ext_bits = true;
+ }
/* We're doing bitwise equality tests, so don't bother with sign
extensions. */
l_const = wide_int::from (l_const, lnprec, UNSIGNED);
if (ll_and_mask.get_precision ())
l_const &= wide_int::from (ll_and_mask, lnprec, UNSIGNED);
l_const <<= xll_bitpos;
- if ((l_const & ~ll_mask) != 0)
+ if (l_non_ext_bits || (l_const & ~ll_mask) != 0)
{
warning_at (lloc, OPT_Wtautological_compare,
"comparison is always %d", wanted_code == NE_EXPR);
again. */
gcc_checking_assert (r_const.get_precision ());
+ /* Before clipping upper bits of the right-hand operand of the compare,
+ check that they're sign or zero extensions, depending on how the
+ left-hand operand would be extended. */
+ bool r_non_ext_bits = false;
+ if (rl_bitsize < rr_bitsize)
+ {
+ wide_int zext = wi::zext (r_const, rl_bitsize);
+ if ((rl_unsignedp ? zext : wi::sext (r_const, rl_bitsize)) == r_const)
+ r_const = zext;
+ else
+ r_non_ext_bits = true;
+ }
r_const = wide_int::from (r_const, lnprec, UNSIGNED);
if (rl_and_mask.get_precision ())
r_const &= wide_int::from (rl_and_mask, lnprec, UNSIGNED);
r_const <<= xrl_bitpos;
- if ((r_const & ~rl_mask) != 0)
+ if (r_non_ext_bits || (r_const & ~rl_mask) != 0)
{
warning_at (rloc, OPT_Wtautological_compare,
"comparison is always %d", wanted_code == NE_EXPR);
--- /dev/null
+/* { dg-do run } */
+/* { dg-options "-O2" } */
+
+/* PR tree-optimization/118456 */
+/* Check that shifted fields compared with a constants compare correctly even
+ if the constant contains sign-extension bits not present in the bit
+ range. */
+
+struct S { unsigned long long o; unsigned short a, b; } s;
+
+__attribute__((noipa)) int
+foo (void)
+{
+ return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == -27;
+}
+
+__attribute__((noipa)) int
+bar (void)
+{
+ return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == -91;
+}
+
+__attribute__((noipa)) int
+bars (void)
+{
+ return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == 37;
+}
+
+__attribute__((noipa)) int
+baz (void)
+{
+ return ((unsigned char) s.a) >> 3 == 49 && ((signed char) s.b) >> 2 == -27;
+}
+
+__attribute__((noipa)) int
+bazs (void)
+{
+ return ((unsigned char) s.a) >> 3 == (unsigned char) -15 && ((signed char) s.b) >> 2 == -27;
+}
+
+int
+main ()
+{
+ s.a = 17 << 3;
+ s.b = (unsigned short)(-27u << 2);
+ if (foo () != 1
+ || bar () != 0
+ || bars () != 0
+ || baz () != 0
+ || bazs () != 0)
+ __builtin_abort ();
+ return 0;
+}