https://gcc.gnu.org/g:84acdb5125a64e6c031855f18c33d2046a7af87e
commit 84acdb5125a64e6c031855f18c33d2046a7af87e Author: Alexandre Oliva <ol...@adacore.com> Date: Mon Jan 13 14:53:25 2025 -0300 [ifcombine] check and extend constants to compare with bitfields Add logic to check and extend constants compared with bitfields, so that fields are only compared with constants they could actually equal. This involves making sure the signedness doesn't change between loads and conversions before shifts: we'd need to carry a lot more data to deal with all the possibilities. (IIRC we had logic that implicitly dealt with that when compare constants were represented as typed trees, but after the rewrite to represent them as wide_ints, the logic seemed superfluous. It wasn't.) While at that, I've robustified decode_field_reference to use local variables throughout, to modify the out parms only when we're about to return non-NULL, and to drop the unused case of NULL pand_mask, that had a latent failure to detect signbit masking. for gcc/ChangeLog PR tree-optimization/118456 * gimple-fold.cc (decode_field_reference): Punt if shifting after changing signedness. Rebustify to set out parms only when returning non-NULL. (fold_truth_andor_for_ifcombine): Check extension bits in constants before clipping. Add complementary assert on r_const's not being set when l_const isn't. for gcc/testsuite/ChangeLog PR tree-optimization/118456 * gcc.dg/field-merge-21.c: New. * gcc.dg/field-merge-22.c: New. Diff: --- gcc/gimple-fold.cc | 44 +++++++++++++++++++++++++++-- gcc/testsuite/gcc.dg/field-merge-21.c | 53 +++++++++++++++++++++++++++++++++++ gcc/testsuite/gcc.dg/field-merge-22.c | 31 ++++++++++++++++++++ 3 files changed, 126 insertions(+), 2 deletions(-) diff --git a/gcc/gimple-fold.cc b/gcc/gimple-fold.cc index 93ed8b3abb05..c9cd842c9088 100644 --- a/gcc/gimple-fold.cc +++ b/gcc/gimple-fold.cc @@ -7712,6 +7712,18 @@ decode_field_reference (tree *pexp, HOST_WIDE_INT *pbitsize, if (shiftrt) { + /* Punt if we're shifting by more than the loaded bitfield (after + adjustment), or if there's a shift after a change of signedness, punt. + When comparing this field with a constant, we'll check that the + constant is a proper sign- or zero-extension (depending on signedness) + of a value that would fit in the selected portion of the bitfield. A + shift after a change of signedness would make the extension + non-uniform, and we can't deal with that (yet ???). See + gcc.dg/field-merge-22.c for a test that would go wrong. */ + if (*pbitsize <= shiftrt + || (convert_before_shift + && outer_type && unsignedp != TYPE_UNSIGNED (outer_type))) + return NULL_TREE; if (!*preversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN) *pbitpos += shiftrt; *pbitsize -= shiftrt; @@ -8512,13 +8524,25 @@ fold_truth_andor_for_ifcombine (enum tree_code code, tree truth_type, and bit position. */ if (l_const.get_precision ()) { + /* Before clipping upper bits of the right-hand operand of the compare, + check that they're sign or zero extensions, depending on how the + left-hand operand would be extended. */ + bool l_non_ext_bits = false; + if (ll_bitsize < lr_bitsize) + { + wide_int zext = wi::zext (l_const, ll_bitsize); + if ((ll_unsignedp ? zext : wi::sext (l_const, ll_bitsize)) == l_const) + l_const = zext; + else + l_non_ext_bits = true; + } /* We're doing bitwise equality tests, so don't bother with sign extensions. */ l_const = wide_int::from (l_const, lnprec, UNSIGNED); if (ll_and_mask.get_precision ()) l_const &= wide_int::from (ll_and_mask, lnprec, UNSIGNED); l_const <<= xll_bitpos; - if ((l_const & ~ll_mask) != 0) + if (l_non_ext_bits || (l_const & ~ll_mask) != 0) { warning_at (lloc, OPT_Wtautological_compare, "comparison is always %d", wanted_code == NE_EXPR); @@ -8530,11 +8554,23 @@ fold_truth_andor_for_ifcombine (enum tree_code code, tree truth_type, again. */ gcc_checking_assert (r_const.get_precision ()); + /* Before clipping upper bits of the right-hand operand of the compare, + check that they're sign or zero extensions, depending on how the + left-hand operand would be extended. */ + bool r_non_ext_bits = false; + if (rl_bitsize < rr_bitsize) + { + wide_int zext = wi::zext (r_const, rl_bitsize); + if ((rl_unsignedp ? zext : wi::sext (r_const, rl_bitsize)) == r_const) + r_const = zext; + else + r_non_ext_bits = true; + } r_const = wide_int::from (r_const, lnprec, UNSIGNED); if (rl_and_mask.get_precision ()) r_const &= wide_int::from (rl_and_mask, lnprec, UNSIGNED); r_const <<= xrl_bitpos; - if ((r_const & ~rl_mask) != 0) + if (r_non_ext_bits || (r_const & ~rl_mask) != 0) { warning_at (rloc, OPT_Wtautological_compare, "comparison is always %d", wanted_code == NE_EXPR); @@ -8586,6 +8622,10 @@ fold_truth_andor_for_ifcombine (enum tree_code code, tree truth_type, storage order mismatch occurs between the left and right sides. */ else { + /* When we set l_const, we also set r_const, so we need not test it + again. */ + gcc_checking_assert (!r_const.get_precision ()); + if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp || ll_reversep != lr_reversep diff --git a/gcc/testsuite/gcc.dg/field-merge-21.c b/gcc/testsuite/gcc.dg/field-merge-21.c new file mode 100644 index 000000000000..042b2123eb63 --- /dev/null +++ b/gcc/testsuite/gcc.dg/field-merge-21.c @@ -0,0 +1,53 @@ +/* { dg-do run } */ +/* { dg-options "-O2" } */ + +/* PR tree-optimization/118456 */ +/* Check that shifted fields compared with a constants compare correctly even + if the constant contains sign-extension bits not present in the bit + range. */ + +struct S { unsigned long long o; unsigned short a, b; } s; + +__attribute__((noipa)) int +foo (void) +{ + return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == -27; +} + +__attribute__((noipa)) int +bar (void) +{ + return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == -91; +} + +__attribute__((noipa)) int +bars (void) +{ + return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == 37; +} + +__attribute__((noipa)) int +baz (void) +{ + return ((unsigned char) s.a) >> 3 == 49 && ((signed char) s.b) >> 2 == -27; +} + +__attribute__((noipa)) int +bazs (void) +{ + return ((unsigned char) s.a) >> 3 == (unsigned char) -15 && ((signed char) s.b) >> 2 == -27; +} + +int +main () +{ + s.a = 17 << 3; + s.b = (unsigned short)(-27u << 2); + if (foo () != 1 + || bar () != 0 + || bars () != 0 + || baz () != 0 + || bazs () != 0) + __builtin_abort (); + return 0; +} diff --git a/gcc/testsuite/gcc.dg/field-merge-22.c b/gcc/testsuite/gcc.dg/field-merge-22.c new file mode 100644 index 000000000000..45b29c0bccaf --- /dev/null +++ b/gcc/testsuite/gcc.dg/field-merge-22.c @@ -0,0 +1,31 @@ +/* { dg-do run } */ +/* { dg-options "-O2" } */ + +/* PR tree-optimization/118456 */ +/* Check that compares with constants take into account sign/zero extension of + both the bitfield and of the shifting type. */ + +#define shift (__CHAR_BIT__ - 4) + +struct S { + signed char a : shift + 2; + signed char b : shift + 2; + short ignore[0]; +} s; + +__attribute__((noipa)) int +foo (void) +{ + return ((unsigned char) s.a) >> shift == 15 + && ((unsigned char) s.b) >> shift == 0; +} + +int +main () +{ + s.a = -1; + s.b = 1; + if (foo () != 1) + __builtin_abort (); + return 0; +}