On October 16, 2017 8:46:12 PM GMT+02:00, Jakub Jelinek <ja...@redhat.com> 
wrote:
>Hi!
>
>The tree-cfg.c verification requires that BIT_FIELD_REF bitpos fits
>into
>uhwi, but fold-const.c happily creates BIT_FIELD_REFs with negative
>bitpos
>(on invalid source).
>
>The following patch just punts optimization instead of creating
>BIT_FIELD_REF with negative bitpos and also adds some formatting fixes.
>
>Bootstrapped/regtested on x86_64-linux and i686-linux, ok for trunk?

OK. 
Richard. 

>2017-10-16  Jakub Jelinek  <ja...@redhat.com>
>
>       PR tree-optimization/82549
>       * fold-const.c (optimize_bit_field_compare, fold_truth_andor_1):
>       Formatting fixes.  Instead of calling make_bit_field_ref with negative
>       bitpos return 0.
>
>       * gcc.c-torture/compile/pr82549.c: New test.
>
>--- gcc/fold-const.c.jj        2017-10-13 09:17:44.000000000 +0200
>+++ gcc/fold-const.c   2017-10-14 13:46:35.939329846 +0200
>@@ -4013,21 +4013,20 @@ optimize_bit_field_compare (location_t l
>                     size_int (nbitsize - lbitsize - lbitpos));
> 
>   if (! const_p)
>-    /* If not comparing with constant, just rework the comparison
>-       and return.  */
>-    return fold_build2_loc (loc, code, compare_type,
>-                      fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
>-                                   make_bit_field_ref (loc, linner, lhs,
>-                                                       unsigned_type,
>-                                                       nbitsize, nbitpos,
>-                                                       1, lreversep),
>-                                   mask),
>-                      fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
>-                                   make_bit_field_ref (loc, rinner, rhs,
>-                                                       unsigned_type,
>-                                                       nbitsize, nbitpos,
>-                                                       1, rreversep),
>-                                   mask));
>+    {
>+      if (nbitpos < 0)
>+      return 0;
>+
>+      /* If not comparing with constant, just rework the comparison
>+       and return.  */
>+      tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
>+                                  nbitsize, nbitpos, 1, lreversep);
>+      t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1,
>mask);
>+      tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
>+                                  nbitsize, nbitpos, 1, rreversep);
>+      t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2,
>mask);
>+      return fold_build2_loc (loc, code, compare_type, t1, t2);
>+    }
> 
>/* Otherwise, we are handling the constant case.  See if the constant
>is too
> big for the field.  Warn and return a tree for 0 (false) if so.  We do
>@@ -4058,6 +4057,9 @@ optimize_bit_field_compare (location_t l
>       }
>     }
> 
>+  if (nbitpos < 0)
>+    return 0;
>+
>   /* Single-bit compares should always be against zero.  */
>   if (lbitsize == 1 && ! integer_zerop (rhs))
>     {
>@@ -5874,7 +5876,10 @@ fold_truth_andor_1 (location_t loc, enum
>        results.  */
>       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
>       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
>-      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
>+      if (lnbitsize == rnbitsize
>+        && xll_bitpos == xlr_bitpos
>+        && lnbitpos >= 0
>+        && rnbitpos >= 0)
>       {
>         lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
>                                   lntype, lnbitsize, lnbitpos,
>@@ -5898,10 +5903,14 @@ fold_truth_andor_1 (location_t loc, enum
>        Note that we still must mask the lhs/rhs expressions.  Furthermore,
>        the mask must be shifted to account for the shift done by
>        make_bit_field_ref.  */
>-      if ((ll_bitsize + ll_bitpos == rl_bitpos
>-         && lr_bitsize + lr_bitpos == rr_bitpos)
>-        || (ll_bitpos == rl_bitpos + rl_bitsize
>-            && lr_bitpos == rr_bitpos + rr_bitsize))
>+      if (((ll_bitsize + ll_bitpos == rl_bitpos
>+          && lr_bitsize + lr_bitpos == rr_bitpos)
>+         || (ll_bitpos == rl_bitpos + rl_bitsize
>+             && lr_bitpos == rr_bitpos + rr_bitsize))
>+        && ll_bitpos >= 0
>+        && rl_bitpos >= 0
>+        && lr_bitpos >= 0
>+        && rr_bitpos >= 0)
>       {
>         tree type;
> 
>@@ -5970,6 +5979,9 @@ fold_truth_andor_1 (location_t loc, enum
>       }
>     }
> 
>+  if (lnbitpos < 0)
>+    return 0;
>+
>   /* Construct the expression we will return.  First get the component
>      reference we will make.  Unless the mask is all ones the width of
>      that field, perform the mask operation.  Then compare with the
>--- gcc/testsuite/gcc.c-torture/compile/pr82549.c.jj   2017-10-14
>13:49:27.831214544 +0200
>+++ gcc/testsuite/gcc.c-torture/compile/pr82549.c      2017-10-14
>13:49:23.561267090 +0200
>@@ -0,0 +1,9 @@
>+/* PR tree-optimization/82549 */
>+
>+int a, b[1];
>+
>+int
>+main ()
>+{
>+  return !a || b[-2] || b[-2];
>+}
>
>       Jakub

Reply via email to