This moves more patterns that show up during bootstrap.

Bootstrap and regtest running on x86_64-unknown-linux-gnu.

Richard.

2015-07-09  Richard Biener  <rguent...@suse.de>

        * fold-const.c (distribute_bit_expr): Remove.
        (fold_plusminus_mult_expr): Likewise.
        (fold_binary_loc): Move factoring out common multiples
        from (A * B) +- (C * D), simplifying (A & C1) + (B & C2)
        to (A & C1) | (B & C2), distributing (A & B) | (A & C)
        to A & (B | C) and simplifying A << C1 << C2 to ...
        * match.pd: ... patterns here.
        
Index: gcc/fold-const.c
===================================================================
*** gcc/fold-const.c    (revision 225609)
--- gcc/fold-const.c    (working copy)
*************** static enum tree_code compcode_to_compar
*** 117,123 ****
  static int operand_equal_for_comparison_p (tree, tree, tree);
  static int twoval_comparison_p (tree, tree *, tree *, int *);
  static tree eval_subst (location_t, tree, tree, tree, tree, tree);
- static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, 
tree);
  static tree make_bit_field_ref (location_t, tree, tree,
                                HOST_WIDE_INT, HOST_WIDE_INT, int);
  static tree optimize_bit_field_compare (location_t, enum tree_code,
--- 117,122 ----
*************** invert_truthvalue_loc (location_t loc, t
*** 3549,3610 ****
                          type, arg);
  }
  
- /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
-    operands are another bit-wise operation with a common input.  If so,
-    distribute the bit operations to save an operation and possibly two if
-    constants are involved.  For example, convert
-       (A | B) & (A | C) into A | (B & C)
-    Further simplification will occur if B and C are constants.
- 
-    If this optimization cannot be done, 0 will be returned.  */
- 
- static tree
- distribute_bit_expr (location_t loc, enum tree_code code, tree type,
-                    tree arg0, tree arg1)
- {
-   tree common;
-   tree left, right;
- 
-   if (TREE_CODE (arg0) != TREE_CODE (arg1)
-       || TREE_CODE (arg0) == code
-       || (TREE_CODE (arg0) != BIT_AND_EXPR
-         && TREE_CODE (arg0) != BIT_IOR_EXPR))
-     return 0;
- 
-   if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
-     {
-       common = TREE_OPERAND (arg0, 0);
-       left = TREE_OPERAND (arg0, 1);
-       right = TREE_OPERAND (arg1, 1);
-     }
-   else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 
0))
-     {
-       common = TREE_OPERAND (arg0, 0);
-       left = TREE_OPERAND (arg0, 1);
-       right = TREE_OPERAND (arg1, 0);
-     }
-   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 
0))
-     {
-       common = TREE_OPERAND (arg0, 1);
-       left = TREE_OPERAND (arg0, 0);
-       right = TREE_OPERAND (arg1, 1);
-     }
-   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 
0))
-     {
-       common = TREE_OPERAND (arg0, 1);
-       left = TREE_OPERAND (arg0, 0);
-       right = TREE_OPERAND (arg1, 0);
-     }
-   else
-     return 0;
- 
-   common = fold_convert_loc (loc, type, common);
-   left = fold_convert_loc (loc, type, left);
-   right = fold_convert_loc (loc, type, right);
-   return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
-                     fold_build2_loc (loc, code, type, left, right));
- }
- 
  /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
     with code CODE.  This optimization is unsafe.  */
  static tree
--- 3548,3553 ----
*************** fold_to_nonsharp_ineq_using_bound (locat
*** 6937,7064 ****
    return fold_build2_loc (loc, GE_EXPR, type, a, y);
  }
  
- /* Fold a sum or difference of at least one multiplication.
-    Returns the folded tree or NULL if no simplification could be made.  */
- 
- static tree
- fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
-                         tree arg0, tree arg1)
- {
-   tree arg00, arg01, arg10, arg11;
-   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
- 
-   /* (A * C) +- (B * C) -> (A+-B) * C.
-      (A * C) +- A -> A * (C+-1).
-      We are most concerned about the case where C is a constant,
-      but other combinations show up during loop reduction.  Since
-      it is not difficult, try all four possibilities.  */
- 
-   if (TREE_CODE (arg0) == MULT_EXPR)
-     {
-       arg00 = TREE_OPERAND (arg0, 0);
-       arg01 = TREE_OPERAND (arg0, 1);
-     }
-   else if (TREE_CODE (arg0) == INTEGER_CST)
-     {
-       arg00 = build_one_cst (type);
-       arg01 = arg0;
-     }
-   else
-     {
-       /* We cannot generate constant 1 for fract.  */
-       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
-       return NULL_TREE;
-       arg00 = arg0;
-       arg01 = build_one_cst (type);
-     }
-   if (TREE_CODE (arg1) == MULT_EXPR)
-     {
-       arg10 = TREE_OPERAND (arg1, 0);
-       arg11 = TREE_OPERAND (arg1, 1);
-     }
-   else if (TREE_CODE (arg1) == INTEGER_CST)
-     {
-       arg10 = build_one_cst (type);
-       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
-        the purpose of this canonicalization.  */
-       if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
-         && negate_expr_p (arg1)
-         && code == PLUS_EXPR)
-       {
-         arg11 = negate_expr (arg1);
-         code = MINUS_EXPR;
-       }
-       else
-       arg11 = arg1;
-     }
-   else
-     {
-       /* We cannot generate constant 1 for fract.  */
-       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
-       return NULL_TREE;
-       arg10 = arg1;
-       arg11 = build_one_cst (type);
-     }
-   same = NULL_TREE;
- 
-   if (operand_equal_p (arg01, arg11, 0))
-     same = arg01, alt0 = arg00, alt1 = arg10;
-   else if (operand_equal_p (arg00, arg10, 0))
-     same = arg00, alt0 = arg01, alt1 = arg11;
-   else if (operand_equal_p (arg00, arg11, 0))
-     same = arg00, alt0 = arg01, alt1 = arg10;
-   else if (operand_equal_p (arg01, arg10, 0))
-     same = arg01, alt0 = arg00, alt1 = arg11;
- 
-   /* No identical multiplicands; see if we can find a common
-      power-of-two factor in non-power-of-two multiplies.  This
-      can help in multi-dimensional array access.  */
-   else if (tree_fits_shwi_p (arg01)
-          && tree_fits_shwi_p (arg11))
-     {
-       HOST_WIDE_INT int01, int11, tmp;
-       bool swap = false;
-       tree maybe_same;
-       int01 = tree_to_shwi (arg01);
-       int11 = tree_to_shwi (arg11);
- 
-       /* Move min of absolute values to int11.  */
-       if (absu_hwi (int01) < absu_hwi (int11))
-         {
-         tmp = int01, int01 = int11, int11 = tmp;
-         alt0 = arg00, arg00 = arg10, arg10 = alt0;
-         maybe_same = arg01;
-         swap = true;
-       }
-       else
-       maybe_same = arg11;
- 
-       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
-         /* The remainder should not be a constant, otherwise we
-            end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
-            increased the number of multiplications necessary.  */
-         && TREE_CODE (arg10) != INTEGER_CST)
-         {
-         alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
-                             build_int_cst (TREE_TYPE (arg00),
-                                            int01 / int11));
-         alt1 = arg10;
-         same = maybe_same;
-         if (swap)
-           maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
-       }
-     }
- 
-   if (same)
-     return fold_build2_loc (loc, MULT_EXPR, type,
-                       fold_build2_loc (loc, code, type,
-                                    fold_convert_loc (loc, type, alt0),
-                                    fold_convert_loc (loc, type, alt1)),
-                       fold_convert_loc (loc, type, same));
- 
-   return NULL_TREE;
- }
- 
  /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
     specified by EXPR into the buffer PTR of length LEN bytes.
     Return the number of bytes placed in the buffer, or zero
--- 6880,6885 ----
*************** fold_binary_loc (location_t loc,
*** 9556,9594 ****
            }
        }
  
-       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
-        one.  Make sure the type is not saturating and has the signedness of
-        the stripped operands, as fold_plusminus_mult_expr will re-associate.
-        ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
-       if ((TREE_CODE (arg0) == MULT_EXPR
-          || TREE_CODE (arg1) == MULT_EXPR)
-         && !TYPE_SATURATING (type)
-         && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
-         && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
-         && (!FLOAT_TYPE_P (type) || flag_associative_math))
-         {
-         tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
-         if (tem)
-           return tem;
-       }
- 
        if (! FLOAT_TYPE_P (type))
        {
-         /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
-            with a constant, and the two constants have no bits in common,
-            we should treat this as a BIT_IOR_EXPR since this may produce more
-            simplifications.  */
-         if (TREE_CODE (arg0) == BIT_AND_EXPR
-             && TREE_CODE (arg1) == BIT_AND_EXPR
-             && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
-             && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
-             && wi::bit_and (TREE_OPERAND (arg0, 1),
-                             TREE_OPERAND (arg1, 1)) == 0)
-           {
-             code = BIT_IOR_EXPR;
-             goto bit_ior;
-           }
- 
          /* Reassociate (plus (plus (mult) (foo)) (mult)) as
             (plus (plus (mult) (mult)) (foo)) so that we can
             take advantage of the factoring cases below.  */
--- 9377,9384 ----
*************** fold_binary_loc (location_t loc,
*** 10120,10141 ****
          && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
        return tem;
  
-       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
-        one.  Make sure the type is not saturating and has the signedness of
-        the stripped operands, as fold_plusminus_mult_expr will re-associate.
-        ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
-       if ((TREE_CODE (arg0) == MULT_EXPR
-          || TREE_CODE (arg1) == MULT_EXPR)
-         && !TYPE_SATURATING (type)
-         && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
-         && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
-         && (!FLOAT_TYPE_P (type) || flag_associative_math))
-         {
-         tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
-         if (tem)
-           return tem;
-       }
- 
        goto associate;
  
      case MULT_EXPR:
--- 9910,9915 ----
*************** fold_binary_loc (location_t loc,
*** 10422,10428 ****
        goto associate;
  
      case BIT_IOR_EXPR:
-     bit_ior:
        /* Canonicalize (X & C1) | C2.  */
        if (TREE_CODE (arg0) == BIT_AND_EXPR
          && TREE_CODE (arg1) == INTEGER_CST
--- 10196,10201 ----
*************** fold_binary_loc (location_t loc,
*** 10493,10502 ****
            return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
        }
  
-       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
-       if (t1 != NULL_TREE)
-       return t1;
- 
        /* See if this can be simplified into a rotate first.  If that
         is unsuccessful continue in the association code.  */
        goto bit_rotate;
--- 10266,10271 ----
*************** fold_binary_loc (location_t loc,
*** 10759,10767 ****
            }
        }
  
-       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
-       if (t1 != NULL_TREE)
-       return t1;
        /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
        if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
          && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
--- 10528,10533 ----
*************** fold_binary_loc (location_t loc,
*** 11110,11141 ****
  
        prec = element_precision (type);
  
-       /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
-       if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
-         && tree_to_uhwi (arg1) < prec
-         && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
-         && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
-       {
-         unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
-                             + tree_to_uhwi (arg1));
- 
-         /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
-            being well defined.  */
-         if (low >= prec)
-           {
-             if (code == LROTATE_EXPR || code == RROTATE_EXPR)
-               low = low % prec;
-             else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
-               return omit_one_operand_loc (loc, type, build_zero_cst (type),
-                                        TREE_OPERAND (arg0, 0));
-             else
-               low = prec - 1;
-           }
- 
-         return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
-                                 build_int_cst (TREE_TYPE (arg1), low));
-       }
- 
        /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
           into x & ((unsigned)-1 >> c) for unsigned types.  */
        if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
--- 10876,10881 ----
Index: gcc/match.pd
===================================================================
*** gcc/match.pd        (revision 225610)
--- gcc/match.pd        (working copy)
*************** (define_operator_list CBRT BUILT_IN_CBRT
*** 419,435 ****
         && tree_nop_conversion_p (type, TREE_TYPE (@1)))
     (bit_not (rop (convert @0) (convert @1))))))
  
! /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
     with a constant, and the two constants have no bits in common,
     we should treat this as a BIT_IOR_EXPR since this may produce more
     simplifications.  */
! (simplify
!  (bit_xor (convert1? (bit_and@4 @0 INTEGER_CST@1))
!           (convert2? (bit_and@5 @2 INTEGER_CST@3)))
!  (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
!       && tree_nop_conversion_p (type, TREE_TYPE (@2))
!       && wi::bit_and (@1, @3) == 0)
!   (bit_ior (convert @4) (convert @5))))
  
  /* (X | Y) ^ X -> Y & ~ X*/
  (simplify
--- 419,436 ----
         && tree_nop_conversion_p (type, TREE_TYPE (@1)))
     (bit_not (rop (convert @0) (convert @1))))))
  
! /* If we are XORing or adding two BIT_AND_EXPR's, both of which are and'ing
     with a constant, and the two constants have no bits in common,
     we should treat this as a BIT_IOR_EXPR since this may produce more
     simplifications.  */
! (for op (bit_xor plus)
!  (simplify
!   (op (convert1? (bit_and@4 @0 INTEGER_CST@1))
!       (convert2? (bit_and@5 @2 INTEGER_CST@3)))
!   (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
!        && tree_nop_conversion_p (type, TREE_TYPE (@2))
!        && wi::bit_and (@1, @3) == 0)
!    (bit_ior (convert @4) (convert @5)))))
  
  /* (X | Y) ^ X -> Y & ~ X*/
  (simplify
*************** (define_operator_list CBRT BUILT_IN_CBRT
*** 455,460 ****
--- 456,474 ----
   (bit_xor:c (bit_and:c @0 @1) @1)
   (bit_and (bit_not @0) @1))
  
+ /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
+    operands are another bit-wise operation with a common input.  If so,
+    distribute the bit operations to save an operation and possibly two if
+    constants are involved.  For example, convert
+      (A | B) & (A | C) into A | (B & C)
+    Further simplification will occur if B and C are constants.  */
+ (for op (bit_and bit_ior)
+      rop (bit_ior bit_and)
+  (simplify
+   (op (convert? (rop:c @0 @1)) (convert? (rop @0 @2)))
+   (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+    (rop (convert @0) (op (convert @1) (convert @2))))))
+ 
  
  (simplify
   (abs (abs@1 @0))
*************** (define_operator_list CBRT BUILT_IN_CBRT
*** 821,826 ****
--- 835,880 ----
             && tree_int_cst_sign_bit (@1) == 0))
       (convert @1))))))
  
+ /* From fold_plusminus_mult_expr.  */
+ (if (! TYPE_SATURATING (type)
+      && (! FLOAT_TYPE_P (type) || flag_associative_math))
+  (for op (plus minus)
+   /* (A * B) +- (A * C) -> A * (B +- C) */
+   (simplify
+    (op (mult:cs @0 @1) (mult:s @0 @2))
+    (mult @0 (op @1 @2)))
+   /* (A * B) +- A -> A * (B +- 1) */
+   (simplify
+    (op (mult:cs @0 @1) @0)
+    (if (! ALL_FRACT_MODE_P (TYPE_MODE (type)))
+     (mult @0 (op @1 { build_one_cst (type); }))))
+   /* A +- (A * B) -> A * (1 +- B) */
+   (simplify
+    (op @0 (mult:cs @0 @1))
+    (if (! ALL_FRACT_MODE_P (TYPE_MODE (type)))
+     (mult @0 (op { build_one_cst (type); } @1))))
+   /* No identical multiplicands; see if we can find a common
+      power-of-two factor in non-power-of-two multiplies.  This
+      can help in multi-dimensional array access.
+      Both operands should be multiplies, otherwise we
+      end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
+      increased the number of multiplications necessary.  */
+   (simplify
+    (op (mult:s @0 INTEGER_CST@1) (mult:s @2 INTEGER_CST@3))
+    (if (tree_fits_shwi_p (@1)
+         && tree_fits_shwi_p (@3))
+     (with
+      { 
+        HOST_WIDE_INT factor1 = tree_to_shwi (@1);
+        HOST_WIDE_INT factor3 = tree_to_shwi (@3);
+      }
+      (if (factor1 % factor3 == 0)
+       (mult (op (mult @0 { build_int_cst (type, factor1 / factor3); }) @2)
+             { build_int_cst (type, factor3); }))
+      (if (factor3 % factor1 == 0)
+       (mult (op (mult @2 { build_int_cst (type, factor3 / factor1); }) @0)
+             { build_int_cst (type, factor1); })))))))
+ 
  
  /* Simplifications of MIN_EXPR and MAX_EXPR.  */
  
*************** (define_operator_list CBRT BUILT_IN_CBRT
*** 880,885 ****
--- 934,960 ----
                            build_int_cst (TREE_TYPE (@1),
                                           element_precision (type)), @1); }))
  
+ /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
+ (for op (lrotate rrotate rshift lshift)
+  (simplify
+   (op (op @0 INTEGER_CST@1) INTEGER_CST@2)
+   (with { unsigned int prec = element_precision (type); }
+    (if (wi::ge_p (@1, 0, TYPE_SIGN (TREE_TYPE (@1)))
+         && wi::lt_p (@1, prec, TYPE_SIGN (TREE_TYPE (@1)))
+         && wi::ge_p (@2, 0, TYPE_SIGN (TREE_TYPE (@2)))
+       && wi::lt_p (@2, prec, TYPE_SIGN (TREE_TYPE (@2))))
+     (with { unsigned int low = wi::add (@1, @2).to_uhwi (); }
+      /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
+         being well defined.  */
+      (if (low >= prec)
+       (if (op == LROTATE_EXPR || op == RROTATE_EXPR)
+        (op @0 { build_int_cst (TREE_TYPE (@1), low % prec); }))
+       (if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
+        { build_zero_cst (type); })
+       (op @0 { build_int_cst (TREE_TYPE (@1), prec - 1); }))
+      (op @0 { build_int_cst (TREE_TYPE (@1), low); }))))))
+ 
+ 
  /* ((1 << A) & 1) != 0 -> A == 0
     ((1 << A) & 1) == 0 -> A != 0 */
  (for cmp (ne eq)

Reply via email to