Richard Sandiford <richard.sandif...@arm.com> writes:
> This patch extends the tree-level folding of variable-length vectors
> so that it can also be used on rtxes.  The first step is to move
> the tree_vector_builder new_unary/binary_operator routines to the
> parent vector_builder class (which in turn means adding a new
> template parameter).  The second step is to make simplify-rtx.c
> use a direct rtx analogue of the VECTOR_CST handling in fold-const.c.
>
> Tested on aarch64-linux-gnu, aarch64_be-elf and x86_64-linux-gnu.
> OK to install?
>
> Richard

Here's a version updated for the earlier patch, so that we take
both HONOR_NANS and HONOR_SNANS into account.  Tested on
aarch64-linux-gnu to far.

Thanks,
Richard


2019-07-15  Richard Sandiford  <richard.sandif...@arm.com>

gcc/
        * rtl.h (bit_and_conditions, bit_ior_conditions): Declare.
        * jump.c (flags_to_condition): Add an honor_nans_p argument.
        (bit_ior_conditions, bit_and_conditions): New functions.
        * simplify-rtx.c (simplify_binary_operation_1): Try to fold an
        AND or IOR of two comparisons into a single comparison.
        (simplify_ternary_operation): Try to fold an if_then_else involving
        two conditions into an AND of two conditions.
        (test_merged_comparisons): New function.
        (simplify_rtx_c_tests): Call it.

Index: gcc/rtl.h
===================================================================
--- gcc/rtl.h   2019-07-12 09:14:06.000000000 +0100
+++ gcc/rtl.h   2019-07-15 16:24:30.685937855 +0100
@@ -3315,6 +3315,8 @@ extern enum rtx_code reverse_condition_m
 extern enum rtx_code swap_condition (enum rtx_code);
 extern enum rtx_code unsigned_condition (enum rtx_code);
 extern enum rtx_code signed_condition (enum rtx_code);
+extern rtx_code bit_and_conditions (rtx_code, rtx_code, machine_mode);
+extern rtx_code bit_ior_conditions (rtx_code, rtx_code, machine_mode);
 extern void mark_jump_label (rtx, rtx_insn *, int);
 
 /* Return true if integer comparison operator CODE interprets its operands
Index: gcc/jump.c
===================================================================
--- gcc/jump.c  2019-07-15 16:22:55.342699887 +0100
+++ gcc/jump.c  2019-07-15 16:24:30.685937855 +0100
@@ -138,13 +138,28 @@ #define CASE(CODE, ORDER, SIGNEDNESS, TR
 }
 
 /* Return the comparison code that implements FLAGS_* bitmask FLAGS.
+   If MODE is not VOIDmode, it gives the mode of the values being compared.
+
    Assert on failure if FORCE, otherwise return UNKNOWN.  */
 
 static rtx_code
-flags_to_condition (unsigned int flags, bool force)
+flags_to_condition (unsigned int flags, bool force,
+                   machine_mode mode = VOIDmode)
 {
+  unsigned int order_mask = FLAGS_ORDER;
+  if (mode != VOIDmode)
+    {
+      if (!HONOR_NANS (mode))
+       {
+         flags |= FLAGS_TRAP_NANS;
+         order_mask &= ~FLAGS_UNORDERED;
+       }
+      else if (!HONOR_SNANS (mode))
+       flags |= FLAGS_TRAP_SNANS;
+    }
+
 #define TEST(CODE, ORDER, SIGNEDNESS, TRAPS)                           \
-  if (((flags ^ (ORDER)) & FLAGS_ORDER) == 0                           \
+  if (((flags ^ (ORDER)) & order_mask) == 0                            \
       && (FLAGS_##SIGNEDNESS == 0                                      \
          || ((FLAGS_##SIGNEDNESS ^ flags) & FLAGS_SIGNEDNESS) == 0)    \
       && (FLAGS_##TRAPS & ~flags & FLAGS_TRAPS) == 0)                  \
@@ -722,6 +737,33 @@ comparison_dominates_p (enum rtx_code co
   return (((flags1 | flags2) & FLAGS_SIGNEDNESS) != FLAGS_SIGNEDNESS
          && (flags1 & ~flags2 & FLAGS_ORDER) == 0);
 }
+
+/* Return the comparison code that tests whether CODE1 | CODE2 is
+   true for mode MODE.  Return UNKNOWN if no such comparison exists.
+   The result can trap whenever either CODE1 or CODE2 traps.  */
+
+rtx_code
+bit_ior_conditions (rtx_code code1, rtx_code code2, machine_mode mode)
+{
+  unsigned int flags1 = condition_to_flags (code1);
+  unsigned int flags2 = condition_to_flags (code2);
+  unsigned int flags = flags1 | flags2;
+  return flags_to_condition (flags, false, mode);
+}
+
+/* Return the comparison code that tests whether CODE1 & CODE2 is
+   true for mode MODE.  Return UNKNOWN if no such comparison exists.
+   The result can trap whenever either CODE1 or CODE2 traps.  */
+
+rtx_code
+bit_and_conditions (rtx_code code1, rtx_code code2, machine_mode mode)
+{
+  unsigned int flags1 = condition_to_flags (code1);
+  unsigned int flags2 = condition_to_flags (code2);
+  unsigned int flags = ((flags1 & flags2 & FLAGS_ORDER)
+                       | ((flags1 | flags2) & ~FLAGS_ORDER));
+  return flags_to_condition (flags, false, mode);
+}
 
 /* Return 1 if INSN is an unconditional jump and nothing else.  */
 
Index: gcc/simplify-rtx.c
===================================================================
--- gcc/simplify-rtx.c  2019-07-12 09:14:06.000000000 +0100
+++ gcc/simplify-rtx.c  2019-07-15 16:24:30.689937823 +0100
@@ -2889,6 +2889,20 @@ simplify_binary_operation_1 (enum rtx_co
            }
        }
 
+      /* (ior (cmp1 x y) (cmp2 x y)) -> (cmp3 x y).  */
+      if (COMPARISON_P (op0)
+         && COMPARISON_P (op1)
+         && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
+         && rtx_equal_p (XEXP (op0, 1), XEXP (op1, 1)))
+       {
+         machine_mode cmp_mode = GET_MODE (XEXP (op0, 0));
+         rtx_code cond = bit_ior_conditions (GET_CODE (op0),
+                                             GET_CODE (op1), cmp_mode);
+         if (cond != UNKNOWN)
+           return simplify_gen_relational (cond, mode, cmp_mode,
+                                           XEXP (op0, 0), XEXP (op0, 1));
+       }
+
       tem = simplify_byte_swapping_operation (code, mode, op0, op1);
       if (tem)
        return tem;
@@ -3321,6 +3335,20 @@ simplify_binary_operation_1 (enum rtx_co
          && rtx_equal_p (op1, XEXP (XEXP (op0, 1), 0)))
        return simplify_gen_binary (AND, mode, op1, XEXP (op0, 0));
 
+      /* (and (cmp1 x y) (cmp2 x y)) -> (cmp3 x y).  */
+      if (COMPARISON_P (op0)
+         && COMPARISON_P (op1)
+         && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
+         && rtx_equal_p (XEXP (op0, 1), XEXP (op1, 1)))
+       {
+         machine_mode cmp_mode = GET_MODE (XEXP (op0, 0));
+         rtx_code cond = bit_and_conditions (GET_CODE (op0),
+                                             GET_CODE (op1), cmp_mode);
+         if (cond != UNKNOWN)
+           return simplify_gen_relational (cond, mode, cmp_mode,
+                                           XEXP (op0, 0), XEXP (op0, 1));
+       }
+
       tem = simplify_byte_swapping_operation (code, mode, op0, op1);
       if (tem)
        return tem;
@@ -5832,6 +5860,14 @@ simplify_ternary_operation (enum rtx_cod
            return simplified;
        }
 
+      /* (if_then_else (cmp1 X1 Y1) (cmp X2 Y2) (const_int 0))
+        -> (and (cmp1 X1 Y1) (cmp2 X2 Y2)).  */
+      if (COMPARISON_P (op0)
+         && COMPARISON_P (op1)
+         && op2 == const0_rtx
+         && GET_MODE (op0) == GET_MODE (op1))
+       return simplify_gen_binary (AND, mode, op0, op1);
+
       if (COMPARISON_P (op0) && ! side_effects_p (op0))
        {
          machine_mode cmp_mode = (GET_MODE (XEXP (op0, 0)) == VOIDmode
@@ -6858,6 +6894,75 @@ make_test_reg (machine_mode mode)
   return gen_rtx_REG (mode, test_reg_num++);
 }
 
+/* Test ANDs and IORs of two comparisons.  */
+
+static void
+test_merged_comparisons (machine_mode mode)
+{
+  rtx reg1 = make_test_reg (mode);
+  rtx reg2 = make_test_reg (mode);
+
+  rtx eq = gen_rtx_EQ (mode, reg1, reg2);
+  rtx ne = gen_rtx_NE (mode, reg1, reg2);
+  rtx le = gen_rtx_LE (mode, reg1, reg2);
+  rtx leu = gen_rtx_LEU (mode, reg1, reg2);
+  rtx lt = gen_rtx_LT (mode, reg1, reg2);
+  rtx ltu = gen_rtx_LTU (mode, reg1, reg2);
+  rtx ge = gen_rtx_GE (mode, reg1, reg2);
+  rtx geu = gen_rtx_GEU (mode, reg1, reg2);
+  rtx gt = gen_rtx_GT (mode, reg1, reg2);
+  rtx gtu = gen_rtx_GTU (mode, reg1, reg2);
+
+  ASSERT_FALSE (simplify_binary_operation (AND, mode, le, leu));
+  ASSERT_FALSE (simplify_binary_operation (AND, mode, lt, ltu));
+  ASSERT_FALSE (simplify_binary_operation (AND, mode, gt, gtu));
+  ASSERT_FALSE (simplify_binary_operation (AND, mode, ge, geu));
+
+  ASSERT_RTX_EQ (eq, simplify_binary_operation (AND, mode, eq, leu));
+  ASSERT_RTX_EQ (eq, simplify_binary_operation (AND, mode, eq, geu));
+  ASSERT_RTX_EQ (eq, simplify_binary_operation (AND, mode, eq, le));
+  ASSERT_RTX_EQ (eq, simplify_binary_operation (AND, mode, eq, ge));
+
+  ASSERT_RTX_EQ (eq, simplify_binary_operation (AND, mode, geu, leu));
+  ASSERT_RTX_EQ (eq, simplify_binary_operation (AND, mode, ge, le));
+
+  ASSERT_RTX_EQ (ltu, simplify_binary_operation (AND, mode, leu, ltu));
+  ASSERT_RTX_EQ (gtu, simplify_binary_operation (AND, mode, geu, gtu));
+  ASSERT_RTX_EQ (lt, simplify_binary_operation (AND, mode, le, lt));
+  ASSERT_RTX_EQ (gt, simplify_binary_operation (AND, mode, ge, gt));
+
+  ASSERT_RTX_EQ (ltu, simplify_binary_operation (AND, mode, ne, leu));
+  ASSERT_RTX_EQ (gtu, simplify_binary_operation (AND, mode, ne, geu));
+  ASSERT_RTX_EQ (lt, simplify_binary_operation (AND, mode, ne, le));
+  ASSERT_RTX_EQ (gt, simplify_binary_operation (AND, mode, ne, ge));
+
+  ASSERT_FALSE (simplify_binary_operation (IOR, mode, le, leu));
+  ASSERT_FALSE (simplify_binary_operation (IOR, mode, lt, ltu));
+  ASSERT_FALSE (simplify_binary_operation (IOR, mode, gt, gtu));
+  ASSERT_FALSE (simplify_binary_operation (IOR, mode, ge, geu));
+
+  ASSERT_RTX_EQ (leu, simplify_binary_operation (IOR, mode, eq, leu));
+  ASSERT_RTX_EQ (geu, simplify_binary_operation (IOR, mode, eq, geu));
+  ASSERT_RTX_EQ (le, simplify_binary_operation (IOR, mode, eq, le));
+  ASSERT_RTX_EQ (ge, simplify_binary_operation (IOR, mode, eq, ge));
+
+  ASSERT_RTX_EQ (ne, simplify_binary_operation (IOR, mode, gtu, ltu));
+  ASSERT_RTX_EQ (ne, simplify_binary_operation (IOR, mode, gt, lt));
+
+  ASSERT_RTX_EQ (leu, simplify_binary_operation (IOR, mode, eq, ltu));
+  ASSERT_RTX_EQ (geu, simplify_binary_operation (IOR, mode, eq, gtu));
+  ASSERT_RTX_EQ (le, simplify_binary_operation (IOR, mode, eq, lt));
+  ASSERT_RTX_EQ (ge, simplify_binary_operation (IOR, mode, eq, gt));
+
+  ASSERT_RTX_EQ (ne, simplify_binary_operation (IOR, mode, ne, ltu));
+  ASSERT_RTX_EQ (ne, simplify_binary_operation (IOR, mode, ne, gtu));
+  ASSERT_RTX_EQ (ne, simplify_binary_operation (IOR, mode, ne, lt));
+  ASSERT_RTX_EQ (ne, simplify_binary_operation (IOR, mode, ne, gt));
+
+  ASSERT_RTX_EQ (eq, simplify_ternary_operation (IF_THEN_ELSE, mode, mode,
+                                                geu, leu, const0_rtx));
+}
+
 /* Test vector simplifications involving VEC_DUPLICATE in which the
    operands and result have vector mode MODE.  SCALAR_REG is a pseudo
    register that holds one element of MODE.  */
@@ -7149,6 +7254,7 @@ simplify_const_poly_int_tests<N>::run ()
 void
 simplify_rtx_c_tests ()
 {
+  test_merged_comparisons (HImode);
   test_vector_ops ();
   simplify_const_poly_int_tests<NUM_POLY_INT_COEFFS>::run ();
 }

Reply via email to