Hi Jakub,

On 10/08/16 07:55, Jakub Jelinek wrote:
On Wed, Aug 10, 2016 at 07:51:08AM +1000, kugan wrote:
On 10/08/16 07:46, Jakub Jelinek wrote:
On Wed, Aug 10, 2016 at 07:42:25AM +1000, kugan wrote:
There was no new regression while testing. I also moved the testcase from
gcc.dg/torture/pr72835.c to gcc.dg/tree-ssa/pr72835.c. Is this OK for trunk?

This looks strange.  The tree-ssa-reassoc.c code has been trying to never
reuse SSA_NAMEs if they would hold a different value.
So there should be no resetting of flow sensitive info needed.

We are not reusing but, if you see the example change in reassoc:

-  _5 = -_4;
-  _6 = _2 * _5;
+  _5 = _4;
+  _6 = _5 * _2;

_5 and _6 will now have different value ranges because they compute
different values. Therefore I think we should reset (?).

No.  We should not have reused _5 and _6 for the different values.
It is not harmful just for the value ranges, but also for debug info.

I see it now. The problem is we are just looking at (-1) being in the ops list for passing changed to rewrite_expr_tree in the case of multiplication by negate. If we have combined (-1), as in the testcase, we will not have the (-1) and will pass changed=false to rewrite_expr_tree.

We should set changed based on what happens in try_special_add_to_ops. Attached patch does this. Bootstrap and regression testing are ongoing. Is this OK for trunk if there is no regression.

Thanks,
Kugan


gcc/testsuite/ChangeLog:

2016-08-10  Kugan Vivekanandarajah  <kug...@linaro.org>

        PR tree-optimization/72835
        * gcc.dg/tree-ssa/pr72835.c: New test.

gcc/ChangeLog:

2016-08-10  Kugan Vivekanandarajah  <kug...@linaro.org>

        PR tree-optimization/72835
* tree-ssa-reassoc.c (try_special_add_to_ops): Return true if we changed the operands. (linearize_expr_tree): Return true if try_special_add_top_ops set changed to true. (reassociate_bb): Pass changed returned by linearlize_expr_tree to rewrite_expr_tree.

diff --git a/gcc/testsuite/gcc.dg/tree-ssa/pr72835.c 
b/gcc/testsuite/gcc.dg/tree-ssa/pr72835.c
index e69de29..d7d0a8d 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/pr72835.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/pr72835.c
@@ -0,0 +1,36 @@
+/* PR tree-optimization/72835 */
+/* { dg-do run } */
+/* { dg-options "-O2" } */
+
+struct struct_1 {
+    unsigned int m1 : 6 ;
+    unsigned int m2 : 24 ;
+    unsigned int m3 : 6 ;
+};
+
+unsigned short var_32 = 0x2d10;
+
+struct struct_1 s1;
+
+void init ()
+{
+  s1.m1 = 4;
+  s1.m2 = 0x7ca4b8;
+  s1.m3 = 24;
+}
+
+void foo ()
+{
+  unsigned int c =
+    ((unsigned int) s1.m2) * (-((unsigned int) s1.m3))
+    + (var_32) * (-((unsigned int) (s1.m1)));
+  if (c != 4098873984)
+    __builtin_abort ();
+}
+
+int main ()
+{
+    init ();
+    foo ();
+    return 0;
+}
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index 7fd7550..c5641fe 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -1038,7 +1038,7 @@ eliminate_using_constants (enum tree_code opcode,
 }
 
 
-static void linearize_expr_tree (vec<operand_entry *> *, gimple *,
+static bool linearize_expr_tree (vec<operand_entry *> *, gimple *,
                                 bool, bool);
 
 /* Structure for tracking and counting operands.  */
@@ -4456,12 +4456,16 @@ acceptable_pow_call (gcall *stmt, tree *base, 
HOST_WIDE_INT *exponent)
 }
 
 /* Try to derive and add operand entry for OP to *OPS.  Return false if
-   unsuccessful.  */
+   unsuccessful.  If we changed the operands such that the (intermediate)
+   results can be different (as in the case of NEGATE_EXPR converted to
+   multiplication by -1), set changed to true so that we will not reuse the
+   SSA (PR72835).  */
 
 static bool
 try_special_add_to_ops (vec<operand_entry *> *ops,
                        enum tree_code code,
-                       tree op, gimple* def_stmt)
+                       tree op, gimple* def_stmt,
+                       bool *changed)
 {
   tree base = NULL_TREE;
   HOST_WIDE_INT exponent = 0;
@@ -4492,6 +4496,7 @@ try_special_add_to_ops (vec<operand_entry *> *ops,
       add_to_ops_vec (ops, rhs1);
       add_to_ops_vec (ops, cst);
       gimple_set_visited (def_stmt, true);
+      *changed = true;
       return true;
     }
 
@@ -4499,9 +4504,10 @@ try_special_add_to_ops (vec<operand_entry *> *ops,
 }
 
 /* Recursively linearize a binary expression that is the RHS of STMT.
-   Place the operands of the expression tree in the vector named OPS.  */
+   Place the operands of the expression tree in the vector named OPS.
+   Return TRUE if try_special_add_to_ops has set changed to TRUE.  */
 
-static void
+static bool
 linearize_expr_tree (vec<operand_entry *> *ops, gimple *stmt,
                     bool is_associative, bool set_visited)
 {
@@ -4512,6 +4518,7 @@ linearize_expr_tree (vec<operand_entry *> *ops, gimple 
*stmt,
   bool binrhsisreassoc = false;
   enum tree_code rhscode = gimple_assign_rhs_code (stmt);
   struct loop *loop = loop_containing_stmt (stmt);
+  bool changed = false;
 
   if (set_visited)
     gimple_set_visited (stmt, true);
@@ -4542,18 +4549,20 @@ linearize_expr_tree (vec<operand_entry *> *ops, gimple 
*stmt,
       if (!is_associative)
        {
          add_to_ops_vec (ops, binrhs);
-         return;
+         return changed;
        }
 
       if (!binrhsisreassoc)
        {
-         if (!try_special_add_to_ops (ops, rhscode, binrhs, binrhsdef))
+         if (!try_special_add_to_ops (ops, rhscode, binrhs,
+                                      binrhsdef, &changed))
            add_to_ops_vec (ops, binrhs);
 
-         if (!try_special_add_to_ops (ops, rhscode, binlhs, binlhsdef))
+         if (!try_special_add_to_ops (ops, rhscode, binlhs,
+                                      binlhsdef, &changed))
            add_to_ops_vec (ops, binlhs);
 
-         return;
+         return changed;
        }
 
       if (dump_file && (dump_flags & TDF_DETAILS))
@@ -4587,11 +4596,13 @@ linearize_expr_tree (vec<operand_entry *> *ops, gimple 
*stmt,
   gcc_assert (TREE_CODE (binrhs) != SSA_NAME
              || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs),
                                      rhscode, loop));
-  linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs),
-                      is_associative, set_visited);
+  changed |= linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs),
+                                 is_associative, set_visited);
 
-  if (!try_special_add_to_ops (ops, rhscode, binrhs, binrhsdef))
+  if (!try_special_add_to_ops (ops, rhscode, binrhs,
+                              binrhsdef, &changed))
     add_to_ops_vec (ops, binrhs);
+  return changed;
 }
 
 /* Repropagate the negates back into subtracts, since no other pass
@@ -5250,6 +5261,7 @@ reassociate_bb (basic_block bb)
   basic_block son;
   gimple *stmt = last_stmt (bb);
   bool cfg_cleanup_needed = false;
+  bool changed = false;
 
   if (stmt && !gimple_visited_p (stmt))
     cfg_cleanup_needed |= maybe_optimize_range_tests (stmt);
@@ -5323,7 +5335,7 @@ reassociate_bb (basic_block bb)
                continue;
 
              gimple_set_visited (stmt, true);
-             linearize_expr_tree (&ops, stmt, true, true);
+             changed = linearize_expr_tree (&ops, stmt, true, true);
              ops.qsort (sort_by_operand_rank);
              optimize_ops_list (rhs_code, &ops);
              if (undistribute_ops_list (rhs_code, &ops,
@@ -5415,7 +5427,7 @@ reassociate_bb (basic_block bb)
 
                      new_lhs = rewrite_expr_tree (stmt, 0, ops,
                                                   powi_result != NULL
-                                                  || negate_result);
+                                                  || changed);
                     }
 
                  /* If we combined some repeated factors into a 

Reply via email to