This patch removes copyprop passes run immediately after CCP.  This
uncovered that I failed to implement copyprop properly for PHI nodes
(due to the "clever" using UNDEFINED as initial value to meet PHI
args into and me not wanting to enable optimistic copy-prop just yet).

Fixed together with the pass removal.

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied to trunk.

Richard.

2015-04-23  Richard Biener  <rguent...@suse.de>

        * passes.def: Remove copy propagation passes run directly after CCP.
        * tree-ssa-ccp.c (get_value_for_expr): Fall back to a COPY for
        SSA names.
        (ccp_visit_phi_node): Rework to handle first executable edge
        specially.

Index: gcc/passes.def
===================================================================
--- gcc/passes.def      (revision 222267)
+++ gcc/passes.def      (working copy)
@@ -158,7 +158,6 @@ along with GCC; see the file COPYING3.
       NEXT_PASS (pass_ccp);
       /* After CCP we rewrite no longer addressed locals into SSA
         form if possible.  */
-      NEXT_PASS (pass_copy_prop);
       NEXT_PASS (pass_complete_unrolli);
       NEXT_PASS (pass_phiprop);
       NEXT_PASS (pass_forwprop);
@@ -210,7 +209,6 @@ along with GCC; see the file COPYING3.
       NEXT_PASS (pass_ccp);
       /* After CCP we rewrite no longer addressed locals into SSA
         form if possible.  */
-      NEXT_PASS (pass_copy_prop);
       NEXT_PASS (pass_cse_sincos);
       NEXT_PASS (pass_optimize_bswap);
       NEXT_PASS (pass_split_crit_edges);
Index: gcc/tree-ssa-ccp.c
===================================================================
*** gcc/tree-ssa-ccp.c  (revision 222269)
--- gcc/tree-ssa-ccp.c  (working copy)
*************** set_lattice_value (tree var, ccp_prop_va
*** 539,547 ****
    if (old_val->lattice_val != new_val.lattice_val
        || (new_val.lattice_val == CONSTANT
          && (TREE_CODE (new_val.value) != TREE_CODE (old_val->value)
-             || simple_cst_equal (new_val.value, old_val->value) != 1
              || (TREE_CODE (new_val.value) == INTEGER_CST
!                 && new_val.mask != old_val->mask))))
      {
        /* ???  We would like to delay creation of INTEGER_CSTs from
         partially constants here.  */
--- 539,552 ----
    if (old_val->lattice_val != new_val.lattice_val
        || (new_val.lattice_val == CONSTANT
          && (TREE_CODE (new_val.value) != TREE_CODE (old_val->value)
              || (TREE_CODE (new_val.value) == INTEGER_CST
!                 && (new_val.mask != old_val->mask
!                     || (wi::bit_and_not (wi::to_widest (old_val->value),
!                                          new_val.mask)
!                         != wi::bit_and_not (wi::to_widest (new_val.value),
!                                             new_val.mask))))
!             || (TREE_CODE (new_val.value) != INTEGER_CST
!                 && !operand_equal_p (new_val.value, old_val->value, 0)))))
      {
        /* ???  We would like to delay creation of INTEGER_CSTs from
         partially constants here.  */
*************** get_value_for_expr (tree expr, bool for_
*** 623,628 ****
--- 628,642 ----
          && val.lattice_val == CONSTANT
          && TREE_CODE (val.value) == ADDR_EXPR)
        val = get_value_from_alignment (val.value);
+       /* Fall back to a copy value.  */
+       if (!for_bits_p
+         && val.lattice_val == VARYING
+         && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr))
+       {
+         val.lattice_val = CONSTANT;
+         val.value = expr;
+         val.mask = -1;
+       }
      }
    else if (is_gimple_min_invariant (expr)
           && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
*************** static enum ssa_prop_result
*** 1068,1074 ****
  ccp_visit_phi_node (gphi *phi)
  {
    unsigned i;
!   ccp_prop_value_t *old_val, new_val;
  
    if (dump_file && (dump_flags & TDF_DETAILS))
      {
--- 1082,1088 ----
  ccp_visit_phi_node (gphi *phi)
  {
    unsigned i;
!   ccp_prop_value_t new_val;
  
    if (dump_file && (dump_flags & TDF_DETAILS))
      {
*************** ccp_visit_phi_node (gphi *phi)
*** 1076,1100 ****
        print_gimple_stmt (dump_file, phi, 0, dump_flags);
      }
  
!   old_val = get_value (gimple_phi_result (phi));
!   switch (old_val->lattice_val)
!     {
!     case VARYING:
!       return SSA_PROP_VARYING;
! 
!     case CONSTANT:
!       new_val = *old_val;
!       break;
! 
!     case UNDEFINED:
!       new_val.lattice_val = UNDEFINED;
!       new_val.value = NULL_TREE;
!       break;
! 
!     default:
!       gcc_unreachable ();
!     }
  
    for (i = 0; i < gimple_phi_num_args (phi); i++)
      {
        /* Compute the meet operator over all the PHI arguments flowing
--- 1090,1100 ----
        print_gimple_stmt (dump_file, phi, 0, dump_flags);
      }
  
!   new_val.lattice_val = UNDEFINED;
!   new_val.value = NULL_TREE;
!   new_val.mask = 0;
  
+   bool first = true;
    for (i = 0; i < gimple_phi_num_args (phi); i++)
      {
        /* Compute the meet operator over all the PHI arguments flowing
*************** ccp_visit_phi_node (gphi *phi)
*** 1116,1122 ****
          tree arg = gimple_phi_arg (phi, i)->def;
          ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
  
!         ccp_lattice_meet (gimple_bb (phi), &new_val, &arg_val);
  
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
--- 1116,1128 ----
          tree arg = gimple_phi_arg (phi, i)->def;
          ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
  
!         if (first)
!           {
!             new_val = arg_val;
!             first = false;
!           }
!         else
!           ccp_lattice_meet (gimple_bb (phi), &new_val, &arg_val);
  
          if (dump_file && (dump_flags & TDF_DETAILS))
            {

Reply via email to