The following speeds up one of the bottle-necks of SCCVN/PRE when
compiling aermod.f90.  It introduces a double_int::rshift function
that works on the whole representation and avoids some redundant type
construction.  It also avoids re-allocating the vector of ops
by using the available shared vector and copying it after construction.

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied to trunk.

Richard.

2013-05-07  Richard Biener  <rguent...@suse.de>

        * double-int.h (rshift): New overload.
        * double-int.c (rshift): New function.
        * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Optimize.
        (create_reference_ops_from_ref): Remove.
        (vn_reference_insert): Use shared ops for constructing the
        reference and copy it.

Index: gcc/double-int.h
===================================================================
*** gcc/double-int.h    (revision 198625)
--- gcc/double-int.h    (working copy)
*************** struct double_int
*** 130,135 ****
--- 130,136 ----
  
    double_int lshift (HOST_WIDE_INT count) const;
    double_int lshift (HOST_WIDE_INT count, unsigned int prec, bool arith) 
const;
+   double_int rshift (HOST_WIDE_INT count) const;
    double_int rshift (HOST_WIDE_INT count, unsigned int prec, bool arith) 
const;
    double_int alshift (HOST_WIDE_INT count, unsigned int prec) const;
    double_int arshift (HOST_WIDE_INT count, unsigned int prec) const;
Index: gcc/double-int.c
===================================================================
*** gcc/double-int.c    (revision 198625)
--- gcc/double-int.c    (working copy)
*************** double_int::lshift (HOST_WIDE_INT count)
*** 1116,1121 ****
--- 1116,1154 ----
    return ret;
  }
  
+ /* Shift A right by COUNT places.  */
+ 
+ double_int
+ double_int::rshift (HOST_WIDE_INT count) const
+ {
+   double_int ret;
+ 
+   gcc_checking_assert (count >= 0);
+ 
+   if (count >= HOST_BITS_PER_DOUBLE_INT)
+     {
+       /* Shifting by the host word size is undefined according to the
+        ANSI standard, so we must handle this as a special case.  */
+       ret.high = 0;
+       ret.low = 0;
+     }
+   else if (count >= HOST_BITS_PER_WIDE_INT)
+     {
+       ret.high = 0;
+       ret.low
+       = (unsigned HOST_WIDE_INT) (high >> (count - HOST_BITS_PER_WIDE_INT));
+     }
+   else
+     {
+       ret.high = high >> count;
+       ret.low = ((low >> count)
+                | ((unsigned HOST_WIDE_INT) high
+                   << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
+     }
+ 
+   return ret;
+ }
+ 
  /* Shift A left by COUNT places keeping only PREC bits of result.  Shift
     right if COUNT is negative.  ARITH true specifies arithmetic shifting;
     otherwise use logical shift.  */
Index: gcc/tree-ssa-sccvn.c
===================================================================
*** gcc/tree-ssa-sccvn.c        (revision 198665)
--- gcc/tree-ssa-sccvn.c        (working copy)
*************** copy_reference_ops_from_ref (tree ref, v
*** 728,733 ****
--- 728,735 ----
      {
        vn_reference_op_s temp;
  
+       result->reserve (3);
+ 
        memset (&temp, 0, sizeof (temp));
        temp.type = TREE_TYPE (ref);
        temp.opcode = TREE_CODE (ref);
*************** copy_reference_ops_from_ref (tree ref, v
*** 735,755 ****
        temp.op1 = TMR_STEP (ref);
        temp.op2 = TMR_OFFSET (ref);
        temp.off = -1;
!       result->safe_push (temp);
  
        memset (&temp, 0, sizeof (temp));
        temp.type = NULL_TREE;
        temp.opcode = ERROR_MARK;
        temp.op0 = TMR_INDEX2 (ref);
        temp.off = -1;
!       result->safe_push (temp);
  
        memset (&temp, 0, sizeof (temp));
        temp.type = NULL_TREE;
        temp.opcode = TREE_CODE (TMR_BASE (ref));
        temp.op0 = TMR_BASE (ref);
        temp.off = -1;
!       result->safe_push (temp);
        return;
      }
  
--- 737,757 ----
        temp.op1 = TMR_STEP (ref);
        temp.op2 = TMR_OFFSET (ref);
        temp.off = -1;
!       result->quick_push (temp);
  
        memset (&temp, 0, sizeof (temp));
        temp.type = NULL_TREE;
        temp.opcode = ERROR_MARK;
        temp.op0 = TMR_INDEX2 (ref);
        temp.off = -1;
!       result->quick_push (temp);
  
        memset (&temp, 0, sizeof (temp));
        temp.type = NULL_TREE;
        temp.opcode = TREE_CODE (TMR_BASE (ref));
        temp.op0 = TMR_BASE (ref);
        temp.off = -1;
!       result->quick_push (temp);
        return;
      }
  
*************** copy_reference_ops_from_ref (tree ref, v
*** 802,810 ****
                    double_int off
                      = tree_to_double_int (this_offset)
                        + tree_to_double_int (bit_offset)
!                         .arshift (BITS_PER_UNIT == 8
!                                   ? 3 : exact_log2 (BITS_PER_UNIT),
!                                   HOST_BITS_PER_DOUBLE_INT);
                    if (off.fits_shwi ())
                      temp.off = off.low;
                  }
--- 804,811 ----
                    double_int off
                      = tree_to_double_int (this_offset)
                        + tree_to_double_int (bit_offset)
!                       .rshift (BITS_PER_UNIT == 8
!                                  ? 3 : exact_log2 (BITS_PER_UNIT));
                    if (off.fits_shwi ())
                      temp.off = off.low;
                  }
*************** copy_reference_ops_from_ref (tree ref, v
*** 846,852 ****
          temp.off = 0;
          result->safe_push (temp);
          temp.opcode = ADDR_EXPR;
!         temp.op0 = build_fold_addr_expr (ref);
          temp.type = TREE_TYPE (temp.op0);
          temp.off = -1;
          break;
--- 847,853 ----
          temp.off = 0;
          result->safe_push (temp);
          temp.opcode = ADDR_EXPR;
!         temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
          temp.type = TREE_TYPE (temp.op0);
          temp.off = -1;
          break;
*************** copy_reference_ops_from_call (gimple cal
*** 1114,1131 ****
      }
  }
  
- /* Create a vector of vn_reference_op_s structures from REF, a
-    REFERENCE_CLASS_P tree.  The vector is not shared. */
- 
- static vec<vn_reference_op_s> 
- create_reference_ops_from_ref (tree ref)
- {
-   vec<vn_reference_op_s> result = vNULL;
- 
-   copy_reference_ops_from_ref (ref, &result);
-   return result;
- }
- 
  /* Create a vector of vn_reference_op_s structures from CALL, a
     call statement.  The vector is not shared.  */
  
--- 1115,1120 ----
*************** vn_reference_insert (tree op, tree resul
*** 2096,2101 ****
--- 2085,2091 ----
  {
    vn_reference_s **slot;
    vn_reference_t vr1;
+   bool tem;
  
    vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
    if (TREE_CODE (result) == SSA_NAME)
*************** vn_reference_insert (tree op, tree resul
*** 2103,2109 ****
    else
      vr1->value_id = get_or_alloc_constant_value_id (result);
    vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
!   vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
    vr1->type = TREE_TYPE (op);
    vr1->set = get_alias_set (op);
    vr1->hashcode = vn_reference_compute_hash (vr1);
--- 2093,2099 ----
    else
      vr1->value_id = get_or_alloc_constant_value_id (result);
    vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
!   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
    vr1->type = TREE_TYPE (op);
    vr1->set = get_alias_set (op);
    vr1->hashcode = vn_reference_compute_hash (vr1);

Reply via email to