From: Ju-Zhe Zhong <juzhe.zh...@rivai.ai>

Hi, since we are going to have LEN_MASK_{LOAD,STORE} into loopVectorizer.

Currenly, 
1. we can fold MASK_{LOAD,STORE} into MEM when mask is all ones.
2. we can fold LEN_{LOAD,STORE} into MEM when (len - bias) is VF.

Now, I think it makes sense that we can support

fold LEN_MASK_{LOAD,STORE} into MEM when both mask = all ones and (len - bias) 
is VF.
         
gcc/ChangeLog:

        * gimple-fold.cc (arith_overflowed_p): Apply LEN_MASK_{LOAD,STORE}.
        (gimple_fold_partial_load_store_mem_ref): Ditto.
        (gimple_fold_partial_store): Ditto.
        (gimple_fold_call): Ditto.

---
 gcc/gimple-fold.cc | 23 ++++++++++++++++++-----
 1 file changed, 18 insertions(+), 5 deletions(-)

diff --git a/gcc/gimple-fold.cc b/gcc/gimple-fold.cc
index 55e80567708..3d46b76edeb 100644
--- a/gcc/gimple-fold.cc
+++ b/gcc/gimple-fold.cc
@@ -5370,10 +5370,10 @@ arith_overflowed_p (enum tree_code code, const_tree 
type,
   return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
 }
 
-/* If IFN_{MASK,LEN}_LOAD/STORE call CALL is unconditional, return a MEM_REF
-   for the memory it references, otherwise return null.  VECTYPE is the
-   type of the memory vector.  MASK_P indicates it's for MASK if true,
-   otherwise it's for LEN.  */
+/* If IFN_{MASK,LEN,LEN_MASK}_LOAD/STORE call CALL is unconditional,
+   return a MEM_REF for the memory it references, otherwise return null.
+   VECTYPE is the type of the memory vector.  MASK_P indicates it's for
+   MASK if true, otherwise it's for LEN.  */
 
 static tree
 gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype, bool mask_p)
@@ -5400,6 +5400,16 @@ gimple_fold_partial_load_store_mem_ref (gcall *call, 
tree vectype, bool mask_p)
       if (maybe_ne (wi::to_poly_widest (basic_len) - wi::to_widest (bias),
                    GET_MODE_SIZE (TYPE_MODE (vectype))))
        return NULL_TREE;
+
+      /* For LEN_MASK_{LOAD,STORE}, we should also check whether
+         the mask is all ones mask.  */
+      internal_fn ifn = gimple_call_internal_fn (call);
+      if (ifn == IFN_LEN_MASK_LOAD || ifn == IFN_LEN_MASK_STORE)
+       {
+         tree mask = gimple_call_arg (call, internal_fn_mask_index (ifn));
+         if (!integer_all_onesp (mask))
+           return NULL_TREE;
+       }
     }
 
   unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
@@ -5438,7 +5448,8 @@ static bool
 gimple_fold_partial_store (gimple_stmt_iterator *gsi, gcall *call,
                           bool mask_p)
 {
-  tree rhs = gimple_call_arg (call, 3);
+  internal_fn ifn = gimple_call_internal_fn (call);
+  tree rhs = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
   if (tree lhs
       = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (rhs), mask_p))
     {
@@ -5676,9 +5687,11 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool 
inplace)
          changed |= gimple_fold_partial_store (gsi, stmt, true);
          break;
        case IFN_LEN_LOAD:
+       case IFN_LEN_MASK_LOAD:
          changed |= gimple_fold_partial_load (gsi, stmt, false);
          break;
        case IFN_LEN_STORE:
+       case IFN_LEN_MASK_STORE:
          changed |= gimple_fold_partial_store (gsi, stmt, false);
          break;
        default:
-- 
2.36.3


Reply via email to