Hi,

This is a follow-up of the internal function patch to add widening and narrowing patterns. This patch improves the inliner cost estimation for internal functions.

Bootstrapped and regression tested on aarch64-unknown-linux-gnu.

gcc/ChangeLog:

        * ipa-fnsummary.cc (analyze_function_body): Correctly handle
        non-zero costed internal functions.
        * tree-inline.cc (estimate_num_insns): Improve costing for internal
        functions.
diff --git a/gcc/ipa-fnsummary.cc b/gcc/ipa-fnsummary.cc
index 
b328bb8ce14b0725f6e5607da9d1e2f61e9baf62..449961fe44e4d86bf61e625dff0759d58e1e80ba
 100644
--- a/gcc/ipa-fnsummary.cc
+++ b/gcc/ipa-fnsummary.cc
@@ -2862,16 +2862,19 @@ analyze_function_body (struct cgraph_node *node, bool 
early)
                 to happen, but we cannot do that for call statements
                 because edges are accounted specially.  */
 
-             if (*(is_gimple_call (stmt) ? &bb_predicate : &p) != false)
+             if (*(is_gimple_call (stmt) && !gimple_call_internal_p (stmt)
+                   ? &bb_predicate : &p) != false)
                {
                  time += final_time;
                  size += this_size;
                }
 
              /* We account everything but the calls.  Calls have their own
-                size/time info attached to cgraph edges.  This is necessary
-                in order to make the cost disappear after inlining.  */
-             if (!is_gimple_call (stmt))
+                size/time info attached to cgraph edges.  This is necessary
+                in order to make the cost disappear after inlining.  The only
+                exceptions are internal calls.  */
+             if (!is_gimple_call (stmt)
+                 || gimple_call_internal_p (stmt))
                {
                  if (prob)
                    {
diff --git a/gcc/tree-inline.cc b/gcc/tree-inline.cc
index 
99efddc36c8906a797583a569424336e961c35d1..bac84d277254703369c27993dcad048de8d4ff70
 100644
--- a/gcc/tree-inline.cc
+++ b/gcc/tree-inline.cc
@@ -4427,7 +4427,48 @@ estimate_num_insns (gimple *stmt, eni_weights *weights)
        tree decl;
 
        if (gimple_call_internal_p (stmt))
-         return 0;
+         {
+           switch (gimple_call_internal_fn (stmt))
+             {
+             default:
+               return 1;
+
+             case IFN_GOMP_TARGET_REV:
+             case IFN_GOMP_USE_SIMT:
+             case IFN_GOMP_SIMT_ENTER_ALLOC:
+             case IFN_GOMP_SIMT_EXIT:
+             case IFN_GOMP_SIMT_LANE:
+             case IFN_GOMP_SIMT_VF:
+             case IFN_GOMP_SIMT_LAST_LANE:
+             case IFN_GOMP_SIMT_ORDERED_PRED:
+             case IFN_GOMP_SIMT_VOTE_ANY:
+             case IFN_GOMP_SIMT_XCHG_BFLY:
+             case IFN_GOMP_SIMT_XCHG_IDX:
+             case IFN_GOMP_SIMD_LANE:
+             case IFN_GOMP_SIMD_VF:
+             case IFN_GOMP_SIMD_LAST_LANE:
+             case IFN_GOMP_SIMD_ORDERED_START:
+             case IFN_GOMP_SIMD_ORDERED_END:
+             case IFN_BUILTIN_EXPECT:
+             case IFN_ANNOTATE:
+             case IFN_NOP:
+             case IFN_UNIQUE:
+             case IFN_DEFERRED_INIT:
+             case IFN_ASSUME:
+               return 0;
+
+             case IFN_UBSAN_NULL:
+             case IFN_UBSAN_BOUNDS:
+             case IFN_UBSAN_VPTR:
+             case IFN_UBSAN_CHECK_ADD:
+             case IFN_UBSAN_CHECK_SUB:
+             case IFN_UBSAN_CHECK_MUL:
+             case IFN_UBSAN_PTR:
+             case IFN_UBSAN_OBJECT_SIZE:
+               /* Estimating a compare and jump.  */
+               return 2;
+             }
+         }
        else if ((decl = gimple_call_fndecl (stmt))
                 && fndecl_built_in_p (decl))
          {

Reply via email to