Hi,
this finishes the planned ipa-inline.c breakup.  The transformation code
is actually quite self contained and separated from heuristics, so ipa-inline.c
now hopefully reads pretty well.

At the way I also revisited comments and dropped the cgraph_ prefixes, like
elsewhere.

Bootstrapped/regtested x86_64-linux, comitted.

Honza

        * cgraph.h (save_inline_function_body): Remove.
        * ipa-inline-transform.c: New file, broke out of...
        * ipa-inline.c: ... this one; Update toplevel comment.
        (ncalls_inlined, nfunctions_inlined): Move to ipa-inline-transform.c;
        make global.
        (update_noncloned_frequencies): Move to ipa-inline-transform.c
        (cgraph_mark_inline_edge): Rename to inline_call; move to
        ipa-inline-transform.c.
        (cgraph_clone_inlined_nodes): Rename to clone_inlined_nodes;
        move to ipa-inline-transform.c
        (recursive_inlining, inline_small_functions, flatten_function,
        ipa_inline, inline_always_inline_functions,
        early_inline_small_functions): Update.
        (inline_transform): Move to ipa-inline-transform.c.
        * ipa-inline.h (inline_call, inline_transform, clone_inlined_nodes):
        Declare.
        * Makefile.in (ipa-inline-transform.o): New file.
        * cgraphunit.c (save_inline_function_body): Move to
        ipa-inline-transform.c
Index: cgraph.h
===================================================================
*** cgraph.h    (revision 172711)
--- cgraph.h    (working copy)
*************** struct cgraph_node *cgraph_function_vers
*** 595,601 ****
                                                const char *);
  void tree_function_versioning (tree, tree, VEC (ipa_replace_map_p,gc)*, bool, 
bitmap,
                               bitmap, basic_block);
- struct cgraph_node *save_inline_function_body (struct cgraph_node *);
  void record_references_in_initializer (tree, bool);
  bool cgraph_process_new_functions (void);
  
--- 595,600 ----
*************** cgraph_edge_recursive_p (struct cgraph_e
*** 927,933 ****
      return e->caller->decl == e->callee->decl;
  }
  
- 
  /* FIXME: inappropriate dependency of cgraph on IPA.  */
  #include "ipa-ref-inline.h"
  
--- 926,931 ----
Index: ipa-inline-transform.c
===================================================================
*** ipa-inline-transform.c      (revision 0)
--- ipa-inline-transform.c      (revision 0)
***************
*** 0 ****
--- 1,328 ----
+ /* Callgraph transformations to handle inlining
+    Copyright (C) 2003, 2004, 2007, 2008, 2009, 2010, 2011
+    Free Software Foundation, Inc.
+    Contributed by Jan Hubicka
+ 
+ This file is part of GCC.
+ 
+ GCC is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free
+ Software Foundation; either version 3, or (at your option) any later
+ version.
+ 
+ GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ for more details.
+ 
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3.  If not see
+ <http://www.gnu.org/licenses/>.  */
+ 
+ /* The inline decisions are stored in callgraph in "inline plan" and
+    applied later.
+ 
+    To mark given call inline, use inline_call function.
+    The function marks the edge inlinable and, if necessary, produces
+    virtual clone in the callgraph representing the new copy of callee's
+    function body.
+ 
+    The inline plan is applied on given function body by inline_transform.  */
+ 
+ #include "config.h"
+ #include "system.h"
+ #include "coretypes.h"
+ #include "tm.h"
+ #include "tree.h"
+ #include "langhooks.h"
+ #include "cgraph.h"
+ #include "timevar.h"
+ #include "output.h"
+ #include "intl.h"
+ #include "coverage.h"
+ #include "ggc.h"
+ #include "tree-flow.h"
+ #include "ipa-prop.h"
+ #include "ipa-inline.h"
+ #include "tree-inline.h"
+ 
+ int ncalls_inlined;
+ int nfunctions_inlined;
+ 
+ /* Scale frequency of NODE edges by FREQ_SCALE and increase loop nest
+    by NEST.  */
+ 
+ static void
+ update_noncloned_frequencies (struct cgraph_node *node,
+                             int freq_scale, int nest)
+ {
+   struct cgraph_edge *e;
+ 
+   /* We do not want to ignore high loop nest after freq drops to 0.  */
+   if (!freq_scale)
+     freq_scale = 1;
+   for (e = node->callees; e; e = e->next_callee)
+     {
+       e->loop_nest += nest;
+       e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
+       if (e->frequency > CGRAPH_FREQ_MAX)
+         e->frequency = CGRAPH_FREQ_MAX;
+       if (!e->inline_failed)
+         update_noncloned_frequencies (e->callee, freq_scale, nest);
+     }
+ }
+ 
+ 
+ /* E is expected to be an edge being inlined.  Clone destination node of
+    the edge and redirect it to the new clone.
+    DUPLICATE is used for bookkeeping on whether we are actually creating new
+    clones or re-using node originally representing out-of-line function call.
+    */
+ 
+ void
+ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
+                    bool update_original, int *overall_size)
+ {
+   HOST_WIDE_INT peak;
+   struct inline_summary *caller_info, *callee_info;
+ 
+   if (duplicate)
+     {
+       /* We may eliminate the need for out-of-line copy to be output.
+        In that case just go ahead and re-use it.  This is not just an
+        memory optimization.  Making offline copy of fuction disappear
+        from the program will improve future decisions on inlining.  */
+       if (!e->callee->callers->next_caller
+         /* Recursive inlining never wants the master clone to
+            be overwritten.  */
+         && update_original
+         /* FIXME: When address is taken of DECL_EXTERNAL function we still
+            can remove its offline copy, but we would need to keep unanalyzed
+            node in the callgraph so references can point to it.  */
+         && !e->callee->address_taken
+         && cgraph_can_remove_if_no_direct_calls_p (e->callee)
+         /* Inlining might enable more devirtualizing, so we want to remove
+            those only after all devirtualizable virtual calls are processed.
+            Lacking may edges in callgraph we just preserve them post
+            inlining.  */
+         && (!DECL_VIRTUAL_P (e->callee->decl)
+             || (!DECL_COMDAT (e->callee->decl)
+                 && !DECL_EXTERNAL (e->callee->decl)))
+         /* Don't reuse if more than one function shares a comdat group.
+            If the other function(s) are needed, we need to emit even
+            this function out of line.  */
+         && !e->callee->same_comdat_group
+         /* During early inlining some unanalyzed cgraph nodes might be in the
+            callgraph and they might reffer the function in question.  */
+         && !cgraph_new_nodes)
+       {
+         gcc_assert (!e->callee->global.inlined_to);
+         if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->decl))
+           {
+             if (overall_size)
+               *overall_size -= inline_summary (e->callee)->size;
+             nfunctions_inlined++;
+           }
+         duplicate = false;
+         e->callee->local.externally_visible = false;
+           update_noncloned_frequencies (e->callee, e->frequency, 
e->loop_nest);
+       }
+       else
+       {
+         struct cgraph_node *n;
+         n = cgraph_clone_node (e->callee, e->callee->decl,
+                                e->count, e->frequency, e->loop_nest,
+                                update_original, NULL);
+         cgraph_redirect_edge_callee (e, n);
+       }
+     }
+ 
+   callee_info = inline_summary (e->callee);
+   caller_info = inline_summary (e->caller);
+ 
+   if (e->caller->global.inlined_to)
+     e->callee->global.inlined_to = e->caller->global.inlined_to;
+   else
+     e->callee->global.inlined_to = e->caller;
+   callee_info->stack_frame_offset
+     = caller_info->stack_frame_offset
+       + caller_info->estimated_self_stack_size;
+   peak = callee_info->stack_frame_offset
+       + callee_info->estimated_self_stack_size;
+   if (inline_summary (e->callee->global.inlined_to)->estimated_stack_size
+       < peak)
+     inline_summary (e->callee->global.inlined_to)->estimated_stack_size = 
peak;
+   cgraph_propagate_frequency (e->callee);
+ 
+   /* Recursively clone all bodies.  */
+   for (e = e->callee->callees; e; e = e->next_callee)
+     if (!e->inline_failed)
+       clone_inlined_nodes (e, duplicate, update_original, overall_size);
+ }
+ 
+ 
+ /* Mark edge E as inlined and update callgraph accordingly.  UPDATE_ORIGINAL
+    specify whether profile of original function should be updated.  If any new
+    indirect edges are discovered in the process, add them to NEW_EDGES, unless
+    it is NULL.  Return true iff any new callgraph edges were discovered as a
+    result of inlining.  */
+ 
+ bool
+ inline_call (struct cgraph_edge *e, bool update_original,
+            VEC (cgraph_edge_p, heap) **new_edges,
+            int *overall_size)
+ {
+   int old_size = 0, new_size = 0;
+   struct cgraph_node *to = NULL;
+   struct cgraph_edge *curr = e;
+   struct inline_summary *info;
+ 
+   /* Don't inline inlined edges.  */
+   gcc_assert (e->inline_failed);
+   /* Don't even think of inlining inline clone.  */
+   gcc_assert (!e->callee->global.inlined_to);
+ 
+   e->inline_failed = CIF_OK;
+   DECL_POSSIBLY_INLINED (e->callee->decl) = true;
+ 
+   clone_inlined_nodes (e, true, update_original, overall_size);
+ 
+   /* Now update size of caller and all functions caller is inlined into.  */
+   for (;e && !e->inline_failed; e = e->caller->callers)
+     {
+       to = e->caller;
+       info = inline_summary (to);
+       old_size = info->size;
+       new_size = estimate_size_after_inlining (to, curr);
+       info->size = new_size;
+       info->time = estimate_time_after_inlining (to, curr);
+     }
+   gcc_assert (curr->callee->global.inlined_to == to);
+   if (overall_size && new_size > old_size)
+     *overall_size += new_size - old_size;
+   ncalls_inlined++;
+ 
+   if (flag_indirect_inlining && optimize)
+     return ipa_propagate_indirect_call_infos (curr, new_edges);
+   else
+     return false;
+ }
+ 
+ 
+ /* Copy function body of NODE and redirect all inline clones to it.
+    This is done before inline plan is applied to NODE when there are
+    still some inline clones if it.
+ 
+    This is neccesary because inline decisions are not really transitive
+    and the other inline clones may have different bodies.  */
+ 
+ static struct cgraph_node *
+ save_inline_function_body (struct cgraph_node *node)
+ {
+   struct cgraph_node *first_clone, *n;
+ 
+   if (dump_file)
+     fprintf (dump_file, "\nSaving body of %s for later reuse\n",
+            cgraph_node_name (node));
+  
+   gcc_assert (node == cgraph_get_node (node->decl));
+ 
+   /* first_clone will be turned into real function.  */
+   first_clone = node->clones;
+   first_clone->decl = copy_node (node->decl);
+   cgraph_insert_node_to_hashtable (first_clone);
+   gcc_assert (first_clone == cgraph_get_node (first_clone->decl));
+ 
+   /* Now reshape the clone tree, so all other clones descends from
+      first_clone.  */
+   if (first_clone->next_sibling_clone)
+     {
+       for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = 
n->next_sibling_clone)
+         n->clone_of = first_clone;
+       n->clone_of = first_clone;
+       n->next_sibling_clone = first_clone->clones;
+       if (first_clone->clones)
+         first_clone->clones->prev_sibling_clone = n;
+       first_clone->clones = first_clone->next_sibling_clone;
+       first_clone->next_sibling_clone->prev_sibling_clone = NULL;
+       first_clone->next_sibling_clone = NULL;
+       gcc_assert (!first_clone->prev_sibling_clone);
+     }
+   first_clone->clone_of = NULL;
+ 
+   /* Now node in question has no clones.  */
+   node->clones = NULL;
+ 
+   if (first_clone->clones)
+     for (n = first_clone->clones; n != first_clone;)
+       {
+         gcc_assert (n->decl == node->decl);
+       n->decl = first_clone->decl;
+       if (n->clones)
+         n = n->clones;
+       else if (n->next_sibling_clone)
+         n = n->next_sibling_clone;
+       else
+         {
+           while (n != first_clone && !n->next_sibling_clone)
+             n = n->clone_of;
+           if (n != first_clone)
+             n = n->next_sibling_clone;
+         }
+       }
+ 
+   /* Copy the OLD_VERSION_NODE function tree to the new version.  */
+   tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
+                           NULL, NULL);
+ 
+   DECL_EXTERNAL (first_clone->decl) = 0;
+   DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
+   TREE_PUBLIC (first_clone->decl) = 0;
+   DECL_COMDAT (first_clone->decl) = 0;
+   VEC_free (ipa_opt_pass, heap,
+             first_clone->ipa_transforms_to_apply);
+   first_clone->ipa_transforms_to_apply = NULL;
+ 
+ #ifdef ENABLE_CHECKING
+   verify_cgraph_node (first_clone);
+ #endif
+   return first_clone;
+ }
+ 
+ 
+ /* Apply inline plan to function.  */
+ 
+ unsigned int
+ inline_transform (struct cgraph_node *node)
+ {
+   unsigned int todo = 0;
+   struct cgraph_edge *e;
+   bool inline_p = false;
+ 
+   /* FIXME: Currently the pass manager is adding inline transform more than
+      once to some clones.  This needs revisiting after WPA cleanups.  */
+   if (cfun->after_inlining)
+     return 0;
+ 
+   /* We might need the body of this function so that we can expand
+      it inline somewhere else.  */
+   if (cgraph_preserve_function_body_p (node->decl))
+     save_inline_function_body (node);
+ 
+   for (e = node->callees; e; e = e->next_callee)
+     {
+       cgraph_redirect_edge_call_stmt_to_callee (e);
+       if (!e->inline_failed || warn_inline)
+         inline_p = true;
+     }
+ 
+   if (inline_p)
+     {
+       timevar_push (TV_INTEGRATION);
+       todo = optimize_inline_calls (current_function_decl);
+       timevar_pop (TV_INTEGRATION);
+     }
+   cfun->always_inline_functions_inlined = true;
+   cfun->after_inlining = true;
+   return todo | execute_fixup_cfg ();
+ }
Index: cgraphunit.c
===================================================================
*** cgraphunit.c        (revision 172709)
--- cgraphunit.c        (working copy)
*************** cgraph_function_versioning (struct cgrap
*** 2094,2167 ****
    return new_version_node;
  }
  
- /* Produce separate function body for inline clones so the offline copy can be
-    modified without affecting them.  */
- struct cgraph_node *
- save_inline_function_body (struct cgraph_node *node)
- {
-   struct cgraph_node *first_clone, *n;
- 
-   gcc_assert (node == cgraph_get_node (node->decl));
- 
-   cgraph_lower_function (node);
- 
-   first_clone = node->clones;
- 
-   first_clone->decl = copy_node (node->decl);
-   cgraph_insert_node_to_hashtable (first_clone);
-   gcc_assert (first_clone == cgraph_get_node (first_clone->decl));
-   if (first_clone->next_sibling_clone)
-     {
-       for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = 
n->next_sibling_clone)
-         n->clone_of = first_clone;
-       n->clone_of = first_clone;
-       n->next_sibling_clone = first_clone->clones;
-       if (first_clone->clones)
-         first_clone->clones->prev_sibling_clone = n;
-       first_clone->clones = first_clone->next_sibling_clone;
-       first_clone->next_sibling_clone->prev_sibling_clone = NULL;
-       first_clone->next_sibling_clone = NULL;
-       gcc_assert (!first_clone->prev_sibling_clone);
-     }
-   first_clone->clone_of = NULL;
-   node->clones = NULL;
- 
-   if (first_clone->clones)
-     for (n = first_clone->clones; n != first_clone;)
-       {
-         gcc_assert (n->decl == node->decl);
-       n->decl = first_clone->decl;
-       if (n->clones)
-         n = n->clones;
-       else if (n->next_sibling_clone)
-         n = n->next_sibling_clone;
-       else
-         {
-           while (n != first_clone && !n->next_sibling_clone)
-             n = n->clone_of;
-           if (n != first_clone)
-             n = n->next_sibling_clone;
-         }
-       }
- 
-   /* Copy the OLD_VERSION_NODE function tree to the new version.  */
-   tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
-                           NULL, NULL);
- 
-   DECL_EXTERNAL (first_clone->decl) = 0;
-   DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
-   TREE_PUBLIC (first_clone->decl) = 0;
-   DECL_COMDAT (first_clone->decl) = 0;
-   VEC_free (ipa_opt_pass, heap,
-             first_clone->ipa_transforms_to_apply);
-   first_clone->ipa_transforms_to_apply = NULL;
- 
- #ifdef ENABLE_CHECKING
-   verify_cgraph_node (first_clone);
- #endif
-   return first_clone;
- }
- 
  /* Given virtual clone, turn it into actual clone.  */
  static void
  cgraph_materialize_clone (struct cgraph_node *node)
--- 2094,2099 ----
Index: ipa-inline.c
===================================================================
*** ipa-inline.c        (revision 172709)
--- ipa-inline.c        (working copy)
*************** along with GCC; see the file COPYING3.  
*** 23,40 ****
  
      The implementation of inliner is organized as follows:
  
-     Transformation of callgraph to represent inlining decisions.
- 
-       The inline decisions are stored in callgraph in "inline plan" and
-       all applied later.
- 
-       To mark given call inline, use cgraph_mark_inline function.
-       The function marks the edge inlinable and, if necessary, produces
-       virtual clone in the callgraph representing the new copy of callee's
-       function body.
- 
-       The inline plan is applied on given function body by inline_transform. 
- 
      inlining heuristics limits
  
        can_inline_edge_p allow to check that particular inlining is allowed
--- 23,28 ----
*************** along with GCC; see the file COPYING3.  
*** 128,290 ****
  #include "ipa-inline.h"
  
  /* Statistics we collect about inlining algorithm.  */
- static int ncalls_inlined;
- static int nfunctions_inlined;
  static int overall_size;
  static gcov_type max_count, max_benefit;
  
- /* Scale frequency of NODE edges by FREQ_SCALE and increase loop nest
-    by NEST.  */
- 
- static void
- update_noncloned_frequencies (struct cgraph_node *node,
-                             int freq_scale, int nest)
- {
-   struct cgraph_edge *e;
- 
-   /* We do not want to ignore high loop nest after freq drops to 0.  */
-   if (!freq_scale)
-     freq_scale = 1;
-   for (e = node->callees; e; e = e->next_callee)
-     {
-       e->loop_nest += nest;
-       e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
-       if (e->frequency > CGRAPH_FREQ_MAX)
-         e->frequency = CGRAPH_FREQ_MAX;
-       if (!e->inline_failed)
-         update_noncloned_frequencies (e->callee, freq_scale, nest);
-     }
- }
- 
- /* E is expected to be an edge being inlined.  Clone destination node of
-    the edge and redirect it to the new clone.
-    DUPLICATE is used for bookkeeping on whether we are actually creating new
-    clones or re-using node originally representing out-of-line function call.
-    */
- void
- cgraph_clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
-                           bool update_original)
- {
-   HOST_WIDE_INT peak;
-   struct inline_summary *caller_info, *callee_info;
- 
-   if (duplicate)
-     {
-       /* We may eliminate the need for out-of-line copy to be output.
-        In that case just go ahead and re-use it.  */
-       if (!e->callee->callers->next_caller
-         /* Recursive inlining never wants the master clone to
-            be overwritten.  */
-         && update_original
-         /* FIXME: When address is taken of DECL_EXTERNAL function we still
-            can remove its offline copy, but we would need to keep unanalyzed
-            node in the callgraph so references can point to it.  */
-         && !e->callee->address_taken
-         && cgraph_can_remove_if_no_direct_calls_p (e->callee)
-         /* Inlining might enable more devirtualizing, so we want to remove
-            those only after all devirtualizable virtual calls are processed.
-            Lacking may edges in callgraph we just preserve them post
-            inlining.  */
-         && (!DECL_VIRTUAL_P (e->callee->decl)
-             || (!DECL_COMDAT (e->callee->decl)
-                 && !DECL_EXTERNAL (e->callee->decl)))
-         /* Don't reuse if more than one function shares a comdat group.
-            If the other function(s) are needed, we need to emit even
-            this function out of line.  */
-         && !e->callee->same_comdat_group
-         && !cgraph_new_nodes)
-       {
-         gcc_assert (!e->callee->global.inlined_to);
-         if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->decl))
-           {
-             overall_size -= inline_summary (e->callee)->size;
-             nfunctions_inlined++;
-           }
-         duplicate = false;
-         e->callee->local.externally_visible = false;
-           update_noncloned_frequencies (e->callee, e->frequency, 
e->loop_nest);
-       }
-       else
-       {
-         struct cgraph_node *n;
-         n = cgraph_clone_node (e->callee, e->callee->decl,
-                                e->count, e->frequency, e->loop_nest,
-                                update_original, NULL);
-         cgraph_redirect_edge_callee (e, n);
-       }
-     }
- 
-   callee_info = inline_summary (e->callee);
-   caller_info = inline_summary (e->caller);
- 
-   if (e->caller->global.inlined_to)
-     e->callee->global.inlined_to = e->caller->global.inlined_to;
-   else
-     e->callee->global.inlined_to = e->caller;
-   callee_info->stack_frame_offset
-     = caller_info->stack_frame_offset
-       + caller_info->estimated_self_stack_size;
-   peak = callee_info->stack_frame_offset
-       + callee_info->estimated_self_stack_size;
-   if (inline_summary (e->callee->global.inlined_to)->estimated_stack_size
-       < peak)
-     inline_summary (e->callee->global.inlined_to)->estimated_stack_size = 
peak;
-   cgraph_propagate_frequency (e->callee);
- 
-   /* Recursively clone all bodies.  */
-   for (e = e->callee->callees; e; e = e->next_callee)
-     if (!e->inline_failed)
-       cgraph_clone_inlined_nodes (e, duplicate, update_original);
- }
- 
- /* Mark edge E as inlined and update callgraph accordingly.  UPDATE_ORIGINAL
-    specify whether profile of original function should be updated.  If any new
-    indirect edges are discovered in the process, add them to NEW_EDGES, unless
-    it is NULL.  Return true iff any new callgraph edges were discovered as a
-    result of inlining.  */
- 
- static bool
- cgraph_mark_inline_edge (struct cgraph_edge *e, bool update_original,
-                        VEC (cgraph_edge_p, heap) **new_edges)
- {
-   int old_size = 0, new_size = 0;
-   struct cgraph_node *to = NULL;
-   struct cgraph_edge *curr = e;
-   struct inline_summary *info;
- 
-   /* Don't inline inlined edges.  */
-   gcc_assert (e->inline_failed);
-   /* Don't even think of inlining inline clone.  */
-   gcc_assert (!e->callee->global.inlined_to);
- 
-   e->inline_failed = CIF_OK;
-   DECL_POSSIBLY_INLINED (e->callee->decl) = true;
- 
-   cgraph_clone_inlined_nodes (e, true, update_original);
- 
-   /* Now update size of caller and all functions caller is inlined into.  */
-   for (;e && !e->inline_failed; e = e->caller->callers)
-     {
-       to = e->caller;
-       info = inline_summary (to);
-       old_size = info->size;
-       new_size = estimate_size_after_inlining (to, curr);
-       info->size = new_size;
-       info->time = estimate_time_after_inlining (to, curr);
-     }
-   gcc_assert (curr->callee->global.inlined_to == to);
-   if (new_size > old_size)
-     overall_size += new_size - old_size;
-   ncalls_inlined++;
- 
-   /* FIXME: We should remove the optimize check after we ensure we never run
-      IPA passes when not optimizing.  */
-   if (flag_indirect_inlining && optimize)
-     return ipa_propagate_indirect_call_infos (curr, new_edges);
-   else
-     return false;
- }
- 
  /* Return false when inlining edge E would lead to violating
     limits on function unit growth or stack usage growth.  
  
--- 116,124 ----
*************** recursive_inlining (struct cgraph_edge *
*** 1196,1206 ****
                                            false, NULL);
          for (e = master_clone->callees; e; e = e->next_callee)
            if (!e->inline_failed)
!             cgraph_clone_inlined_nodes (e, true, false);
        }
  
        cgraph_redirect_edge_callee (curr, master_clone);
!       cgraph_mark_inline_edge (curr, false, new_edges);
        lookup_recursive_calls (node, curr->callee, heap);
        n++;
      }
--- 1030,1040 ----
                                            false, NULL);
          for (e = master_clone->callees; e; e = e->next_callee)
            if (!e->inline_failed)
!             clone_inlined_nodes (e, true, false, NULL);
        }
  
        cgraph_redirect_edge_callee (curr, master_clone);
!       inline_call (curr, false, new_edges, &overall_size);
        lookup_recursive_calls (node, curr->callee, heap);
        n++;
      }
*************** inline_small_functions (void)
*** 1460,1466 ****
  
          callee = edge->callee;
          gcc_checking_assert (!callee->global.inlined_to);
!         cgraph_mark_inline_edge (edge, true, &new_indirect_edges);
          if (flag_indirect_inlining)
            add_new_edges_to_heap (heap, new_indirect_edges);
  
--- 1294,1300 ----
  
          callee = edge->callee;
          gcc_checking_assert (!callee->global.inlined_to);
!         inline_call (edge, true, &new_indirect_edges, &overall_size);
          if (flag_indirect_inlining)
            add_new_edges_to_heap (heap, new_indirect_edges);
  
*************** flatten_function (struct cgraph_node *no
*** 1588,1594 ****
                 cgraph_node_name (e->callee),
                 cgraph_node_name (e->caller));
        orig_callee = e->callee;
!       cgraph_mark_inline_edge (e, true, NULL);
        if (e->callee != orig_callee)
        orig_callee->aux = (void *) node;
        flatten_function (e->callee);
--- 1422,1428 ----
                 cgraph_node_name (e->callee),
                 cgraph_node_name (e->caller));
        orig_callee = e->callee;
!       inline_call (e, true, NULL, NULL);
        if (e->callee != orig_callee)
        orig_callee->aux = (void *) node;
        flatten_function (e->callee);
*************** ipa_inline (void)
*** 1697,1703 ****
                               inline_summary (node->callers->caller)->size);
                    }
  
!                 cgraph_mark_inline_edge (node->callers, true, NULL);
                  if (dump_file)
                    fprintf (dump_file,
                             " Inlined into %s which now has %i size\n",
--- 1531,1537 ----
                               inline_summary (node->callers->caller)->size);
                    }
  
!                 inline_call (node->callers, true, NULL, NULL);
                  if (dump_file)
                    fprintf (dump_file,
                             " Inlined into %s which now has %i size\n",
*************** inline_always_inline_functions (struct c
*** 1752,1758 ****
        fprintf (dump_file, "  Inlining %s into %s (always_inline).\n",
                 cgraph_node_name (e->callee),
                 cgraph_node_name (e->caller));
!       cgraph_mark_inline_edge (e, true, NULL);
        inlined = true;
      }
  
--- 1586,1592 ----
        fprintf (dump_file, "  Inlining %s into %s (always_inline).\n",
                 cgraph_node_name (e->callee),
                 cgraph_node_name (e->caller));
!       inline_call (e, true, NULL, NULL);
        inlined = true;
      }
  
*************** early_inline_small_functions (struct cgr
*** 1801,1807 ****
        fprintf (dump_file, " Inlining %s into %s.\n",
                 cgraph_node_name (e->callee),
                 cgraph_node_name (e->caller));
!       cgraph_mark_inline_edge (e, true, NULL);
        inlined = true;
      }
  
--- 1635,1641 ----
        fprintf (dump_file, " Inlining %s into %s.\n",
                 cgraph_node_name (e->callee),
                 cgraph_node_name (e->caller));
!       inline_call (e, true, NULL, NULL);
        inlined = true;
      }
  
*************** struct gimple_opt_pass pass_early_inline
*** 1916,1957 ****
  };
  
  
- /* Apply inline plan to function.  */
- static unsigned int
- inline_transform (struct cgraph_node *node)
- {
-   unsigned int todo = 0;
-   struct cgraph_edge *e;
-   bool inline_p = false;
- 
-   /* FIXME: Currently the pass manager is adding inline transform more than
-      once to some clones.  This needs revisiting after WPA cleanups.  */
-   if (cfun->after_inlining)
-     return 0;
- 
-   /* We might need the body of this function so that we can expand
-      it inline somewhere else.  */
-   if (cgraph_preserve_function_body_p (node->decl))
-     save_inline_function_body (node);
- 
-   for (e = node->callees; e; e = e->next_callee)
-     {
-       cgraph_redirect_edge_call_stmt_to_callee (e);
-       if (!e->inline_failed || warn_inline)
-         inline_p = true;
-     }
- 
-   if (inline_p)
-     {
-       timevar_push (TV_INTEGRATION);
-       todo = optimize_inline_calls (current_function_decl);
-       timevar_pop (TV_INTEGRATION);
-     }
-   cfun->always_inline_functions_inlined = true;
-   cfun->after_inlining = true;
-   return todo | execute_fixup_cfg ();
- }
- 
  /* When to run IPA inlining.  Inlining of always-inline functions
     happens during early inlining.  */
  
--- 1750,1755 ----
Index: ipa-inline.h
===================================================================
*** ipa-inline.h        (revision 172709)
--- ipa-inline.h        (working copy)
*************** DEF_VEC_O(inline_summary_t);
*** 63,68 ****
--- 63,69 ----
  DEF_VEC_ALLOC_O(inline_summary_t,heap);
  extern VEC(inline_summary_t,heap) *inline_summary_vec;
  
+ /* In ipa-inline-analysis.c  */
  void debug_inline_summary (struct cgraph_node *);
  void dump_inline_summaries (FILE *f);
  void inline_generate_summary (void);
*************** int estimate_time_after_inlining (struct
*** 74,79 ****
--- 75,88 ----
  int estimate_size_after_inlining (struct cgraph_node *, struct cgraph_edge *);
  int estimate_growth (struct cgraph_node *);
  
+ /* In ipa-inline-transform.c  */
+ bool inline_call (struct cgraph_edge *, bool, VEC (cgraph_edge_p, heap) **, 
int *);
+ unsigned int inline_transform (struct cgraph_node *);
+ void clone_inlined_nodes (struct cgraph_edge *e, bool, bool, int *);
+ 
+ extern int ncalls_inlined;
+ extern int nfunctions_inlined;
+ 
  static inline struct inline_summary *
  inline_summary (struct cgraph_node *node)
  {
Index: Makefile.in
===================================================================
*** Makefile.in (revision 172709)
--- Makefile.in (working copy)
*************** OBJS-archive = \
*** 1469,1474 ****
--- 1469,1475 ----
          ipa-split.o \
        ipa-inline.o \
        ipa-inline-analysis.o \
+       ipa-inline-transform.o \
        ipa-prop.o \
        ipa-pure-const.o \
        ipa-reference.o \
*************** ipa-inline-analysis.o : ipa-inline-analy
*** 3035,3040 ****
--- 3036,3046 ----
     $(DIAGNOSTIC_H) $(PARAMS_H) $(TIMEVAR_H) $(TREE_PASS_H) \
     $(HASHTAB_H) $(COVERAGE_H) $(GGC_H) $(TREE_FLOW_H) $(IPA_PROP_H) \
     gimple-pretty-print.h ipa-inline.h $(LTO_STREAMER_H)
+ ipa-inline-transform.o : ipa-inline-transform.c $(CONFIG_H) $(SYSTEM_H) 
coretypes.h $(TM_H) \
+    $(TREE_H) langhooks.h $(TREE_INLINE_H) $(FLAGS_H) $(CGRAPH_H) intl.h \
+    $(DIAGNOSTIC_H) $(PARAMS_H) $(TIMEVAR_H) $(TREE_PASS_H) \
+    $(HASHTAB_H) $(COVERAGE_H) $(GGC_H) $(TREE_FLOW_H) $(IPA_PROP_H) \
+    gimple-pretty-print.h ipa-inline.h $(LTO_STREAMER_H)
  ipa-utils.o : ipa-utils.c $(IPA_UTILS_H) $(CONFIG_H) $(SYSTEM_H) \
     coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) $(TREE_INLINE_H) langhooks.h \
     pointer-set.h $(GGC_H) $(GIMPLE_H) $(SPLAY_TREE_H) \

Reply via email to