On Wed, 1 Aug 2018 at 16:31, Richard Biener <rguent...@suse.de> wrote:
> --- a/gcc/cfganal.c
> +++ b/gcc/cfganal.c
> @@ -1057,6 +1057,119 @@ pre_and_rev_post_order_compute (int *pre_order, int 
> *rev_post_order,
>    return pre_order_num;
>  }
>
> +/* Unline pre_and_rev_post_order_compute we fill rev_post_order backwards

Unlike

> +   so iterating in RPO order needs to start with rev_post_order[n - 1]
> +   going to rev_post_order[0].  If FOR_ITERATION is true then try to
> +   make CFG cycles fit into small contiguous regions of the RPO order.
> +   When FOR_ITERATION is true this requires up-to-date loop structures.  */
> +
> +int
> +rev_post_order_and_mark_dfs_back_seme (struct function *fn, edge entry,
> +                                      bitmap exit_bbs, bool for_iteration,
> +                                      int *rev_post_order)
> +{
> +  int pre_order_num = 0;
> +  int rev_post_order_num = 0;
> +
> +  /* Allocate stack for back-tracking up CFG.  Worst case we need
> +     O(n^2) edges but the following should suffice in practice without
> +     a need to re-allocate.  */
> +  auto_vec<edge, 20> stack (2 * n_basic_blocks_for_fn (fn));
> +
> +  int *pre = XNEWVEC (int, 2 * last_basic_block_for_fn (fn));
> +  int *post = pre + last_basic_block_for_fn (fn);
> +
> +  /* BB flag to track nodes that have been visited.  */
> +  auto_bb_flag visited (fn);
> +  /* BB flag to track which nodes whave post[] assigned to avoid
> +     zeroing post.  */

s/whave/have/.

> +  free (pre);

XDELETEVEC (pre);

> +
> +  /* Clear the temporarily allocated flags.  */
> +  for (int i = 0; i < rev_post_order_num; ++i)
> +    BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags
> +      &= ~(post_assigned|visited);
> +
> +  return rev_post_order_num;
> +}
> +
> +
> +
>  /* Compute the depth first search order on the _reverse_ graph and
>     store in the array DFS_ORDER, marking the nodes visited in VISITED.

s/store/store it/

thanks,

Reply via email to