From: Sergey Grechanik <mouseent...@condor.intra.ispras.ru> Sometimes we need to be able to call sched_get_condition_with_rev from selective scheduler for an instruction with zero luid (i.e. before h_d_i_d had been extended). On such occasion, we need to bypass the caching and use "old", uncached lookup.
The patch factors out caching logic to a separate function and amends it to skip cached lookup for instructions with zero luid. It also renames INSN_COND to INSN_CACHED_COND to avoid clash with the same macro in predication patch for sel-sched. 2011-08-04 Sergey Grechanik <mouseent...@ispras.ru> * sched-deps.c (sched_get_condition_with_rev): Rename to ... (sched_get_condition_with_rev_uncached): ... this. Factor out condition caching logic into ... (sched_get_condition_with_rev): ... this. Reimplement. Do not attempt to use cache for instructions with zero luid. (sched_analyze_insn): Use INSN_CACHED_COND instead of INSN_COND. * sched-int.h (INSN_COND): Rename to INSN_CACHED_COND. diff --git a/gcc/sched-deps.c b/gcc/sched-deps.c index 343d03c..a82df5d 100644 --- a/gcc/sched-deps.c +++ b/gcc/sched-deps.c @@ -488,27 +488,13 @@ deps_may_trap_p (const_rtx mem) /* Find the condition under which INSN is executed. If REV is not NULL, it is set to TRUE when the returned comparison should be reversed - to get the actual condition. - We only do actual work the first time we come here for an insn; the - results are cached in INSN_COND and INSN_REVERSE_COND. */ + to get the actual condition. */ static rtx -sched_get_condition_with_rev (const_rtx insn, bool *rev) +sched_get_condition_with_rev_uncached (const_rtx insn, bool *rev) { rtx pat = PATTERN (insn); rtx src; - if (INSN_COND (insn) == const_true_rtx) - return NULL_RTX; - - if (INSN_COND (insn) != NULL_RTX) - { - if (rev) - *rev = INSN_REVERSE_COND (insn); - return INSN_COND (insn); - } - - INSN_COND (insn) = const_true_rtx; - INSN_REVERSE_COND (insn) = false; if (pat == 0) return 0; @@ -516,10 +502,7 @@ sched_get_condition_with_rev (const_rtx insn, bool *rev) *rev = false; if (GET_CODE (pat) == COND_EXEC) - { - INSN_COND (insn) = COND_EXEC_TEST (pat); - return COND_EXEC_TEST (pat); - } + return COND_EXEC_TEST (pat); if (!any_condjump_p (insn) || !onlyjump_p (insn)) return 0; @@ -527,10 +510,7 @@ sched_get_condition_with_rev (const_rtx insn, bool *rev) src = SET_SRC (pc_set (insn)); if (XEXP (src, 2) == pc_rtx) - { - INSN_COND (insn) = XEXP (src, 0); - return XEXP (src, 0); - } + return XEXP (src, 0); else if (XEXP (src, 1) == pc_rtx) { rtx cond = XEXP (src, 0); @@ -541,14 +521,45 @@ sched_get_condition_with_rev (const_rtx insn, bool *rev) if (rev) *rev = true; - INSN_COND (insn) = cond; - INSN_REVERSE_COND (insn) = true; return cond; } return 0; } +/* Caching variant of sched_get_condition_with_rev_uncached. + We only do actual work the first time we come here for an insn; the + results are cached in INSN_CACHED_COND and INSN_REVERSE_COND. */ +static rtx +sched_get_condition_with_rev (const_rtx insn, bool *rev) +{ + if (INSN_LUID (insn) == 0) + return sched_get_condition_with_rev_uncached (insn, rev); + + if (INSN_CACHED_COND (insn) == const_true_rtx) + return NULL_RTX; + + if (INSN_CACHED_COND (insn) != NULL_RTX) + { + if (rev) + *rev = INSN_REVERSE_COND (insn); + return INSN_CACHED_COND (insn); + } + + INSN_CACHED_COND (insn) + = sched_get_condition_with_rev_uncached (insn, &INSN_REVERSE_COND (insn)); + + if (INSN_CACHED_COND (insn) == NULL_RTX) + { + INSN_CACHED_COND (insn) = const_true_rtx; + return NULL_RTX; + } + + if (rev) + *rev = INSN_REVERSE_COND (insn); + return INSN_CACHED_COND (insn); +} + /* True when we can find a condition under which INSN is executed. */ static bool sched_has_condition_p (const_rtx insn) @@ -2884,9 +2895,9 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx insn) for (list = reg_last->uses; list; list = XEXP (list, 1)) { rtx other = XEXP (list, 0); - if (INSN_COND (other) != const_true_rtx - && refers_to_regno_p (i, i + 1, INSN_COND (other), NULL)) - INSN_COND (other) = const_true_rtx; + if (INSN_CACHED_COND (other) != const_true_rtx + && refers_to_regno_p (i, i + 1, INSN_CACHED_COND (other), NULL)) + INSN_CACHED_COND (other) = const_true_rtx; } } } diff --git a/gcc/sched-int.h b/gcc/sched-int.h index f310f8a..b8240d7 100644 --- a/gcc/sched-int.h +++ b/gcc/sched-int.h @@ -849,7 +849,7 @@ extern VEC(haifa_deps_insn_data_def, heap) *h_d_i_d; #define INSN_RESOLVED_FORW_DEPS(INSN) (HDID (INSN)->resolved_forw_deps) #define INSN_HARD_BACK_DEPS(INSN) (HDID (INSN)->hard_back_deps) #define INSN_SPEC_BACK_DEPS(INSN) (HDID (INSN)->spec_back_deps) -#define INSN_COND(INSN) (HDID (INSN)->cond) +#define INSN_CACHED_COND(INSN) (HDID (INSN)->cond) #define INSN_REVERSE_COND(INSN) (HDID (INSN)->reverse_cond) #define CANT_MOVE(INSN) (HDID (INSN)->cant_move) #define CANT_MOVE_BY_LUID(LUID) (VEC_index (haifa_deps_insn_data_def, h_d_i_d, \