Hi, The test case gcc.target/sh/pr52933-2.c has been failing for a while. This is because the sh_treg_combine pass optimizes only cbranches but not cmoves, which on SH are zero-displacement cbranches. The attached patch makes sh_treg_combine also handle cmoves. This eliminates the need for some combine patterns in sh.md and fixes the failing test case. Tested with make -k check RUNTESTFLAGS="--target_board=sh-sim \{-m2/-ml,-m2/-mb,-m2a/-mb,-m4/-ml,-m4/-mb,-m4a/-ml,-m4a/-mb}" Committed as r218850.
Cheers, Oleg gcc/ChangeLog: PR target/51244 * config/sh/sh_treg_combine.cc (is_conditional_insn): New function. (cbranch_trace): Add member rtx* condition_rtx_in_insn, initialize it accordingly in constructor. (cbranch_trace::branch_condition_rtx_ref): New function. (cbranch_trace::branch_condition_rtx): Use branch_condition_rtx_ref. (sh_treg_combine::try_invert_branch_condition): Invert condition rtx in insn using reversed_comparison_code and validate_change instead of invert_jump_1. (sh_treg_combine::execute): Look for conditional insns in basic blocks in addition to conditional branches. * config/sh/sh.md (*movsicc_div0s): Remove combine patterns.
Index: gcc/config/sh/sh.md =================================================================== --- gcc/config/sh/sh.md (revision 218847) +++ gcc/config/sh/sh.md (working copy) @@ -1086,47 +1086,6 @@ (label_ref (match_dup 2)) (pc)))]) -;; Conditional move combine pattern for div0s comparisons. -;; This is used when TARGET_PRETEND_CMOVE is in effect. -(define_insn_and_split "*movsicc_div0s" - [(set (match_operand:SI 0 "arith_reg_dest" "") - (if_then_else:SI (ge (xor:SI (match_operand:SI 1 "arith_reg_operand" "") - (match_operand:SI 2 "arith_reg_operand" "")) - (const_int 0)) - (match_operand:SI 3 "arith_reg_operand" "") - (match_operand:SI 4 "general_movsrc_operand" ""))) - (clobber (reg:SI T_REG))] - "TARGET_PRETEND_CMOVE" - "#" - "&& 1" - [(set (reg:SI T_REG) (lt:SI (xor:SI (match_dup 1) (match_dup 2)) - (const_int 0))) - (set (match_dup 0) - (if_then_else (ne (reg:SI T_REG) (const_int 0)) - (match_dup 4) - (match_dup 3)))]) - -(define_insn_and_split "*movsicc_div0s" - [(set (match_operand:SI 0 "arith_reg_dest") - (if_then_else:SI (eq (lshiftrt:SI - (match_operand:SI 1 "arith_reg_operand") - (const_int 31)) - (lshiftrt:SI - (match_operand:SI 2 "arith_reg_operand") - (const_int 31))) - (match_operand:SI 3 "arith_reg_operand") - (match_operand:SI 4 "general_movsrc_operand"))) - (clobber (reg:SI T_REG))] - "TARGET_PRETEND_CMOVE" - "#" - "&& 1" - [(set (reg:SI T_REG) (lt:SI (xor:SI (match_dup 1) (match_dup 2)) - (const_int 0))) - (set (match_dup 0) - (if_then_else (ne (reg:SI T_REG) (const_int 0)) - (match_dup 4) - (match_dup 3)))]) - ;; ------------------------------------------------------------------------- ;; SImode unsigned integer comparisons ;; ------------------------------------------------------------------------- Index: gcc/config/sh/sh_treg_combine.cc =================================================================== --- gcc/config/sh/sh_treg_combine.cc (revision 218847) +++ gcc/config/sh/sh_treg_combine.cc (working copy) @@ -432,6 +432,16 @@ return count; } +static bool +is_conditional_insn (rtx_insn* i) +{ + if (! (INSN_P (i) && NONDEBUG_INSN_P (i))) + return false; + + rtx p = PATTERN (i); + return GET_CODE (p) == SET && GET_CODE (XEXP (p, 1)) == IF_THEN_ELSE; +} + // FIXME: Remove dependency on SH predicate function somehow. extern int t_reg_operand (rtx, machine_mode); extern int negt_reg_operand (rtx, machine_mode); @@ -484,6 +494,7 @@ struct cbranch_trace { rtx_insn *cbranch_insn; + rtx* condition_rtx_in_insn; branch_condition_type_t cbranch_type; // The comparison against zero right before the conditional branch. @@ -495,9 +506,14 @@ cbranch_trace (rtx_insn *insn) : cbranch_insn (insn), + condition_rtx_in_insn (NULL), cbranch_type (unknown_branch_condition), setcc () { + if (is_conditional_insn (cbranch_insn)) + condition_rtx_in_insn = &XEXP (XEXP (PATTERN (cbranch_insn), 1), 0); + else if (rtx x = pc_set (cbranch_insn)) + condition_rtx_in_insn = &XEXP (XEXP (x, 1), 0); } basic_block bb (void) const { return BLOCK_FOR_INSN (cbranch_insn); } @@ -505,9 +521,17 @@ rtx branch_condition_rtx (void) const { - rtx x = pc_set (cbranch_insn); - return x == NULL_RTX ? NULL_RTX : XEXP (XEXP (x, 1), 0); + return condition_rtx_in_insn != NULL ? *condition_rtx_in_insn : NULL; } + rtx& + branch_condition_rtx_ref (void) const + { + // Before anything gets to invoke this function, there are other checks + // in place to make sure that we have a known branch condition and thus + // the ref to the rtx in the insn. + gcc_assert (condition_rtx_in_insn != NULL); + return *condition_rtx_in_insn; + } bool can_invert_condition (void) const @@ -1033,9 +1057,19 @@ { log_msg ("inverting branch condition\n"); - if (!invert_jump_1 (trace.cbranch_insn, JUMP_LABEL (trace.cbranch_insn))) - log_return (false, "invert_jump_1 failed\n"); + rtx& comp = trace.branch_condition_rtx_ref (); + rtx_code rev_cmp_code = reversed_comparison_code (comp, trace.cbranch_insn); + + if (rev_cmp_code == UNKNOWN) + log_return (false, "reversed_comparison_code = UNKNOWN\n"); + + validate_change (trace.cbranch_insn, &comp, + gen_rtx_fmt_ee (rev_cmp_code, + GET_MODE (comp), XEXP (comp, 0), + XEXP (comp, 1)), + 1); + if (verify_changes (num_validated_changes ())) confirm_change_group (); else @@ -1531,14 +1565,26 @@ log_rtx (m_ccreg); log_msg (" STORE_FLAG_VALUE = %d\n", STORE_FLAG_VALUE); - // Look for basic blocks that end with a conditional branch and try to - // optimize them. + // Look for basic blocks that end with a conditional branch or for + // conditional insns and try to optimize them. basic_block bb; FOR_EACH_BB_FN (bb, fun) { - rtx_insn *i = BB_END (bb); + rtx_insn* i = BB_END (bb); + if (i == NULL || i == PREV_INSN (BB_HEAD (bb))) + continue; + + // A conditional branch is always the last insn of a basic block. if (any_condjump_p (i) && onlyjump_p (i)) - try_optimize_cbranch (i); + { + try_optimize_cbranch (i); + i = PREV_INSN (i); + } + + // Check all insns in block for conditional insns. + for (; i != NULL && i != PREV_INSN (BB_HEAD (bb)); i = PREV_INSN (i)) + if (is_conditional_insn (i)) + try_optimize_cbranch (i); } log_msg ("\n\n");