This patch uses HWI_COMPUTABLE_MODE_P (X) instead of GET_MODE_PRECISION (X) <= HOST_BITS_PER_WIDE_INT in cases where X also needs to be a scalar integer.
Tested on aarch64-linux-gnu and x86_64-linux-gnu, and by making sure that there were no differences in testsuite assembly output for one target per CPU. OK to install? Richard 2017-08-23 Richard Sandiford <richard.sandif...@linaro.org> Alan Hayward <alan.hayw...@arm.com> David Sherwood <david.sherw...@arm.com> gcc/ * combine.c (simplify_comparison): Use HWI_COMPUTABLE_MODE_P. (record_promoted_value): Likewise. * expr.c (expand_expr_real_2): Likewise. * ree.c (update_reg_equal_equiv_notes): Likewise. (combine_set_extension): Likewise. * rtlanal.c (low_bitmask_len): Likewise. * simplify-rtx.c (neg_const_int): Likewise. (simplify_binary_operation_1): Likewise. Index: gcc/combine.c =================================================================== --- gcc/combine.c 2017-08-22 17:14:30.333927457 +0100 +++ gcc/combine.c 2017-08-23 10:44:17.183477418 +0100 @@ -11821,10 +11821,9 @@ simplify_comparison (enum rtx_code code, if (paradoxical_subreg_p (inner_op0) && GET_CODE (inner_op1) == SUBREG + && HWI_COMPUTABLE_MODE_P (GET_MODE (SUBREG_REG (inner_op0))) && (GET_MODE (SUBREG_REG (inner_op0)) == GET_MODE (SUBREG_REG (inner_op1))) - && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (inner_op0))) - <= HOST_BITS_PER_WIDE_INT) && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0), GET_MODE (SUBREG_REG (inner_op0))))) && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1), @@ -13158,7 +13157,7 @@ record_promoted_value (rtx_insn *insn, r unsigned int regno = REGNO (SUBREG_REG (subreg)); machine_mode mode = GET_MODE (subreg); - if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT) + if (!HWI_COMPUTABLE_MODE_P (mode)) return; for (links = LOG_LINKS (insn); links;) Index: gcc/expr.c =================================================================== --- gcc/expr.c 2017-08-21 15:50:48.660709938 +0100 +++ gcc/expr.c 2017-08-23 10:44:17.184477384 +0100 @@ -8455,7 +8455,7 @@ #define REDUCE_BIT_FIELD(expr) (reduce_b if (modifier == EXPAND_STACK_PARM) target = 0; if (TREE_CODE (treeop0) == INTEGER_CST - && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT + && HWI_COMPUTABLE_MODE_P (mode) && TREE_CONSTANT (treeop1)) { rtx constant_part; @@ -8478,7 +8478,7 @@ #define REDUCE_BIT_FIELD(expr) (reduce_b } else if (TREE_CODE (treeop1) == INTEGER_CST - && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT + && HWI_COMPUTABLE_MODE_P (mode) && TREE_CONSTANT (treeop0)) { rtx constant_part; Index: gcc/ree.c =================================================================== --- gcc/ree.c 2017-08-22 17:14:30.338850894 +0100 +++ gcc/ree.c 2017-08-23 10:44:17.185477350 +0100 @@ -268,7 +268,7 @@ update_reg_equal_equiv_notes (rtx_insn * /* Update equivalency constants. Recall that RTL constants are sign-extended. */ if (GET_CODE (orig_src) == CONST_INT - && HOST_BITS_PER_WIDE_INT >= GET_MODE_BITSIZE (new_mode)) + && HWI_COMPUTABLE_MODE_P (new_mode)) { if (INTVAL (orig_src) >= 0 || code == SIGN_EXTEND) /* Nothing needed. */; @@ -336,7 +336,7 @@ combine_set_extension (ext_cand *cand, r /* Merge constants by directly moving the constant into the register under some conditions. Recall that RTL constants are sign-extended. */ if (GET_CODE (orig_src) == CONST_INT - && HOST_BITS_PER_WIDE_INT >= GET_MODE_BITSIZE (cand->mode)) + && HWI_COMPUTABLE_MODE_P (cand->mode)) { if (INTVAL (orig_src) >= 0 || cand->code == SIGN_EXTEND) new_set = gen_rtx_SET (new_reg, orig_src); Index: gcc/rtlanal.c =================================================================== --- gcc/rtlanal.c 2017-08-22 17:14:30.340820268 +0100 +++ gcc/rtlanal.c 2017-08-23 10:44:17.187477282 +0100 @@ -5782,7 +5782,7 @@ low_bitmask_len (machine_mode mode, unsi { if (mode != VOIDmode) { - if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT) + if (!HWI_COMPUTABLE_MODE_P (mode)) return -1; m &= GET_MODE_MASK (mode); } Index: gcc/simplify-rtx.c =================================================================== --- gcc/simplify-rtx.c 2017-08-22 17:14:30.340820268 +0100 +++ gcc/simplify-rtx.c 2017-08-23 10:44:17.187477282 +0100 @@ -62,7 +62,7 @@ neg_const_int (machine_mode mode, const_ { unsigned HOST_WIDE_INT val = -UINTVAL (i); - if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT + if (!HWI_COMPUTABLE_MODE_P (mode) && val == UINTVAL (i)) return simplify_const_unary_operation (NEG, mode, CONST_CAST_RTX (i), mode); @@ -3351,7 +3351,8 @@ simplify_binary_operation_1 (enum rtx_co if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1)) return op0; /* Rotating ~0 always results in ~0. */ - if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT + if (CONST_INT_P (trueop0) + && HWI_COMPUTABLE_MODE_P (mode) && UINTVAL (trueop0) == GET_MODE_MASK (mode) && ! side_effects_p (op1)) return op0; @@ -3433,7 +3434,7 @@ simplify_binary_operation_1 (enum rtx_co goto canonicalize_shift; case SMIN: - if (width <= HOST_BITS_PER_WIDE_INT + if (HWI_COMPUTABLE_MODE_P (mode) && mode_signbit_p (mode, trueop1) && ! side_effects_p (op0)) return op1; @@ -3445,7 +3446,7 @@ simplify_binary_operation_1 (enum rtx_co break; case SMAX: - if (width <= HOST_BITS_PER_WIDE_INT + if (HWI_COMPUTABLE_MODE_P (mode) && CONST_INT_P (trueop1) && (UINTVAL (trueop1) == GET_MODE_MASK (mode) >> 1) && ! side_effects_p (op0))