On Mon, Sep 9, 2013 at 12:27 PM, Richard Sandiford
<[email protected]> wrote:
> Similar to patch 1, but here the calls are all to simplify_* routines.
> In the 7414 combine.c hunk, the code is PLUS, AND, IOR or XOR.
> In the hunk after that, the "cval |= ..." stuff is effectively doing
> a trunc_int_for_mode by hand.
>
> Tested in the same way as before. OK to install?
Ok.
Thanks,
Richard.
> Thanks,
> Richard
>
>
> gcc/
> * combine.c (simplify_set, expand_field_assignment,
> extract_left_shift)
> (force_to_mode, simplify_shift_const_1, simplify_comparison):
> Use gen_int_mode with the mode of the associated simplify_* call.
> * explow.c (probe_stack_range, anti_adjust_stack_and_probe): Likewise.
> * expmed.c (expand_shift_1): Likewise.
> * function.c (instantiate_virtual_regs_in_insn): Likewise.
> * loop-iv.c (iv_number_of_iterations): Likewise.
> * loop-unroll.c (unroll_loop_runtime_iterations): Likewise.
> * simplify-rtx.c (simplify_binary_operation_1): Likewise.
>
> Index: gcc/combine.c
> ===================================================================
> --- gcc/combine.c 2013-09-09 10:49:45.218460753 +0100
> +++ gcc/combine.c 2013-09-09 11:11:13.699055109 +0100
> @@ -6370,16 +6370,17 @@ simplify_set (rtx x)
>
> if ((recog_for_combine (&pat, other_insn, ¬e) < 0
> && ! check_asm_operands (pat)))
> {
> *cc_use = old_cc_use;
> other_changed = 0;
>
> - op0 = simplify_gen_binary (XOR, GET_MODE (op0),
> - op0, GEN_INT (mask));
> + op0 = simplify_gen_binary (XOR, GET_MODE (op0), op0,
> + gen_int_mode (mask,
> + GET_MODE (op0)));
> }
> }
> }
>
> if (other_changed)
> undobuf.other_insn = other_insn;
>
> @@ -6893,19 +6894,21 @@ expand_field_assignment (const_rtx x)
> else if (GET_CODE (pos) == MINUS
> && CONST_INT_P (XEXP (pos, 1))
> && (INTVAL (XEXP (pos, 1))
> == GET_MODE_PRECISION (GET_MODE (inner)) - len))
> /* If position is ADJUST - X, new position is X. */
> pos = XEXP (pos, 0);
> else
> - pos = simplify_gen_binary (MINUS, GET_MODE (pos),
> - GEN_INT (GET_MODE_PRECISION (
> - GET_MODE (inner))
> - - len),
> - pos);
> + {
> + HOST_WIDE_INT prec = GET_MODE_PRECISION (GET_MODE (inner));
> + pos = simplify_gen_binary (MINUS, GET_MODE (pos),
> + gen_int_mode (prec - len,
> + GET_MODE (pos)),
> + pos);
> + }
> }
> }
>
> /* A SUBREG between two modes that occupy the same numbers of words
> can be done by moving the SUBREG to the source. */
> else if (GET_CODE (SET_DEST (x)) == SUBREG
> /* We need SUBREGs to compute nonzero_bits properly. */
> @@ -6950,15 +6953,16 @@ expand_field_assignment (const_rtx x)
> /* Compute a mask of LEN bits, if we can do this on the host machine.
> */
> if (len >= HOST_BITS_PER_WIDE_INT)
> break;
>
> /* Now compute the equivalent expression. Make a copy of INNER
> for the SET_DEST in case it is a MEM into which we will substitute;
> we don't want shared RTL in that case. */
> - mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << len) - 1);
> + mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << len) - 1,
> + compute_mode);
> cleared = simplify_gen_binary (AND, compute_mode,
> simplify_gen_unary (NOT, compute_mode,
> simplify_gen_binary (ASHIFT,
> compute_mode,
> mask, pos),
> compute_mode),
> inner);
> @@ -7414,17 +7418,19 @@ extract_left_shift (rtx x, int count)
> case PLUS: case IOR: case XOR: case AND:
> /* If we can safely shift this constant and we find the inner shift,
> make a new operation. */
> if (CONST_INT_P (XEXP (x, 1))
> && (UINTVAL (XEXP (x, 1))
> & ((((unsigned HOST_WIDE_INT) 1 << count)) - 1)) == 0
> && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
> - return simplify_gen_binary (code, mode, tem,
> - GEN_INT (INTVAL (XEXP (x, 1)) >> count));
> -
> + {
> + HOST_WIDE_INT val = INTVAL (XEXP (x, 1)) >> count;
> + return simplify_gen_binary (code, mode, tem,
> + gen_int_mode (val, mode));
> + }
> break;
>
> default:
> break;
> }
>
> return 0;
> @@ -8124,25 +8130,18 @@ force_to_mode (rtx x, enum machine_mode
> if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
> && GET_MODE_MASK (GET_MODE (x)) != mask
> && HWI_COMPUTABLE_MODE_P (GET_MODE (x)))
> {
> unsigned HOST_WIDE_INT cval
> = UINTVAL (XEXP (x, 1))
> | (GET_MODE_MASK (GET_MODE (x)) & ~mask);
> - int width = GET_MODE_PRECISION (GET_MODE (x));
> rtx y;
>
> - /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
> - number, sign extend it. */
> - if (width > 0 && width < HOST_BITS_PER_WIDE_INT
> - && (cval & (HOST_WIDE_INT_1U << (width - 1))) != 0)
> - cval |= HOST_WIDE_INT_M1U << width;
> -
> - y = simplify_gen_binary (AND, GET_MODE (x),
> - XEXP (x, 0), GEN_INT (cval));
> + y = simplify_gen_binary (AND, GET_MODE (x), XEXP (x, 0),
> + gen_int_mode (cval, GET_MODE (x)));
> if (set_src_cost (y, optimize_this_for_speed_p)
> < set_src_cost (x, optimize_this_for_speed_p))
> x = y;
> }
>
> break;
> }
> @@ -8224,16 +8223,17 @@ force_to_mode (rtx x, enum machine_mode
> && CONST_INT_P (XEXP (x, 1))
> && ((INTVAL (XEXP (XEXP (x, 0), 1))
> + floor_log2 (INTVAL (XEXP (x, 1))))
> < GET_MODE_PRECISION (GET_MODE (x)))
> && (UINTVAL (XEXP (x, 1))
> & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
> {
> - temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
> - << INTVAL (XEXP (XEXP (x, 0), 1)));
> + temp = gen_int_mode ((INTVAL (XEXP (x, 1)) & mask)
> + << INTVAL (XEXP (XEXP (x, 0), 1)),
> + GET_MODE (x));
> temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
> XEXP (XEXP (x, 0), 0), temp);
> x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
> XEXP (XEXP (x, 0), 1));
> return force_to_mode (x, mode, mask, next_select);
> }
>
> @@ -8439,15 +8439,16 @@ force_to_mode (rtx x, enum machine_mode
> in the mode of X, compute where the bits we care about are.
> Otherwise, we can't do anything. Don't change the mode of
> the shift or propagate MODE into the shift, though. */
> if (CONST_INT_P (XEXP (x, 1))
> && INTVAL (XEXP (x, 1)) >= 0)
> {
> temp = simplify_binary_operation (code == ROTATE ? ROTATERT :
> ROTATE,
> - GET_MODE (x), GEN_INT (mask),
> + GET_MODE (x),
> + gen_int_mode (mask, GET_MODE (x)),
> XEXP (x, 1));
> if (temp && CONST_INT_P (temp))
> SUBST (XEXP (x, 0),
> force_to_mode (XEXP (x, 0), GET_MODE (x),
> INTVAL (temp), next_select));
> }
> break;
> @@ -10088,15 +10089,16 @@ simplify_shift_const_1 (enum rtx_code co
> || code == ROTATE)))
> break;
>
> /* To compute the mask to apply after the shift, shift the
> nonzero bits of the inner shift the same way the
> outer shift will. */
>
> - mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
> + mask_rtx = gen_int_mode (nonzero_bits (varop, GET_MODE (varop)),
> + result_mode);
>
> mask_rtx
> = simplify_const_binary_operation (code, result_mode,
> mask_rtx,
> GEN_INT (count));
>
> /* Give up if we can't compute an outer operation to use. */
> if (mask_rtx == 0
> @@ -10189,17 +10191,18 @@ simplify_shift_const_1 (enum rtx_code co
>
> if (CONST_INT_P (XEXP (varop, 1))
> /* We can't do this if we have (ashiftrt (xor)) and the
> constant has its sign bit set in shift_mode. */
> && !(code == ASHIFTRT && GET_CODE (varop) == XOR
> && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
> shift_mode))
> - && (new_rtx = simplify_const_binary_operation (code,
> result_mode,
> - XEXP (varop, 1),
> - GEN_INT (count))) != > 0
> + && (new_rtx = simplify_const_binary_operation
> + (code, result_mode,
> + gen_int_mode (INTVAL (XEXP (varop, 1)), result_mode),
> + GEN_INT (count))) != 0
> && CONST_INT_P (new_rtx)
> && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
> INTVAL (new_rtx), result_mode,
> &complement_p))
> {
> varop = XEXP (varop, 0);
> continue;
> }
> @@ -11942,19 +11945,19 @@ simplify_comparison (enum rtx_code code,
> /* If this is a test for negative, we can make an explicit
> test of the sign bit. Test this first so we can use
> a paradoxical subreg to extend OP0. */
>
> if (op1 == const0_rtx && (code == LT || code == GE)
> && HWI_COMPUTABLE_MODE_P (mode))
> {
> + unsigned HOST_WIDE_INT sign
> + = (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1);
> op0 = simplify_gen_binary (AND, tmode,
> gen_lowpart (tmode, op0),
> - GEN_INT ((unsigned HOST_WIDE_INT) 1
> - << (GET_MODE_BITSIZE (mode)
> - - 1)));
> + gen_int_mode (sign, mode));
> code = (code == LT) ? NE : EQ;
> break;
> }
>
> /* If the only nonzero bits in OP0 and OP1 are those in the
> narrower mode and this is an equality or unsigned comparison,
> we can use the wider mode. Similarly for sign-extended
> Index: gcc/explow.c
> ===================================================================
> --- gcc/explow.c 2013-09-09 10:55:59.748537330 +0100
> +++ gcc/explow.c 2013-09-09 10:57:31.664292367 +0100
> @@ -1632,15 +1632,16 @@ probe_stack_range (HOST_WIDE_INT first,
> rtx end_lab = gen_label_rtx ();
>
>
> /* Step 1: round SIZE to the previous multiple of the interval. */
>
> /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
> rounded_size
> - = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
> + = simplify_gen_binary (AND, Pmode, size,
> + gen_int_mode (-PROBE_INTERVAL, Pmode));
> rounded_size_op = force_operand (rounded_size, NULL_RTX);
>
>
> /* Step 2: compute initial and final value of the loop counter. */
>
> /* TEST_ADDR = SP + FIRST. */
> test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
> @@ -1776,15 +1777,16 @@ anti_adjust_stack_and_probe (rtx size, b
> rtx end_lab = gen_label_rtx ();
>
>
> /* Step 1: round SIZE to the previous multiple of the interval. */
>
> /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
> rounded_size
> - = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
> + = simplify_gen_binary (AND, Pmode, size,
> + gen_int_mode (-PROBE_INTERVAL, Pmode));
> rounded_size_op = force_operand (rounded_size, NULL_RTX);
>
>
> /* Step 2: compute initial and final value of the loop counter. */
>
> /* SP = SP_0 + PROBE_INTERVAL. */
> anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
> Index: gcc/expmed.c
> ===================================================================
> --- gcc/expmed.c 2013-09-09 10:55:59.751537354 +0100
> +++ gcc/expmed.c 2013-09-09 10:57:31.656292301 +0100
> @@ -2210,19 +2210,18 @@ expand_shift_1 (enum tree_code code, enu
> other_amount = GEN_INT (GET_MODE_BITSIZE (mode)
> - INTVAL (op1));
> else
> {
> other_amount
> = simplify_gen_unary (NEG, GET_MODE (op1),
> op1, GET_MODE (op1));
> + HOST_WIDE_INT mask = GET_MODE_PRECISION (mode) - 1;
> other_amount
> - = simplify_gen_binary (AND, GET_MODE (op1),
> - other_amount,
> - GEN_INT (GET_MODE_PRECISION (mode)
> - - 1));
> + = simplify_gen_binary (AND, GET_MODE (op1), other_amount,
> + gen_int_mode (mask, GET_MODE
> (op1)));
> }
>
> shifted = force_reg (mode, shifted);
>
> temp = expand_shift_1 (left ? LSHIFT_EXPR : RSHIFT_EXPR,
> mode, shifted, new_amount, 0, 1);
> temp1 = expand_shift_1 (left ? RSHIFT_EXPR : LSHIFT_EXPR,
> Index: gcc/function.c
> ===================================================================
> --- gcc/function.c 2013-09-09 10:55:59.756537396 +0100
> +++ gcc/function.c 2013-09-09 10:57:31.666292384 +0100
> @@ -1516,15 +1516,15 @@ instantiate_virtual_regs_in_insn (rtx in
> new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
> if (new_rtx)
> {
> start_sequence ();
>
> for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx,
> NULL);
> x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
> - GEN_INT (-offset));
> + gen_int_mode (-offset, GET_MODE
> (new_rtx)));
> x = force_operand (x, new_rtx);
> if (x != new_rtx)
> emit_move_insn (new_rtx, x);
>
> seq = get_insns ();
> end_sequence ();
>
> Index: gcc/loop-iv.c
> ===================================================================
> --- gcc/loop-iv.c 2013-09-08 17:18:39.577589199 +0100
> +++ gcc/loop-iv.c 2013-09-09 10:57:31.667292392 +0100
> @@ -2668,19 +2668,19 @@ iv_number_of_iterations (struct loop *lo
> s /= 2;
> d *= 2;
> size--;
> }
> bound = GEN_INT (((unsigned HOST_WIDEST_INT) 1 << (size - 1 ) << 1) -
> 1);
>
> tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
> - tmp = simplify_gen_binary (UMOD, mode, tmp1, GEN_INT (d));
> + tmp = simplify_gen_binary (UMOD, mode, tmp1, gen_int_mode (d, mode));
> assumption = simplify_gen_relational (NE, SImode, mode, tmp,
> const0_rtx);
> desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
>
> - tmp = simplify_gen_binary (UDIV, mode, tmp1, GEN_INT (d));
> + tmp = simplify_gen_binary (UDIV, mode, tmp1, gen_int_mode (d, mode));
> inv = inverse (s, size);
> tmp = simplify_gen_binary (MULT, mode, tmp, gen_int_mode (inv, mode));
> desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
> }
> else
> {
> if (iv1.step == const0_rtx)
> Index: gcc/loop-unroll.c
> ===================================================================
> --- gcc/loop-unroll.c 2013-09-09 10:55:59.757537404 +0100
> +++ gcc/loop-unroll.c 2013-09-09 10:57:31.668292400 +0100
> @@ -1305,15 +1305,15 @@ unroll_loop_runtime_iterations (struct l
> /* We must be careful when updating the number of iterations due to
> preconditioning and the fact that the value must be valid at entry
> of the loop. After passing through the above code, we see that
> the correct new number of iterations is this: */
> gcc_assert (!desc->const_iter);
> desc->niter_expr =
> simplify_gen_binary (UDIV, desc->mode, old_niter,
> - GEN_INT (max_unroll + 1));
> + gen_int_mode (max_unroll + 1, desc->mode));
> loop->nb_iterations_upper_bound
> = loop->nb_iterations_upper_bound.udiv (double_int::from_uhwi (max_unroll
> + 1),
> TRUNC_DIV_EXPR);
> if (loop->any_estimate)
> loop->nb_iterations_estimate
> = loop->nb_iterations_estimate.udiv (double_int::from_uhwi (max_unroll
> Index: gcc/simplify-rtx.c
> ===================================================================
> --- gcc/simplify-rtx.c 2013-09-08 17:18:39.577589199 +0100
> +++ gcc/simplify-rtx.c 2013-09-09 10:57:31.658292318 +0100
> @@ -2814,20 +2814,21 @@ simplify_binary_operation_1 (enum rtx_co
> if (CONST_INT_P (op1)
> && (HWI_COMPUTABLE_MODE_P (mode)
> || INTVAL (op1) > 0)
> && GET_CODE (op0) == AND
> && CONST_INT_P (XEXP (op0, 1))
> && CONST_INT_P (op1)
> && (UINTVAL (XEXP (op0, 1)) & UINTVAL (op1)) != 0)
> - return simplify_gen_binary (IOR, mode,
> - simplify_gen_binary
> - (AND, mode, XEXP (op0, 0),
> - GEN_INT (UINTVAL (XEXP (op0, 1))
> - & ~UINTVAL (op1))),
> - op1);
> + {
> + rtx tmp = simplify_gen_binary (AND, mode, XEXP (op0, 0),
> + gen_int_mode (UINTVAL (XEXP (op0, 1))
> + & ~UINTVAL (op1),
> + mode));
> + return simplify_gen_binary (IOR, mode, tmp, op1);
> + }
>
> /* If OP0 is (ashiftrt (plus ...) C), it might actually be
> a (sign_extend (plus ...)). Then check if OP1 is a CONST_INT and
> the PLUS does not affect any of the bits in OP1: then we can do
> the IOR as a PLUS and we can associate. This is valid if OP1
> can be safely shifted left C bits. */
> if (CONST_INT_P (trueop1) && GET_CODE (op0) == ASHIFTRT
> @@ -2949,25 +2950,27 @@ simplify_binary_operation_1 (enum rtx_co
> simplify_gen_unary (NOT, mode, a,
> mode),
> c);
> if ((~cval & bval) == 0)
> {
> /* Try to simplify ~A&C | ~B&C. */
> if (na_c != NULL_RTX)
> return simplify_gen_binary (IOR, mode, na_c,
> - GEN_INT (~bval & cval));
> + gen_int_mode (~bval & cval,
> mode));
> }
> else
> {
> /* If ~A&C is zero, simplify A&(~C&B) | ~B&C. */
> if (na_c == const0_rtx)
> {
> rtx a_nc_b = simplify_gen_binary (AND, mode, a,
> - GEN_INT (~cval & bval));
> + gen_int_mode (~cval &
> bval,
> + mode));
> return simplify_gen_binary (IOR, mode, a_nc_b,
> - GEN_INT (~bval & cval));
> + gen_int_mode (~bval & cval,
> + mode));
> }
> }
> }
>
> /* (xor (comparison foo bar) (const_int 1)) can become the reversed
> comparison if STORE_FLAG_VALUE is 1. */
> if (STORE_FLAG_VALUE == 1
> @@ -3293,15 +3296,15 @@ simplify_binary_operation_1 (enum rtx_co
> return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
> return CONST0_RTX (mode);
> }
> /* Implement modulus by power of two as AND. */
> if (CONST_INT_P (trueop1)
> && exact_log2 (UINTVAL (trueop1)) > 0)
> return simplify_gen_binary (AND, mode, op0,
> - GEN_INT (INTVAL (op1) - 1));
> + gen_int_mode (INTVAL (op1) - 1, mode));
> break;
>
> case MOD:
> /* 0%x is 0 (or x&0 if x has side-effects). */
> if (trueop0 == CONST0_RTX (mode))
> {
> if (side_effects_p (op1))