ping This patch further improves aarch64_legitimate_constant_p. Allow all integer, floating point and vector constants. Allow label references and non-anchor symbols with an immediate offset. This allows such constants to be rematerialized, resulting in smaller code and fewer stack spills.
SPEC2006 codesize reduces by 0.08%, SPEC2017 by 0.13%. Bootstrap OK, OK for commit? ChangeLog: 2017-07-07 Wilco Dijkstra <wdijk...@arm.com> * config/aarch64/aarch64.c (aarch64_legitimate_constant_p): Return true for more constants, symbols and label references. (aarch64_valid_floating_const): Remove unused function. -- diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c index a2eca64a9c13e44d223b5552c079ef4e09659e84..810c17416db01681e99a9eb8cc9f5af137ed2054 100644 --- a/gcc/config/aarch64/aarch64.c +++ b/gcc/config/aarch64/aarch64.c @@ -10173,49 +10173,46 @@ aarch64_legitimate_pic_operand_p (rtx x) return true; } -/* Return true if X holds either a quarter-precision or - floating-point +0.0 constant. */ -static bool -aarch64_valid_floating_const (machine_mode mode, rtx x) -{ - if (!CONST_DOUBLE_P (x)) - return false; - - if (aarch64_float_const_zero_rtx_p (x)) - return true; - - /* We only handle moving 0.0 to a TFmode register. */ - if (!(mode == SFmode || mode == DFmode)) - return false; - - return aarch64_float_const_representable_p (x); -} +/* Implement TARGET_LEGITIMATE_CONSTANT_P hook. Return true for constants + that should be rematerialized rather than spilled. */ static bool aarch64_legitimate_constant_p (machine_mode mode, rtx x) { + /* Support CSE and rematerialization of common constants. */ + if (CONST_INT_P (x) || CONST_DOUBLE_P (x) || GET_CODE (x) == CONST_VECTOR) + return true; + /* Do not allow vector struct mode constants. We could support 0 and -1 easily, but they need support in aarch64-simd.md. */ - if (TARGET_SIMD && aarch64_vect_struct_mode_p (mode)) + if (aarch64_vect_struct_mode_p (mode)) return false; - /* This could probably go away because - we now decompose CONST_INTs according to expand_mov_immediate. */ - if ((GET_CODE (x) == CONST_VECTOR - && aarch64_simd_valid_immediate (x, mode, false, NULL)) - || CONST_INT_P (x) || aarch64_valid_floating_const (mode, x)) - return !targetm.cannot_force_const_mem (mode, x); + /* Do not allow wide int constants - this requires support in movti. */ + if (CONST_WIDE_INT_P (x)) + return false; - if (GET_CODE (x) == HIGH - && aarch64_valid_symref (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) - return true; + /* Do not allow const (plus (anchor_symbol, const_int)). */ + if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS) + { + x = XEXP (XEXP (x, 0), 0); + if (SYMBOL_REF_P (x) && SYMBOL_REF_ANCHOR_P (x)) + return false; + } + + if (GET_CODE (x) == HIGH) + x = XEXP (x, 0); /* Treat symbols as constants. Avoid TLS symbols as they are complex, so spilling them is better than rematerialization. */ if (SYMBOL_REF_P (x) && !SYMBOL_REF_TLS_MODEL (x)) return true; - return aarch64_constant_address_p (x); + /* Label references are always constant. */ + if (GET_CODE (x) == LABEL_REF) + return true; + + return false; } rtx