We've implemented the slli + bitwise => bitwise + slli reassociation in r15-7062. I'd hoped late combine could handle slli.d + bitwise + add.d => bitwise + slli.d + add.d => bitwise => alsl.d, but it does not always work, for example
a |= 0xfff; b |= 0xfff; a <<= 2; b <<= 2; a += x; b += y; the compiler will attempt to "CSE" the materialization of the const (0xfff << 2), breaking our expectation. I tried to improve the cost model to unbreak this but my attempts didn't work. So add a dedicated pattern to at lease make these cases work. This is based on https://gcc.gnu.org/pipermail/gcc-patches/2025-January/674263.html with the uarch fusion stuff stripped off. Based on the currently available and limited information, I guess for uarch fusion a cleaner solution would be using TARGET_SCHED_MACRO_FUSION_PAIR_P to keep bstrpick and slli.d together and then using a peephole2 to rewrite the slli.d to alsl.d. We can do it later anyway. gcc/ChangeLog: * config/loongarch/loongarch.md (<optab>_alsl_reverse<X:mode>): New define_insn_and_split. gcc/testsuite/ChangeLog: * gcc.target/loongarch/bitwise-shift-reassoc-dual.c: New test. * gcc.target/loongarch/bstrpick_alsl_paired.c (scan-tree-dump): Match and_alsl_reversedi instead of and_shift_reversedi. --- Bootstrapped and regtested on loongarch64-linux-gnu. Ok for trunk? gcc/config/loongarch/loongarch.md | 38 ++++++++++++++++--- .../loongarch/bitwise-shift-reassoc-dual.c | 18 +++++++++ .../loongarch/bstrpick_alsl_paired.c | 2 +- 3 files changed, 52 insertions(+), 6 deletions(-) create mode 100644 gcc/testsuite/gcc.target/loongarch/bitwise-shift-reassoc-dual.c diff --git a/gcc/config/loongarch/loongarch.md b/gcc/config/loongarch/loongarch.md index f01b143db43..478f859051c 100644 --- a/gcc/config/loongarch/loongarch.md +++ b/gcc/config/loongarch/loongarch.md @@ -3157,11 +3157,39 @@ (define_insn_and_split "<optab>_shift_reverse<X:mode>" } }) -;; The late_combine2 pass can handle slli.d + add.d => alsl.d, so we -;; already have slli.d + any_bitwise + add.d => any_bitwise + slli.d + -;; add.d => any_bitwise + alsl.d. But late_combine2 cannot handle slli.d + -;; add.w => alsl.w, so implement slli.d + and + add.w => and + alsl.w on -;; our own. +;; The late_combine2 pass can handle slli.d + add.d => alsl.d, but it seems +;; not covering all cases. So do this on our own. +(define_insn_and_split "<optab>_alsl_reverse<X:mode>" + [(set (match_operand:X 0 "register_operand" "=&r") + (plus:X + (any_bitwise:X + (ashift:X (match_operand:X 1 "register_operand" "r0") + (match_operand:SI 2 "const_immalsl_operand" "i")) + (match_operand:X 3 "const_int_operand" "i")) + (match_operand:X 4 "register_operand" "r")))] + "loongarch_reassoc_shift_bitwise (<is_and>, operands[2], operands[3], + <MODE>mode)" + "#" + "&& reload_completed" + [(set (match_dup 0) (any_bitwise:X (match_dup 1) (match_dup 3))) + (set (match_dup 0) (plus:X (ashift:X (match_dup 0) (match_dup 2)) + (match_dup 4)))] + { + operands[3] = loongarch_reassoc_shift_bitwise (<is_and>, + operands[2], + operands[3], + <MODE>mode); + + if (ins_zero_bitmask_operand (operands[3], <MODE>mode)) + { + gcc_checking_assert (<is_and>); + emit_move_insn (operands[0], operands[1]); + operands[1] = operands[0]; + } + }) + +;; Likewise for slli.d + and + add.w => and + alsl.w, note that late +;; combine cannot help this at all. (define_insn_and_split "<optab>_alsl_reversesi_extended" [(set (match_operand:DI 0 "register_operand" "=&r") (sign_extend:DI diff --git a/gcc/testsuite/gcc.target/loongarch/bitwise-shift-reassoc-dual.c b/gcc/testsuite/gcc.target/loongarch/bitwise-shift-reassoc-dual.c new file mode 100644 index 00000000000..ad66daf6caa --- /dev/null +++ b/gcc/testsuite/gcc.target/loongarch/bitwise-shift-reassoc-dual.c @@ -0,0 +1,18 @@ +/* { dg-do compile } */ +/* { dg-options "-O2" } */ +/* { dg-final { scan-assembler-times "bstrpick\\.\[wd\]" 2 } } */ +/* { dg-final { scan-assembler-times "alsl\\.\[wd\]" 2 } } */ + +struct Pair { unsigned long a, b; }; + +struct Pair +test (struct Pair p, unsigned long x) +{ + p.a &= 0xfffffff; + p.a <<= 2; + p.a += x; + p.b &= 0xfffffff; + p.b <<= 2; + p.b += x; + return p; +} diff --git a/gcc/testsuite/gcc.target/loongarch/bstrpick_alsl_paired.c b/gcc/testsuite/gcc.target/loongarch/bstrpick_alsl_paired.c index 900e8c9e19f..166565275da 100644 --- a/gcc/testsuite/gcc.target/loongarch/bstrpick_alsl_paired.c +++ b/gcc/testsuite/gcc.target/loongarch/bstrpick_alsl_paired.c @@ -1,6 +1,6 @@ /* { dg-do compile } */ /* { dg-options "-mabi=lp64d -O2 -fdump-rtl-combine" } */ -/* { dg-final { scan-rtl-dump "{and_shift_reversedi}" "combine" } } */ +/* { dg-final { scan-rtl-dump "{and_alsl_reversedi}" "combine" } } */ /* { dg-final { scan-assembler-not "alsl.d\t\\\$r\[0-9\]+,\\\$r\[0-9\]+,\\\$r0" } } */ struct SA -- 2.48.1