https://gcc.gnu.org/g:91f00a75536a553b5f95ea80694d655c5ddf34af
commit r16-3251-g91f00a75536a553b5f95ea80694d655c5ddf34af Author: Xi Ruoyao <xry...@xry111.site> Date: Sat Mar 1 11:46:44 2025 +0800 LoongArch: Allow using bstrins for masking the address in atomic_test_and_set We can use bstrins for masking the address here. As people are already working on LA32R (which lacks bstrins instructions), for future-proofing we check whether (const_int -4) is an and_operand and force it into an register if not. gcc/ChangeLog: * config/loongarch/sync.md (atomic_test_and_set): Use bstrins for masking the address if possible. Diff: --- gcc/config/loongarch/sync.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/gcc/config/loongarch/sync.md b/gcc/config/loongarch/sync.md index 63929abba457..01346a79da22 100644 --- a/gcc/config/loongarch/sync.md +++ b/gcc/config/loongarch/sync.md @@ -356,12 +356,13 @@ rtx mem = operands[1]; rtx model = operands[2]; rtx addr = force_reg (Pmode, XEXP (mem, 0)); - rtx tmp_reg = gen_reg_rtx (Pmode); - rtx zero_reg = gen_rtx_REG (Pmode, 0); - + rtx mask = gen_int_mode (-4, Pmode); rtx aligned_addr = gen_reg_rtx (Pmode); - emit_move_insn (tmp_reg, gen_rtx_PLUS (Pmode, zero_reg, GEN_INT (-4))); - emit_move_insn (aligned_addr, gen_rtx_AND (Pmode, addr, tmp_reg)); + + if (!and_operand (mask, Pmode)) + mask = force_reg (Pmode, mask); + + emit_move_insn (aligned_addr, gen_rtx_AND (Pmode, addr, mask)); rtx aligned_mem = change_address (mem, SImode, aligned_addr); set_mem_alias_set (aligned_mem, 0);