On Mon, Jun 21, 2021 at 09:18:28AM +0200, Uros Bizjak via Gcc-patches wrote:
> > 2021-06-20 Roger Sayle <[email protected]>
> >
> > gcc/ChangeLog
> > PR target/11877
> > * config/i386/i386.md: New define_peephole2s to shrink writing
> > 1, 2 or 4 consecutive zeros to memory when optimizing for size.
> >
> > gcc/testsuite/ChangeLog
> > PR target/11877
> > * gcc.target/i386/pr11877.c: New test case.
>
> OK.
It unfortunately doesn't extend well to larger memory clearing.
Consider e.g.
void
foo (int *p)
{
p[0] = 0;
p[7] = 0;
p[23] = 0;
p[41] = 0;
p[48] = 0;
p[59] = 0;
p[69] = 0;
p[78] = 0;
p[83] = 0;
p[89] = 0;
p[98] = 0;
p[121] = 0;
p[132] = 0;
p[143] = 0;
p[154] = 0;
}
where with the patch we emit:
xorl %eax, %eax
xorl %edx, %edx
xorl %ecx, %ecx
xorl %esi, %esi
xorl %r8d, %r8d
movl %eax, (%rdi)
movl %eax, 28(%rdi)
movl %eax, 92(%rdi)
movl %eax, 164(%rdi)
movl %edx, 192(%rdi)
movl %edx, 236(%rdi)
movl %edx, 276(%rdi)
movl %edx, 312(%rdi)
movl %ecx, 332(%rdi)
movl %ecx, 356(%rdi)
movl %ecx, 392(%rdi)
movl %ecx, 484(%rdi)
movl %esi, 528(%rdi)
movl %esi, 572(%rdi)
movl %r8d, 616(%rdi)
Here is an incremental (so far untested) patch that emits:
xorl %eax, %eax
movl %eax, (%rdi)
movl %eax, 28(%rdi)
movl %eax, 92(%rdi)
movl %eax, 164(%rdi)
movl %eax, 192(%rdi)
movl %eax, 236(%rdi)
movl %eax, 276(%rdi)
movl %eax, 312(%rdi)
movl %eax, 332(%rdi)
movl %eax, 356(%rdi)
movl %eax, 392(%rdi)
movl %eax, 484(%rdi)
movl %eax, 528(%rdi)
movl %eax, 572(%rdi)
movl %eax, 616(%rdi)
instead:
2021-06-21 Jakub Jelinek <[email protected]>
PR target/11877
* config/i386/i386-protos.h (ix86_zero_stores_peep2_p): Declare.
* config/i386/i386.c (ix86_zero_stores_peep2_p): New function.
* config/i386/i386.md (peephole2s for 1/2/4 stores of const0_rtx):
Remove "" from match_operand. Add peephole2s for 1/2/4 stores of
const0_rtx following previous successful peep2s.
--- gcc/config/i386/i386-protos.h.jj 2021-06-07 09:24:57.696690116 +0200
+++ gcc/config/i386/i386-protos.h 2021-06-21 10:21:05.428887980 +0200
@@ -111,6 +111,7 @@ extern bool ix86_use_lea_for_mov (rtx_in
extern bool ix86_avoid_lea_for_addr (rtx_insn *, rtx[]);
extern void ix86_split_lea_for_addr (rtx_insn *, rtx[], machine_mode);
extern bool ix86_lea_for_add_ok (rtx_insn *, rtx[]);
+extern bool ix86_zero_stores_peep2_p (rtx_insn *, rtx);
extern bool ix86_vec_interleave_v2df_operator_ok (rtx operands[3], bool high);
extern bool ix86_dep_by_shift_count (const_rtx set_insn, const_rtx use_insn);
extern bool ix86_agi_dependent (rtx_insn *set_insn, rtx_insn *use_insn);
--- gcc/config/i386/i386.c.jj 2021-06-21 09:39:21.622487840 +0200
+++ gcc/config/i386/i386.c 2021-06-21 10:21:12.389794740 +0200
@@ -15186,6 +15186,33 @@ ix86_lea_for_add_ok (rtx_insn *insn, rtx
return ix86_lea_outperforms (insn, regno0, regno1, regno2, 0, false);
}
+/* Return true if insns before FIRST_INSN (which is of the form
+ (set (memory) (zero_operand)) are all also either in the
+ same form, or (set (zero_operand) (const_int 0)). */
+
+bool
+ix86_zero_stores_peep2_p (rtx_insn *first_insn, rtx zero_operand)
+{
+ rtx_insn *insn = first_insn;
+ for (int count = 0; count < 512; count++)
+ {
+ insn = prev_nonnote_nondebug_insn_bb (insn);
+ if (!insn)
+ return false;
+ rtx set = single_set (insn);
+ if (!set)
+ return false;
+ if (SET_SRC (set) == const0_rtx
+ && rtx_equal_p (SET_DEST (set), zero_operand))
+ return true;
+ if (set != PATTERN (insn)
+ || !rtx_equal_p (SET_SRC (set), zero_operand)
+ || !memory_operand (SET_DEST (set), VOIDmode))
+ return false;
+ }
+ return false;
+}
+
/* Return true if destination reg of SET_BODY is shift count of
USE_BODY. */
--- gcc/config/i386/i386.md.jj 2021-06-21 09:42:04.086303699 +0200
+++ gcc/config/i386/i386.md 2021-06-21 10:21:31.932532964 +0200
@@ -19360,10 +19360,10 @@ (define_peephole2
;; When optimizing for size, zeroing memory should use a register.
(define_peephole2
[(match_scratch:SWI48 0 "r")
- (set (match_operand:SWI48 1 "memory_operand" "") (const_int 0))
- (set (match_operand:SWI48 2 "memory_operand" "") (const_int 0))
- (set (match_operand:SWI48 3 "memory_operand" "") (const_int 0))
- (set (match_operand:SWI48 4 "memory_operand" "") (const_int 0))]
+ (set (match_operand:SWI48 1 "memory_operand") (const_int 0))
+ (set (match_operand:SWI48 2 "memory_operand") (const_int 0))
+ (set (match_operand:SWI48 3 "memory_operand") (const_int 0))
+ (set (match_operand:SWI48 4 "memory_operand") (const_int 0))]
"optimize_insn_for_size_p () && peep2_regno_dead_p (0, FLAGS_REG)"
[(set (match_dup 1) (match_dup 0))
(set (match_dup 2) (match_dup 0))
@@ -19375,8 +19375,8 @@ (define_peephole2
(define_peephole2
[(match_scratch:SWI48 0 "r")
- (set (match_operand:SWI48 1 "memory_operand" "") (const_int 0))
- (set (match_operand:SWI48 2 "memory_operand" "") (const_int 0))]
+ (set (match_operand:SWI48 1 "memory_operand") (const_int 0))
+ (set (match_operand:SWI48 2 "memory_operand") (const_int 0))]
"optimize_insn_for_size_p () && peep2_regno_dead_p (0, FLAGS_REG)"
[(set (match_dup 1) (match_dup 0))
(set (match_dup 2) (match_dup 0))]
@@ -19386,13 +19386,48 @@ (define_peephole2
(define_peephole2
[(match_scratch:SWI48 0 "r")
- (set (match_operand:SWI48 1 "memory_operand" "") (const_int 0))]
+ (set (match_operand:SWI48 1 "memory_operand") (const_int 0))]
"optimize_insn_for_size_p () && peep2_regno_dead_p (0, FLAGS_REG)"
[(set (match_dup 1) (match_dup 0))]
{
ix86_expand_clear (operands[0]);
})
+(define_peephole2
+ [(set (match_operand:SWI48 5 "memory_operand")
+ (match_operand:SWI48 0 "general_reg_operand"))
+ (set (match_operand:SWI48 1 "memory_operand") (const_int 0))
+ (set (match_operand:SWI48 2 "memory_operand") (const_int 0))
+ (set (match_operand:SWI48 3 "memory_operand") (const_int 0))
+ (set (match_operand:SWI48 4 "memory_operand") (const_int 0))]
+ "optimize_insn_for_size_p ()
+ && ix86_zero_stores_peep2_p (peep2_next_insn (0), operands[0])"
+ [(set (match_dup 5) (match_dup 0))
+ (set (match_dup 1) (match_dup 0))
+ (set (match_dup 2) (match_dup 0))
+ (set (match_dup 3) (match_dup 0))
+ (set (match_dup 4) (match_dup 0))])
+
+(define_peephole2
+ [(set (match_operand:SWI48 3 "memory_operand")
+ (match_operand:SWI48 0 "general_reg_operand"))
+ (set (match_operand:SWI48 1 "memory_operand") (const_int 0))
+ (set (match_operand:SWI48 2 "memory_operand") (const_int 0))]
+ "optimize_insn_for_size_p ()
+ && ix86_zero_stores_peep2_p (peep2_next_insn (0), operands[0])"
+ [(set (match_dup 3) (match_dup 0))
+ (set (match_dup 1) (match_dup 0))
+ (set (match_dup 2) (match_dup 0))])
+
+(define_peephole2
+ [(set (match_operand:SWI48 2 "memory_operand")
+ (match_operand:SWI48 0 "general_reg_operand"))
+ (set (match_operand:SWI48 1 "memory_operand") (const_int 0))]
+ "optimize_insn_for_size_p ()
+ && ix86_zero_stores_peep2_p (peep2_next_insn (0), operands[0])"
+ [(set (match_dup 2) (match_dup 0))
+ (set (match_dup 1) (match_dup 0))])
+
;; Reload dislikes loading constants directly into class_likely_spilled
;; hard registers. Try to tidy things up here.
(define_peephole2
Jakub