The Aarch64 instruction patterns for atomic operations on memory use the same constraints for arithmetic and logical operations despite the arithmetic operations requiring stronger restrictions. This causes an ICE in some circumstances.
This patch backports the fix from trunk to the GCC-5 branch. The original patch is at: https://gcc.gnu.org/ml/gcc-patches/2015-08/msg00518.html. The commit is at: https://gcc.gnu.org/ml/gcc-cvs/2015-08/msg00437.html. Tested for aarch64-none-elf with cross-compiled check-gcc and for aarch64-none-linux-gnu with native bootstrap and make check. Ok for the branch? Matthew gcc/ 2015-09-09 Matthew Wahab <matthew.wa...@arm.com> Backport from mainline 2015-08-14 Matthew Wahab <matthew.wa...@arm.com> PR target/67143 * config/aarch64/atomics.md (atomic_<optab><mode>): Replace 'lconst_atomic' with 'const_atomic'. (atomic_fetch_<optab><mode>): Likewise. (atomic_<optab>_fetch<mode>): Likewise. * config/aarch64/iterators.md (lconst-atomic): Move below 'const_atomic'. (const_atomic): New. gcc/testsuite/ 2015-09-09 Matthew Wahab <matthew.wa...@arm.com> Backport from mainline 2015-08-14 Matthew Wahab <matthew.wa...@arm.com> Matthias Klose <d...@debian.org> PR target/67143 * gcc.c-torture/compile/pr67143.c: New * gcc.target/aarch64/atomic-op-imm.c (atomic_fetch_add_negative_RELAXED): New. (atomic_fetch_sub_negative_ACQUIRE): New.
diff --git a/gcc/config/aarch64/atomics.md b/gcc/config/aarch64/atomics.md index 1a38ac0..6e6be99 100644 --- a/gcc/config/aarch64/atomics.md +++ b/gcc/config/aarch64/atomics.md @@ -119,7 +119,7 @@ [(set (match_operand:ALLI 0 "aarch64_sync_memory_operand" "+Q") (unspec_volatile:ALLI [(atomic_op:ALLI (match_dup 0) - (match_operand:ALLI 1 "<atomic_op_operand>" "r<lconst_atomic>")) + (match_operand:ALLI 1 "<atomic_op_operand>" "r<const_atomic>")) (match_operand:SI 2 "const_int_operand")] ;; model UNSPECV_ATOMIC_OP)) (clobber (reg:CC CC_REGNUM)) @@ -164,7 +164,7 @@ (set (match_dup 1) (unspec_volatile:ALLI [(atomic_op:ALLI (match_dup 1) - (match_operand:ALLI 2 "<atomic_op_operand>" "r<lconst_atomic>")) + (match_operand:ALLI 2 "<atomic_op_operand>" "r<const_atomic>")) (match_operand:SI 3 "const_int_operand")] ;; model UNSPECV_ATOMIC_OP)) (clobber (reg:CC CC_REGNUM)) @@ -209,7 +209,7 @@ [(set (match_operand:ALLI 0 "register_operand" "=&r") (atomic_op:ALLI (match_operand:ALLI 1 "aarch64_sync_memory_operand" "+Q") - (match_operand:ALLI 2 "<atomic_op_operand>" "r<lconst_atomic>"))) + (match_operand:ALLI 2 "<atomic_op_operand>" "r<const_atomic>"))) (set (match_dup 1) (unspec_volatile:ALLI [(match_dup 1) (match_dup 2) diff --git a/gcc/config/aarch64/iterators.md b/gcc/config/aarch64/iterators.md index 498358a..7c8bbfa 100644 --- a/gcc/config/aarch64/iterators.md +++ b/gcc/config/aarch64/iterators.md @@ -342,9 +342,6 @@ ;; Attribute to describe constants acceptable in logical operations (define_mode_attr lconst [(SI "K") (DI "L")]) -;; Attribute to describe constants acceptable in atomic logical operations -(define_mode_attr lconst_atomic [(QI "K") (HI "K") (SI "K") (DI "L")]) - ;; Map a mode to a specific constraint character. (define_mode_attr cmode [(QI "q") (HI "h") (SI "s") (DI "d")]) @@ -845,6 +842,16 @@ (plus "aarch64_plus_operand") (minus "aarch64_plus_operand")]) +;; Constants acceptable for atomic operations. +;; This definition must appear in this file before the iterators it refers to. +(define_code_attr const_atomic + [(plus "IJ") (minus "IJ") + (xor "<lconst_atomic>") (ior "<lconst_atomic>") + (and "<lconst_atomic>")]) + +;; Attribute to describe constants acceptable in atomic logical operations +(define_mode_attr lconst_atomic [(QI "K") (HI "K") (SI "K") (DI "L")]) + ;; ------------------------------------------------------------------- ;; Int Iterators. ;; ------------------------------------------------------------------- diff --git a/gcc/testsuite/gcc.c-torture/compile/pr67143.c b/gcc/testsuite/gcc.c-torture/compile/pr67143.c new file mode 100644 index 0000000..62c4186 --- /dev/null +++ b/gcc/testsuite/gcc.c-torture/compile/pr67143.c @@ -0,0 +1,21 @@ +long a, c; +int b; +int d; +void ut_dbg_assertion_failed() __attribute__((noreturn)); +long dict_index_is_spatial(int *); +void btr_block_get_func(char *); +long btr_page_get_level_low(unsigned char *); +void btr_validate_level(long p1) { + unsigned char *e; + while (p1 != btr_page_get_level_low(e)) { + if (__builtin_expect(b, 0)) + ut_dbg_assertion_failed(); + if (dict_index_is_spatial(&d)) + while (c != 5535) { + __sync_add_and_fetch(&a, 536870912); + btr_block_get_func(""); + } + } + for (long i; i; ++i) + btr_validate_level(-i); +} diff --git a/gcc/testsuite/gcc.target/aarch64/atomic-op-imm.c b/gcc/testsuite/gcc.target/aarch64/atomic-op-imm.c index 6c6f7e1..47d7a96 100644 --- a/gcc/testsuite/gcc.target/aarch64/atomic-op-imm.c +++ b/gcc/testsuite/gcc.target/aarch64/atomic-op-imm.c @@ -16,6 +16,18 @@ atomic_fetch_sub_ACQUIRE () } int +atomic_fetch_add_negative_RELAXED () +{ + return __atomic_fetch_add (&v, -4096, __ATOMIC_RELAXED); +} + +int +atomic_fetch_sub_negative_ACQUIRE () +{ + return __atomic_fetch_sub (&v, -4096, __ATOMIC_ACQUIRE); +} + +int atomic_fetch_and_SEQ_CST () { return __atomic_fetch_and (&v, 4096, __ATOMIC_SEQ_CST); @@ -75,4 +87,4 @@ atomic_or_fetch_CONSUME () return __atomic_or_fetch (&v, 4096, __ATOMIC_CONSUME); } -/* { dg-final { scan-assembler-times "\tw\[0-9\]+, w\[0-9\]+, #*4096" 12 } } */ +/* { dg-final { scan-assembler-times "\tw\[0-9\]+, w\[0-9\]+, #*4096" 14 } } */