This also makes the existing intrinsics tests apply to the new patterns.
Tested on aarch64-none-elf.
gcc/ChangeLog:
* config/aarch64/arm_neon.h (vceq_s64, vceq_u64, vceqz_s64, vceqz_u64,
vcge_s64, vcge_u64, vcgez_s64, vcgt_s64, vcgt_u64, vcgtz_s64, vcle_s64,
vcle_u64, vclez_s64, vclt_s64, vclt_u64, vcltz_s64, vtst_s64,
vtst_u64): Rewrite using gcc vector extensions.
gcc/testsuite/ChangeLog:
* gcc.target/aarch64/singleton_intrinsics_1.c: Generalize regex to
allow cmlt or sshr.
diff --git a/gcc/config/aarch64/arm_neon.h b/gcc/config/aarch64/arm_neon.h
index 319cd8c1a0a441831a037e9c063badce7565f97c..02cdc7852d92e30e38c9c62ed09137b0d96cf6a6 100644
--- a/gcc/config/aarch64/arm_neon.h
+++ b/gcc/config/aarch64/arm_neon.h
@@ -12367,7 +12367,7 @@ vceq_s32 (int32x2_t __a, int32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vceq_s64 (int64x1_t __a, int64x1_t __b)
{
- return (uint64x1_t) {__a[0] == __b[0] ? -1ll : 0ll};
+ return (uint64x1_t) (__a == __b);
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
@@ -12391,7 +12391,7 @@ vceq_u32 (uint32x2_t __a, uint32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vceq_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (uint64x1_t) {__a[0] == __b[0] ? -1ll : 0ll};
+ return (__a == __b);
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -12527,7 +12527,7 @@ vceqz_s32 (int32x2_t __a)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vceqz_s64 (int64x1_t __a)
{
- return (uint64x1_t) {__a[0] == 0ll ? -1ll : 0ll};
+ return (uint64x1_t) (__a == __AARCH64_INT64_C (0));
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
@@ -12551,7 +12551,7 @@ vceqz_u32 (uint32x2_t __a)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vceqz_u64 (uint64x1_t __a)
{
- return (uint64x1_t) {__a[0] == 0ll ? -1ll : 0ll};
+ return (__a == __AARCH64_UINT64_C (0));
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -12681,7 +12681,7 @@ vcge_s32 (int32x2_t __a, int32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcge_s64 (int64x1_t __a, int64x1_t __b)
{
- return (uint64x1_t) {__a[0] >= __b[0] ? -1ll : 0ll};
+ return (uint64x1_t) (__a >= __b);
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
@@ -12705,7 +12705,7 @@ vcge_u32 (uint32x2_t __a, uint32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcge_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (uint64x1_t) {__a[0] >= __b[0] ? -1ll : 0ll};
+ return (__a >= __b);
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -12829,7 +12829,7 @@ vcgez_s32 (int32x2_t __a)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcgez_s64 (int64x1_t __a)
{
- return (uint64x1_t) {__a[0] >= 0ll ? -1ll : 0ll};
+ return (uint64x1_t) (__a >= __AARCH64_INT64_C (0));
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -12923,7 +12923,7 @@ vcgt_s32 (int32x2_t __a, int32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcgt_s64 (int64x1_t __a, int64x1_t __b)
{
- return (uint64x1_t) (__a[0] > __b[0] ? -1ll : 0ll);
+ return (uint64x1_t) (__a > __b);
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
@@ -12947,7 +12947,7 @@ vcgt_u32 (uint32x2_t __a, uint32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcgt_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (uint64x1_t) (__a[0] > __b[0] ? -1ll : 0ll);
+ return (__a > __b);
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -13071,7 +13071,7 @@ vcgtz_s32 (int32x2_t __a)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcgtz_s64 (int64x1_t __a)
{
- return (uint64x1_t) {__a[0] > 0ll ? -1ll : 0ll};
+ return (uint64x1_t) (__a > __AARCH64_INT64_C (0));
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -13165,7 +13165,7 @@ vcle_s32 (int32x2_t __a, int32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcle_s64 (int64x1_t __a, int64x1_t __b)
{
- return (uint64x1_t) {__a[0] <= __b[0] ? -1ll : 0ll};
+ return (uint64x1_t) (__a <= __b);
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
@@ -13189,7 +13189,7 @@ vcle_u32 (uint32x2_t __a, uint32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcle_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (uint64x1_t) {__a[0] <= __b[0] ? -1ll : 0ll};
+ return (__a <= __b);
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -13313,7 +13313,7 @@ vclez_s32 (int32x2_t __a)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vclez_s64 (int64x1_t __a)
{
- return (uint64x1_t) {__a[0] <= 0ll ? -1ll : 0ll};
+ return (uint64x1_t) (__a <= __AARCH64_INT64_C (0));
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -13407,7 +13407,7 @@ vclt_s32 (int32x2_t __a, int32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vclt_s64 (int64x1_t __a, int64x1_t __b)
{
- return (uint64x1_t) {__a[0] < __b[0] ? -1ll : 0ll};
+ return (uint64x1_t) (__a < __b);
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
@@ -13431,7 +13431,7 @@ vclt_u32 (uint32x2_t __a, uint32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vclt_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (uint64x1_t) {__a[0] < __b[0] ? -1ll : 0ll};
+ return (__a < __b);
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -13555,7 +13555,7 @@ vcltz_s32 (int32x2_t __a)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vcltz_s64 (int64x1_t __a)
{
- return (uint64x1_t) {__a[0] < 0ll ? -1ll : 0ll};
+ return (uint64x1_t) (__a < __AARCH64_INT64_C (0));
}
__extension__ static __inline uint32x4_t __attribute__ ((__always_inline__))
@@ -24083,7 +24083,7 @@ vtst_s32 (int32x2_t __a, int32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vtst_s64 (int64x1_t __a, int64x1_t __b)
{
- return (uint64x1_t) {(__a[0] & __b[0]) ? -1ll : 0ll};
+ return (uint64x1_t) ((__a & __b) != __AARCH64_INT64_C (0));
}
__extension__ static __inline uint8x8_t __attribute__ ((__always_inline__))
@@ -24107,7 +24107,7 @@ vtst_u32 (uint32x2_t __a, uint32x2_t __b)
__extension__ static __inline uint64x1_t __attribute__ ((__always_inline__))
vtst_u64 (uint64x1_t __a, uint64x1_t __b)
{
- return (uint64x1_t) {(__a[0] & __b[0]) ? -1ll : 0ll};
+ return ((__a & __b) != __AARCH64_UINT64_C (0));
}
__extension__ static __inline uint8x16_t __attribute__ ((__always_inline__))
diff --git a/gcc/testsuite/gcc.target/aarch64/singleton_intrinsics_1.c b/gcc/testsuite/gcc.target/aarch64/singleton_intrinsics_1.c
index 4a0934b01f9442b7f1324a1f4528d45022daf9b8..633a0d24eade982181d972b915f303b06e5087c4 100644
--- a/gcc/testsuite/gcc.target/aarch64/singleton_intrinsics_1.c
+++ b/gcc/testsuite/gcc.target/aarch64/singleton_intrinsics_1.c
@@ -235,8 +235,8 @@ test_vrshl_u64 (uint64x1_t a, int64x1_t b)
return vrshl_u64 (a, b);
}
-/* For int64x1_t, sshr...#63 is output instead of the equivalent cmlt...#0. */
-/* { dg-final { scan-assembler-times "\\tsshr\\td\[0-9\]+" 2 } } */
+/* For int64x1_t, sshr...#63 is equivalent to cmlt...#0. */
+/* { dg-final { scan-assembler-times "\\t(?:sshr|cmlt)\\td\[0-9\]+" 2 } } */
int64x1_t
test_vshr_n_s64 (int64x1_t a)