I noticed while working on the `a ^ CST` patch, that bitwise_inverted_equal_p
would check INTEGER_CST directly and not handle vector csts that are uniform.
This moves over to using uniform_integer_cst_p instead of checking INTEGER_CST
directly.
Bootstrapped and tested on x86_64-linux-gnu with no regressions.
PR tree-optimization/115238
gcc/ChangeLog:
* generic-match-head.cc (bitwise_inverted_equal_p): Use
uniform_integer_cst_p instead of checking INTEGER_CST.
* gimple-match-head.cc (gimple_bitwise_inverted_equal_p): Likewise.
gcc/testsuite/ChangeLog:
* gcc.dg/tree-ssa/bitops-9.c: New test.
Signed-off-by: Andrew Pinski <[email protected]>
---
gcc/generic-match-head.cc | 6 ++++--
gcc/gimple-match-head.cc | 6 ++++--
gcc/testsuite/gcc.dg/tree-ssa/bitops-9.c | 15 +++++++++++++++
3 files changed, 23 insertions(+), 4 deletions(-)
create mode 100644 gcc/testsuite/gcc.dg/tree-ssa/bitops-9.c
diff --git a/gcc/generic-match-head.cc b/gcc/generic-match-head.cc
index 3709fe5456d..641d8e9b2de 100644
--- a/gcc/generic-match-head.cc
+++ b/gcc/generic-match-head.cc
@@ -146,8 +146,10 @@ bitwise_inverted_equal_p (tree expr1, tree expr2, bool
&wascmp)
return false;
if (!tree_nop_conversion_p (TREE_TYPE (expr1), TREE_TYPE (expr2)))
return false;
- if (TREE_CODE (expr1) == INTEGER_CST && TREE_CODE (expr2) == INTEGER_CST)
- return wi::to_wide (expr1) == ~wi::to_wide (expr2);
+ tree cst1 = uniform_integer_cst_p (expr1);
+ tree cst2 = uniform_integer_cst_p (expr2);
+ if (cst1 && cst2)
+ return wi::to_wide (cst1) == ~wi::to_wide (cst2);
if (operand_equal_p (expr1, expr2, 0))
return false;
if (TREE_CODE (expr1) == BIT_NOT_EXPR
diff --git a/gcc/gimple-match-head.cc b/gcc/gimple-match-head.cc
index d5908f4e9a6..e26fa0860ee 100644
--- a/gcc/gimple-match-head.cc
+++ b/gcc/gimple-match-head.cc
@@ -295,8 +295,10 @@ gimple_bitwise_inverted_equal_p (tree expr1, tree expr2,
bool &wascmp, tree (*va
return false;
if (!tree_nop_conversion_p (TREE_TYPE (expr1), TREE_TYPE (expr2)))
return false;
- if (TREE_CODE (expr1) == INTEGER_CST && TREE_CODE (expr2) == INTEGER_CST)
- return wi::to_wide (expr1) == ~wi::to_wide (expr2);
+ tree cst1 = uniform_integer_cst_p (expr1);
+ tree cst2 = uniform_integer_cst_p (expr2);
+ if (cst1 && cst2)
+ return wi::to_wide (cst1) == ~wi::to_wide (cst2);
if (operand_equal_p (expr1, expr2, 0))
return false;
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/bitops-9.c
b/gcc/testsuite/gcc.dg/tree-ssa/bitops-9.c
new file mode 100644
index 00000000000..a18b6bf3214
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/bitops-9.c
@@ -0,0 +1,15 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized-raw" } */
+/* PR tree-optimization/115238 */
+
+
+#define vector8 __attribute__((vector_size(2*sizeof(int))))
+
+void f(int a, vector8 int *b)
+{
+ a = 1;
+ *b = a | ((~a) ^ *b);
+}
+/* { dg-final { scan-tree-dump-not "bit_xor_expr, " "optimized" } } */
+/* { dg-final { scan-tree-dump-times "bit_ior_expr, " 1 "optimized" } } */
+/* { dg-final { scan-tree-dump-times "bit_not_expr, " 1 "optimized" } } */
--
2.43.0