The following adjusts a leftover BIT_FIELD_REF special-casing to only
cover the cases general code doesn't handle.

Boostrapped and tested on x86_64-unknown-linux-gnu, pushed.

        PR tree-optimization/111916
        * tree-sra.cc (sra_modify_assign): Do not lower all
        BIT_FIELD_REF reads that are sra_handled_bf_read_p.

        * gcc.dg/torture/pr111916.c: New testcase.
---
 gcc/testsuite/gcc.dg/torture/pr111916.c | 16 ++++++++++++++++
 gcc/tree-sra.cc                         |  3 ++-
 2 files changed, 18 insertions(+), 1 deletion(-)
 create mode 100644 gcc/testsuite/gcc.dg/torture/pr111916.c

diff --git a/gcc/testsuite/gcc.dg/torture/pr111916.c 
b/gcc/testsuite/gcc.dg/torture/pr111916.c
new file mode 100644
index 00000000000..2873045aaa4
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/torture/pr111916.c
@@ -0,0 +1,16 @@
+/* { dg-do run } */
+
+#pragma pack(1)
+struct A {
+  int b : 4;
+  int c : 11;
+  int d : 2;
+  int e : 5;
+} f;
+int main()
+{
+  struct A g = {1, 1, 1, 1};
+  while (!g.b)
+    f = g;
+  return 0;
+}
diff --git a/gcc/tree-sra.cc b/gcc/tree-sra.cc
index f8dff8b27d7..b985dee6964 100644
--- a/gcc/tree-sra.cc
+++ b/gcc/tree-sra.cc
@@ -4275,7 +4275,8 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator 
*gsi)
 
   if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
       || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
-      || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
+      || (TREE_CODE (rhs) == BIT_FIELD_REF && !sra_handled_bf_read_p (rhs))
+      || TREE_CODE (lhs) == BIT_FIELD_REF)
     {
       modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
                                          gsi, false);
-- 
2.35.3

Reply via email to