The following avoids lowering a volatile bitfiled access and in case
the if-converted and original loops end up in different outer loops
because of simplifcations enabled scrap the result since that is not
how the vectorizer expects the loops to be laid out.

Bootstrapped and tested on x86_64-unknown-linux-gnu, pushed.

        PR tree-optimization/114197
        * tree-if-conv.cc (bitfields_to_lower_p): Do not lower if
        there are volatile bitfield accesses.
        (pass_if_conversion::execute): Throw away result if the
        if-converted and original loops are not nested as expected.

        * gcc.dg/torture/pr114197.c: New testcase.
---
 gcc/testsuite/gcc.dg/torture/pr114197.c | 15 +++++++++++++++
 gcc/tree-if-conv.cc                     | 23 +++++++++++++++++++----
 2 files changed, 34 insertions(+), 4 deletions(-)
 create mode 100644 gcc/testsuite/gcc.dg/torture/pr114197.c

diff --git a/gcc/testsuite/gcc.dg/torture/pr114197.c 
b/gcc/testsuite/gcc.dg/torture/pr114197.c
new file mode 100644
index 00000000000..fb7e2fb712c
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/torture/pr114197.c
@@ -0,0 +1,15 @@
+/* { dg-do compile } */
+
+#pragma pack(push)
+struct a {
+  volatile signed b : 8;
+};
+#pragma pack(pop)
+int c;
+static struct a d = {5};
+void e() {
+f:
+  for (c = 8; c < 55; ++c)
+    if (!d.b)
+      goto f;
+}
diff --git a/gcc/tree-if-conv.cc b/gcc/tree-if-conv.cc
index db0d0f4a497..09d99fb9dda 100644
--- a/gcc/tree-if-conv.cc
+++ b/gcc/tree-if-conv.cc
@@ -3701,6 +3701,14 @@ bitfields_to_lower_p (class loop *loop,
              if (dump_file && (dump_flags & TDF_DETAILS))
                print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
 
+             if (TREE_THIS_VOLATILE (op))
+               {
+                 if (dump_file && (dump_flags & TDF_DETAILS))
+                   fprintf (dump_file, "\t Bitfield NO OK to lower,"
+                                       " the access is volatile.\n");
+                 return false;
+               }
+
              if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
                {
                  if (dump_file && (dump_flags & TDF_DETAILS))
@@ -4031,20 +4039,27 @@ pass_if_conversion::execute (function *fun)
   if (todo & TODO_update_ssa_any)
     update_ssa (todo & TODO_update_ssa_any);
 
-  /* If if-conversion elided the loop fall back to the original one.  */
+  /* If if-conversion elided the loop fall back to the original one.  Likewise
+     if the loops are not nested in the same outer loop.  */
   for (unsigned i = 0; i < preds.length (); ++i)
     {
       gimple *g = preds[i];
       if (!gimple_bb (g))
        continue;
-      unsigned ifcvt_loop = tree_to_uhwi (gimple_call_arg (g, 0));
-      unsigned orig_loop = tree_to_uhwi (gimple_call_arg (g, 1));
-      if (!get_loop (fun, ifcvt_loop) || !get_loop (fun, orig_loop))
+      auto ifcvt_loop = get_loop (fun, tree_to_uhwi (gimple_call_arg (g, 0)));
+      auto orig_loop = get_loop (fun, tree_to_uhwi (gimple_call_arg (g, 1)));
+      if (!ifcvt_loop || !orig_loop)
        {
          if (dump_file)
            fprintf (dump_file, "If-converted loop vanished\n");
          fold_loop_internal_call (g, boolean_false_node);
        }
+      else if (loop_outer (ifcvt_loop) != loop_outer (orig_loop))
+       {
+         if (dump_file)
+           fprintf (dump_file, "If-converted loop in different outer loop\n");
+         fold_loop_internal_call (g, boolean_false_node);
+       }
     }
 
   return 0;
-- 
2.35.3

Reply via email to