Hi!

The eh pass lowers try { } finally { } stmts and handles
in there e.g. GIMPLE_GOTOs or GIMPLE_CONDs which jump from
within the try block out of that by redirecting the jumps
to an artificial label with code to perform the cleanups/destructors
and then continuing the goto, ultimately to the original label.

Now, for computed gotos and non-local gotos, we document we don't
invoke destructors (and cleanups as well), that is something we really
can't handle, similarly longjmp.

This PR is about asm goto though, and in that case I don't see why
we shouldn't be performing the cleanups, while the user doesn't
specify which particular label will be jumped to, so it is more
like GIMPLE_COND (i.e. conditional goto) rather than unconditional
GIMPLE_GOTO, even with potentiall more different maybe gotos, there is
still list of the potential labels and we can adjust some or all of them
to artificial labels performing cleanups and continuing jump towards the
user label, we know from where the jumps go (asm goto) and to where
(the different LABEL_DECLs).

So, the following patch handles asm goto in the eh pass similarly to
GIMPLE_COND and GIMPLE_GOTO.

Bootstrapped/regtested on x86_64-linux and i686-linux, ok for trunk?

2025-11-25  Jakub Jelinek  <[email protected]>

        PR middle-end/122835
        * tree-eh.cc (replace_goto_queue_1): Handle GIMPLE_ASM.
        (maybe_record_in_goto_queue): Likewise.
        (lower_eh_constructs_2): Likewise.

        * gcc.dg/torture/pr122835.c: New test.

--- gcc/tree-eh.cc.jj   2025-09-04 18:51:34.130709784 +0200
+++ gcc/tree-eh.cc      2025-11-25 17:13:37.724534641 +0100
@@ -517,6 +517,48 @@ replace_goto_queue_1 (gimple *stmt, stru
        }
       break;
 
+    case GIMPLE_ASM:
+      if (int n = gimple_asm_nlabels (as_a <gasm *> (stmt)))
+       {
+         temp.g = stmt;
+         gasm *asm_stmt = as_a <gasm *> (stmt);
+         location_t loc = gimple_location (stmt);
+         tree bypass_label = NULL_TREE;
+         for (int i = 0; i < n; ++i)
+           {
+             tree elt = gimple_asm_label_op (asm_stmt, i);
+             temp.tp = &TREE_VALUE (elt);
+             seq = find_goto_replacement (tf, temp);
+             if (!seq)
+               continue;
+             if (gimple_seq_singleton_p (seq)
+                 && gimple_code (gimple_seq_first_stmt (seq)) == GIMPLE_GOTO)
+               {
+                 TREE_VALUE (elt)
+                   = gimple_goto_dest (gimple_seq_first_stmt (seq));
+                 continue;
+               }
+
+             if (bypass_label == NULL_TREE)
+               {
+                 bypass_label = create_artificial_label (loc);
+                 gsi_insert_after (gsi, gimple_build_goto (bypass_label),
+                                   GSI_CONTINUE_LINKING);
+               }
+
+             tree label = create_artificial_label (loc);
+             TREE_VALUE (elt) = label;
+             gsi_insert_after (gsi, gimple_build_label (label),
+                               GSI_CONTINUE_LINKING);
+             gsi_insert_seq_after (gsi, gimple_seq_copy (seq),
+                                   GSI_CONTINUE_LINKING);
+           }
+         if (bypass_label)
+           gsi_insert_after (gsi, gimple_build_label (bypass_label),
+                             GSI_CONTINUE_LINKING);
+       }
+      break;
+
     case GIMPLE_COND:
       replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
       replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
@@ -685,12 +727,28 @@ maybe_record_in_goto_queue (struct leh_s
                                    EXPR_LOCATION (*new_stmt.tp));
       }
       break;
+
     case GIMPLE_GOTO:
       new_stmt.g = stmt;
       record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
                                  gimple_location (stmt));
       break;
 
+    case GIMPLE_ASM:
+      if (int n = gimple_asm_nlabels (as_a <gasm *> (stmt)))
+       {
+         new_stmt.g = stmt;
+         gasm *asm_stmt = as_a <gasm *> (stmt);
+         for (int i = 0; i < n; ++i)
+           {
+             tree elt = gimple_asm_label_op (asm_stmt, i);
+             new_stmt.tp = &TREE_VALUE (elt);
+             record_in_goto_queue_label (tf, new_stmt, TREE_VALUE (elt),
+                                         gimple_location (stmt));
+           }
+       }
+      break;
+
     case GIMPLE_RETURN:
       tf->may_return = true;
       new_stmt.g = stmt;
@@ -2082,6 +2140,7 @@ lower_eh_constructs_2 (struct leh_state
     case GIMPLE_COND:
     case GIMPLE_GOTO:
     case GIMPLE_RETURN:
+    case GIMPLE_ASM:
       maybe_record_in_goto_queue (state, stmt);
       break;
 
--- gcc/testsuite/gcc.dg/torture/pr122835.c.jj  2025-11-25 17:26:19.533199921 
+0100
+++ gcc/testsuite/gcc.dg/torture/pr122835.c     2025-11-25 19:11:01.639094677 
+0100
@@ -0,0 +1,79 @@
+/* PR middle-end/122835 */
+/* { dg-do run { target i?86-*-* x86_64-*-* aarch64-*-* arm*-*-* powerpc*-*-* 
s390*-*-* } } */
+
+#if defined(__x86_64__) || defined(__i386__)
+#define JMP "jmp"
+#elif defined(__aarch64__) || defined(__arm__) || defined(__powerpc__)
+#define JMP "b"
+#elif defined(__s390__)
+#define JMP "j"
+#endif
+
+int cnt;
+
+static void
+my_cleanup (int *p)
+{
+  ++cnt;
+}
+
+__attribute__((noipa)) static void
+my_abort (void)
+{
+  __builtin_abort ();
+}
+
+int
+main ()
+{
+  {
+    int x __attribute__((cleanup (my_cleanup))) = 0;
+
+    asm goto (JMP "\t%l0" :::: l1);
+
+    my_abort ();
+  }
+
+l1:
+  if (cnt != 1)
+    __builtin_abort ();
+
+  {
+    int x __attribute__((cleanup (my_cleanup))) = 0;
+
+    {
+      int y __attribute__((cleanup (my_cleanup))) = 0;
+
+      asm goto (JMP "\t%l1" :::: l2, l3);
+
+      my_abort ();
+    }
+l2:
+    __builtin_abort ();
+  }
+l3:
+  if (cnt != 3)
+    __builtin_abort ();
+
+  {
+    int x __attribute__((cleanup (my_cleanup))) = 0;
+
+    {
+      int y __attribute__((cleanup (my_cleanup))) = 0;
+
+      asm goto (JMP "\t%l0" :::: l4, l5);
+
+      my_abort ();
+    }
+l4:
+    if (cnt != 4)
+      __builtin_abort ();
+  }
+  if (0)
+    {
+l5:
+      __builtin_abort ();
+    }
+  if (cnt != 5)
+    __builtin_abort ();
+}


2025-11-25  Jakub Jelinek  <[email protected]>

        PR middle-end/122835
        * tree-eh.cc (replace_goto_queue_1): Handle GIMPLE_ASM.
        (maybe_record_in_goto_queue): Likewise.
        (lower_eh_constructs_2): Likewise.

        * gcc.dg/torture/pr122835.c: New test.

--- gcc/tree-eh.cc.jj   2025-09-04 18:51:34.130709784 +0200
+++ gcc/tree-eh.cc      2025-11-25 17:13:37.724534641 +0100
@@ -517,6 +517,48 @@ replace_goto_queue_1 (gimple *stmt, stru
        }
       break;
 
+    case GIMPLE_ASM:
+      if (int n = gimple_asm_nlabels (as_a <gasm *> (stmt)))
+       {
+         temp.g = stmt;
+         gasm *asm_stmt = as_a <gasm *> (stmt);
+         location_t loc = gimple_location (stmt);
+         tree bypass_label = NULL_TREE;
+         for (int i = 0; i < n; ++i)
+           {
+             tree elt = gimple_asm_label_op (asm_stmt, i);
+             temp.tp = &TREE_VALUE (elt);
+             seq = find_goto_replacement (tf, temp);
+             if (!seq)
+               continue;
+             if (gimple_seq_singleton_p (seq)
+                 && gimple_code (gimple_seq_first_stmt (seq)) == GIMPLE_GOTO)
+               {
+                 TREE_VALUE (elt)
+                   = gimple_goto_dest (gimple_seq_first_stmt (seq));
+                 continue;
+               }
+
+             if (bypass_label == NULL_TREE)
+               {
+                 bypass_label = create_artificial_label (loc);
+                 gsi_insert_after (gsi, gimple_build_goto (bypass_label),
+                                   GSI_CONTINUE_LINKING);
+               }
+
+             tree label = create_artificial_label (loc);
+             TREE_VALUE (elt) = label;
+             gsi_insert_after (gsi, gimple_build_label (label),
+                               GSI_CONTINUE_LINKING);
+             gsi_insert_seq_after (gsi, gimple_seq_copy (seq),
+                                   GSI_CONTINUE_LINKING);
+           }
+         if (bypass_label)
+           gsi_insert_after (gsi, gimple_build_label (bypass_label),
+                             GSI_CONTINUE_LINKING);
+       }
+      break;
+
     case GIMPLE_COND:
       replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
       replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
@@ -685,12 +727,28 @@ maybe_record_in_goto_queue (struct leh_s
                                    EXPR_LOCATION (*new_stmt.tp));
       }
       break;
+
     case GIMPLE_GOTO:
       new_stmt.g = stmt;
       record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
                                  gimple_location (stmt));
       break;
 
+    case GIMPLE_ASM:
+      if (int n = gimple_asm_nlabels (as_a <gasm *> (stmt)))
+       {
+         new_stmt.g = stmt;
+         gasm *asm_stmt = as_a <gasm *> (stmt);
+         for (int i = 0; i < n; ++i)
+           {
+             tree elt = gimple_asm_label_op (asm_stmt, i);
+             new_stmt.tp = &TREE_VALUE (elt);
+             record_in_goto_queue_label (tf, new_stmt, TREE_VALUE (elt),
+                                         gimple_location (stmt));
+           }
+       }
+      break;
+
     case GIMPLE_RETURN:
       tf->may_return = true;
       new_stmt.g = stmt;
@@ -2082,6 +2140,7 @@ lower_eh_constructs_2 (struct leh_state
     case GIMPLE_COND:
     case GIMPLE_GOTO:
     case GIMPLE_RETURN:
+    case GIMPLE_ASM:
       maybe_record_in_goto_queue (state, stmt);
       break;
 
--- gcc/testsuite/gcc.dg/torture/pr122835.c.jj  2025-11-25 17:26:19.533199921 
+0100
+++ gcc/testsuite/gcc.dg/torture/pr122835.c     2025-11-25 19:11:01.639094677 
+0100
@@ -0,0 +1,79 @@
+/* PR middle-end/122835 */
+/* { dg-do run { target i?86-*-* x86_64-*-* aarch64-*-* arm*-*-* powerpc*-*-* 
s390*-*-* } } */
+
+#if defined(__x86_64__) || defined(__i386__)
+#define JMP "jmp"
+#elif defined(__aarch64__) || defined(__arm__) || defined(__powerpc__)
+#define JMP "b"
+#elif defined(__s390__)
+#define JMP "j"
+#endif
+
+int cnt;
+
+static void
+my_cleanup (int *p)
+{
+  ++cnt;
+}
+
+__attribute__((noipa)) static void
+my_abort (void)
+{
+  __builtin_abort ();
+}
+
+int
+main ()
+{
+  {
+    int x __attribute__((cleanup (my_cleanup))) = 0;
+
+    asm goto (JMP "\t%l0" :::: l1);
+
+    my_abort ();
+  }
+
+l1:
+  if (cnt != 1)
+    __builtin_abort ();
+
+  {
+    int x __attribute__((cleanup (my_cleanup))) = 0;
+
+    {
+      int y __attribute__((cleanup (my_cleanup))) = 0;
+
+      asm goto (JMP "\t%l1" :::: l2, l3);
+
+      my_abort ();
+    }
+l2:
+    __builtin_abort ();
+  }
+l3:
+  if (cnt != 3)
+    __builtin_abort ();
+
+  {
+    int x __attribute__((cleanup (my_cleanup))) = 0;
+
+    {
+      int y __attribute__((cleanup (my_cleanup))) = 0;
+
+      asm goto (JMP "\t%l0" :::: l4, l5);
+
+      my_abort ();
+    }
+l4:
+    if (cnt != 4)
+      __builtin_abort ();
+  }
+  if (0)
+    {
+l5:
+      __builtin_abort ();
+    }
+  if (cnt != 5)
+    __builtin_abort ();
+}

        Jakub

Reply via email to