From: Jakub Jelinek Date: Wed, 26 Nov 2025 10:05:42 +0000 (+0100) Subject: eh: Invoke cleanups/destructors in asm goto jumps [PR122835] X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=f68fe3ddda40d4c238095ff8c76c473b0d1e4827;p=thirdparty%2Fgcc.git eh: Invoke cleanups/destructors in asm goto jumps [PR122835] The eh pass lowers try { } finally { } stmts and handles in there e.g. GIMPLE_GOTOs or GIMPLE_CONDs which jump from within the try block out of that by redirecting the jumps to an artificial label with code to perform the cleanups/destructors and then continuing the goto, ultimately to the original label. Now, for computed gotos and non-local gotos, we document we don't invoke destructors (and cleanups as well), that is something we really can't handle, similarly longjmp. This PR is about asm goto though, and in that case I don't see why we shouldn't be performing the cleanups, while the user doesn't specify which particular label will be jumped to, so it is more like GIMPLE_COND (i.e. conditional goto) rather than unconditional GIMPLE_GOTO, even with potentiall more different maybe gotos, there is still list of the potential labels and we can adjust some or all of them to artificial labels performing cleanups and continuing jump towards the user label, we know from where the jumps go (asm goto) and to where (the different LABEL_DECLs). So, the following patch handles asm goto in the eh pass similarly to GIMPLE_COND and GIMPLE_GOTO. 2025-11-26 Jakub Jelinek PR middle-end/122835 * tree-eh.cc (replace_goto_queue_1): Handle GIMPLE_ASM. (maybe_record_in_goto_queue): Likewise. (lower_eh_constructs_2): Likewise. * gcc.dg/torture/pr122835.c: New test. --- diff --git a/gcc/testsuite/gcc.dg/torture/pr122835.c b/gcc/testsuite/gcc.dg/torture/pr122835.c new file mode 100644 index 00000000000..03efdfa045c --- /dev/null +++ b/gcc/testsuite/gcc.dg/torture/pr122835.c @@ -0,0 +1,79 @@ +/* PR middle-end/122835 */ +/* { dg-do run { target i?86-*-* x86_64-*-* aarch64-*-* arm*-*-* powerpc*-*-* s390*-*-* } } */ + +#if defined(__x86_64__) || defined(__i386__) +#define JMP "jmp" +#elif defined(__aarch64__) || defined(__arm__) || defined(__powerpc__) +#define JMP "b" +#elif defined(__s390__) +#define JMP "j" +#endif + +int cnt; + +static void +my_cleanup (int *p) +{ + ++cnt; +} + +__attribute__((noipa)) static void +my_abort (void) +{ + __builtin_abort (); +} + +int +main () +{ + { + int x __attribute__((cleanup (my_cleanup))) = 0; + + asm goto (JMP "\t%l0" :::: l1); + + my_abort (); + } + +l1: + if (cnt != 1) + __builtin_abort (); + + { + int x __attribute__((cleanup (my_cleanup))) = 0; + + { + int y __attribute__((cleanup (my_cleanup))) = 0; + + asm goto (JMP "\t%l1" :::: l2, l3); + + my_abort (); + } +l2: + __builtin_abort (); + } +l3: + if (cnt != 3) + __builtin_abort (); + + { + int x __attribute__((cleanup (my_cleanup))) = 0; + + { + int y __attribute__((cleanup (my_cleanup))) = 0; + + asm goto (JMP "\t%l0" :::: l4, l5); + + my_abort (); + } +l4: + if (cnt != 4) + __builtin_abort (); + } + if (0) + { +l5: + __builtin_abort (); + } + if (cnt != 5) + __builtin_abort (); +} diff --git a/gcc/tree-eh.cc b/gcc/tree-eh.cc index 5c62e6bcc38..be85444cd51 100644 --- a/gcc/tree-eh.cc +++ b/gcc/tree-eh.cc @@ -517,6 +517,48 @@ replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf, } break; + case GIMPLE_ASM: + if (int n = gimple_asm_nlabels (as_a (stmt))) + { + temp.g = stmt; + gasm *asm_stmt = as_a (stmt); + location_t loc = gimple_location (stmt); + tree bypass_label = NULL_TREE; + for (int i = 0; i < n; ++i) + { + tree elt = gimple_asm_label_op (asm_stmt, i); + temp.tp = &TREE_VALUE (elt); + seq = find_goto_replacement (tf, temp); + if (!seq) + continue; + if (gimple_seq_singleton_p (seq) + && gimple_code (gimple_seq_first_stmt (seq)) == GIMPLE_GOTO) + { + TREE_VALUE (elt) + = gimple_goto_dest (gimple_seq_first_stmt (seq)); + continue; + } + + if (bypass_label == NULL_TREE) + { + bypass_label = create_artificial_label (loc); + gsi_insert_after (gsi, gimple_build_goto (bypass_label), + GSI_CONTINUE_LINKING); + } + + tree label = create_artificial_label (loc); + TREE_VALUE (elt) = label; + gsi_insert_after (gsi, gimple_build_label (label), + GSI_CONTINUE_LINKING); + gsi_insert_seq_after (gsi, gimple_seq_copy (seq), + GSI_CONTINUE_LINKING); + } + if (bypass_label) + gsi_insert_after (gsi, gimple_build_label (bypass_label), + GSI_CONTINUE_LINKING); + } + break; + case GIMPLE_COND: replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi); replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi); @@ -685,12 +727,28 @@ maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt) EXPR_LOCATION (*new_stmt.tp)); } break; + case GIMPLE_GOTO: new_stmt.g = stmt; record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt), gimple_location (stmt)); break; + case GIMPLE_ASM: + if (int n = gimple_asm_nlabels (as_a (stmt))) + { + new_stmt.g = stmt; + gasm *asm_stmt = as_a (stmt); + for (int i = 0; i < n; ++i) + { + tree elt = gimple_asm_label_op (asm_stmt, i); + new_stmt.tp = &TREE_VALUE (elt); + record_in_goto_queue_label (tf, new_stmt, TREE_VALUE (elt), + gimple_location (stmt)); + } + } + break; + case GIMPLE_RETURN: tf->may_return = true; new_stmt.g = stmt; @@ -2082,6 +2140,7 @@ lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi) case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_RETURN: + case GIMPLE_ASM: maybe_record_in_goto_queue (state, stmt); break;