From: Andrew Pinski Date: Tue, 23 Sep 2025 18:37:35 +0000 (-0700) Subject: fab/forwprop: Move optimize stack restore to forwprop [PR121762] X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=a8326b0c20383b4f10c8a5a3c8c4887fd54d4335;p=thirdparty%2Fgcc.git fab/forwprop: Move optimize stack restore to forwprop [PR121762] This moves the removal of redundant stack restore from fab to forwprop. There is a possibility of removing some of the redundant stack restores before the last folding but that is for a different patch. Changes since v1: * v2: the additional comment change. Bootstrapped and tested on x86_64-linux-gnu. PR tree-optimization/121762 gcc/ChangeLog: * tree-ssa-ccp.cc (optimize_stack_restore): Move to tree-ssa-forwprop.cc. (pass_fold_builtins::execute): Don't call optimize_stack_restore. * tree-ssa-forwprop.cc (optimize_stack_restore): New function from tree-ssa-ccp.cc. Return bool instead of value, use replace_call_with_value istead of returning integer_zero_node. (simplify_builtin_call): Call optimize_stack_restore. Signed-off-by: Andrew Pinski --- diff --git a/gcc/tree-ssa-ccp.cc b/gcc/tree-ssa-ccp.cc index 51e133e860d..739d3be9129 100644 --- a/gcc/tree-ssa-ccp.cc +++ b/gcc/tree-ssa-ccp.cc @@ -3085,115 +3085,6 @@ make_pass_ccp (gcc::context *ctxt) return new pass_ccp (ctxt); } - - -/* Try to optimize out __builtin_stack_restore. Optimize it out - if there is another __builtin_stack_restore in the same basic - block and no calls or ASM_EXPRs are in between, or if this block's - only outgoing edge is to EXIT_BLOCK and there are no calls or - ASM_EXPRs after this __builtin_stack_restore. - Note restore right before a noreturn function is not needed. - And skip some cheap calls that will most likely become an instruction. - Restoring the stack before a call is important to be able to keep - stack usage down so that call does not run out of stack. */ - - -static tree -optimize_stack_restore (gimple_stmt_iterator i) -{ - tree callee; - gimple *stmt; - - basic_block bb = gsi_bb (i); - gimple *call = gsi_stmt (i); - - if (gimple_code (call) != GIMPLE_CALL - || gimple_call_num_args (call) != 1 - || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME - || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0)))) - return NULL_TREE; - - for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i)) - { - stmt = gsi_stmt (i); - if (is_a (stmt)) - return NULL_TREE; - gcall *call = dyn_cast(stmt); - if (!call) - continue; - - /* We can remove the restore in front of noreturn - calls. Since the restore will happen either - via an unwind/longjmp or not at all. */ - if (gimple_call_noreturn_p (call)) - break; - - /* Internal calls are ok, to bypass - check first since fndecl will be null. */ - if (gimple_call_internal_p (call)) - continue; - - callee = gimple_call_fndecl (call); - /* Non-builtin calls are not ok. */ - if (!callee - || !fndecl_built_in_p (callee)) - return NULL_TREE; - - /* Do not remove stack updates before strub leave. */ - if (fndecl_built_in_p (callee, BUILT_IN___STRUB_LEAVE) - /* Alloca calls are not ok either. */ - || fndecl_builtin_alloc_p (callee)) - return NULL_TREE; - - if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE)) - goto second_stack_restore; - - /* If not a simple or inexpensive builtin, then it is not ok either. */ - if (!is_simple_builtin (callee) - && !is_inexpensive_builtin (callee)) - return NULL_TREE; - } - - /* Allow one successor of the exit block, or zero successors. */ - switch (EDGE_COUNT (bb->succs)) - { - case 0: - break; - case 1: - if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) - return NULL_TREE; - break; - default: - return NULL_TREE; - } - second_stack_restore: - - /* If there's exactly one use, then zap the call to __builtin_stack_save. - If there are multiple uses, then the last one should remove the call. - In any case, whether the call to __builtin_stack_save can be removed - or not is irrelevant to removing the call to __builtin_stack_restore. */ - if (has_single_use (gimple_call_arg (call, 0))) - { - gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0)); - if (is_gimple_call (stack_save)) - { - callee = gimple_call_fndecl (stack_save); - if (callee && fndecl_built_in_p (callee, BUILT_IN_STACK_SAVE)) - { - gimple_stmt_iterator stack_save_gsi; - tree rhs; - - stack_save_gsi = gsi_for_stmt (stack_save); - rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0); - replace_call_with_value (&stack_save_gsi, rhs); - } - } - } - - /* No effect, so the statement will be deleted. */ - return integer_zero_node; -} - /* If va_list type is a simple pointer and nothing special is needed, optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0), __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple @@ -4345,13 +4236,6 @@ pass_fold_builtins::execute (function *fun) switch (DECL_FUNCTION_CODE (callee)) { - case BUILT_IN_STACK_RESTORE: - result = optimize_stack_restore (i); - if (result) - break; - gsi_next (&i); - continue; - case BUILT_IN_UNREACHABLE: if (optimize_unreachable (i)) cfg_changed = true; diff --git a/gcc/tree-ssa-forwprop.cc b/gcc/tree-ssa-forwprop.cc index 917f3d90f6c..941b01f3d0e 100644 --- a/gcc/tree-ssa-forwprop.cc +++ b/gcc/tree-ssa-forwprop.cc @@ -2132,6 +2132,116 @@ simplify_builtin_memcpy_memset (gimple_stmt_iterator *gsi_p, gcall *stmt2) } } + +/* Try to optimize out __builtin_stack_restore. Optimize it out + if there is another __builtin_stack_restore in the same basic + block and no calls or ASM_EXPRs are in between, or if this block's + only outgoing edge is to EXIT_BLOCK and there are no calls or + ASM_EXPRs after this __builtin_stack_restore. + Note restore right before a noreturn function is not needed. + And skip some cheap calls that will most likely become an instruction. + Restoring the stack before a call is important to be able to keep + stack usage down so that call does not run out of stack. */ + + +static bool +optimize_stack_restore (gimple_stmt_iterator *gsi, gimple *call) +{ + if (!(cfun->curr_properties & PROP_last_full_fold)) + return false; + tree callee; + gimple *stmt; + + basic_block bb = gsi_bb (*gsi); + + if (gimple_call_num_args (call) != 1 + || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME + || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0)))) + return false; + + gimple_stmt_iterator i = *gsi; + for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i)) + { + stmt = gsi_stmt (i); + if (is_a (stmt)) + return false; + gcall *call = dyn_cast(stmt); + if (!call) + continue; + + /* We can remove the restore in front of noreturn + calls. Since the restore will happen either + via an unwind/longjmp or not at all. */ + if (gimple_call_noreturn_p (call)) + break; + + /* Internal calls are ok, to bypass + check first since fndecl will be null. */ + if (gimple_call_internal_p (call)) + continue; + + callee = gimple_call_fndecl (call); + /* Non-builtin calls are not ok. */ + if (!callee + || !fndecl_built_in_p (callee)) + return false; + + /* Do not remove stack updates before strub leave. */ + if (fndecl_built_in_p (callee, BUILT_IN___STRUB_LEAVE) + /* Alloca calls are not ok either. */ + || fndecl_builtin_alloc_p (callee)) + return false; + + if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE)) + goto second_stack_restore; + + /* If not a simple or inexpensive builtin, then it is not ok either. */ + if (!is_simple_builtin (callee) + && !is_inexpensive_builtin (callee)) + return false; + } + + /* Allow one successor of the exit block, or zero successors. */ + switch (EDGE_COUNT (bb->succs)) + { + case 0: + break; + case 1: + if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) + return false; + break; + default: + return false; + } + second_stack_restore: + + /* If there's exactly one use, then zap the call to __builtin_stack_save. + If there are multiple uses, then the last one should remove the call. + In any case, whether the call to __builtin_stack_save can be removed + or not is irrelevant to removing the call to __builtin_stack_restore. */ + if (has_single_use (gimple_call_arg (call, 0))) + { + gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0)); + if (is_gimple_call (stack_save)) + { + callee = gimple_call_fndecl (stack_save); + if (callee && fndecl_built_in_p (callee, BUILT_IN_STACK_SAVE)) + { + gimple_stmt_iterator stack_save_gsi; + tree rhs; + + stack_save_gsi = gsi_for_stmt (stack_save); + rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0); + replace_call_with_value (&stack_save_gsi, rhs); + } + } + } + + /* No effect, so the statement will be deleted. */ + replace_call_with_value (gsi, NULL_TREE); + return true; +} + /* *GSI_P is a GIMPLE_CALL to a builtin function. Optimize memcpy (p, "abcd", 4); @@ -2163,6 +2273,8 @@ simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2, bool full_walk switch (DECL_FUNCTION_CODE (callee2)) { + case BUILT_IN_STACK_RESTORE: + return optimize_stack_restore (gsi_p, as_a(stmt2)); case BUILT_IN_MEMCMP: case BUILT_IN_MEMCMP_EQ: return simplify_builtin_memcmp (gsi_p, as_a(stmt2));