return new pass_ccp (ctxt);
}
-
-
-/* Try to optimize out __builtin_stack_restore. Optimize it out
- if there is another __builtin_stack_restore in the same basic
- block and no calls or ASM_EXPRs are in between, or if this block's
- only outgoing edge is to EXIT_BLOCK and there are no calls or
- ASM_EXPRs after this __builtin_stack_restore.
- Note restore right before a noreturn function is not needed.
- And skip some cheap calls that will most likely become an instruction.
- Restoring the stack before a call is important to be able to keep
- stack usage down so that call does not run out of stack. */
-
-
-static tree
-optimize_stack_restore (gimple_stmt_iterator i)
-{
- tree callee;
- gimple *stmt;
-
- basic_block bb = gsi_bb (i);
- gimple *call = gsi_stmt (i);
-
- if (gimple_code (call) != GIMPLE_CALL
- || gimple_call_num_args (call) != 1
- || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
- || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
- return NULL_TREE;
-
- for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
- {
- stmt = gsi_stmt (i);
- if (is_a<gasm*> (stmt))
- return NULL_TREE;
- gcall *call = dyn_cast<gcall*>(stmt);
- if (!call)
- continue;
-
- /* We can remove the restore in front of noreturn
- calls. Since the restore will happen either
- via an unwind/longjmp or not at all. */
- if (gimple_call_noreturn_p (call))
- break;
-
- /* Internal calls are ok, to bypass
- check first since fndecl will be null. */
- if (gimple_call_internal_p (call))
- continue;
-
- callee = gimple_call_fndecl (call);
- /* Non-builtin calls are not ok. */
- if (!callee
- || !fndecl_built_in_p (callee))
- return NULL_TREE;
-
- /* Do not remove stack updates before strub leave. */
- if (fndecl_built_in_p (callee, BUILT_IN___STRUB_LEAVE)
- /* Alloca calls are not ok either. */
- || fndecl_builtin_alloc_p (callee))
- return NULL_TREE;
-
- if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE))
- goto second_stack_restore;
-
- /* If not a simple or inexpensive builtin, then it is not ok either. */
- if (!is_simple_builtin (callee)
- && !is_inexpensive_builtin (callee))
- return NULL_TREE;
- }
-
- /* Allow one successor of the exit block, or zero successors. */
- switch (EDGE_COUNT (bb->succs))
- {
- case 0:
- break;
- case 1:
- if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
- return NULL_TREE;
- break;
- default:
- return NULL_TREE;
- }
- second_stack_restore:
-
- /* If there's exactly one use, then zap the call to __builtin_stack_save.
- If there are multiple uses, then the last one should remove the call.
- In any case, whether the call to __builtin_stack_save can be removed
- or not is irrelevant to removing the call to __builtin_stack_restore. */
- if (has_single_use (gimple_call_arg (call, 0)))
- {
- gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
- if (is_gimple_call (stack_save))
- {
- callee = gimple_call_fndecl (stack_save);
- if (callee && fndecl_built_in_p (callee, BUILT_IN_STACK_SAVE))
- {
- gimple_stmt_iterator stack_save_gsi;
- tree rhs;
-
- stack_save_gsi = gsi_for_stmt (stack_save);
- rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
- replace_call_with_value (&stack_save_gsi, rhs);
- }
- }
- }
-
- /* No effect, so the statement will be deleted. */
- return integer_zero_node;
-}
-
/* If va_list type is a simple pointer and nothing special is needed,
optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
__builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
switch (DECL_FUNCTION_CODE (callee))
{
- case BUILT_IN_STACK_RESTORE:
- result = optimize_stack_restore (i);
- if (result)
- break;
- gsi_next (&i);
- continue;
-
case BUILT_IN_UNREACHABLE:
if (optimize_unreachable (i))
cfg_changed = true;
}
}
+
+/* Try to optimize out __builtin_stack_restore. Optimize it out
+ if there is another __builtin_stack_restore in the same basic
+ block and no calls or ASM_EXPRs are in between, or if this block's
+ only outgoing edge is to EXIT_BLOCK and there are no calls or
+ ASM_EXPRs after this __builtin_stack_restore.
+ Note restore right before a noreturn function is not needed.
+ And skip some cheap calls that will most likely become an instruction.
+ Restoring the stack before a call is important to be able to keep
+ stack usage down so that call does not run out of stack. */
+
+
+static bool
+optimize_stack_restore (gimple_stmt_iterator *gsi, gimple *call)
+{
+ if (!(cfun->curr_properties & PROP_last_full_fold))
+ return false;
+ tree callee;
+ gimple *stmt;
+
+ basic_block bb = gsi_bb (*gsi);
+
+ if (gimple_call_num_args (call) != 1
+ || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
+ || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
+ return false;
+
+ gimple_stmt_iterator i = *gsi;
+ for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
+ {
+ stmt = gsi_stmt (i);
+ if (is_a<gasm*> (stmt))
+ return false;
+ gcall *call = dyn_cast<gcall*>(stmt);
+ if (!call)
+ continue;
+
+ /* We can remove the restore in front of noreturn
+ calls. Since the restore will happen either
+ via an unwind/longjmp or not at all. */
+ if (gimple_call_noreturn_p (call))
+ break;
+
+ /* Internal calls are ok, to bypass
+ check first since fndecl will be null. */
+ if (gimple_call_internal_p (call))
+ continue;
+
+ callee = gimple_call_fndecl (call);
+ /* Non-builtin calls are not ok. */
+ if (!callee
+ || !fndecl_built_in_p (callee))
+ return false;
+
+ /* Do not remove stack updates before strub leave. */
+ if (fndecl_built_in_p (callee, BUILT_IN___STRUB_LEAVE)
+ /* Alloca calls are not ok either. */
+ || fndecl_builtin_alloc_p (callee))
+ return false;
+
+ if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE))
+ goto second_stack_restore;
+
+ /* If not a simple or inexpensive builtin, then it is not ok either. */
+ if (!is_simple_builtin (callee)
+ && !is_inexpensive_builtin (callee))
+ return false;
+ }
+
+ /* Allow one successor of the exit block, or zero successors. */
+ switch (EDGE_COUNT (bb->succs))
+ {
+ case 0:
+ break;
+ case 1:
+ if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
+ return false;
+ break;
+ default:
+ return false;
+ }
+ second_stack_restore:
+
+ /* If there's exactly one use, then zap the call to __builtin_stack_save.
+ If there are multiple uses, then the last one should remove the call.
+ In any case, whether the call to __builtin_stack_save can be removed
+ or not is irrelevant to removing the call to __builtin_stack_restore. */
+ if (has_single_use (gimple_call_arg (call, 0)))
+ {
+ gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
+ if (is_gimple_call (stack_save))
+ {
+ callee = gimple_call_fndecl (stack_save);
+ if (callee && fndecl_built_in_p (callee, BUILT_IN_STACK_SAVE))
+ {
+ gimple_stmt_iterator stack_save_gsi;
+ tree rhs;
+
+ stack_save_gsi = gsi_for_stmt (stack_save);
+ rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
+ replace_call_with_value (&stack_save_gsi, rhs);
+ }
+ }
+ }
+
+ /* No effect, so the statement will be deleted. */
+ replace_call_with_value (gsi, NULL_TREE);
+ return true;
+}
+
/* *GSI_P is a GIMPLE_CALL to a builtin function.
Optimize
memcpy (p, "abcd", 4);
switch (DECL_FUNCTION_CODE (callee2))
{
+ case BUILT_IN_STACK_RESTORE:
+ return optimize_stack_restore (gsi_p, as_a<gcall*>(stmt2));
case BUILT_IN_MEMCMP:
case BUILT_IN_MEMCMP_EQ:
return simplify_builtin_memcmp (gsi_p, as_a<gcall*>(stmt2));