/* RTL dead store elimination.
- Copyright (C) 2005-2017 Free Software Foundation, Inc.
+ Copyright (C) 2005-2020 Free Software Foundation, Inc.
Contributed by Richard Sandiford <rsandifor@codesourcery.com>
and Kenneth Zadeck <zadeck@naturalbridge.com>
#include "explow.h"
#include "expr.h"
#include "dbgcnt.h"
-#include "params.h"
#include "rtl-iter.h"
#include "cfgcleanup.h"
+#include "calls.h"
/* This file contains three techniques for performing Dead Store
Elimination (dse).
struct insn_info_type;
/* This structure holds information about a candidate store. */
-struct store_info
+class store_info
{
+public:
/* False means this is a clobber. */
bool is_set;
} positions_needed;
/* The next store info for this insn. */
- struct store_info *next;
+ class store_info *next;
/* The right hand side of the store. This is used if there is a
subsequent reload of the mems address somewhere later in the
/* This structure holds information about a load. These are only
built for rtx bases. */
-struct read_info_type
+class read_info_type
{
+public:
/* The id of the mem group of the base address. */
int group_id;
rtx mem;
/* The next read_info for this insn. */
- struct read_info_type *next;
+ class read_info_type *next;
};
-typedef struct read_info_type *read_info_t;
+typedef class read_info_type *read_info_t;
static object_allocator<read_info_type> read_info_type_pool ("read_info_pool");
for (cur = new_insn; cur; cur = NEXT_INSN (cur))
{
info.current = cur;
- note_stores (PATTERN (cur), note_add_store, &info);
+ note_stores (cur, note_add_store, &info);
}
/* If a failure was flagged above, return 1 so that for_each_inc_dec will
if (note)
return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
insn_info) == 0;
+
+ /* Punt on stack pushes, those don't have REG_INC notes and we are
+ unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
+ {
+ const_rtx x = *iter;
+ if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
+ return false;
+ }
+
return true;
}
if (note)
return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
&insn_info) == 0;
+
+ /* Punt on stack pushes, those don't have REG_INC notes and we are
+ unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
+ {
+ const_rtx x = *iter;
+ if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
+ return false;
+ }
+
return true;
}
return 0;
}
+ if (known_eq (width, 0))
+ return 0;
+
if (group_id >= 0)
{
/* In the restrictive case where the base is a constant or the
while (ptr)
{
insn_info_t next = ptr->next_local_store;
- struct store_info *s_info = ptr->store_rec;
+ class store_info *s_info = ptr->store_rec;
bool del = true;
/* Skip the clobbers. We delete the active insn if this insn
&& known_subrange_p (offset, width,
s_info->offset, s_info->width)
&& all_positions_needed_p (s_info, offset - s_info->offset,
- width))
+ width)
+ /* We can only remove the later store if the earlier aliases
+ at least all accesses the later one. */
+ && ((MEM_ALIAS_SET (mem) == MEM_ALIAS_SET (s_info->mem)
+ || alias_set_subset_of (MEM_ALIAS_SET (mem),
+ MEM_ALIAS_SET (s_info->mem)))
+ && (!MEM_EXPR (s_info->mem)
+ || refs_same_for_tbaa_p (MEM_EXPR (s_info->mem),
+ MEM_EXPR (mem)))))
{
if (GET_MODE (mem) == BLKmode)
{
/* Try a wider mode if truncating the store mode to NEW_MODE
requires a real instruction. */
- if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
+ if (maybe_lt (GET_MODE_SIZE (new_mode), GET_MODE_SIZE (store_mode))
&& !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
continue;
else
gap = read_offset - store_info->offset;
- if (maybe_ne (gap, 0))
+ if (gap.is_constant () && maybe_ne (gap, 0))
{
poly_int64 shift = gap * BITS_PER_UNIT;
poly_int64 access_size = GET_MODE_SIZE (read_mode) + gap;
point. This does occasionally happen, see PR 37922. */
bitmap regs_set = BITMAP_ALLOC (®_obstack);
- for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
- note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
+ for (this_insn = insns;
+ this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
+ {
+ if (insn_invalid_p (this_insn, false))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, " -- replacing the loaded MEM with ");
+ print_simple_rtl (dump_file, read_reg);
+ fprintf (dump_file, " led to an invalid instruction\n");
+ }
+ BITMAP_FREE (regs_set);
+ return false;
+ }
+ note_stores (this_insn, look_for_hardregs, regs_set);
+ }
bitmap_and_into (regs_set, regs_live);
if (!bitmap_empty_p (regs_set))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
- fprintf (dump_file,
- "abandoning replacement because sequence clobbers live hardregs:");
+ fprintf (dump_file, "abandoning replacement because sequence "
+ "clobbers live hardregs:");
df_print_regset (dump_file, regs_set);
}
BITMAP_FREE (regs_set);
}
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
+ {
+ const_rtx x = *iter;
+ if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " -- replacing the MEM failed due to address "
+ "side-effects\n");
+ return false;
+ }
+ }
+
if (validate_change (read_insn->insn, loc, read_reg, 0))
{
deferred_change *change = deferred_change_pool.allocate ();
insn_info = bb_info->last_insn;
if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
- || (MEM_VOLATILE_P (mem)))
+ || MEM_VOLATILE_P (mem))
{
+ if (crtl->stack_protect_guard
+ && (MEM_EXPR (mem) == crtl->stack_protect_guard
+ || (crtl->stack_protect_guard_decl
+ && MEM_EXPR (mem) == crtl->stack_protect_guard_decl))
+ && MEM_VOLATILE_P (mem))
+ {
+ /* This is either the stack protector canary on the stack,
+ which ought to be written by a MEM_VOLATILE_P store and
+ thus shouldn't be deleted and is read at the very end of
+ function, but shouldn't conflict with any other store.
+ Or it is __stack_chk_guard variable or TLS or whatever else
+ MEM holding the canary value, which really shouldn't be
+ ever modified in -fstack-protector* protected functions,
+ otherwise the prologue store wouldn't match the epilogue
+ check. */
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " stack protector canary read ignored.\n");
+ insn_info->cannot_delete = true;
+ return;
+ }
+
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " adding wild read, volatile or barrier.\n");
add_wild_read (bb_info);
if (!is_int_mode (TYPE_MODE (TREE_VALUE (arg)), &mode))
return false;
- reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
+ function_arg_info arg (mode, /*named=*/true);
+ reg = targetm.calls.function_arg (args_so_far, arg);
if (!reg || !REG_P (reg) || GET_MODE (reg) != mode)
return false;
if (tmp)
args[idx] = tmp;
- targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
+ targetm.calls.function_arg_advance (args_so_far, arg);
}
if (arg != void_list_node || idx != nargs)
return false;
bitmap ret;
ret = ALLOC_REG_SET (NULL);
- bitmap_and (ret, in, fixed_reg_set_regset);
+ bitmap_and (ret, in, bitmap_view<HARD_REG_SET> (fixed_reg_set));
return ret;
}
non-register target. */
static void
-scan_insn (bb_info_t bb_info, rtx_insn *insn)
+scan_insn (bb_info_t bb_info, rtx_insn *insn, int max_active_local_stores)
{
rtx body;
insn_info_type *insn_info = insn_info_type_pool.allocate ();
&& GET_CODE (sym) == SYMBOL_REF
&& SYMBOL_REF_DECL (sym)
&& TREE_CODE (SYMBOL_REF_DECL (sym)) == FUNCTION_DECL
- && DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (sym)) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (SYMBOL_REF_DECL (sym)) == BUILT_IN_MEMSET)
+ && fndecl_built_in_p (SYMBOL_REF_DECL (sym), BUILT_IN_MEMSET))
memset_call = SYMBOL_REF_DECL (sym);
if (const_call || memset_call)
fprintf (dump_file, "handling memset as BLKmode store\n");
if (mems_found == 1)
{
- if (active_local_stores_len++
- >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
+ if (active_local_stores_len++ >= max_active_local_stores)
{
active_local_stores_len = 1;
active_local_stores = NULL;
clear_rhs_from_active_local_stores ();
}
}
- else if (SIBLING_CALL_P (insn) && reload_completed)
+ else if (SIBLING_CALL_P (insn)
+ && (reload_completed || HARD_FRAME_POINTER_IS_ARG_POINTER))
/* Arguments for a sibling call that are pushed to memory are passed
using the incoming argument pointer of the current function. After
- reload that might be (and likely is) frame pointer based. */
+ reload that might be (and likely is) frame pointer based. And, if
+ it is a frame pointer on the target, even before reload we need to
+ kill frame pointer based stores. */
add_wild_read (bb_info);
else
/* Every other call, including pure functions, may read any memory
it as cannot delete. This simplifies the processing later. */
if (mems_found == 1)
{
- if (active_local_stores_len++
- >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
+ if (active_local_stores_len++ >= max_active_local_stores)
{
active_local_stores_len = 1;
active_local_stores = NULL;
bool del = false;
/* If ANY of the store_infos match the cselib group that is
- being deleted, then the insn can not be deleted. */
+ being deleted, then the insn cannot be deleted. */
while (store_info)
{
if ((store_info->group_id == -1)
bitmap_set_bit (all_blocks, ENTRY_BLOCK);
bitmap_set_bit (all_blocks, EXIT_BLOCK);
+ /* For -O1 reduce the maximum number of active local stores for RTL DSE
+ since this can consume huge amounts of memory (PR89115). */
+ int max_active_local_stores = param_max_dse_active_local_stores;
+ if (optimize < 2)
+ max_active_local_stores /= 10;
+
FOR_ALL_BB_FN (bb, cfun)
{
insn_info_t ptr;
FOR_BB_INSNS (bb, insn)
{
if (INSN_P (insn))
- scan_insn (bb_info, insn);
+ scan_insn (bb_info, insn, max_active_local_stores);
cselib_process_insn (insn);
if (INSN_P (insn))
df_simulate_one_insn_forwards (bb, insn, regs_live);
if ((locally_deleted || globally_deleted)
&& cfun->can_throw_non_call_exceptions
&& purge_all_dead_edges ())
- cleanup_cfg (0);
+ {
+ free_dominance_info (CDI_DOMINATORS);
+ cleanup_cfg (0);
+ }
return 0;
}