+2014-08-22 David Malcolm <dmalcolm@redhat.com>
+
+ * postreload-gcse.c (struct occr): Strengthen field "insn" from
+ rtx to rtx_insn *.
+ (struct unoccr): Likewise.
+ (struct modifies_mem): Likewise.
+ (alloc_mem): Likewise for local "insn".
+ (insert_expr_in_table): Likewise for param "insn".
+ (dump_expr_hash_table_entry): Likewise for local "insn".
+ (oprs_unchanged_p): Likewise for param "insn".
+ (load_killed_in_block_p): Likewise for local "setter".
+ (record_last_reg_set_info): Likewise for param "insn".
+ (record_last_reg_set_info_regno): Likewise.
+ (record_last_mem_set_info): Likewise.
+ (record_last_set_info): Likewise for local "last_set_insn".
+ (record_opr_changes): Likewise for param "insn".
+ (hash_scan_set): Likewise.
+ (compute_hash_table): Likewise for local "insn".
+ (get_avail_load_store_reg): Likewise for param "insn".
+ (eliminate_partially_redundant_load): Likewise, also for locals
+ "avail_insn", "next_pred_bb_end". Replace use of NULL_RTX with
+ RTX for insns.
+ (eliminate_partially_redundant_loads): Likewise for local "insn".
+
2014-08-22 David Malcolm <dmalcolm@redhat.com>
* optabs.c (expand_doubleword_shift): Strengthen local "insn" from
/* Next occurrence of this expression. */
struct occr *next;
/* The insn that computes the expression. */
- rtx insn;
+ rtx_insn *insn;
/* Nonzero if this [anticipatable] occurrence has been deleted. */
char deleted_p;
};
{
struct unoccr *next;
edge pred;
- rtx insn;
+ rtx_insn *insn;
};
static struct obstack unoccr_obstack;
/* A list of insns that may modify memory within the current basic block. */
struct modifies_mem
{
- rtx insn;
+ rtx_insn *insn;
struct modifies_mem *next;
};
static struct modifies_mem *modifies_mem_list;
static void free_mem (void);
/* Support for hash table construction and transformations. */
-static bool oprs_unchanged_p (rtx, rtx, bool);
-static void record_last_reg_set_info (rtx, rtx);
-static void record_last_reg_set_info_regno (rtx, int);
-static void record_last_mem_set_info (rtx);
+static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
+static void record_last_reg_set_info (rtx_insn *, rtx);
+static void record_last_reg_set_info_regno (rtx_insn *, int);
+static void record_last_mem_set_info (rtx_insn *);
static void record_last_set_info (rtx, const_rtx, void *);
-static void record_opr_changes (rtx);
+static void record_opr_changes (rtx_insn *);
static void find_mem_conflicts (rtx, const_rtx, void *);
static int load_killed_in_block_p (int, rtx, bool);
/* Hash table support. */
static hashval_t hash_expr (rtx, int *);
-static void insert_expr_in_table (rtx, rtx);
+static void insert_expr_in_table (rtx, rtx_insn *);
static struct expr *lookup_expr_in_table (rtx);
static void dump_hash_table (FILE *);
static bool reg_killed_on_edge (rtx, edge);
static bool reg_used_on_edge (rtx, edge);
-static rtx get_avail_load_store_reg (rtx);
+static rtx get_avail_load_store_reg (rtx_insn *);
static bool bb_has_well_behaved_predecessors (basic_block);
static struct occr* get_bb_avail_insn (basic_block, struct occr *);
-static void hash_scan_set (rtx);
+static void hash_scan_set (rtx_insn *);
static void compute_hash_table (void);
/* The work horses of this pass. */
static void eliminate_partially_redundant_load (basic_block,
- rtx,
+ rtx_insn *,
struct expr *);
static void eliminate_partially_redundant_loads (void);
\f
{
int i;
basic_block bb;
- rtx insn;
+ rtx_insn *insn;
/* Find the largest UID and create a mapping from UIDs to CUIDs. */
uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
basic block. */
static void
-insert_expr_in_table (rtx x, rtx insn)
+insert_expr_in_table (rtx x, rtx_insn *insn)
{
int do_not_record_p;
hashval_t hash;
occr = exprs->avail_occr;
while (occr)
{
- rtx insn = occr->insn;
+ rtx_insn *insn = occr->insn;
print_rtl_single (file, insn);
fprintf (file, "\n");
occr = occr->next;
2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */
static bool
-oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
+oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
{
int i, j;
enum rtx_code code;
while (list_entry)
{
- rtx setter = list_entry->insn;
+ rtx_insn *setter = list_entry->insn;
/* Ignore entries in the list that do not apply. */
if ((after_insn
/* Record register first/last/block set information for REGNO in INSN. */
static inline void
-record_last_reg_set_info (rtx insn, rtx reg)
+record_last_reg_set_info (rtx_insn *insn, rtx reg)
{
unsigned int regno, end_regno;
}
static inline void
-record_last_reg_set_info_regno (rtx insn, int regno)
+record_last_reg_set_info_regno (rtx_insn *insn, int regno)
{
reg_avail_info[regno] = INSN_CUID (insn);
}
a CALL_INSN). We merely need to record which insns modify memory. */
static void
-record_last_mem_set_info (rtx insn)
+record_last_mem_set_info (rtx_insn *insn)
{
struct modifies_mem *list_entry;
static void
record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
{
- rtx last_set_insn = (rtx) data;
+ rtx_insn *last_set_insn = (rtx_insn *) data;
if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest);
This data is used by oprs_unchanged_p. */
static void
-record_opr_changes (rtx insn)
+record_opr_changes (rtx_insn *insn)
{
rtx note;
After reload we are interested in loads/stores only. */
static void
-hash_scan_set (rtx insn)
+hash_scan_set (rtx_insn *insn)
{
rtx pat = PATTERN (insn);
rtx src = SET_SRC (pat);
FOR_EACH_BB_FN (bb, cfun)
{
- rtx insn;
+ rtx_insn *insn;
/* First pass over the instructions records information used to
determine when registers and memory are last set.
/* Return the loaded/stored register of a load/store instruction. */
static rtx
-get_avail_load_store_reg (rtx insn)
+get_avail_load_store_reg (rtx_insn *insn)
{
if (REG_P (SET_DEST (PATTERN (insn))))
/* A load. */
a redundancy is also worth doing, assuming it is possible. */
static void
-eliminate_partially_redundant_load (basic_block bb, rtx insn,
+eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
struct expr *expr)
{
edge pred;
- rtx avail_insn = NULL_RTX;
+ rtx_insn *avail_insn = NULL;
rtx avail_reg;
rtx dest, pat;
struct occr *a_occr;
/* Check potential for replacing load with copy for predecessors. */
FOR_EACH_EDGE (pred, ei, bb->preds)
{
- rtx next_pred_bb_end;
+ rtx_insn *next_pred_bb_end;
- avail_insn = NULL_RTX;
+ avail_insn = NULL;
avail_reg = NULL_RTX;
pred_bb = pred->src;
next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
not_ok_count += pred->count;
unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
sizeof (struct unoccr));
- unoccr->insn = NULL_RTX;
+ unoccr->insn = NULL;
unoccr->pred = pred;
unoccr->next = unavail_occrs;
unavail_occrs = unoccr;
static void
eliminate_partially_redundant_loads (void)
{
- rtx insn;
+ rtx_insn *insn;
basic_block bb;
/* Note we start at block 1. */