+2014-06-15 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * df.h (DF_INSN_INFO_MWS, FOR_EACH_INSN_INFO_DEF): New macros.
+ (FOR_EACH_INSN_INFO_USE, FOR_EACH_INSN_INFO_EQ_USE): Likewise.
+ (FOR_EACH_INSN_DEF, FOR_EACH_INSN_USE, FOR_EACH_INSN_EQ_USE): Likewise.
+ * auto-inc-dec.c (find_inc, merge_in_block): Use them.
+ * combine.c (create_log_links): Likewise.
+ * compare-elim.c (find_flags_uses_in_insn): Likewise.
+ (try_eliminate_compare): Likewise.
+ * cprop.c (make_set_regs_unavailable, mark_oprs_set): Likewise.
+ * dce.c (deletable_insn_p, find_call_stack_args): Likewise.
+ (remove_reg_equal_equiv_notes_for_defs): Likewise.
+ (reset_unmarked_insns_debug_uses, mark_reg_dependencies): Likewise.
+ (word_dce_process_block, dce_process_block): Likewise.
+ * ddg.c (def_has_ccmode_p): Likewise.
+ * df-core.c (df_bb_regno_first_def_find): Likewise.
+ (df_bb_regno_last_def_find, df_find_def, df_find_use): Likewise.
+ * df-problems.c (df_rd_simulate_one_insn): Likewise.
+ (df_lr_bb_local_compute, df_live_bb_local_compute): Likewise.
+ (df_chain_remove_problem, df_chain_insn_top_dump): Likewise.
+ (df_chain_insn_bottom_dump, df_word_lr_bb_local_compute): Likewise.
+ (df_word_lr_simulate_defs, df_word_lr_simulate_uses): Likewise.
+ (df_remove_dead_eq_notes, df_note_bb_compute): Likewise.
+ (df_simulate_find_defs, df_simulate_find_uses): Likewise.
+ (df_simulate_find_noclobber_defs, df_simulate_defs): Likewise.
+ (df_simulate_uses, df_md_simulate_one_insn): Likewise.
+ * df-scan.c (df_reorganize_refs_by_reg_by_insn): Likewise.
+ * fwprop.c (local_ref_killed_between_p): Likewise.
+ (all_uses_available_at, free_load_extend): Likewise.
+ * gcse.c (update_bb_reg_pressure, calculate_bb_reg_pressure): Likewise.
+ * hw-doloop.c (scan_loop): Likewise.
+ * ifcvt.c (dead_or_predicable): Likewise.
+ * init-regs.c (initialize_uninitialized_regs): Likewise.
+ * ira-lives.c (mark_hard_reg_early_clobbers): Likewise.
+ (process_bb_node_lives): Likewise.
+ * ira.c (compute_regs_asm_clobbered, build_insn_chain): Likewise.
+ (find_moveable_pseudos): Likewise.
+ * loop-invariant.c (check_dependencies, record_uses): Likewise.
+ * recog.c (peep2_find_free_register): Likewise.
+ * ree.c (get_defs): Likewise.
+ * regstat.c (regstat_bb_compute_ri): Likewise.
+ (regstat_bb_compute_calls_crossed): Likewise.
+ * sched-deps.c (find_inc, find_mem): Likewise.
+ * sel-sched-ir.c (maybe_downgrade_id_to_use): Likewise.
+ (maybe_downgrade_id_to_use, setup_id_reg_sets): Likewise.
+ * shrink-wrap.c (requires_stack_frame_p): Likewise.
+ (prepare_shrink_wrap): Likewise.
+ * store-motion.c (compute_store_table, build_store_vectors): Likewise.
+ * web.c (union_defs, pass_web::execute): Likewise.
+ * config/i386/i386.c (increase_distance, insn_defines_reg): Likewise.
+ (insn_uses_reg_mem, ix86_ok_to_clobber_flags): Likewise.
+
2014-06-13 Vladimir Makarov <vmakarov@redhat.com>
* lra-assign.c (assign_by_spills): Add code to assign vector regs
rtx insn;
basic_block bb = BLOCK_FOR_INSN (mem_insn.insn);
rtx other_insn;
- df_ref *def_rec;
+ df_ref def;
/* Make sure this reg appears only once in this insn. */
if (count_occurrences (PATTERN (mem_insn.insn), mem_insn.reg0, 1) != 1)
/* Need to assure that none of the operands of the inc instruction are
assigned to by the mem insn. */
- for (def_rec = DF_INSN_DEFS (mem_insn.insn); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, mem_insn.insn)
{
- df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
if ((regno == REGNO (inc_insn.reg0))
|| (regno == REGNO (inc_insn.reg_res)))
FOR_BB_INSNS_REVERSE_SAFE (bb, insn, curr)
{
- unsigned int uid = INSN_UID (insn);
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
bool insn_is_add_or_inc = true;
if (!NONDEBUG_INSN_P (insn))
/* If the inc insn was merged with a mem, the inc insn is gone
and there is noting to update. */
- if (DF_INSN_UID_GET (uid))
+ if (insn_info)
{
- df_ref *def_rec;
- df_ref *use_rec;
+ df_ref def, use;
+
/* Need to update next use. */
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
reg_next_use[DF_REF_REGNO (def)] = NULL;
reg_next_inc_use[DF_REF_REGNO (def)] = NULL;
reg_next_def[DF_REF_REGNO (def)] = insn;
}
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref use = *use_rec;
reg_next_use[DF_REF_REGNO (use)] = insn;
if (insn_is_add_or_inc)
reg_next_inc_use[DF_REF_REGNO (use)] = insn;
{
basic_block bb;
rtx *next_use, insn;
- df_ref *def_vec, *use_vec;
+ df_ref def, use;
next_use = XCNEWVEC (rtx, max_reg_num ());
/* Log links are created only once. */
gcc_assert (!LOG_LINKS (insn));
- for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *def_vec;
int regno = DF_REF_REGNO (def);
rtx use_insn;
next_use[regno] = NULL_RTX;
}
- for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
+ FOR_EACH_INSN_USE (use, insn)
{
- df_ref use = *use_vec;
int regno = DF_REF_REGNO (use);
/* Do not consider the usage of the stack pointer
static void
find_flags_uses_in_insn (struct comparison *cmp, rtx insn)
{
- df_ref *use_rec, use;
+ df_ref use;
/* If we've already lost track of uses, don't bother collecting more. */
if (cmp->missing_uses)
return;
/* Find a USE of the flags register. */
- for (use_rec = DF_INSN_USES (insn); (use = *use_rec) != NULL; use_rec++)
+ FOR_EACH_INSN_USE (use, insn)
if (DF_REF_REGNO (use) == targetm.flags_regnum)
{
rtx x, *loc;
| DF_REF_MUST_CLOBBER | DF_REF_SIGN_EXTRACT
| DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART
| DF_REF_PRE_POST_MODIFY);
- df_ref *def_rec, def;
+ df_ref def;
/* Note that the BB_HEAD is always either a note or a label, but in
any case it means that IN_A is defined outside the block. */
continue;
/* Find a possible def of IN_A in INSN. */
- for (def_rec = DF_INSN_DEFS (insn); (def = *def_rec) != NULL; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_REGNO (def) == REGNO (in_a))
break;
static unsigned int
increase_distance (rtx prev, rtx next, unsigned int distance)
{
- df_ref *use_rec;
- df_ref *def_rec;
+ df_ref def, use;
if (!prev || !next)
return distance + (distance & 1) + 2;
if (!DF_INSN_USES (next) || !DF_INSN_DEFS (prev))
return distance + 1;
- for (use_rec = DF_INSN_USES (next); *use_rec; use_rec++)
- for (def_rec = DF_INSN_DEFS (prev); *def_rec; def_rec++)
- if (!DF_REF_IS_ARTIFICIAL (*def_rec)
- && DF_REF_REGNO (*use_rec) == DF_REF_REGNO (*def_rec))
+ FOR_EACH_INSN_USE (use, next)
+ FOR_EACH_INSN_DEF (def, prev)
+ if (!DF_REF_IS_ARTIFICIAL (def)
+ && DF_REF_REGNO (use) == DF_REF_REGNO (def))
return distance + (distance & 1) + 2;
return distance + 1;
insn_defines_reg (unsigned int regno1, unsigned int regno2,
rtx insn)
{
- df_ref *def_rec;
+ df_ref def;
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- if (DF_REF_REG_DEF_P (*def_rec)
- && !DF_REF_IS_ARTIFICIAL (*def_rec)
- && (regno1 == DF_REF_REGNO (*def_rec)
- || regno2 == DF_REF_REGNO (*def_rec)))
- {
- return true;
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ if (DF_REF_REG_DEF_P (def)
+ && !DF_REF_IS_ARTIFICIAL (def)
+ && (regno1 == DF_REF_REGNO (def)
+ || regno2 == DF_REF_REGNO (def)))
+ return true;
return false;
}
static bool
insn_uses_reg_mem (unsigned int regno, rtx insn)
{
- df_ref *use_rec;
+ df_ref use;
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
- if (DF_REF_REG_MEM_P (*use_rec) && regno == DF_REF_REGNO (*use_rec))
+ FOR_EACH_INSN_USE (use, insn)
+ if (DF_REF_REG_MEM_P (use) && regno == DF_REF_REGNO (use))
return true;
return false;
ix86_ok_to_clobber_flags (rtx insn)
{
basic_block bb = BLOCK_FOR_INSN (insn);
- df_ref *use;
+ df_ref use;
bitmap live;
while (insn)
{
if (NONDEBUG_INSN_P (insn))
{
- for (use = DF_INSN_USES (insn); *use; use++)
- if (DF_REF_REG_USE_P (*use) && DF_REF_REGNO (*use) == FLAGS_REG)
+ FOR_EACH_INSN_USE (use, insn)
+ if (DF_REF_REG_USE_P (use) && DF_REF_REGNO (use) == FLAGS_REG)
return false;
if (insn_defines_reg (FLAGS_REG, INVALID_REGNUM, insn))
static void
make_set_regs_unavailable (rtx insn)
{
- struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_ref *def_rec;
+ df_ref def;
- for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
- SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
+ FOR_EACH_INSN_DEF (def, insn)
+ SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (def));
}
/* Top level function to create an assignment hash table.
static void
mark_oprs_set (rtx insn)
{
- struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_ref *def_rec;
+ df_ref def;
- for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
- SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
+ FOR_EACH_INSN_DEF (def, insn)
+ SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (def));
}
\f
/* Compute copy/constant propagation working variables. */
{
rtx body, x;
int i;
+ df_ref def;
if (CALL_P (insn)
/* We cannot delete calls inside of the recursive dce because
return false;
/* If INSN sets a global_reg, leave it untouched. */
- for (df_ref *def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- if (HARD_REGISTER_NUM_P (DF_REF_REGNO (*def_rec))
- && global_regs[DF_REF_REGNO (*def_rec)])
+ FOR_EACH_INSN_DEF (def, insn)
+ if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
+ && global_regs[DF_REF_REGNO (def)])
return false;
body = PATTERN (insn);
sp + offset. */
if (!fast)
{
- df_ref *use_rec;
+ df_ref use;
struct df_link *defs;
rtx set;
- for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
- if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
+ FOR_EACH_INSN_USE (use, call_insn)
+ if (rtx_equal_p (addr, DF_REF_REG (use)))
break;
- if (*use_rec == NULL)
+ if (use == NULL)
return false;
- for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
+ for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
if (! DF_REF_IS_ARTIFICIAL (defs->ref))
break;
}
if (addr != stack_pointer_rtx)
{
- df_ref *use_rec;
+ df_ref use;
struct df_link *defs;
rtx set;
- for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
- if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
+ FOR_EACH_INSN_USE (use, call_insn)
+ if (rtx_equal_p (addr, DF_REF_REG (use)))
break;
- for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
+ for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
if (! DF_REF_IS_ARTIFICIAL (defs->ref))
break;
break;
if (!fast)
{
- df_ref *use_rec;
+ df_ref use;
struct df_link *defs;
rtx set;
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
- if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
+ FOR_EACH_INSN_USE (use, insn)
+ if (rtx_equal_p (addr, DF_REF_REG (use)))
break;
- if (*use_rec == NULL)
+ if (use == NULL)
break;
- for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
+ for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
if (! DF_REF_IS_ARTIFICIAL (defs->ref))
break;
static void
remove_reg_equal_equiv_notes_for_defs (rtx insn)
{
- df_ref *def_rec;
+ df_ref def;
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (*def_rec));
+ FOR_EACH_INSN_DEF (def, insn)
+ remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def));
}
/* Scan all BBs for debug insns and reset those that reference values
FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
if (DEBUG_INSN_P (insn))
{
- df_ref *use_rec;
+ df_ref use;
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
+ FOR_EACH_INSN_USE (use, insn)
{
- df_ref use = *use_rec;
struct df_link *defs;
for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
{
mark_reg_dependencies (rtx insn)
{
struct df_link *defs;
- df_ref *use_rec;
+ df_ref use;
if (DEBUG_INSN_P (insn))
return;
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
+ FOR_EACH_INSN_USE (use, insn)
{
- df_ref use = *use_rec;
if (dump_file)
{
fprintf (dump_file, "Processing use of ");
FOR_BB_INSNS_REVERSE (bb, insn)
if (DEBUG_INSN_P (insn))
{
- df_ref *use_rec;
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
- if (DF_REF_REGNO (*use_rec) >= FIRST_PSEUDO_REGISTER
- && (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (*use_rec)))
+ df_ref use;
+ FOR_EACH_INSN_USE (use, insn)
+ if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
+ && (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use)))
== 2 * UNITS_PER_WORD)
- && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec))
- && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec) + 1))
- dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
+ && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use))
+ && !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1))
+ dead_debug_add (&debug, use, DF_REF_REGNO (use));
}
else if (INSN_P (insn))
{
death. */
if (debug.used && !bitmap_empty_p (debug.used))
{
- df_ref *def_rec;
+ df_ref def;
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
+ FOR_EACH_INSN_DEF (def, insn)
+ dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
marked_insn_p (insn)
&& !control_flow_insn_p (insn)
? DEBUG_TEMP_AFTER_WITH_REG_FORCE
bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
rtx insn;
bool block_changed;
- df_ref *def_rec;
+ df_ref def;
struct dead_debug_local debug;
if (redo_out)
FOR_BB_INSNS_REVERSE (bb, insn)
if (DEBUG_INSN_P (insn))
{
- df_ref *use_rec;
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
- if (!bitmap_bit_p (local_live, DF_REF_REGNO (*use_rec))
- && !bitmap_bit_p (au, DF_REF_REGNO (*use_rec)))
- dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
+ df_ref use;
+ FOR_EACH_INSN_USE (use, insn)
+ if (!bitmap_bit_p (local_live, DF_REF_REGNO (use))
+ && !bitmap_bit_p (au, DF_REF_REGNO (use)))
+ dead_debug_add (&debug, use, DF_REF_REGNO (use));
}
else if (INSN_P (insn))
{
/* The insn is needed if there is someone who uses the output. */
if (!needed)
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
- || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
+ FOR_EACH_INSN_DEF (def, insn)
+ if (bitmap_bit_p (local_live, DF_REF_REGNO (def))
+ || bitmap_bit_p (au, DF_REF_REGNO (def)))
{
needed = true;
mark_insn (insn, true);
was marked, in case the debug use was after the point of
death. */
if (debug.used && !bitmap_empty_p (debug.used))
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
+ FOR_EACH_INSN_DEF (def, insn)
+ dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
needed && !control_flow_insn_p (insn)
? DEBUG_TEMP_AFTER_WITH_REG_FORCE
: DEBUG_TEMP_BEFORE_WITH_VALUE);
static bool
def_has_ccmode_p (rtx insn)
{
- df_ref *def;
+ df_ref def;
- for (def = DF_INSN_DEFS (insn); *def; def++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- enum machine_mode mode = GET_MODE (DF_REF_REG (*def));
+ enum machine_mode mode = GET_MODE (DF_REF_REG (def));
if (GET_MODE_CLASS (mode) == MODE_CC)
return true;
df_bb_regno_first_def_find (basic_block bb, unsigned int regno)
{
rtx insn;
- df_ref *def_rec;
- unsigned int uid;
+ df_ref def;
FOR_BB_INSNS (bb, insn)
{
if (!INSN_P (insn))
continue;
- uid = INSN_UID (insn);
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_REGNO (def) == regno)
- return def;
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ if (DF_REF_REGNO (def) == regno)
+ return def;
}
return NULL;
}
df_bb_regno_last_def_find (basic_block bb, unsigned int regno)
{
rtx insn;
- df_ref *def_rec;
- unsigned int uid;
+ df_ref def;
FOR_BB_INSNS_REVERSE (bb, insn)
{
if (!INSN_P (insn))
continue;
- uid = INSN_UID (insn);
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_REGNO (def) == regno)
- return def;
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ if (DF_REF_REGNO (def) == regno)
+ return def;
}
return NULL;
df_ref
df_find_def (rtx insn, rtx reg)
{
- unsigned int uid;
- df_ref *def_rec;
+ df_ref def;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
gcc_assert (REG_P (reg));
- uid = INSN_UID (insn);
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_REGNO (def) == REGNO (reg))
- return def;
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ if (DF_REF_REGNO (def) == REGNO (reg))
+ return def;
return NULL;
}
df_ref
df_find_use (rtx insn, rtx reg)
{
- unsigned int uid;
- df_ref *use_rec;
+ df_ref use;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
gcc_assert (REG_P (reg));
- uid = INSN_UID (insn);
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ if (DF_REF_REGNO (use) == REGNO (reg))
+ return use;
+ if (df->changeable_flags & DF_EQ_NOTES)
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
if (DF_REF_REGNO (use) == REGNO (reg))
return use;
- }
- if (df->changeable_flags & DF_EQ_NOTES)
- for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_REGNO (use) == REGNO (reg))
- return use;
- }
return NULL;
}
df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
bitmap local_rd)
{
- unsigned uid = INSN_UID (insn);
- df_ref *def_rec;
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (dregno >= FIRST_PSEUDO_REGISTER))
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
+ df_ref def, use;
/* Process the registers set in an exception handler. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
FOR_BB_INSNS_REVERSE (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
-
if (!NONDEBUG_INSN_P (insn))
continue;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- {
- unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (&bb_info->def, dregno);
- bitmap_clear_bit (&bb_info->use, dregno);
- }
- }
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ bitmap_set_bit (&bb_info->def, dregno);
+ bitmap_clear_bit (&bb_info->use, dregno);
+ }
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- /* Add use to set of uses in this BB. */
- bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
- }
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ /* Add use to set of uses in this BB. */
+ bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
}
/* Process the registers set in an exception handler or the hard
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
rtx insn;
- df_ref *def_rec;
+ df_ref def, *def_rec;
int luid = 0;
FOR_BB_INSNS (bb, insn)
continue;
luid++;
- for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
if (DF_REF_FLAGS_IS_SET (def,
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
+ df_ref def, use;
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
if (df_chain_problem_p (DF_DU_CHAIN))
DF_REF_CHAIN (*use_rec) = NULL;
FOR_BB_INSNS (bb, insn)
- {
- unsigned int uid = INSN_UID (insn);
-
- if (INSN_P (insn))
- {
- if (df_chain_problem_p (DF_DU_CHAIN))
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- DF_REF_CHAIN (*def_rec) = NULL;
- if (df_chain_problem_p (DF_UD_CHAIN))
- {
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- DF_REF_CHAIN (*use_rec) = NULL;
- for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
- DF_REF_CHAIN (*use_rec) = NULL;
- }
- }
- }
+ if (INSN_P (insn))
+ {
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ if (df_chain_problem_p (DF_DU_CHAIN))
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ DF_REF_CHAIN (def) = NULL;
+ if (df_chain_problem_p (DF_UD_CHAIN))
+ {
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ DF_REF_CHAIN (use) = NULL;
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
+ DF_REF_CHAIN (use) = NULL;
+ }
+ }
}
bitmap_clear (df_chain->out_of_date_transfer_functions);
if (df_chain_problem_p (DF_UD_CHAIN) && INSN_P (insn))
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_ref *use_rec = DF_INSN_INFO_USES (insn_info);
- df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
+ df_ref use;
+
fprintf (file, ";; UD chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
- if (*use_rec || *eq_use_rec)
- {
- while (*use_rec)
- {
- df_ref use = *use_rec;
- if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
- || !(df->changeable_flags & DF_NO_HARD_REGS))
- {
- fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
- if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
- fprintf (file, "read/write ");
- df_chain_dump (DF_REF_CHAIN (use), file);
- fprintf (file, "\n");
- }
- use_rec++;
- }
- while (*eq_use_rec)
- {
- df_ref use = *eq_use_rec;
- if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
- || !(df->changeable_flags & DF_NO_HARD_REGS))
- {
- fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
- df_chain_dump (DF_REF_CHAIN (use), file);
- fprintf (file, "\n");
- }
- eq_use_rec++;
- }
- }
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
+ || !(df->changeable_flags & DF_NO_HARD_REGS))
+ {
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
+ if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
+ fprintf (file, "read/write ");
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ }
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
+ if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
+ || !(df->changeable_flags & DF_NO_HARD_REGS))
+ {
+ fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ }
}
}
if (df_chain_problem_p (DF_DU_CHAIN) && INSN_P (insn))
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_ref *def_rec = DF_INSN_INFO_DEFS (insn_info);
+ df_ref def;
fprintf (file, ";; DU chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
- if (*def_rec)
- {
- while (*def_rec)
- {
- df_ref def = *def_rec;
- if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
- || !(df->changeable_flags & DF_NO_HARD_REGS))
- {
- fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
- if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
- fprintf (file, "read/write ");
- df_chain_dump (DF_REF_CHAIN (def), file);
- fprintf (file, "\n");
- }
- def_rec++;
- }
- }
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
+ || !(df->changeable_flags & DF_NO_HARD_REGS))
+ {
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
+ if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
+ fprintf (file, "read/write ");
+ df_chain_dump (DF_REF_CHAIN (def), file);
+ fprintf (file, "\n");
+ }
fprintf (file, "\n");
}
}
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
+ df_ref def, use;
/* Ensure that artificial refs don't contain references to pseudos. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
FOR_BB_INSNS_REVERSE (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
-
if (!NONDEBUG_INSN_P (insn))
continue;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
- {
- df_word_lr_mark_ref (def, true, &bb_info->def);
- df_word_lr_mark_ref (def, false, &bb_info->use);
- }
- }
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- df_word_lr_mark_ref (use, true, &bb_info->use);
- }
+
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
+ {
+ df_word_lr_mark_ref (def, true, &bb_info->def);
+ df_word_lr_mark_ref (def, false, &bb_info->use);
+ }
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ df_word_lr_mark_ref (use, true, &bb_info->use);
}
}
df_word_lr_simulate_defs (rtx insn, bitmap live)
{
bool changed = false;
- df_ref *def_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
- changed = true;
- else
- changed |= df_word_lr_mark_ref (*def_rec, false, live);
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
+ changed = true;
+ else
+ changed |= df_word_lr_mark_ref (def, false, live);
return changed;
}
void
df_word_lr_simulate_uses (rtx insn, bitmap live)
{
- df_ref *use_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref use;
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- df_word_lr_mark_ref (*use_rec, true, live);
+ FOR_EACH_INSN_USE (use, insn)
+ df_word_lr_mark_ref (use, true, live);
}
\f
/*----------------------------------------------------------------------------
one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
so we need to purge the complete EQ_USES vector when removing
the note using df_notes_rescan. */
- df_ref *use_rec;
+ df_ref use;
bool deleted = false;
- for (use_rec = DF_INSN_EQ_USES (insn); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
- && DF_REF_LOC (use)
- && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
- && ! bitmap_bit_p (live, DF_REF_REGNO (use))
- && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
- {
- deleted = true;
- break;
- }
- }
+ FOR_EACH_INSN_EQ_USE (use, insn)
+ if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
+ && DF_REF_LOC (use)
+ && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
+ && !bitmap_bit_p (live, DF_REF_REGNO (use))
+ && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
+ {
+ deleted = true;
+ break;
+ }
if (deleted)
{
rtx next;
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
+ df_ref def, use;
struct dead_debug_local debug;
dead_debug_local_init (&debug, NULL, NULL);
FOR_BB_INSNS_REVERSE (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
struct df_mw_hardreg **mws_rec;
int debug_insn;
{
if (REG_DEAD_DEBUGGING && dump_file)
{
- fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
+ fprintf (dump_file, "processing call %d\n live =",
+ INSN_UID (insn));
df_print_regset (dump_file, live);
}
/* We only care about real sets for calls. Clobbers cannot
be depended on to really die. */
- mws_rec = DF_INSN_UID_MWS (uid);
+ mws_rec = DF_INSN_INFO_MWS (insn_info);
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
/* All of the defs except the return value are some sort of
clobber. This code is for the return. */
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
{
else
{
/* Regular insn. */
- mws_rec = DF_INSN_UID_MWS (uid);
+ mws_rec = DF_INSN_INFO_MWS (insn_info);
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
mws_rec++;
}
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
df_create_unused_note (insn,
def, live, artificial_uses, &debug);
}
/* Process the uses. */
- mws_rec = DF_INSN_UID_MWS (uid);
+ mws_rec = DF_INSN_INFO_MWS (insn_info);
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
mws_rec++;
}
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
if (REG_DEAD_DEBUGGING && dump_file && !debug_insn)
void
df_simulate_find_defs (rtx insn, bitmap defs)
{
- df_ref *def_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- bitmap_set_bit (defs, DF_REF_REGNO (def));
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ bitmap_set_bit (defs, DF_REF_REGNO (def));
}
/* Find the set of uses for INSN. This includes partial defs. */
static void
df_simulate_find_uses (rtx insn, bitmap uses)
{
- df_ref *rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def, use;
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
- {
- df_ref def = *rec;
- if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
- bitmap_set_bit (uses, DF_REF_REGNO (def));
- }
- for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
- {
- df_ref use = *rec;
- bitmap_set_bit (uses, DF_REF_REGNO (use));
- }
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
+ bitmap_set_bit (uses, DF_REF_REGNO (def));
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ bitmap_set_bit (uses, DF_REF_REGNO (use));
}
/* Find the set of real DEFs, which are not clobbers, for INSN. */
void
df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
{
- df_ref *def_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (defs, DF_REF_REGNO (def));
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
+ bitmap_set_bit (defs, DF_REF_REGNO (def));
}
void
df_simulate_defs (rtx insn, bitmap live)
{
- df_ref *def_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
/* If the def is to only part of the reg, it does
void
df_simulate_uses (rtx insn, bitmap live)
{
- df_ref *use_rec;
- unsigned int uid = INSN_UID (insn);
+ df_ref use;
if (DEBUG_INSN_P (insn))
return;
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- /* Add use to set of uses in this BB. */
- bitmap_set_bit (live, DF_REF_REGNO (use));
- }
+ FOR_EACH_INSN_USE (use, insn)
+ /* Add use to set of uses in this BB. */
+ bitmap_set_bit (live, DF_REF_REGNO (use));
}
void
df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
- bitmap local_md)
+ bitmap local_md)
{
- unsigned uid = INSN_UID (insn);
- df_ref *def_rec;
+ df_ref def;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (dregno >= FIRST_PSEUDO_REGISTER))
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
df_ref *ref_rec;
+ df_ref def, use;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
{
if (INSN_P (insn))
{
- unsigned int uid = INSN_UID (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
if (include_defs)
- for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- unsigned int regno = DF_REF_REGNO (*ref_rec);
+ unsigned int regno = DF_REF_REGNO (def);
ref_info->count[regno]++;
}
if (include_uses)
- for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- unsigned int regno = DF_REF_REGNO (*ref_rec);
+ unsigned int regno = DF_REF_REGNO (use);
ref_info->count[regno]++;
}
if (include_eq_uses)
- for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
{
- unsigned int regno = DF_REF_REGNO (*ref_rec);
+ unsigned int regno = DF_REF_REGNO (use);
ref_info->count[regno]++;
}
}
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
df_ref *ref_rec;
+ df_ref def, use;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
{
if (INSN_P (insn))
{
- unsigned int uid = INSN_UID (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
if (include_defs)
- for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref ref = *ref_rec;
- unsigned int regno = DF_REF_REGNO (ref);
+ unsigned int regno = DF_REF_REGNO (def);
if (regno >= start)
{
unsigned int id
= ref_info->begin[regno] + ref_info->count[regno]++;
- DF_REF_ID (ref) = id;
- ref_info->refs[id] = ref;
+ DF_REF_ID (def) = id;
+ ref_info->refs[id] = def;
}
}
if (include_uses)
- for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref ref = *ref_rec;
- unsigned int regno = DF_REF_REGNO (ref);
+ unsigned int regno = DF_REF_REGNO (use);
if (regno >= start)
{
unsigned int id
= ref_info->begin[regno] + ref_info->count[regno]++;
- DF_REF_ID (ref) = id;
- ref_info->refs[id] = ref;
+ DF_REF_ID (use) = id;
+ ref_info->refs[id] = use;
}
}
if (include_eq_uses)
- for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
{
- df_ref ref = *ref_rec;
- unsigned int regno = DF_REF_REGNO (ref);
+ unsigned int regno = DF_REF_REGNO (use);
if (regno >= start)
{
unsigned int id
= ref_info->begin[regno] + ref_info->count[regno]++;
- DF_REF_ID (ref) = id;
- ref_info->refs[id] = ref;
+ DF_REF_ID (use) = id;
+ ref_info->refs[id] = use;
}
}
}
#define DF_INSN_INFO_DEFS(II) ((II)->defs)
#define DF_INSN_INFO_USES(II) ((II)->uses)
#define DF_INSN_INFO_EQ_USES(II) ((II)->eq_uses)
+#define DF_INSN_INFO_MWS(II) ((II)->mw_hardregs)
#define DF_INSN_LUID(INSN) (DF_INSN_INFO_LUID (DF_INSN_INFO_GET (INSN)))
#define DF_INSN_DEFS(INSN) (DF_INSN_INFO_DEFS (DF_INSN_INFO_GET (INSN)))
#define DF_INSN_UID_EQ_USES(INSN) (DF_INSN_UID_GET (INSN)->eq_uses)
#define DF_INSN_UID_MWS(INSN) (DF_INSN_UID_GET (INSN)->mw_hardregs)
+#define FOR_EACH_INSN_INFO_DEF(ITER, INSN) \
+ for (df_ref *ITER##_ = DF_INSN_INFO_DEFS (INSN); (ITER = *ITER##_); \
+ ++ITER##_)
+
+#define FOR_EACH_INSN_INFO_USE(ITER, INSN) \
+ for (df_ref *ITER##_ = DF_INSN_INFO_USES (INSN); (ITER = *ITER##_); \
+ ++ITER##_)
+
+#define FOR_EACH_INSN_INFO_EQ_USE(ITER, INSN) \
+ for (df_ref *ITER##_ = DF_INSN_INFO_EQ_USES (INSN); (ITER = *ITER##_); \
+ ++ITER##_)
+
+#define FOR_EACH_INSN_DEF(ITER, INSN) \
+ FOR_EACH_INSN_INFO_DEF(ITER, DF_INSN_INFO_GET (INSN))
+
+#define FOR_EACH_INSN_USE(ITER, INSN) \
+ FOR_EACH_INSN_INFO_USE(ITER, DF_INSN_INFO_GET (INSN))
+
+#define FOR_EACH_INSN_EQ_USE(ITER, INSN) \
+ FOR_EACH_INSN_INFO_EQ_USE(ITER, DF_INSN_INFO_GET (INSN))
+
/* An obstack for bitmap not related to specific dataflow problems.
This obstack should e.g. be used for bitmaps with a short life time
such as temporary bitmaps. This obstack is declared in df-core.c. */
for (insn = from; insn != to; insn = NEXT_INSN (insn))
{
- df_ref *def_rec;
+ df_ref def;
if (!INSN_P (insn))
continue;
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_REGNO (ref) == DF_REF_REGNO (def))
- return true;
- }
+ FOR_EACH_INSN_DEF (def, insn)
+ if (DF_REF_REGNO (ref) == DF_REF_REGNO (def))
+ return true;
}
return false;
}
static bool
all_uses_available_at (rtx def_insn, rtx target_insn)
{
- df_ref *use_rec;
+ df_ref use;
struct df_insn_info *insn_info = DF_INSN_INFO_GET (def_insn);
rtx def_set = single_set (def_insn);
rtx next;
/* If the insn uses the reg that it defines, the substitution is
invalid. */
- for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (rtx_equal_p (DF_REF_REG (use), def_reg))
- return false;
- }
- for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (rtx_equal_p (DF_REF_REG (use), def_reg))
- return false;
- }
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ if (rtx_equal_p (DF_REF_REG (use), def_reg))
+ return false;
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
+ if (rtx_equal_p (DF_REF_REG (use), def_reg))
+ return false;
}
else
{
/* Look at all the uses of DEF_INSN, and see if they are not
killed between DEF_INSN and TARGET_INSN. */
- for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref use = *use_rec;
if (def_reg && rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
if (use_killed_between (use, def_insn, target_insn))
return false;
}
- for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
{
- df_ref use = *use_rec;
if (def_reg && rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
if (use_killed_between (use, def_insn, target_insn))
free_load_extend (rtx src, rtx insn)
{
rtx reg;
- df_ref *use_vec;
- df_ref use = 0, def;
+ df_ref def, use;
reg = XEXP (src, 0);
#ifdef LOAD_EXTEND_OP
#endif
return false;
- for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
- {
- use = *use_vec;
-
- if (!DF_REF_IS_ARTIFICIAL (use)
- && DF_REF_TYPE (use) == DF_REF_REG_USE
- && DF_REF_REG (use) == reg)
- break;
- }
+ FOR_EACH_INSN_USE (use, insn)
+ if (!DF_REF_IS_ARTIFICIAL (use)
+ && DF_REF_TYPE (use) == DF_REF_REG_USE
+ && DF_REF_REG (use) == reg)
+ break;
if (!use)
return false;
{
rtx dreg, insn;
basic_block succ_bb;
- df_ref *op, op_ref;
+ df_ref use, op_ref;
edge succ;
edge_iterator ei;
int decreased_pressure = 0;
int nregs;
enum reg_class pressure_class;
-
- for (op = DF_INSN_USES (from); *op; op++)
+
+ FOR_EACH_INSN_USE (use, from)
{
- dreg = DF_REF_REAL_REG (*op);
+ dreg = DF_REF_REAL_REG (use);
/* The live range of register is shrunk only if it isn't:
1. referred on any path from the end of this block to EXIT, or
2. referred by insns other than FROM in this block. */
{
rtx dreg;
int regno;
- df_ref *def_rec, *use_rec;
+ df_ref def, use;
if (! NONDEBUG_INSN_P (insn))
continue;
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- dreg = DF_REF_REAL_REG (*def_rec);
+ dreg = DF_REF_REAL_REG (def);
gcc_assert (REG_P (dreg));
regno = REGNO (dreg);
- if (!(DF_REF_FLAGS (*def_rec)
+ if (!(DF_REF_FLAGS (def)
& (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
if (bitmap_clear_bit (curr_regs_live, regno))
}
}
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
+ FOR_EACH_INSN_USE (use, insn)
{
- dreg = DF_REF_REAL_REG (*use_rec);
+ dreg = DF_REF_REAL_REG (use);
gcc_assert (REG_P (dreg));
regno = REGNO (dreg);
if (bitmap_set_bit (curr_regs_live, regno))
insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
- df_ref *def_rec;
+ df_ref def;
HARD_REG_SET set_this_insn;
if (!NONDEBUG_INSN_P (insn))
loop->has_asm = true;
CLEAR_HARD_REG_SET (set_this_insn);
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- rtx dreg = DF_REF_REG (*def_rec);
+ rtx dreg = DF_REF_REG (def);
if (!REG_P (dreg))
continue;
FOR_BB_INSNS_REVERSE (new_dest, insn)
if (NONDEBUG_INSN_P (insn))
{
- df_ref *def_rec;
- unsigned int uid = INSN_UID (insn);
-
- /* If this insn sets any reg in return_regs.. */
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- unsigned r = DF_REF_REGNO (def);
-
- if (bitmap_bit_p (return_regs, r))
+ df_ref def;
+
+ /* If this insn sets any reg in return_regs, add all
+ reg uses to the set of regs we're interested in. */
+ FOR_EACH_INSN_DEF (def, insn)
+ if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
+ {
+ df_simulate_uses (insn, return_regs);
break;
- }
- /* ..then add all reg uses to the set of regs
- we're interested in. */
- if (*def_rec)
- df_simulate_uses (insn, return_regs);
+ }
}
if (bitmap_intersect_p (merge_set, return_regs))
{
FOR_BB_INSNS (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
- df_ref *use_rec;
+ df_ref use;
if (!NONDEBUG_INSN_P (insn))
continue;
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ FOR_EACH_INSN_USE (use, insn)
{
- df_ref use = *use_rec;
unsigned int regno = DF_REF_REGNO (use);
/* Only do this for the pseudos. */
if (dump_file)
fprintf (dump_file,
"adding initialization in %s of reg %d at in block %d for insn %d.\n",
- current_function_name (), regno, bb->index, uid);
+ current_function_name (), regno, bb->index,
+ INSN_UID (insn));
}
}
}
static bool
mark_hard_reg_early_clobbers (rtx insn, bool live_p)
{
- df_ref *def_rec;
+ df_ref def;
bool set_p = false;
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- if (DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MUST_CLOBBER))
+ FOR_EACH_INSN_DEF (def, insn)
+ if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
{
- rtx dreg = DF_REF_REG (*def_rec);
+ rtx dreg = DF_REF_REG (def);
if (GET_CODE (dreg) == SUBREG)
dreg = SUBREG_REG (dreg);
because there is no way to say that non-operand hard
register clobbers are not early ones. */
if (live_p)
- mark_ref_live (*def_rec);
+ mark_ref_live (def);
else
- mark_ref_dead (*def_rec);
+ mark_ref_dead (def);
set_p = true;
}
pessimistic, but it probably doesn't matter much in practice. */
FOR_BB_INSNS_REVERSE (bb, insn)
{
- df_ref *def_rec, *use_rec;
+ df_ref def, use;
bool call_p;
if (!NONDEBUG_INSN_P (insn))
live would stop us from allocating it to a call-crossing
allocno. */
call_p = CALL_P (insn);
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
- mark_ref_live (*def_rec);
+ FOR_EACH_INSN_DEF (def, insn)
+ if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
+ mark_ref_live (def);
/* If INSN has multiple outputs, then any value used in one
of the outputs conflicts with the other outputs. Model this
to the same hard register as an unused output we could
set the hard register before the output reload insn. */
if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
+ FOR_EACH_INSN_USE (use, insn)
{
int i;
rtx reg;
- reg = DF_REF_REG (*use_rec);
+ reg = DF_REF_REG (use);
for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
{
rtx set;
{
/* After the previous loop, this is a no-op if
REG is contained within SET_DEST (SET). */
- mark_ref_live (*use_rec);
+ mark_ref_live (use);
break;
}
}
process_single_reg_class_operands (false, freq);
/* See which defined values die here. */
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
- mark_ref_dead (*def_rec);
+ FOR_EACH_INSN_DEF (def, insn)
+ if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
+ mark_ref_dead (def);
if (call_p)
{
curr_point++;
/* Mark each used value as live. */
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
- mark_ref_live (*use_rec);
+ FOR_EACH_INSN_USE (use, insn)
+ mark_ref_live (use);
process_single_reg_class_operands (true, freq);
/* Mark each hard reg as live again. For example, a
hard register can be in clobber and in an insn
input. */
- for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
+ FOR_EACH_INSN_USE (use, insn)
{
- rtx ureg = DF_REF_REG (*use_rec);
+ rtx ureg = DF_REF_REG (use);
if (GET_CODE (ureg) == SUBREG)
ureg = SUBREG_REG (ureg);
if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER)
continue;
- mark_ref_live (*use_rec);
+ mark_ref_live (use);
}
}
rtx insn;
FOR_BB_INSNS_REVERSE (bb, insn)
{
- df_ref *def_rec;
+ df_ref def;
if (insn_contains_asm (insn))
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (HARD_REGISTER_NUM_P (dregno))
add_to_hard_reg_set (&crtl->asm_clobbers,
{
if (!NOTE_P (insn) && !BARRIER_P (insn))
{
- unsigned int uid = INSN_UID (insn);
- df_ref *def_rec;
- df_ref *use_rec;
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_ref def, use;
c = new_insn_chain ();
c->next = next;
c->block = bb->index;
if (NONDEBUG_INSN_P (insn))
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
/* Ignore may clobbers because these are generated
bitmap_copy (&c->live_throughout, live_relevant_regs);
if (NONDEBUG_INSN_P (insn))
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref use = *use_rec;
unsigned int regno = DF_REF_REGNO (use);
rtx reg = DF_REF_REG (use);
FOR_BB_INSNS (bb, insn)
if (NONDEBUG_INSN_P (insn))
{
+ df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *u_rec, *d_rec;
+ df_ref def, use;
uid_luid[INSN_UID (insn)] = i++;
- u_rec = DF_INSN_USES (insn);
- d_rec = DF_INSN_DEFS (insn);
+ u_rec = DF_INSN_INFO_USES (insn_info);
+ d_rec = DF_INSN_INFO_DEFS (insn_info);
if (d_rec[0] != NULL && d_rec[1] == NULL
&& u_rec[0] != NULL && u_rec[1] == NULL
&& DF_REF_REGNO (*u_rec) == DF_REF_REGNO (*d_rec)
bitmap_clear_bit (transp, regno);
continue;
}
- while (*u_rec)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- unsigned regno = DF_REF_REGNO (*u_rec);
+ unsigned regno = DF_REF_REGNO (use);
bitmap_set_bit (&used, regno);
if (bitmap_clear_bit (moveable, regno))
bitmap_clear_bit (transp, regno);
- u_rec++;
}
- while (*d_rec)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- unsigned regno = DF_REF_REGNO (*d_rec);
+ unsigned regno = DF_REF_REGNO (def);
bitmap_set_bit (&set, regno);
bitmap_clear_bit (transp, regno);
bitmap_clear_bit (moveable, regno);
- d_rec++;
}
}
}
bitmap def_bb_transp = bb_transp_live + def_block->index;
bool local_to_bb_p = bitmap_bit_p (def_bb_local, i);
rtx use_insn = closest_uses[i];
- df_ref *def_insn_use_rec = DF_INSN_USES (def_insn);
+ df_ref use;
bool all_ok = true;
bool all_transp = true;
if (dump_file)
fprintf (dump_file, "Examining insn %d, def for %d\n",
INSN_UID (def_insn), i);
- while (*def_insn_use_rec != NULL)
+ FOR_EACH_INSN_USE (use, def_insn)
{
- df_ref use = *def_insn_use_rec;
unsigned regno = DF_REF_REGNO (use);
if (bitmap_bit_p (&unusable_as_input, regno))
{
else
all_transp = false;
}
-
- def_insn_use_rec++;
}
if (!all_ok)
continue;
check_dependencies (rtx insn, bitmap depends_on)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_ref *use_rec;
+ df_ref use;
basic_block bb = BLOCK_FOR_INSN (insn);
- for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
- if (!check_dependency (bb, *use_rec, depends_on))
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ if (!check_dependency (bb, use, depends_on))
return false;
- for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
- if (!check_dependency (bb, *use_rec, depends_on))
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
+ if (!check_dependency (bb, use, depends_on))
return false;
return true;
record_uses (rtx insn)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_ref *use_rec;
+ df_ref use;
struct invariant *inv;
- for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref use = *use_rec;
inv = invariant_for_use (use);
if (inv)
record_use (inv->def, use);
}
- for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
{
- df_ref use = *use_rec;
inv = invariant_for_use (use);
if (inv)
record_use (inv->def, use);
{
enum reg_class cl;
HARD_REG_SET live;
- df_ref *def_rec;
+ df_ref def;
int i;
gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
/* Don't use registers set or clobbered by the insn. */
- for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
- *def_rec; def_rec++)
- SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
+ FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
+ SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
from = peep2_buf_position (from + 1);
}
static struct df_link *
get_defs (rtx insn, rtx reg, vec<rtx> *dest)
{
- df_ref reg_info, *uses;
+ df_ref use;
struct df_link *ref_chain, *ref_link;
- reg_info = NULL;
-
- for (uses = DF_INSN_USES (insn); *uses; uses++)
+ FOR_EACH_INSN_USE (use, insn)
{
- reg_info = *uses;
- if (GET_CODE (DF_REF_REG (reg_info)) == SUBREG)
+ if (GET_CODE (DF_REF_REG (use)) == SUBREG)
return NULL;
- if (REGNO (DF_REF_REG (reg_info)) == REGNO (reg))
- break;
+ if (REGNO (DF_REF_REG (use)) == REGNO (reg))
+ break;
}
- gcc_assert (reg_info != NULL && uses != NULL);
+ gcc_assert (use != NULL);
- ref_chain = DF_REF_CHAIN (reg_info);
+ ref_chain = DF_REF_CHAIN (use);
for (ref_link = ref_chain; ref_link; ref_link = ref_link->next)
{
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
+ df_ref def, use;
int luid = 0;
bitmap_iterator bi;
unsigned int regno;
FOR_BB_INSNS_REVERSE (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
bitmap_iterator bi;
struct df_mw_hardreg **mws_rec;
rtx link;
/* We only care about real sets for calls. Clobbers cannot
be depended on.
Only do this if the value is totally dead. */
- for (mws_rec = DF_INSN_UID_MWS (uid); *mws_rec; mws_rec++)
+ for (mws_rec = DF_INSN_INFO_MWS (insn_info); *mws_rec; mws_rec++)
{
struct df_mw_hardreg *mws = *mws_rec;
if (DF_MWS_REG_DEF_P (mws))
/* All of the defs except the return value are some sort of
clobber. This code is for the return. */
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
if ((!CALL_P (insn))
|| (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))))
{
}
}
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
if (uregno >= FIRST_PSEUDO_REGISTER)
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
+ df_ref def, use;
bitmap_copy (live, df_get_live_out (bb));
FOR_BB_INSNS_REVERSE (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
unsigned int regno;
if (!INSN_P (insn))
/* All of the defs except the return value are some sort of
clobber. This code is for the return. */
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *def_rec;
if ((!CALL_P (insn))
|| (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))))
{
}
}
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- bitmap_set_bit (live, DF_REF_REGNO (use));
- }
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ bitmap_set_bit (live, DF_REF_REGNO (use));
}
}
if (parse_add_or_inc (mii, inc_cand, backwards))
{
struct dep_replacement *desc;
- df_ref *def_rec;
+ df_ref def;
rtx newaddr, newmem;
if (sched_verbose >= 5)
/* Need to assure that none of the operands of the inc
instruction are assigned to by the mem insn. */
- for (def_rec = DF_INSN_DEFS (mii->mem_insn); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (reg_overlap_mentioned_p (DF_REF_REG (def), mii->inc_input)
- || reg_overlap_mentioned_p (DF_REF_REG (def), mii->mem_reg0))
- {
- if (sched_verbose >= 5)
- fprintf (sched_dump,
- "inc conflicts with store failure.\n");
- goto next;
- }
- }
+ FOR_EACH_INSN_DEF (def, mii->mem_insn)
+ if (reg_overlap_mentioned_p (DF_REF_REG (def), mii->inc_input)
+ || reg_overlap_mentioned_p (DF_REF_REG (def), mii->mem_reg0))
+ {
+ if (sched_verbose >= 5)
+ fprintf (sched_dump,
+ "inc conflicts with store failure.\n");
+ goto next;
+ }
newaddr = mii->inc_input;
if (mii->mem_index != NULL_RTX)
newaddr = gen_rtx_PLUS (GET_MODE (newaddr), newaddr,
}
if (REG_P (reg0))
{
- df_ref *def_rec;
+ df_ref use;
int occurrences = 0;
/* Make sure this reg appears only once in this insn. Can't use
count_occurrences since that only works for pseudos. */
- for (def_rec = DF_INSN_USES (mii->mem_insn); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (reg_overlap_mentioned_p (reg0, DF_REF_REG (def)))
- if (++occurrences > 1)
- {
- if (sched_verbose >= 5)
- fprintf (sched_dump, "mem count failure\n");
- return false;
- }
- }
+ FOR_EACH_INSN_USE (use, mii->mem_insn)
+ if (reg_overlap_mentioned_p (reg0, DF_REF_REG (use)))
+ if (++occurrences > 1)
+ {
+ if (sched_verbose >= 5)
+ fprintf (sched_dump, "mem count failure\n");
+ return false;
+ }
mii->mem_reg0 = reg0;
return find_inc (mii, true) || find_inc (mii, false);
maybe_downgrade_id_to_use (idata_t id, insn_t insn)
{
bool must_be_use = false;
- unsigned uid = INSN_UID (insn);
- df_ref *rec;
+ df_ref def;
rtx lhs = IDATA_LHS (id);
rtx rhs = IDATA_RHS (id);
return;
}
- for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- df_ref def = *rec;
-
if (DF_REF_INSN (def)
&& DF_REF_FLAGS_IS_SET (def, DF_REF_PRE_POST_MODIFY)
&& loc_mentioned_in_p (DF_REF_LOC (def), IDATA_RHS (id)))
static void
setup_id_reg_sets (idata_t id, insn_t insn)
{
- unsigned uid = INSN_UID (insn);
- df_ref *rec;
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_ref def, use;
regset tmp = get_clear_regset_from_pool ();
- for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
- df_ref def = *rec;
unsigned int regno = DF_REF_REGNO (def);
/* Post modifies are treated like clobbers by sched-deps.c. */
bitmap_set_bit (tmp, regno);
}
- for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
{
- df_ref use = *rec;
unsigned int regno = DF_REF_REGNO (use);
/* When these refs are met for the first time, skip them, as
requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
HARD_REG_SET set_up_by_prologue)
{
- df_ref *df_rec;
+ df_ref def, use;
HARD_REG_SET hardregs;
unsigned regno;
return true;
CLEAR_HARD_REG_SET (hardregs);
- for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- rtx dreg = DF_REF_REG (*df_rec);
+ rtx dreg = DF_REF_REG (def);
if (!REG_P (dreg))
continue;
&& df_regs_ever_live_p (regno))
return true;
- for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
+ FOR_EACH_INSN_USE (use, insn)
{
- rtx reg = DF_REF_REG (*df_rec);
+ rtx reg = DF_REF_REG (use);
if (!REG_P (reg))
continue;
{
rtx insn, curr, x;
HARD_REG_SET uses, defs;
- df_ref *ref;
+ df_ref def, use;
bool split_p = false;
if (JUMP_P (BB_END (entry_block)))
&split_p))
{
/* Add all defined registers to DEFs. */
- for (ref = DF_INSN_DEFS (insn); *ref; ref++)
+ FOR_EACH_INSN_DEF (def, insn)
{
- x = DF_REF_REG (*ref);
+ x = DF_REF_REG (def);
if (REG_P (x) && HARD_REGISTER_P (x))
SET_HARD_REG_BIT (defs, REGNO (x));
}
/* Add all used registers to USESs. */
- for (ref = DF_INSN_USES (insn); *ref; ref++)
+ FOR_EACH_INSN_USE (use, insn)
{
- x = DF_REF_REG (*ref);
+ x = DF_REF_REG (use);
if (REG_P (x) && HARD_REGISTER_P (x))
SET_HARD_REG_BIT (uses, REGNO (x));
}
unsigned regno;
#endif
rtx insn, tmp;
- df_ref *def_rec;
+ df_ref def;
int *last_set_in, *already_set;
struct st_expr * ptr, **prev_next_ptr_ptr;
unsigned int max_gcse_regno = max_reg_num ();
if (! NONDEBUG_INSN_P (insn))
continue;
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- last_set_in[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
+ FOR_EACH_INSN_DEF (def, insn)
+ last_set_in[DF_REF_REGNO (def)] = INSN_UID (insn);
}
/* Now find the stores. */
if (! NONDEBUG_INSN_P (insn))
continue;
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- already_set[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
+ FOR_EACH_INSN_DEF (def, insn)
+ already_set[DF_REF_REGNO (def)] = INSN_UID (insn);
/* Now that we've marked regs, look for stores. */
find_moveable_store (insn, already_set, last_set_in);
/* Unmark regs that are no longer set. */
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- if (last_set_in[DF_REF_REGNO (*def_rec)] == INSN_UID (insn))
- last_set_in[DF_REF_REGNO (*def_rec)] = 0;
+ FOR_EACH_INSN_DEF (def, insn)
+ if (last_set_in[DF_REF_REGNO (def)] == INSN_UID (insn))
+ last_set_in[DF_REF_REGNO (def)] = 0;
}
#ifdef ENABLE_CHECKING
FOR_BB_INSNS (bb, insn)
if (NONDEBUG_INSN_P (insn))
{
- df_ref *def_rec;
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
+ df_ref def;
+ FOR_EACH_INSN_DEF (def, insn)
{
- unsigned int ref_regno = DF_REF_REGNO (*def_rec);
+ unsigned int ref_regno = DF_REF_REGNO (def);
if (ref_regno < max_gcse_regno)
- regs_set_in_block[DF_REF_REGNO (*def_rec)] = 1;
+ regs_set_in_block[DF_REF_REGNO (def)] = 1;
}
}
{
struct df_insn_info *insn_info = DF_REF_INSN_INFO (use);
struct df_link *link = DF_REF_CHAIN (use);
- df_ref *eq_use_link;
- df_ref *def_link;
rtx set;
if (insn_info)
{
- rtx insn = insn_info->insn;
- eq_use_link = DF_INSN_INFO_EQ_USES (insn_info);
- def_link = DF_INSN_INFO_DEFS (insn_info);
- set = single_set (insn);
+ df_ref eq_use;
+
+ set = single_set (insn_info->insn);
+ FOR_EACH_INSN_INFO_EQ_USE (eq_use, insn_info)
+ if (use != eq_use
+ && DF_REF_REAL_REG (use) == DF_REF_REAL_REG (eq_use))
+ (*fun) (use_entry + DF_REF_ID (use), use_entry + DF_REF_ID (eq_use));
}
else
- {
- /* An artificial use. It links up with nothing. */
- eq_use_link = NULL;
- def_link = NULL;
- set = NULL;
- }
+ set = NULL;
/* Union all occurrences of the same register in reg notes. */
- if (eq_use_link)
- while (*eq_use_link)
- {
- if (use != *eq_use_link
- && DF_REF_REAL_REG (use) == DF_REF_REAL_REG (*eq_use_link))
- (*fun) (use_entry + DF_REF_ID (use),
- use_entry + DF_REF_ID (*eq_use_link));
- eq_use_link++;
- }
-
/* Recognize trivial noop moves and attempt to keep them as noop. */
if (set
&& SET_SRC (set) == DF_REF_REG (use)
&& SET_SRC (set) == SET_DEST (set))
{
- if (def_link)
- while (*def_link)
- {
- if (DF_REF_REAL_REG (use) == DF_REF_REAL_REG (*def_link))
- (*fun) (use_entry + DF_REF_ID (use),
- def_entry + DF_REF_ID (*def_link));
- def_link++;
- }
+ df_ref def;
+
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ if (DF_REF_REAL_REG (use) == DF_REF_REAL_REG (def))
+ (*fun) (use_entry + DF_REF_ID (use), def_entry + DF_REF_ID (def));
}
/* UD chains of uninitialized REGs are empty. Keeping all uses of
/* A READ_WRITE use requires the corresponding def to be in the same
register. Find it and union. */
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
- {
- df_ref *link;
+ if (insn_info)
+ {
+ df_ref def;
- if (insn_info)
- link = DF_INSN_INFO_DEFS (insn_info);
- else
- link = NULL;
-
- if (link)
- while (*link)
- {
- if (DF_REF_REAL_REG (*link) == DF_REF_REAL_REG (use))
- (*fun) (use_entry + DF_REF_ID (use),
- def_entry + DF_REF_ID (*link));
- link++;
- }
- }
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ if (DF_REF_REAL_REG (use) == DF_REF_REAL_REG (def))
+ (*fun) (use_entry + DF_REF_ID (use), def_entry + DF_REF_ID (def));
+ }
}
/* Find the corresponding register for the given entry. */
FOR_ALL_BB_FN (bb, fun)
FOR_BB_INSNS (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
if (NONDEBUG_INSN_P (insn))
{
- df_ref *use_rec;
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
- DF_REF_ID (use) = uses_num++;
- }
- for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
- DF_REF_ID (use) = uses_num++;
- }
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_ref use;
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
+ DF_REF_ID (use) = uses_num++;
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
+ if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
+ DF_REF_ID (use) = uses_num++;
}
}
/* Produce the web. */
FOR_ALL_BB_FN (bb, fun)
FOR_BB_INSNS (bb, insn)
- {
- unsigned int uid = INSN_UID (insn);
if (NONDEBUG_INSN_P (insn))
{
- df_ref *use_rec;
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_ref use;
union_match_dups (insn, def_entry, use_entry, unionfind_union);
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
- union_defs (use, def_entry, used, use_entry, unionfind_union);
- }
- for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
- union_defs (use, def_entry, used, use_entry, unionfind_union);
- }
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
+ union_defs (use, def_entry, used, use_entry, unionfind_union);
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
+ if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
+ union_defs (use, def_entry, used, use_entry, unionfind_union);
}
- }
/* Update the instruction stream, allocating new registers for split pseudos
in progress. */
FOR_ALL_BB_FN (bb, fun)
FOR_BB_INSNS (bb, insn)
- {
- unsigned int uid = INSN_UID (insn);
-
if (NONDEBUG_INSN_P (insn)
/* Ignore naked clobber. For example, reg 134 in the second insn
of the following sequence will not be replaced.
Thus the later passes can optimize them away. */
&& GET_CODE (PATTERN (insn)) != CLOBBER)
{
- df_ref *use_rec;
- df_ref *def_rec;
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
- replace_ref (use, entry_register (use_entry + DF_REF_ID (use), use, used));
- }
- for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
- {
- df_ref use = *use_rec;
- if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
- replace_ref (use, entry_register (use_entry + DF_REF_ID (use), use, used));
- }
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if (DF_REF_REGNO (def) >= FIRST_PSEUDO_REGISTER)
- replace_ref (def, entry_register (def_entry + DF_REF_ID (def), def, used));
- }
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_ref def, use;
+ FOR_EACH_INSN_INFO_USE (use, insn_info)
+ if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
+ replace_ref (use, entry_register (use_entry + DF_REF_ID (use),
+ use, used));
+ FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
+ if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
+ replace_ref (use, entry_register (use_entry + DF_REF_ID (use),
+ use, used));
+ FOR_EACH_INSN_INFO_DEF (def, insn_info)
+ if (DF_REF_REGNO (def) >= FIRST_PSEUDO_REGISTER)
+ replace_ref (def, entry_register (def_entry + DF_REF_ID (def),
+ def, used));
}
- }
free (def_entry);
free (use_entry);