+2008-10-11 Kenneth Zadeck <zadeck@naturalbridge.com>
+
+ PR rtl-optimization/37448
+ * df.h: (df_ref_class): New enum.
+ (DF_REF_TYPE_NAMES, df_ref_extract): Removed.
+ (struct df_ref): Replaced with union df_ref_d.
+ (df_base_ref, df_artificial_ref, df_regular_ref, df_extract_ref):
+ New members of df_ref_d union.
+ (DF_REF_REAL_REG, DF_REF_REGNO, DF_REF_REAL_LOC, DF_REF_REG,
+ DF_REF_LOC, DF_REF_BB, DF_REF_INSN_INFO, DF_REF_INSN,
+ DF_REF_CLASS, DF_REF_TYPE, DF_REF_CHAIN, DF_REF_ID, DF_REF_FLAGS,
+ DF_REF_ORDER, DF_REF_IS_ARTIFICIAL, DF_REF_NEXT_REG,
+ DF_REF_PREV_REG, DF_REF_EXTRACT_WIDTH, DF_REF_EXTRACT_OFFSET,
+ DF_REF_EXTRACT_MODE): Replaced definition to access union
+ df_ref_d.
+ (DF_MWS_REG_DEF_P, DF_MWS_REG_USE_P, DF_MWS_TYPE): New macros.
+ (df_scan_bb_info, df_bb_regno_first_def_find,
+ df_bb_regno_last_def_find, df_find_def, df_find_use,
+ df_refs_chain_dump, df_regs_chain_dump, df_ref_debug,
+ debug_df_ref, df_chain_create, df_chain_unlink, df_chain_copy,
+ df_ref_create, df_ref_remove, df_compute_accessed_bytes,
+ df_get_artificial_defs, df_get_artificial_uses, union_defs)
+ Replaced struct df_ref * with df_ref.
+ * df-scan.c (df_collection_rec, df_null_ref_rec,
+ df_ref_chain_delete_du_chain, df_ref_chain_delete, df_install_ref,
+ df_grow_ref_info, df_ref_create, df_reg_chain_unlink,
+ df_ref_compress_rec, df_ref_remove, df_ref_chain_delete_du_chain,
+ df_ref_chain_delete, df_free_collection_rec, df_insn_rescan,
+ df_reorganize_refs_by_reg_by_reg,
+ df_reorganize_refs_by_reg_by_insn, df_reorganize_refs_by_reg,
+ df_ref_change_reg_with_loc_1, df_notes_rescan, df_swap_refs,
+ df_sort_and_compress_refs, df_install_ref, df_install_refs,
+ df_ref_record, df_get_conditional_uses, df_get_call_refs,
+ df_bb_refs_record, df_exit_block_uses_collect,
+ df_record_exit_block_uses, df_reg_chain_mark,
+ df_reg_chain_verify_unmarked, df_refs_verify): Replaced struct
+ df_ref * with df_ref.
+ (df_ref_record, df_uses_record, df_ref_create_structure): Added
+ df_ref_class parameter.
+ (df_scan_problem_data): Added new pools for different types of
+ refs.
+ (df_scan_free_internal, df_scan_alloc, df_free_ref,
+ df_ref_create_structure): Processed new ref pools.
+ (df_scan_start_dump): Added counts of refs and insns.
+ (df_ref_create, df_notes_rescan, df_def_record_1, df_uses_record,
+ df_get_call_refs, df_insn_refs_collect, df_bb_refs_collect,
+ df_entry_block_defs_collect, df_exit_block_uses_collect): Added
+ code to pass df_ref_class down to ref creation functions.
+ (df_reg_chain_unlink, df_ref_remove, df_ref_change_reg_with_loc_1,
+ df_reg_chain_mark): Use macros to hide references to df_refs.
+ (df_ref_chain_change_bb): Removed.
+ (df_insn_change_bb): Remove calls to df_ref_insn_change_bb.
+ (df_ref_equal_p, df_ref_compare, df_ref_create_structure):
+ Enhanced to understand df_ref union structure.
+ * fwprop.c (local_ref_killed_between_p, use_killed_between,
+ all_uses_available_at, update_df, try_fwprop_subst,
+ forward_propagate_subreg, forward_propagate_and_simplify,
+ forward_propagate_into, fwprop, fwprop_addr): Replaced struct
+ df_ref * with df_ref.
+ (use_killed_between, all_uses_available_at): Use macros to hide
+ references to df_refs.
+ * regstat.c (regstat_bb_compute_ri,
+ regstat_bb_compute_calls_crossed): Replaced struct df_ref * with
+ df_ref.
+ * see.c (see_handle_relevant_defs, see_handle_relevant_uses,
+ see_handle_relevant_refs, see_analyze_one_def,
+ see_update_relevancy, see_propagate_extensions_to_uses): Replaced
+ struct df_ref * with df_ref.
+ * ra-conflict.c (record_one_conflict, clear_reg_in_live,
+ global_conflicts): Replaced struct df_ref * with df_ref.
+ * ddg.c (create_ddg_dep_from_intra_loop_link,
+ add_cross_iteration_register_deps, build_inter_loop_deps):
+ Replaced struct df_ref * with df_ref.
+ (create_ddg_dep_from_intra_loop_link,
+ add_cross_iteration_register_deps): Use macros to hide references
+ to df_refs.
+ * auto-inc-dec.c (find_inc, merge_in_block): Replaced struct
+ df_ref * with df_ref.
+ * df-core.c (df_bb_regno_first_def_find,
+ df_bb_regno_last_def_find, df_find_def, df_find_use,
+ df_refs_chain_dump, df_regs_chain_dump, df_ref_debug,
+ debug_df_ref): Replaced struct df_ref * with df_ref.
+ (df_mws_dump, df_ref_debug): Use macros to hide references to
+ df_refs.
+ * cse.c (cse_extended_basic_block): Replaced struct df_ref * with
+ df_ref.
+ * web.c (union_defs, entry_register, replace_ref, web_main):
+ Replaced struct df_ref * with df_ref.
+ (union_defs, replace_ref): Use macros to hide references to
+ df_refs.
+ * global.c (compute_regs_asm_clobbered, build_insn_chain):
+ Replaced struct df_ref * with df_ref.
+ * ifcvt.c (dead_or_predicable): Replaced struct df_ref * with
+ df_ref.
+ * sel-sched-ir.c (maybe_downgrade_id_to_use, setup_id_reg_sets, ):
+ Replaced struct df_ref * with df_ref.
+ * ira-lives.c (mark_ref_live, def_conflicts_with_inputs_p,
+ mark_ref_dead, process_bb_node_lives): Replaced struct df_ref *
+ with df_ref.
+ * local-alloc.c (block_alloc): Replaced struct df_ref * with
+ df_ref.
+ * df-byte-scan.c (df_compute_accessed_bytes_extract,
+ df_compute_accessed_bytes_strict_low_part,
+ df_compute_accessed_bytes_subreg, df_compute_accessed_bytes):
+ Replaced struct df_ref * with df_ref.
+ (df_compute_accessed_bytes): Use macros to hide references to
+ df_refs.
+ * init-regs.c (initialize_uninitialized_regs): Replaced struct
+ df_ref * with df_ref.
+ * loop-invariant.c (invariant_for_use, hash_invariant_expr_1,
+ check_dependency, check_dependencies, record_uses): Replaced
+ struct df_ref * with df_ref.
+ (invariant_for_use, check_dependency): Use macros to hide
+ references to df_refs.
+ * loop-iv.c (iv_analysis_loop_init, iv_get_reaching_def,
+ get_biv_step_1, get_biv_step, record_iv, iv_analyze_def,
+ iv_analyze, biv_p): Replaced struct df_ref * with df_ref.
+ (iv_analysis_loop_init, iv_get_reaching_def): Use macros to hide
+ references to df_refs.
+ * ira.c (compute_regs_asm_clobbered): Replaced struct df_ref * with df_ref.
+ * combine.c (create_log_links): Replaced struct df_ref * with df_ref.
+ * df-problems.c (df_rd_bb_local_compute_process_def,
+ df_lr_bb_local_compute, df_live_bb_local_compute, df_chain_create,
+ df_chain_unlink_1, df_chain_unlink, df_chain_copy,
+ df_chain_remove_problem, df_chain_create_bb_process_use,
+ df_chain_create_bb, df_chain_top_dump, df_chain_bottom_dump,
+ df_byte_lr_check_regs, df_byte_lr_bb_local_compute,
+ df_byte_lr_simulate_defs, df_byte_lr_simulate_uses,
+ df_byte_lr_simulate_artificial_refs_at_top,
+ df_byte_lr_simulate_artificial_refs_at_end, df_create_unused_note,
+ df_note_bb_compute, df_note_add_problem, df_simulate_defs,
+ df_simulate_uses, df_simulate_artificial_refs_at_end,
+ df_simulate_artificial_refs_at_top): Replaced struct df_ref * with df_ref.
+ (df_chain_dump): Use macros to hide
+ references to df_refs.
+ * config/mips/mips.c (r10k_simplify_address): Replaced struct
+ df_ref * with df_ref.
+ * dce.c (mark_nonreg_stores, delete_corresponding_reg_eq_notes,
+ mark_artificial_uses, mark_reg_dependencies,
+ byte_dce_process_block): Replaced struct df_ref * with df_ref.
+
2008-10-11 Eric Botcazou <ebotcazou@adacore.com>
* tree.h (contains_packed_reference): Mention ARRAY_RANGE_REF in
rtx insn;
basic_block bb = BASIC_BLOCK (BLOCK_NUM (mem_insn.insn));
rtx other_insn;
- struct df_ref **def_rec;
+ df_ref *def_rec;
/* Make sure this reg appears only once in this insn. */
if (count_occurrences (PATTERN (mem_insn.insn), mem_insn.reg0, 1) != 1)
assigned to by the mem insn. */
for (def_rec = DF_INSN_DEFS (mem_insn.insn); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
if ((regno == REGNO (inc_insn.reg0))
|| (regno == REGNO (inc_insn.reg_res)))
and there is noting to update. */
if (DF_INSN_UID_GET(uid))
{
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
/* Need to update next use. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
reg_next_use[DF_REF_REGNO (def)] = NULL;
reg_next_inc_use[DF_REF_REGNO (def)] = NULL;
reg_next_def[DF_REF_REGNO (def)] = insn;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
reg_next_use[DF_REF_REGNO (use)] = insn;
if (insn_is_add_or_inc)
reg_next_inc_use[DF_REF_REGNO (use)] = insn;
{
basic_block bb;
rtx *next_use, insn;
- struct df_ref **def_vec, **use_vec;
+ df_ref *def_vec, *use_vec;
next_use = XCNEWVEC (rtx, max_reg_num ());
for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
{
- struct df_ref *def = *def_vec;
+ df_ref def = *def_vec;
int regno = DF_REF_REGNO (def);
rtx use_insn;
for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
{
- struct df_ref *use = *use_vec;
+ df_ref use = *use_vec;
int regno = DF_REF_REGNO (use);
/* Do not consider the usage of the stack pointer
r10k_simplify_address (rtx x, rtx insn)
{
rtx newx, op0, op1, set, def_insn, note;
- struct df_ref *use, *def;
+ df_ref use, def;
struct df_link *defs;
newx = NULL_RTX;
edge pointing to that bb. */
if (bb_has_eh_pred (bb))
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
}
static void
delete_corresponding_reg_eq_notes (rtx insn)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
/* This loop is a little tricky. We cannot just go down the
chain because it is being modified by the actions in the
anyway. */
while (DF_REG_EQ_USE_CHAIN (regno))
{
- struct df_ref *eq_use = DF_REG_EQ_USE_CHAIN (regno);
+ df_ref eq_use = DF_REG_EQ_USE_CHAIN (regno);
rtx noted_insn = DF_REF_INSN (eq_use);
rtx note = find_reg_note (noted_insn, REG_EQUAL, NULL_RTX);
if (!note)
{
basic_block bb;
struct df_link *defs;
- struct df_ref **use_rec;
+ df_ref *use_rec;
FOR_ALL_BB (bb)
{
mark_reg_dependencies (rtx insn)
{
struct df_link *defs;
- struct df_ref **use_rec;
+ df_ref *use_rec;
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (dump_file)
{
fprintf (dump_file, "Processing use of ");
bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
rtx insn;
bool block_changed;
- struct df_ref **def_rec;
+ df_ref *def_rec;
if (redo_out)
{
/* The insn is needed if there is someone who uses the output. */
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int last;
unsigned int dregno = DF_REF_REGNO (def);
unsigned int start = df_byte_lr_get_regno_start (dregno);
bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
rtx insn;
bool block_changed;
- struct df_ref **def_rec;
+ df_ref *def_rec;
if (redo_out)
{
if (set && REG_P (SET_DEST (set)))
{
int regno = REGNO (SET_DEST (set));
- struct df_ref *first_def;
+ df_ref first_def;
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
first_def = df_bb_regno_first_def_find (g->bb, regno);
gcc_assert (first_def);
- if (bitmap_bit_p (bb_info->gen, first_def->id))
+ if (bitmap_bit_p (bb_info->gen, DF_REF_ID (first_def)))
return;
}
}
and anti-dependences from its uses in the current iteration to the
first definition in the next iteration. */
static void
-add_cross_iteration_register_deps (ddg_ptr g, struct df_ref *last_def)
+add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def)
{
int regno = DF_REF_REGNO (last_def);
struct df_link *r_use;
#ifdef ENABLE_CHECKING
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
#endif
- struct df_ref *first_def = df_bb_regno_first_def_find (g->bb, regno);
+ df_ref first_def = df_bb_regno_first_def_find (g->bb, regno);
gcc_assert (last_def_node);
gcc_assert (first_def);
#ifdef ENABLE_CHECKING
- if (last_def->id != first_def->id)
- gcc_assert (!bitmap_bit_p (bb_info->gen, first_def->id));
+ if (DF_REF_ID (last_def) != DF_REF_ID (first_def))
+ gcc_assert (!bitmap_bit_p (bb_info->gen, DF_REF_ID (first_def)));
#endif
/* Create inter-loop true dependences and anti dependences. */
gcc_assert (first_def_node);
- if (last_def->id != first_def->id
+ if (DF_REF_ID (last_def) != DF_REF_ID (first_def)
|| !flag_modulo_sched_allow_regmoves)
create_ddg_dep_no_link (g, use_node, first_def_node, ANTI_DEP,
REG_DEP, 1);
{
ddg_node_ptr dest_node;
- if (last_def->id == first_def->id)
+ if (DF_REF_ID (last_def) == DF_REF_ID (first_def))
return;
dest_node = get_node_of_insn (g, DF_REF_INSN (first_def));
/* Find inter-loop register output, true and anti deps. */
EXECUTE_IF_SET_IN_BITMAP (rd_bb_info->gen, 0, rd_num, bi)
{
- struct df_ref *rd = DF_DEFS_GET (rd_num);
+ df_ref rd = DF_DEFS_GET (rd_num);
add_cross_iteration_register_deps (g, rd);
}
df_compute_accessed_bytes for a description of MM. */
static bool
-df_compute_accessed_bytes_extract (struct df_ref *ref,
+df_compute_accessed_bytes_extract (df_ref ref,
enum df_mm mm ,
unsigned int *start_byte,
unsigned int *last_byte)
otherwise and set START_BYTE and LAST_BYTE. */
static bool
-df_compute_accessed_bytes_strict_low_part (struct df_ref *ref,
+df_compute_accessed_bytes_strict_low_part (df_ref ref,
unsigned int *start_byte,
unsigned int *last_byte)
{
otherwise and set START_BYTE and LAST_BYTE. */
static bool
-df_compute_accessed_bytes_subreg (struct df_ref *ref, unsigned int *start_byte,
+df_compute_accessed_bytes_subreg (df_ref ref, unsigned int *start_byte,
unsigned int *last_byte)
{
return true;
/* Defs and uses are different in the amount of the reg that touch. */
- if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (ref))
{
/* This is an lvalue. */
This means that this use can be ignored. */
bool
-df_compute_accessed_bytes (struct df_ref *ref, enum df_mm mm,
+df_compute_accessed_bytes (df_ref ref, enum df_mm mm,
unsigned int *start_byte,
unsigned int *last_byte)
{
if (!dbg_cnt (df_byte_scan))
return true;
- if (DF_REF_TYPE (ref) != DF_REF_REG_DEF
+ if (!DF_REF_REG_DEF_P (ref)
&& DF_REF_FLAGS_IS_SET (ref, DF_REF_READ_WRITE))
{
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_PRE_POST_MODIFY))
/* Return first def of REGNO within BB. */
-struct df_ref *
+df_ref
df_bb_regno_first_def_find (basic_block bb, unsigned int regno)
{
rtx insn;
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid;
FOR_BB_INSNS (bb, insn)
uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_REGNO (def) == regno)
return def;
}
/* Return last def of REGNO within BB. */
-struct df_ref *
+df_ref
df_bb_regno_last_def_find (basic_block bb, unsigned int regno)
{
rtx insn;
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid;
FOR_BB_INSNS_REVERSE (bb, insn)
uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_REGNO (def) == regno)
return def;
}
/* Finds the reference corresponding to the definition of REG in INSN.
DF is the dataflow object. */
-struct df_ref *
+df_ref
df_find_def (rtx insn, rtx reg)
{
unsigned int uid;
- struct df_ref **def_rec;
+ df_ref *def_rec;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (rtx_equal_p (DF_REF_REAL_REG (def), reg))
return def;
}
/* Finds the reference corresponding to the use of REG in INSN.
DF is the dataflow object. */
-struct df_ref *
+df_ref
df_find_use (rtx insn, rtx reg)
{
unsigned int uid;
- struct df_ref **use_rec;
+ df_ref *use_rec;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
uid = INSN_UID (insn);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (rtx_equal_p (DF_REF_REAL_REG (use), reg))
return use;
}
if (df->changeable_flags & DF_EQ_NOTES)
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (rtx_equal_p (DF_REF_REAL_REG (use), reg))
return use;
}
void
-df_refs_chain_dump (struct df_ref **ref_rec, bool follow_chain, FILE *file)
+df_refs_chain_dump (df_ref *ref_rec, bool follow_chain, FILE *file)
{
fprintf (file, "{ ");
while (*ref_rec)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
fprintf (file, "%c%d(%d)",
DF_REF_REG_DEF_P (ref) ? 'd' : (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
DF_REF_ID (ref),
/* Dump either a ref-def or reg-use chain. */
void
-df_regs_chain_dump (struct df_ref *ref, FILE *file)
+df_regs_chain_dump (df_ref ref, FILE *file)
{
fprintf (file, "{ ");
while (ref)
DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
DF_REF_ID (ref),
DF_REF_REGNO (ref));
- ref = ref->next_reg;
+ ref = DF_REF_NEXT_REG (ref);
}
fprintf (file, "}");
}
while (*mws)
{
fprintf (file, "mw %c r[%d..%d]\n",
- ((*mws)->type == DF_REF_REG_DEF) ? 'd' : 'u',
+ (DF_MWS_REG_DEF_P (*mws)) ? 'd' : 'u',
(*mws)->start_regno, (*mws)->end_regno);
mws++;
}
void
-df_ref_debug (struct df_ref *ref, FILE *file)
+df_ref_debug (df_ref ref, FILE *file)
{
fprintf (file, "%c%d ",
DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
fprintf (file, "reg %d bb %d insn %d flag 0x%x type 0x%x ",
DF_REF_REGNO (ref),
DF_REF_BBNO (ref),
- DF_REF_INSN_INFO (ref) ? INSN_UID (DF_REF_INSN (ref)) : -1,
+ DF_REF_IS_ARTIFICIAL (ref) ? -1 : DF_REF_INSN_UID (ref),
DF_REF_FLAGS (ref),
DF_REF_TYPE (ref));
if (DF_REF_LOC (ref))
void
-debug_df_ref (struct df_ref *ref)
+debug_df_ref (df_ref ref)
{
df_ref_debug (ref, stderr);
}
DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
DF_REF_ID (link->ref),
DF_REF_BBNO (link->ref),
- DF_REF_INSN_INFO (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
+ DF_REF_IS_ARTIFICIAL (link->ref) ? -1 : DF_REF_INSN_UID (link->ref));
}
fprintf (file, "}");
}
static void
df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
- struct df_ref **def_rec,
+ df_ref *def_rec,
enum df_ref_flags top_flag)
{
while (*def_rec)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
{
unsigned int regno = DF_REF_REGNO (def);
basic_block bb = BASIC_BLOCK (bb_index);
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
/* Process the registers set in an exception handler. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the hardware registers that are always live. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
}
goto. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the uses that are live into an exception handler. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
basic_block bb = BASIC_BLOCK (bb_index);
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
rtx insn;
- struct df_ref **def_rec;
+ df_ref *def_rec;
int luid = 0;
FOR_BB_INSNS (bb, insn)
luid++;
for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
if (DF_REF_FLAGS_IS_SET (def,
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
}
}
/* Create a du or ud chain from SRC to DST and link it into SRC. */
struct df_link *
-df_chain_create (struct df_ref *src, struct df_ref *dst)
+df_chain_create (df_ref src, df_ref dst)
{
struct df_link *head = DF_REF_CHAIN (src);
struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
/* Delete any du or ud chains that start at REF and point to
TARGET. */
static void
-df_chain_unlink_1 (struct df_ref *ref, struct df_ref *target)
+df_chain_unlink_1 (df_ref ref, df_ref target)
{
struct df_link *chain = DF_REF_CHAIN (ref);
struct df_link *prev = NULL;
/* Delete a du or ud chain that leave or point to REF. */
void
-df_chain_unlink (struct df_ref *ref)
+df_chain_unlink (df_ref ref)
{
struct df_link *chain = DF_REF_CHAIN (ref);
while (chain)
TO_REF. */
void
-df_chain_copy (struct df_ref *to_ref,
+df_chain_copy (df_ref to_ref,
struct df_link *from_ref)
{
while (from_ref)
EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
{
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
basic_block bb = BASIC_BLOCK (bb_index);
if (df_chain_problem_p (DF_DU_CHAIN))
static void
df_chain_create_bb_process_use (bitmap local_rd,
- struct df_ref **use_rec,
+ df_ref *use_rec,
enum df_ref_flags top_flag)
{
bitmap_iterator bi;
while (*use_rec)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (uregno >= FIRST_PSEUDO_REGISTER))
EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
{
- struct df_ref *def;
+ df_ref def;
if (def_index > last_index)
break;
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx insn;
bitmap cpy = BITMAP_ALLOC (NULL);
- struct df_ref **def_rec;
+ df_ref *def_rec;
bitmap_copy (cpy, bb_info->in);
bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the regular instructions next. */
FOR_BB_INSNS (bb, insn)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
if (!INSN_P (insn))
pass only changes the bits in cpy. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (dregno >= FIRST_PSEUDO_REGISTER))
if (df_chain_problem_p (DF_DU_CHAIN))
{
rtx insn;
- struct df_ref **def_rec = df_get_artificial_defs (bb->index);
+ df_ref *def_rec = df_get_artificial_defs (bb->index);
if (*def_rec)
{
fprintf (file, ";; DU chains for artificial defs\n");
while (*def_rec)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
df_chain_dump (DF_REF_CHAIN (def), file);
fprintf (file, "\n");
while (*def_rec)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
- if (def->flags & DF_REF_READ_WRITE)
+ if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (def), file);
fprintf (file, "\n");
if (df_chain_problem_p (DF_UD_CHAIN))
{
rtx insn;
- struct df_ref **use_rec = df_get_artificial_uses (bb->index);
+ df_ref *use_rec = df_get_artificial_uses (bb->index);
if (*use_rec)
{
fprintf (file, ";; UD chains for artificial uses\n");
while (*use_rec)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
if (INSN_P (insn))
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- struct df_ref **eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
+ df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
use_rec = DF_INSN_INFO_USES (insn_info);
if (*use_rec || *eq_use_rec)
{
while (*use_rec)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
- if (use->flags & DF_REF_READ_WRITE)
+ if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
}
while (*eq_use_rec)
{
- struct df_ref *use = *eq_use_rec;
+ df_ref use = *eq_use_rec;
fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
extracts, subregs or strict_low_parts. */
static void
-df_byte_lr_check_regs (struct df_ref **ref_rec)
+df_byte_lr_check_regs (df_ref *ref_rec)
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
for (; *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT
| DF_REF_ZERO_EXTRACT
| DF_REF_STRICT_LOW_PART)
basic_block bb = BASIC_BLOCK (bb_index);
struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
/* Process the registers set in an exception handler. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the hardware registers that are always live. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
unsigned int start = problem_data->regno_start[uregno];
unsigned int len = problem_data->regno_len[uregno];
goto. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the uses that are live into an exception handler. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
{
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
- struct df_ref **use_rec;
+ df_ref *use_rec;
unsigned int uid = INSN_UID (insn);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
unsigned int start = problem_data->regno_start[uregno];
unsigned int len = problem_data->regno_len[uregno];
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
- struct df_ref **def_rec;
+ df_ref *def_rec;
#ifdef EH_USES
- struct df_ref **use_rec;
+ df_ref *use_rec;
#endif
int bb_index = bb->index;
#ifdef EH_USES
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
{
unsigned int uregno = DF_REF_REGNO (use);
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
int bb_index = bb->index;
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
{
unsigned int dregno = DF_REF_REGNO (def);
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
unsigned int uregno = DF_REF_REGNO (use);
LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
static rtx
-df_create_unused_note (rtx insn, rtx old, struct df_ref *def,
+df_create_unused_note (rtx insn, rtx old, df_ref def,
bitmap live, bitmap artificial_uses)
{
unsigned int dregno = DF_REF_REGNO (def);
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
bitmap_copy (live, df_get_live_out (bb));
bitmap_clear (artificial_uses);
to begin processing. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
#ifdef REG_DEAD_DEBUGGING
if (dump_file)
fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
unsigned int regno = DF_REF_REGNO (use);
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
- if ((mws->type == DF_REF_REG_DEF)
+ if ((DF_MWS_REG_DEF_P (mws))
&& !df_ignore_stack_reg (mws->start_regno))
old_unused_notes
= df_set_unused_notes_for_mw (insn, old_unused_notes,
clobber. This code is for the return. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
{
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
- if (mws->type == DF_REF_REG_DEF)
+ if (DF_MWS_REG_DEF_P (mws))
old_unused_notes
= df_set_unused_notes_for_mw (insn, old_unused_notes,
mws, live, do_not_gen,
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
old_unused_notes
= df_create_unused_note (insn, old_unused_notes,
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
- if ((mws->type != DF_REF_REG_DEF)
+ if ((DF_MWS_REG_DEF_P (mws))
&& !df_ignore_stack_reg (mws->start_regno))
old_dead_notes
= df_set_dead_notes_for_mw (insn, old_dead_notes,
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
#ifdef REG_DEAD_DEBUGGING
void
df_simulate_find_defs (rtx insn, bitmap defs)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
void
df_simulate_defs (rtx insn, bitmap live)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
/* If the def is to only part of the reg, it does
void
df_simulate_uses (rtx insn, bitmap live)
{
- struct df_ref **use_rec;
+ df_ref *use_rec;
unsigned int uid = INSN_UID (insn);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
bitmap_set_bit (live, DF_REF_REGNO (use));
}
void
df_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
{
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
int bb_index = bb->index;
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
bitmap_clear_bit (live, DF_REF_REGNO (def));
}
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
bitmap_set_bit (live, DF_REF_REGNO (use));
}
void
df_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
#ifdef EH_USES
- struct df_ref **use_rec;
+ df_ref *use_rec;
#endif
int bb_index = bb->index;
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
bitmap_clear_bit (live, DF_REF_REGNO (def));
}
#ifdef EH_USES
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
bitmap_set_bit (live, DF_REF_REGNO (use));
}
struct df_collection_rec
{
- struct df_ref ** def_vec;
+ df_ref * def_vec;
unsigned int next_def;
- struct df_ref ** use_vec;
+ df_ref * use_vec;
unsigned int next_use;
- struct df_ref ** eq_use_vec;
+ df_ref * eq_use_vec;
unsigned int next_eq_use;
struct df_mw_hardreg **mw_vec;
unsigned int next_mw;
};
-static struct df_ref * df_null_ref_rec[1];
+static df_ref df_null_ref_rec[1];
static struct df_mw_hardreg * df_null_mw_rec[1];
-static void df_ref_record (struct df_collection_rec *,
+static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
rtx, rtx *,
basic_block, struct df_insn_info *,
enum df_ref_type, enum df_ref_flags,
static void df_defs_record (struct df_collection_rec *, rtx,
basic_block, struct df_insn_info *,
enum df_ref_flags);
-static void df_uses_record (struct df_collection_rec *,
+static void df_uses_record (enum df_ref_class, struct df_collection_rec *,
rtx *, enum df_ref_type,
basic_block, struct df_insn_info *,
enum df_ref_flags,
int, int, enum machine_mode);
-static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
- basic_block, struct df_insn_info *,
- enum df_ref_type, enum df_ref_flags,
- int, int, enum machine_mode);
+static df_ref df_ref_create_structure (enum df_ref_class,
+ struct df_collection_rec *, rtx, rtx *,
+ basic_block, struct df_insn_info *,
+ enum df_ref_type, enum df_ref_flags,
+ int, int, enum machine_mode);
static void df_insn_refs_collect (struct df_collection_rec*,
basic_block, struct df_insn_info *);
static void df_get_exit_block_use_set (bitmap);
static void df_get_entry_block_def_set (bitmap);
static void df_grow_ref_info (struct df_ref_info *, unsigned int);
-static void df_ref_chain_delete_du_chain (struct df_ref **);
-static void df_ref_chain_delete (struct df_ref **);
+static void df_ref_chain_delete_du_chain (df_ref *);
+static void df_ref_chain_delete (df_ref *);
static void df_refs_add_to_chains (struct df_collection_rec *,
basic_block, rtx);
static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
-static void df_install_ref (struct df_ref *, struct df_reg_info *,
+static void df_install_ref (df_ref, struct df_reg_info *,
struct df_ref_info *, bool);
static int df_ref_compare (const void *, const void *);
/* Problem data for the scanning dataflow function. */
struct df_scan_problem_data
{
- alloc_pool ref_pool;
+ alloc_pool ref_base_pool;
+ alloc_pool ref_artificial_pool;
+ alloc_pool ref_regular_pool;
alloc_pool ref_extract_pool;
alloc_pool insn_pool;
alloc_pool reg_pool;
BITMAP_FREE (df->insns_to_notes_rescan);
free_alloc_pool (df_scan->block_pool);
- free_alloc_pool (problem_data->ref_pool);
+ free_alloc_pool (problem_data->ref_base_pool);
+ free_alloc_pool (problem_data->ref_artificial_pool);
+ free_alloc_pool (problem_data->ref_regular_pool);
free_alloc_pool (problem_data->ref_extract_pool);
free_alloc_pool (problem_data->insn_pool);
free_alloc_pool (problem_data->reg_pool);
df_scan->problem_data = problem_data;
df_scan->computed = true;
- problem_data->ref_pool
- = create_alloc_pool ("df_scan_ref pool",
- sizeof (struct df_ref), block_size);
+ problem_data->ref_base_pool
+ = create_alloc_pool ("df_scan ref base",
+ sizeof (struct df_base_ref), block_size);
+ problem_data->ref_artificial_pool
+ = create_alloc_pool ("df_scan ref artificial",
+ sizeof (struct df_artificial_ref), block_size);
+ problem_data->ref_regular_pool
+ = create_alloc_pool ("df_scan ref regular",
+ sizeof (struct df_regular_ref), block_size);
problem_data->ref_extract_pool
- = create_alloc_pool ("df_scan_ref extract pool",
- sizeof (struct df_ref_extract), block_size);
+ = create_alloc_pool ("df_scan ref extract",
+ sizeof (struct df_extract_ref), block_size);
problem_data->insn_pool
- = create_alloc_pool ("df_scan_insn pool",
+ = create_alloc_pool ("df_scan insn",
sizeof (struct df_insn_info), block_size);
problem_data->reg_pool
- = create_alloc_pool ("df_scan_reg pool",
+ = create_alloc_pool ("df_scan reg",
sizeof (struct df_reg_info), block_size);
problem_data->mw_reg_pool
- = create_alloc_pool ("df_scan_mw_reg pool",
+ = create_alloc_pool ("df_scan mw_reg",
sizeof (struct df_mw_hardreg), block_size);
bitmap_obstack_initialize (&problem_data->reg_bitmaps);
df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
{
int i;
+ int dcount = 0;
+ int ucount = 0;
+ int ecount = 0;
+ int icount = 0;
+ int ccount = 0;
+ basic_block bb;
+ rtx insn;
fprintf (file, ";; invalidated by call \t");
df_print_regset (file, df_invalidated_by_call);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (df_regs_ever_live_p (i))
fprintf (file, " %d[%s]", i, reg_names[i]);
+ fprintf (file, "\n;; ref usage \t");
+
+ for (i = 0; i < (int)df->regs_inited; i++)
+ if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
+ {
+ const char * sep = "";
+
+ fprintf (file, "r%d={", i);
+ if (DF_REG_DEF_COUNT (i))
+ {
+ fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
+ sep = ",";
+ dcount += DF_REG_DEF_COUNT (i);
+ }
+ if (DF_REG_USE_COUNT (i))
+ {
+ fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
+ sep = ",";
+ ucount += DF_REG_USE_COUNT (i);
+ }
+ if (DF_REG_EQ_USE_COUNT (i))
+ {
+ fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
+ ecount += DF_REG_EQ_USE_COUNT (i);
+ }
+ fprintf (file, "} ");
+ }
- fprintf (file, "\n");
+ FOR_EACH_BB (bb)
+ FOR_BB_INSNS (bb, insn)
+ if (INSN_P (insn))
+ {
+ if (CALL_P (insn))
+ ccount++;
+ else
+ icount++;
+ }
+
+ fprintf (file, "\n;; total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
+ dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
}
/* Dump the bb_info for a given basic block. */
{
if (ref_info->refs_size < new_size)
{
- ref_info->refs = XRESIZEVEC (struct df_ref *, ref_info->refs, new_size);
+ ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
memset (ref_info->refs + ref_info->refs_size, 0,
- (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
+ (new_size - ref_info->refs_size) *sizeof (df_ref));
ref_info->refs_size = new_size;
}
}
fields if they were constants. Otherwise they should be -1 if
those flags were set. */
-struct df_ref *
+df_ref
df_ref_create (rtx reg, rtx *loc, rtx insn,
basic_block bb,
enum df_ref_type ref_type,
enum df_ref_flags ref_flags,
int width, int offset, enum machine_mode mode)
{
- struct df_ref *ref;
+ df_ref ref;
struct df_reg_info **reg_info;
struct df_ref_info *ref_info;
- struct df_ref **ref_rec;
- struct df_ref ***ref_rec_ptr;
+ df_ref *ref_rec;
+ df_ref **ref_rec_ptr;
unsigned int count = 0;
bool add_to_table;
+ enum df_ref_class cl;
df_grow_reg_info ();
/* You cannot hack artificial refs. */
gcc_assert (insn);
- ref = df_ref_create_structure (NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
+
+ if (width != -1 || offset != -1)
+ cl = DF_REF_EXTRACT;
+ else if (loc)
+ cl = DF_REF_REGULAR;
+ else
+ cl = DF_REF_BASE;
+ ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
ref_type, ref_flags,
width, offset, mode);
- if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (ref))
{
reg_info = df->def_regs;
ref_info = &df->def_info;
ref_rec = *ref_rec_ptr;
if (count)
{
- ref_rec = XRESIZEVEC (struct df_ref *, ref_rec, count+2);
+ ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
*ref_rec_ptr = ref_rec;
ref_rec[count] = ref;
ref_rec[count+1] = NULL;
- qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
+ qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
}
else
{
- struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
+ df_ref *ref_rec = XNEWVEC (df_ref, 2);
ref_rec[0] = ref;
ref_rec[1] = NULL;
*ref_rec_ptr = ref_rec;
----------------------------------------------------------------------------*/
static void
-df_free_ref (struct df_ref *ref)
+df_free_ref (df_ref ref)
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
- if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
- pool_free (problem_data->ref_extract_pool, (struct df_ref_extract *)ref);
- else
- pool_free (problem_data->ref_pool, ref);
+ switch (DF_REF_CLASS (ref))
+ {
+ case DF_REF_BASE:
+ pool_free (problem_data->ref_base_pool, ref);
+ break;
+
+ case DF_REF_ARTIFICIAL:
+ pool_free (problem_data->ref_artificial_pool, ref);
+ break;
+
+ case DF_REF_REGULAR:
+ pool_free (problem_data->ref_regular_pool, ref);
+ break;
+
+ case DF_REF_EXTRACT:
+ pool_free (problem_data->ref_extract_pool, ref);
+ break;
+ }
}
Also delete the def-use or use-def chain if it exists. */
static void
-df_reg_chain_unlink (struct df_ref *ref)
+df_reg_chain_unlink (df_ref ref)
{
- struct df_ref *next = DF_REF_NEXT_REG (ref);
- struct df_ref *prev = DF_REF_PREV_REG (ref);
+ df_ref next = DF_REF_NEXT_REG (ref);
+ df_ref prev = DF_REF_PREV_REG (ref);
int id = DF_REF_ID (ref);
struct df_reg_info *reg_info;
- struct df_ref **refs = NULL;
+ df_ref *refs = NULL;
- if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (ref))
{
- reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
+ int regno = DF_REF_REGNO (ref);
+ reg_info = DF_REG_DEF_GET (regno);
refs = df->def_info.refs;
}
else
{
if (df->analyze_subset)
{
- if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
+ if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
refs[id] = NULL;
}
else
/* Remove REF from VEC. */
static void
-df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
+df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
{
- struct df_ref **vec = *vec_ptr;
+ df_ref *vec = *vec_ptr;
if (vec[1])
{
/* Unlink REF from all def-use/use-def chains, etc. */
void
-df_ref_remove (struct df_ref *ref)
+df_ref_remove (df_ref ref)
{
#if 0
if (dump_file)
if (DF_REF_IS_ARTIFICIAL (ref))
{
struct df_scan_bb_info *bb_info
- = df_scan_get_bb_info (DF_REF_BB (ref)->index);
+ = df_scan_get_bb_info (DF_REF_BBNO (ref));
df_ref_compress_rec (&bb_info->artificial_defs, ref);
}
else
if (DF_REF_IS_ARTIFICIAL (ref))
{
struct df_scan_bb_info *bb_info
- = df_scan_get_bb_info (DF_REF_BB (ref)->index);
+ = df_scan_get_bb_info (DF_REF_BBNO (ref));
df_ref_compress_rec (&bb_info->artificial_uses, ref);
}
else
/* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
static void
-df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
+df_ref_chain_delete_du_chain (df_ref *ref_rec)
{
while (*ref_rec)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
/* CHAIN is allocated by DF_CHAIN. So make sure to
pass df_scan instance for the problem. */
if (DF_REF_CHAIN (ref))
/* Delete all refs in the ref chain. */
static void
-df_ref_chain_delete (struct df_ref **ref_rec)
+df_ref_chain_delete (df_ref *ref_rec)
{
- struct df_ref **start = ref_rec;
+ df_ref *start = ref_rec;
while (*ref_rec)
{
df_reg_chain_unlink (*ref_rec);
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
- struct df_ref **ref;
+ df_ref *ref;
struct df_mw_hardreg **mw;
if (collection_rec->def_vec)
struct df_insn_info *insn_info = NULL;
basic_block bb = BLOCK_FOR_INSN (insn);
struct df_collection_rec collection_rec;
- collection_rec.def_vec = XALLOCAVEC (struct df_ref *, 1000);
- collection_rec.use_vec = XALLOCAVEC (struct df_ref *, 1000);
- collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
if ((!df) || (!INSN_P (insn)))
ref_info->begin[regno] = offset;
if (include_defs)
{
- struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
+ df_ref ref = DF_REG_DEF_CHAIN (regno);
while (ref)
{
ref_info->refs[offset] = ref;
}
if (include_uses)
{
- struct df_ref *ref = DF_REG_USE_CHAIN (regno);
+ df_ref ref = DF_REG_USE_CHAIN (regno);
while (ref)
{
ref_info->refs[offset] = ref;
}
if (include_eq_uses)
{
- struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
+ df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
while (ref)
{
ref_info->refs[offset] = ref;
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **ref_rec;
+ df_ref *ref_rec;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **ref_rec;
+ df_ref *ref_rec;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_uses)
for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_defs)
for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_uses)
for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_eq_uses)
for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
static unsigned int
df_add_refs_to_table (unsigned int offset,
struct df_ref_info *ref_info,
- struct df_ref **ref_vec)
+ df_ref *ref_vec)
{
while (*ref_vec)
{
- struct df_ref *ref = *ref_vec;
+ df_ref ref = *ref_vec;
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
{
}
-/* Change the BB of all refs in the ref chain from OLD_BB to NEW_BB.
- Assumes that all refs in the chain have the same BB. */
-
-static void
-df_ref_chain_change_bb (struct df_ref **ref_rec,
- basic_block old_bb,
- basic_block new_bb)
-{
- while (*ref_rec)
- {
- struct df_ref *ref = *ref_rec;
-
- gcc_assert (DF_REF_BB (ref) == old_bb);
- DF_REF_BB (ref) = new_bb;
- ref_rec++;
- }
-}
-
-
/* Change all of the basic block references in INSN to use the insn's
current basic block. This function is called from routines that move
instructions from one block to another. */
if (!INSN_P (insn))
return;
- df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
- df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
- df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
-
df_set_bb_dirty (new_bb);
if (old_bb)
{
/* Helper function for df_ref_change_reg_with_loc. */
static void
-df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df, struct df_reg_info *new_df,
+df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
+ struct df_reg_info *new_df,
int new_regno, rtx loc)
{
- struct df_ref *the_ref = old_df->reg_chain;
+ df_ref the_ref = old_df->reg_chain;
while (the_ref)
{
- if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
+ if ((!DF_REF_IS_ARTIFICIAL (the_ref))
+ && (DF_REF_LOC (the_ref))
+ && (*DF_REF_LOC (the_ref) == loc))
{
- struct df_ref *next_ref = the_ref->next_reg;
- struct df_ref *prev_ref = the_ref->prev_reg;
- struct df_ref **ref_vec, **ref_vec_t;
+ df_ref next_ref = DF_REF_NEXT_REG (the_ref);
+ df_ref prev_ref = DF_REF_PREV_REG (the_ref);
+ df_ref *ref_vec, *ref_vec_t;
+ struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
unsigned int count = 0;
DF_REF_REGNO (the_ref) = new_regno;
/* Pull the_ref out of the old regno chain. */
if (prev_ref)
- prev_ref->next_reg = next_ref;
+ DF_REF_NEXT_REG (prev_ref) = next_ref;
else
old_df->reg_chain = next_ref;
if (next_ref)
- next_ref->prev_reg = prev_ref;
+ DF_REF_PREV_REG (next_ref) = prev_ref;
old_df->n_refs--;
/* Put the ref into the new regno chain. */
- the_ref->prev_reg = NULL;
- the_ref->next_reg = new_df->reg_chain;
+ DF_REF_PREV_REG (the_ref) = NULL;
+ DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
if (new_df->reg_chain)
- new_df->reg_chain->prev_reg = the_ref;
+ DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
new_df->reg_chain = the_ref;
new_df->n_refs++;
df_set_bb_dirty (DF_REF_BB (the_ref));
- /* Need to resort the record that the ref was in because the
- regno is a sorting key. First, find the right record. */
- if (DF_REF_IS_ARTIFICIAL (the_ref))
- {
- unsigned int bb_index = DF_REF_BB (the_ref)->index;
- if (DF_REF_REG_DEF_P (the_ref))
- ref_vec = df_get_artificial_defs (bb_index);
- else
- ref_vec = df_get_artificial_uses (bb_index);
- }
+ /* Need to sort the record again that the ref was in because
+ the regno is a sorting key. First, find the right
+ record. */
+ if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
+ ref_vec = insn_info->eq_uses;
else
- {
- struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
- if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
- ref_vec = insn_info->eq_uses;
- else
- ref_vec = insn_info->uses;
- if (dump_file)
- fprintf (dump_file, "changing reg in insn %d\n",
- INSN_UID (DF_REF_INSN (the_ref)));
- }
+ ref_vec = insn_info->uses;
+ if (dump_file)
+ fprintf (dump_file, "changing reg in insn %d\n",
+ DF_REF_INSN_UID (the_ref));
+
ref_vec_t = ref_vec;
-
+
/* Find the length. */
while (*ref_vec_t)
{
count++;
ref_vec_t++;
}
- qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
+ qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
the_ref = next_ref;
}
else
- the_ref = the_ref->next_reg;
+ the_ref = DF_REF_NEXT_REG (the_ref);
}
}
/* Change the regno of all refs that contained LOC from OLD_REGNO to
- NEW_REGNO. Refs that do not match LOC are not changed. This call
- is to support the SET_REGNO macro. */
+ NEW_REGNO. Refs that do not match LOC are not changed which means
+ that artificial refs are not changed since they have no loc. This
+ call is to support the SET_REGNO macro. */
void
df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
unsigned int num_deleted;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 1000);
num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
{
case REG_EQUIV:
case REG_EQUAL:
- df_uses_record (&collection_rec,
+ df_uses_record (DF_REF_REGULAR, &collection_rec,
&XEXP (note, 0), DF_REF_REG_USE,
bb, insn_info, DF_REF_IN_NOTE, -1, -1, 0);
default:
It ignores DF_REF_MARKER. */
static bool
-df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
+df_ref_equal_p (df_ref ref1, df_ref ref2)
{
if (!ref2)
return false;
+
+ if (ref1 == ref2)
+ return true;
+
+ if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
+ || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
+ || DF_REF_REG (ref1) != DF_REF_REG (ref2)
+ || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
+ || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
+ != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
+ || DF_REF_BB (ref1) != DF_REF_BB (ref2)
+ || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
+ return false;
+
+ switch (DF_REF_CLASS (ref1))
+ {
+ case DF_REF_ARTIFICIAL:
+ case DF_REF_BASE:
+ return true;
- /* The two flag tests here are only to make sure we do not look at
- the offset and width if they are not there. The flags are
- compared in the next set of tests. */
- if ((DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
- && (DF_REF_FLAGS_IS_SET (ref2, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
- && ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
+ case DF_REF_EXTRACT:
+ if ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
|| (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
- || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))))
- return false;
+ || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2)))
+ return false;
+ /* fallthru. */
+
+ case DF_REF_REGULAR:
+ return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
- return (ref1 == ref2) ||
- (DF_REF_REG (ref1) == DF_REF_REG (ref2)
- && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
- && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
- && DF_REF_INSN_INFO (ref1) == DF_REF_INSN_INFO (ref2)
- && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
- && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
- == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
- && DF_REF_BB (ref1) == DF_REF_BB (ref2));
+ default:
+ gcc_unreachable ();
+ }
+ return false;
}
static int
df_ref_compare (const void *r1, const void *r2)
{
- const struct df_ref *const ref1 = *(const struct df_ref *const*)r1;
- const struct df_ref *const ref2 = *(const struct df_ref *const*)r2;
+ const df_ref ref1 = *(const df_ref *)r1;
+ const df_ref ref2 = *(const df_ref *)r2;
if (ref1 == ref2)
return 0;
+ if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
+ return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
+
if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
- if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
- || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
+ if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
+ return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
+
+ /* Cannot look at the LOC field on artificial refs. */
+ if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
+ && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
return 1;
}
- /* The flags are the same at this point so it is safe to only look
+ /* The classes are the same at this point so it is safe to only look
at ref1. */
- if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
+ if (DF_REF_CLASS (ref1) == DF_REF_EXTRACT)
{
- if (DF_REF_EXTRACT_OFFSET_CONST (ref1) != DF_REF_EXTRACT_OFFSET_CONST (ref2))
- return DF_REF_EXTRACT_OFFSET_CONST (ref1) - DF_REF_EXTRACT_OFFSET_CONST (ref2);
- if (DF_REF_EXTRACT_WIDTH_CONST (ref1) != DF_REF_EXTRACT_WIDTH_CONST (ref2))
- return DF_REF_EXTRACT_WIDTH_CONST (ref1) - DF_REF_EXTRACT_WIDTH_CONST (ref2);
- if (DF_REF_EXTRACT_MODE_CONST (ref1) != DF_REF_EXTRACT_MODE_CONST (ref2))
- return DF_REF_EXTRACT_MODE_CONST (ref1) - DF_REF_EXTRACT_MODE_CONST (ref2);
+ if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
+ return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
+ if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
+ return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
+ if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
+ return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
}
return 0;
}
static void
-df_swap_refs (struct df_ref **ref_vec, int i, int j)
+df_swap_refs (df_ref *ref_vec, int i, int j)
{
- struct df_ref *tmp = ref_vec[i];
+ df_ref tmp = ref_vec[i];
ref_vec[i] = ref_vec[j];
ref_vec[j] = tmp;
}
/* Sort and compress a set of refs. */
static unsigned int
-df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
+df_sort_and_compress_refs (df_ref *ref_vec, unsigned int count)
{
unsigned int i;
unsigned int dist = 0;
of DF_REF_COMPARE. */
if (i == count - 1)
return count;
- qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
+ qsort (ref_vec, count, sizeof (df_ref), df_ref_compare);
}
for (i=0; i<count-dist; i++)
/* Add the new df_ref to appropriate reg_info/ref_info chains. */
static void
-df_install_ref (struct df_ref *this_ref,
+df_install_ref (df_ref this_ref,
struct df_reg_info *reg_info,
struct df_ref_info *ref_info,
bool add_to_table)
{
unsigned int regno = DF_REF_REGNO (this_ref);
/* Add the ref to the reg_{def,use,eq_use} chain. */
- struct df_ref *head = reg_info->reg_chain;
+ df_ref head = reg_info->reg_chain;
reg_info->reg_chain = this_ref;
reg_info->n_refs++;
eq_uses) and installs the entire group into the insn. It also adds
each of these refs into the appropriate chains. */
-static struct df_ref **
+static df_ref *
df_install_refs (basic_block bb,
- struct df_ref **old_vec, unsigned int count,
+ df_ref *old_vec, unsigned int count,
struct df_reg_info **reg_info,
struct df_ref_info *ref_info,
bool is_notes)
if (count)
{
unsigned int i;
- struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
+ df_ref *new_vec = XNEWVEC (df_ref, count + 1);
bool add_to_table;
switch (ref_info->ref_order)
for (i = 0; i < count; i++)
{
- struct df_ref *this_ref = old_vec[i];
+ df_ref this_ref = old_vec[i];
new_vec[i] = this_ref;
df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
ref_info, add_to_table);
if they were constants. Otherwise they should be -1 if those flags
were set. */
-static struct df_ref *
-df_ref_create_structure (struct df_collection_rec *collection_rec,
+static df_ref
+df_ref_create_structure (enum df_ref_class cl,
+ struct df_collection_rec *collection_rec,
rtx reg, rtx *loc,
basic_block bb, struct df_insn_info *info,
enum df_ref_type ref_type,
enum df_ref_flags ref_flags,
int width, int offset, enum machine_mode mode)
{
- struct df_ref *this_ref;
+ df_ref this_ref = NULL;
int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
- if (ref_flags & (DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
+ switch (cl)
{
- this_ref = (struct df_ref *) pool_alloc (problem_data->ref_extract_pool);
+ case DF_REF_BASE:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
+ gcc_assert (loc == NULL);
+ break;
+
+ case DF_REF_ARTIFICIAL:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
+ this_ref->artificial_ref.bb = bb;
+ gcc_assert (loc == NULL);
+ break;
+
+ case DF_REF_REGULAR:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
+ this_ref->regular_ref.loc = loc;
+ gcc_assert (loc);
+ break;
+
+ case DF_REF_EXTRACT:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_extract_pool);
DF_REF_EXTRACT_WIDTH (this_ref) = width;
DF_REF_EXTRACT_OFFSET (this_ref) = offset;
DF_REF_EXTRACT_MODE (this_ref) = mode;
+ this_ref->regular_ref.loc = loc;
+ gcc_assert (loc);
+ break;
}
- else
- this_ref = (struct df_ref *) pool_alloc (problem_data->ref_pool);
+
+ DF_REF_CLASS (this_ref) = cl;
DF_REF_ID (this_ref) = -1;
DF_REF_REG (this_ref) = reg;
DF_REF_REGNO (this_ref) = regno;
- DF_REF_LOC (this_ref) = loc;
+ DF_REF_TYPE (this_ref) = ref_type;
DF_REF_INSN_INFO (this_ref) = info;
DF_REF_CHAIN (this_ref) = NULL;
- DF_REF_TYPE (this_ref) = ref_type;
DF_REF_FLAGS (this_ref) = ref_flags;
- DF_REF_BB (this_ref) = bb;
DF_REF_NEXT_REG (this_ref) = NULL;
DF_REF_PREV_REG (this_ref) = NULL;
DF_REF_ORDER (this_ref) = df->ref_order++;
if ((regno < FIRST_PSEUDO_REGISTER)
&& (!DF_REF_IS_ARTIFICIAL (this_ref)))
{
- if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (this_ref))
{
if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
if (collection_rec)
{
- if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (this_ref))
collection_rec->def_vec[collection_rec->next_def++] = this_ref;
else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
static void
-df_ref_record (struct df_collection_rec *collection_rec,
+df_ref_record (enum df_ref_class cl,
+ struct df_collection_rec *collection_rec,
rtx reg, rtx *loc,
basic_block bb, struct df_insn_info *insn_info,
enum df_ref_type ref_type,
= (struct df_scan_problem_data *) df_scan->problem_data;
unsigned int i;
unsigned int endregno;
- struct df_ref *ref;
+ df_ref ref;
if (GET_CODE (reg) == SUBREG)
{
for (i = regno; i < endregno; i++)
{
- ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
+ ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
bb, insn_info, ref_type, ref_flags,
width, offset, mode);
}
else
{
- struct df_ref *ref;
- ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn_info,
- ref_type, ref_flags, width, offset, mode);
+ df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
+ ref_type, ref_flags, width, offset, mode);
}
}
int offset = -1;
int width = -1;
enum machine_mode mode = 0;
+ enum df_ref_class cl = DF_REF_REGULAR;
/* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
construct. */
width = INTVAL (XEXP (dst, 1));
offset = INTVAL (XEXP (dst, 2));
mode = GET_MODE (dst);
+ cl = DF_REF_EXTRACT;
}
loc = &XEXP (dst, 0);
/* At this point if we do not have a reg or a subreg, just return. */
if (REG_P (dst))
{
- df_ref_record (collection_rec,
+ df_ref_record (cl, collection_rec,
dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
width, offset, mode);
/* We want to keep sp alive everywhere - by making all
writes to sp also use of sp. */
if (REGNO (dst) == STACK_POINTER_REGNUM)
- df_ref_record (collection_rec,
+ df_ref_record (DF_REF_BASE, collection_rec,
dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
width, offset, mode);
}
flags |= DF_REF_SUBREG;
- df_ref_record (collection_rec,
+ df_ref_record (cl, collection_rec,
dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
width, offset, mode);
}
those flags were set. */
static void
-df_uses_record (struct df_collection_rec *collection_rec,
+df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
rtx *loc, enum df_ref_type ref_type,
basic_block bb, struct df_insn_info *insn_info,
enum df_ref_flags flags,
/* If we are clobbering a MEM, mark any registers inside the address
as being used. */
if (MEM_P (XEXP (x, 0)))
- df_uses_record (collection_rec,
+ df_uses_record (cl, collection_rec,
&XEXP (XEXP (x, 0), 0),
DF_REF_REG_MEM_STORE,
bb, insn_info,
return;
case MEM:
- df_uses_record (collection_rec,
+ df_uses_record (cl, collection_rec,
&XEXP (x, 0), DF_REF_REG_MEM_LOAD,
bb, insn_info, flags & DF_REF_IN_NOTE,
width, offset, mode);
if (!REG_P (SUBREG_REG (x)))
{
loc = &SUBREG_REG (x);
- df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags,
+ df_uses_record (cl, collection_rec, loc, ref_type, bb, insn_info, flags,
width, offset, mode);
return;
}
/* ... Fall through ... */
case REG:
- df_ref_record (collection_rec,
+ df_ref_record (cl, collection_rec,
x, loc, bb, insn_info,
ref_type, flags,
width, offset, mode);
else
flags |= DF_REF_SIGN_EXTRACT;
- df_uses_record (collection_rec,
+ df_uses_record (DF_REF_EXTRACT, collection_rec,
&XEXP (x, 0), ref_type, bb, insn_info, flags,
width, offset, mode);
return;
{
rtx dst = SET_DEST (x);
gcc_assert (!(flags & DF_REF_IN_NOTE));
- df_uses_record (collection_rec,
+ df_uses_record (cl, collection_rec,
&SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
width, offset, mode);
case SUBREG:
if (df_read_modify_subreg_p (dst))
{
- df_uses_record (collection_rec, &SUBREG_REG (dst),
+ df_uses_record (cl, collection_rec, &SUBREG_REG (dst),
DF_REF_REG_USE, bb, insn_info,
flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
width, offset, mode);
case CC0:
break;
case MEM:
- df_uses_record (collection_rec, &XEXP (dst, 0),
+ df_uses_record (cl, collection_rec, &XEXP (dst, 0),
DF_REF_REG_MEM_STORE, bb, insn_info, flags,
width, offset, mode);
break;
/* A strict_low_part uses the whole REG and not just the
SUBREG. */
dst = XEXP (dst, 0);
- df_uses_record (collection_rec,
+ df_uses_record (cl, collection_rec,
(GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
DF_REF_REG_USE, bb, insn_info,
DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
width = INTVAL (XEXP (dst, 1));
offset = INTVAL (XEXP (dst, 2));
mode = GET_MODE (dst);
+ df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_USE, bb, insn_info,
+ DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
+ width, offset, mode);
}
else
{
- df_uses_record (collection_rec, &XEXP (dst, 1),
+ df_uses_record (cl, collection_rec, &XEXP (dst, 1),
DF_REF_REG_USE, bb, insn_info, flags,
width, offset, mode);
- df_uses_record (collection_rec, &XEXP (dst, 2),
+ df_uses_record (cl, collection_rec, &XEXP (dst, 2),
DF_REF_REG_USE, bb, insn_info, flags,
width, offset, mode);
+ df_uses_record (cl, collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_USE, bb, insn_info,
+ DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
+ width, offset, mode);
}
- df_uses_record (collection_rec, &XEXP (dst, 0),
- DF_REF_REG_USE, bb, insn_info,
- DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
- width, offset, mode);
}
break;
int j;
for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
- df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
+ df_uses_record (cl, collection_rec, &ASM_OPERANDS_INPUT (x, j),
DF_REF_REG_USE, bb, insn_info, flags,
width, offset, mode);
return;
case PRE_MODIFY:
case POST_MODIFY:
/* Catch the def of the register being modified. */
- df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0),
+ df_ref_record (cl, collection_rec, XEXP (x, 0), &XEXP (x, 0),
bb, insn_info,
DF_REF_REG_DEF,
flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
loc = &XEXP (x, 0);
goto retry;
}
- df_uses_record (collection_rec, &XEXP (x, i), ref_type,
+ df_uses_record (cl, collection_rec, &XEXP (x, i), ref_type,
bb, insn_info, flags,
width, offset, mode);
}
{
int j;
for (j = 0; j < XVECLEN (x, i); j++)
- df_uses_record (collection_rec,
+ df_uses_record (cl, collection_rec,
&XVECEXP (x, i, j), ref_type,
bb, insn_info, flags,
width, offset, mode);
unsigned int i;
for (i = 0; i < collection_rec->next_def; i++)
{
- struct df_ref *ref = collection_rec->def_vec[i];
+ df_ref ref = collection_rec->def_vec[i];
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
{
int width = -1;
int offset = -1;
enum machine_mode mode = 0;
- struct df_ref *use;
+ df_ref use;
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
{
mode = DF_REF_EXTRACT_MODE (ref);
}
- use = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
+ use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
DF_REF_LOC (ref), DF_REF_BB (ref),
DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
depending on which def is seen first. */
for (i=0; i<collection_rec->next_def; i++)
{
- struct df_ref *def = collection_rec->def_vec[i];
+ df_ref def = collection_rec->def_vec[i];
bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
}
note = XEXP (note, 1))
{
if (GET_CODE (XEXP (note, 0)) == USE)
- df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
+ df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (XEXP (note, 0), 0),
DF_REF_REG_USE, bb, insn_info, flags, -1, -1, 0);
else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
{
insn_info, flags);
}
else
- df_uses_record (collection_rec, &XEXP (note, 0),
+ df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (note, 0),
DF_REF_REG_USE, bb, insn_info, flags, -1, -1, 0);
}
}
/* The stack ptr is used (honorarily) by a CALL insn. */
- df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
NULL, bb, insn_info, DF_REF_REG_USE,
DF_REF_CALL_STACK_USAGE | flags,
-1, -1, 0);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
{
- df_ref_record (collection_rec, regno_reg_rtx[i],
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1, 0);
- df_ref_record (collection_rec, regno_reg_rtx[i],
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1, 0);
}
|| !bitmap_bit_p (df->exit_block_uses, ui)
|| refers_to_regno_p (ui, ui+1,
crtl->return_rtx, NULL)))
- df_ref_record (collection_rec, regno_reg_rtx[ui],
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
NULL, bb, insn_info, DF_REF_REG_DEF,
DF_REF_MAY_CLOBBER | flags,
-1, -1, 0);
{
case REG_EQUIV:
case REG_EQUAL:
- df_uses_record (collection_rec,
+ df_uses_record (DF_REF_REGULAR, collection_rec,
&XEXP (note, 0), DF_REF_REG_USE,
bb, insn_info, DF_REF_IN_NOTE, -1, -1, 0);
break;
case REG_NON_LOCAL_GOTO:
/* The frame ptr is used by a non-local goto. */
- df_ref_record (collection_rec,
+ df_ref_record (DF_REF_BASE, collection_rec,
regno_reg_rtx[FRAME_POINTER_REGNUM],
NULL, bb, insn_info,
DF_REF_REG_USE, 0, -1, -1, 0);
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- df_ref_record (collection_rec,
+ df_ref_record (DF_REF_BASE, collection_rec,
regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
NULL, bb, insn_info,
DF_REF_REG_USE, 0, -1, -1, 0);
(is_cond_exec) ? DF_REF_CONDITIONAL : 0);
/* Record the register uses. */
- df_uses_record (collection_rec,
+ df_uses_record (DF_REF_REGULAR, collection_rec,
&PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
-1, -1, 0);
unsigned regno = EH_RETURN_DATA_REGNO (i);
if (regno == INVALID_REGNUM)
break;
- df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
}
}
eh-receiver for all of the edges at once. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (EH_USES (i))
- df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP, -1, -1, 0);
}
#endif
/* Add the hard_frame_pointer if this block is the target of a
non-local goto. */
if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
- df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
/* Add the artificial uses. */
EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
{
- df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
bb, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
}
}
int luid = 0;
struct df_scan_bb_info *bb_info;
struct df_collection_rec collection_rec;
- collection_rec.def_vec = XALLOCAVEC (struct df_ref *, 1000);
- collection_rec.use_vec = XALLOCAVEC (struct df_ref *, 1000);
- collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
if (!df)
EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
{
- df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1, 0);
}
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = XALLOCAVEC (struct df_ref *, FIRST_PSEUDO_REGISTER);
+ collection_rec.def_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
df_entry_block_defs_collect (&collection_rec, entry_block_defs);
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
- df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
&& !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
&& bb_has_eh_pred (EXIT_BLOCK_PTR)
&& fixed_regs[ARG_POINTER_REGNUM])
- df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
#endif
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.use_vec = XALLOCAVEC (struct df_ref *, FIRST_PSEUDO_REGISTER);
+ collection_rec.use_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
df_exit_block_uses_collect (&collection_rec, exit_block_uses);
are in the correct chain. */
static unsigned int
-df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
+df_reg_chain_mark (df_ref refs, unsigned int regno,
bool is_def, bool is_eq_use)
{
unsigned int count = 0;
- struct df_ref *ref;
+ df_ref ref;
for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
{
gcc_assert (!DF_REF_IS_REG_MARKED (ref));
/* Check to make sure the ref is in the correct chain. */
gcc_assert (DF_REF_REGNO (ref) == regno);
if (is_def)
- gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
+ gcc_assert (DF_REF_REG_DEF_P (ref));
else
- gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
+ gcc_assert (!DF_REF_REG_DEF_P (ref));
if (is_eq_use)
gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
else
gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
- if (ref->next_reg)
- gcc_assert (ref->next_reg->prev_reg == ref);
+ if (DF_REF_NEXT_REG (ref))
+ gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
count++;
DF_REF_REG_MARK (ref);
}
/* Verify that all of the registers in the chain are unmarked. */
static void
-df_reg_chain_verify_unmarked (struct df_ref *refs)
+df_reg_chain_verify_unmarked (df_ref refs)
{
- struct df_ref *ref;
+ df_ref ref;
for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
gcc_assert (!DF_REF_IS_REG_MARKED (ref));
}
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
static bool
-df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
+df_refs_verify (df_ref *new_rec, df_ref *old_rec,
bool abort_if_fail)
{
while ((*new_rec) && (*old_rec))
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = XALLOCAVEC (struct df_ref *, 1000);
- collection_rec.use_vec = XALLOCAVEC (struct df_ref *, 1000);
- collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
+ collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
gcc_assert (bb_info);
- /* Scan the block an insn at a time from beginning to end. */
+ /* Scan the block, one insn at a time, from beginning to end. */
FOR_BB_INSNS_REVERSE (bb, insn)
{
if (!INSN_P (insn))
struct df;
struct df_problem;
struct df_link;
+struct df_insn_info;
+union df_ref_d;
/* Data flow problems. All problems must have a unique id here. */
DF_MM_MUST
};
-/* The first of these is a set of a register. The remaining three are
- all uses of a register (the mem_load and mem_store relate to how
- the register as an addressing operand). */
-enum df_ref_type {DF_REF_REG_DEF, DF_REF_REG_USE, DF_REF_REG_MEM_LOAD,
- DF_REF_REG_MEM_STORE};
+/* Descriminator for the various df_ref types. */
+enum df_ref_class {DF_REF_BASE, DF_REF_ARTIFICIAL, DF_REF_REGULAR, DF_REF_EXTRACT};
-#define DF_REF_TYPE_NAMES {"def", "use", "mem load", "mem store"}
+/* The first of these us a set of a registers. The remaining three
+ are all uses of a register (the mem_load and mem_store relate to
+ how the register as an addressing operand). */
+enum df_ref_type {DF_REF_REG_DEF, DF_REF_REG_USE,
+ DF_REF_REG_MEM_LOAD, DF_REF_REG_MEM_STORE};
enum df_ref_flags
{
};
-/* One of these structures is allocated for every insn. */
-struct df_insn_info
-{
- rtx insn; /* The insn this info comes from. */
- struct df_ref **defs; /* Head of insn-def chain. */
- struct df_ref **uses; /* Head of insn-use chain. */
- /* Head of insn-use chain for uses in REG_EQUAL/EQUIV notes. */
- struct df_ref **eq_uses;
- struct df_mw_hardreg **mw_hardregs;
- /* The logical uid of the insn in the basic block. This is valid
- after any call to df_analyze but may rot after insns are added,
- deleted or moved. */
- int luid;
-};
-
-
/* Define a register reference structure. One of these is allocated
- for every register reference (use or def). Note some register
- references (e.g., post_inc, subreg) generate both a def and a use. */
-struct df_ref
+ for every register reference (use or def). Note some register
+ references (e.g., post_inc, subreg) generate both a def and a use. */
+struct df_base_ref
{
- rtx reg; /* The register referenced. */
- basic_block bb; /* Basic block containing the instruction. */
-
- /* Insn info for the insn containing ref. This will be null if this is
- an artificial reference. */
- struct df_insn_info *insn_info;
+ /* These three bitfields are intentionally oversized, in the hope that
+ accesses to 8 and 16-bit fields will usually be quicker. */
+ ENUM_BITFIELD(df_ref_class) cl : 8;
- rtx *loc; /* The location of the reg. */
+ ENUM_BITFIELD(df_ref_type) type : 8;
+ /* Type of ref. */
+ ENUM_BITFIELD(df_ref_flags) flags : 16;
+ /* Various flags. */
+ rtx reg; /* The register referenced. */
struct df_link *chain; /* Head of def-use, use-def. */
+ /* Pointer to the insn info of the containing instruction. FIXME!
+ Currently this is NULL for artificial refs but this will be used
+ when FUDs are added. */
+ struct df_insn_info *insn_info;
+ /* For each regno, there are three chains of refs, one for the uses,
+ the eq_uses and the defs. These chains go thru the refs
+ themselves rather than using an external structure. */
+ union df_ref_d *next_reg; /* Next ref with same regno and type. */
+ union df_ref_d *prev_reg; /* Prev ref with same regno and type. */
+ unsigned int regno; /* The register number referenced. */
/* Location in the ref table. This is only valid after a call to
df_maybe_reorganize_[use,def]_refs which is an expensive operation. */
int id;
/* The index at which the operand was scanned in the insn. This is
used to totally order the refs in an insn. */
unsigned int ref_order;
+};
- unsigned int regno; /* The register number referenced. */
- /* These two bitfields are intentionally oversized, in the hope that
- accesses to 16-bit fields will usually be quicker. */
- ENUM_BITFIELD(df_ref_type) type : 16;
- /* Type of ref. */
- ENUM_BITFIELD(df_ref_flags) flags : 16;
- /* Various flags. */
- /* For each regno, there are three chains of refs, one for the uses,
- the eq_uses and the defs. These chains go thru the refs
- themselves rather than using an external structure. */
- struct df_ref *next_reg; /* Next ref with same regno and type. */
- struct df_ref *prev_reg; /* Prev ref with same regno and type. */
+/* The three types of df_refs. Note that the df_ref_extract is an
+ extension of the df_regular_ref, not the df_base_ref. */
+struct df_artificial_ref
+{
+ struct df_base_ref base;
+
+ /* Artificial refs do not have an insn, so to get the basic block,
+ it must be explicitly here. */
+ basic_block bb;
+};
+
+
+struct df_regular_ref
+{
+ struct df_base_ref base;
+ /* The loc is the address in the insn of the reg. This is not
+ defined for special registers, such as clobbers and stack
+ pointers that are also associated with call insns and so those
+ just use the base. */
+ rtx *loc;
};
+
/* A df_ref_extract is just a df_ref with a width and offset field at
the end of it. It is used to hold this information if the ref was
wrapped by a SIGN_EXTRACT or a ZERO_EXTRACT and to pass this info
to passes that wish to process partial regs precisely. */
-struct df_ref_extract
+struct df_extract_ref
{
- struct df_ref ref;
+ struct df_regular_ref base;
int width;
int offset;
enum machine_mode mode;
};
+/* Union of the different kinds of defs/uses placeholders. */
+union df_ref_d
+{
+ struct df_base_ref base;
+ struct df_regular_ref regular_ref;
+ struct df_artificial_ref artificial_ref;
+ struct df_extract_ref extract_ref;
+};
+typedef union df_ref_d *df_ref;
+
+
+/* One of these structures is allocated for every insn. */
+struct df_insn_info
+{
+ rtx insn; /* The insn this info comes from. */
+ df_ref *defs; /* Head of insn-def chain. */
+ df_ref *uses; /* Head of insn-use chain. */
+ /* Head of insn-use chain for uses in REG_EQUAL/EQUIV notes. */
+ df_ref *eq_uses;
+ struct df_mw_hardreg **mw_hardregs;
+ /* The logical uid of the insn in the basic block. This is valid
+ after any call to df_analyze but may rot after insns are added,
+ deleted or moved. */
+ int luid;
+};
+
/* These links are used for ref-ref chains. Currently only DEF-USE and
USE-DEF chains can be built by DF. */
struct df_link
{
- struct df_ref *ref;
+ df_ref ref;
struct df_link *next;
};
boundary of the df_set_blocks if that has been defined. */
struct df_ref_info
{
- struct df_ref **refs; /* Ref table, indexed by id. */
+ df_ref *refs; /* Ref table, indexed by id. */
unsigned int *begin; /* First ref_index for this pseudo. */
unsigned int *count; /* Count of refs for this pseudo. */
unsigned int refs_size; /* Size of currently allocated refs table. */
struct df_reg_info
{
/* Head of chain for refs of that type and regno. */
- struct df_ref *reg_chain;
+ df_ref reg_chain;
/* Number of refs in the chain. */
unsigned int n_refs;
};
/* Macros to access the elements within the ref structure. */
-#define DF_REF_REAL_REG(REF) (GET_CODE ((REF)->reg) == SUBREG \
- ? SUBREG_REG ((REF)->reg) : ((REF)->reg))
-#define DF_REF_REGNO(REF) ((REF)->regno)
-#define DF_REF_REAL_LOC(REF) (GET_CODE (*((REF)->loc)) == SUBREG \
- ? &SUBREG_REG (*((REF)->loc)) : ((REF)->loc))
-#define DF_REF_REG(REF) ((REF)->reg)
-#define DF_REF_LOC(REF) ((REF)->loc)
-#define DF_REF_BB(REF) ((REF)->bb)
+#define DF_REF_REAL_REG(REF) (GET_CODE ((REF)->base.reg) == SUBREG \
+ ? SUBREG_REG ((REF)->base.reg) : ((REF)->base.reg))
+#define DF_REF_REGNO(REF) ((REF)->base.regno)
+#define DF_REF_REAL_LOC(REF) (GET_CODE (*((REF)->regular_ref.loc)) == SUBREG \
+ ? &SUBREG_REG (*((REF)->regular_ref.loc)) : ((REF)->regular_ref.loc))
+#define DF_REF_REG(REF) ((REF)->base.reg)
+#define DF_REF_LOC(REF) ((DF_REF_CLASS(REF) == DF_REF_REGULAR || DF_REF_CLASS(REF) == DF_REF_EXTRACT) ? \
+ (REF)->regular_ref.loc : NULL)
+#define DF_REF_BB(REF) (DF_REF_IS_ARTIFICIAL(REF) ? \
+ (REF)->artificial_ref.bb : BLOCK_FOR_INSN (DF_REF_INSN(REF)))
#define DF_REF_BBNO(REF) (DF_REF_BB (REF)->index)
-#define DF_REF_INSN_INFO(REF) ((REF)->insn_info)
-#define DF_REF_INSN(REF) ((REF)->insn_info->insn)
+#define DF_REF_INSN_INFO(REF) ((REF)->base.insn_info)
+#define DF_REF_INSN(REF) ((REF)->base.insn_info->insn)
#define DF_REF_INSN_UID(REF) (INSN_UID (DF_REF_INSN(REF)))
-#define DF_REF_TYPE(REF) ((REF)->type)
-#define DF_REF_CHAIN(REF) ((REF)->chain)
-#define DF_REF_ID(REF) ((REF)->id)
-#define DF_REF_FLAGS(REF) ((REF)->flags)
+#define DF_REF_CLASS(REF) ((REF)->base.cl)
+#define DF_REF_TYPE(REF) ((REF)->base.type)
+#define DF_REF_CHAIN(REF) ((REF)->base.chain)
+#define DF_REF_ID(REF) ((REF)->base.id)
+#define DF_REF_FLAGS(REF) ((REF)->base.flags)
#define DF_REF_FLAGS_IS_SET(REF, v) ((DF_REF_FLAGS (REF) & (v)) != 0)
#define DF_REF_FLAGS_SET(REF, v) (DF_REF_FLAGS (REF) |= (v))
#define DF_REF_FLAGS_CLEAR(REF, v) (DF_REF_FLAGS (REF) &= ~(v))
-#define DF_REF_ORDER(REF) ((REF)->ref_order)
-/* If DF_REF_IS_ARTIFICIAL () is true, this is not a real definition/use,
- but an artificial one created to model
- always live registers, eh uses, etc.
- ARTIFICIAL refs has NULL insn. */
-#define DF_REF_IS_ARTIFICIAL(REF) ((REF)->insn_info == NULL)
+#define DF_REF_ORDER(REF) ((REF)->base.ref_order)
+/* If DF_REF_IS_ARTIFICIAL () is true, this is not a real
+ definition/use, but an artificial one created to model always live
+ registers, eh uses, etc. */
+#define DF_REF_IS_ARTIFICIAL(REF) (DF_REF_CLASS(REF) == DF_REF_ARTIFICIAL)
#define DF_REF_REG_MARK(REF) (DF_REF_FLAGS_SET ((REF),DF_REF_REG_MARKER))
#define DF_REF_REG_UNMARK(REF) (DF_REF_FLAGS_CLEAR ((REF),DF_REF_REG_MARKER))
#define DF_REF_IS_REG_MARKED(REF) (DF_REF_FLAGS_IS_SET ((REF),DF_REF_REG_MARKER))
-#define DF_REF_NEXT_REG(REF) ((REF)->next_reg)
-#define DF_REF_PREV_REG(REF) ((REF)->prev_reg)
+#define DF_REF_NEXT_REG(REF) ((REF)->base.next_reg)
+#define DF_REF_PREV_REG(REF) ((REF)->base.prev_reg)
/* The following two macros may only be applied if one of
DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT is true. */
-#define DF_REF_EXTRACT_WIDTH(REF) (((struct df_ref_extract *)(REF))->width)
-#define DF_REF_EXTRACT_WIDTH_CONST(REF) (((const struct df_ref_extract *)(REF))->width)
-#define DF_REF_EXTRACT_OFFSET(REF) (((struct df_ref_extract *)(REF))->offset)
-#define DF_REF_EXTRACT_OFFSET_CONST(REF) (((const struct df_ref_extract *)(REF))->offset)
-#define DF_REF_EXTRACT_MODE(REF) (((struct df_ref_extract *)(REF))->mode)
-#define DF_REF_EXTRACT_MODE_CONST(REF) (((const struct df_ref_extract *)(REF))->mode)
-/* Macros to determine the reference type. */
+#define DF_REF_EXTRACT_WIDTH(REF) ((REF)->extract_ref.width)
+#define DF_REF_EXTRACT_OFFSET(REF) ((REF)->extract_ref.offset)
+#define DF_REF_EXTRACT_MODE(REF) ((REF)->extract_ref.mode)
+/* Macros to determine the reference type. */
#define DF_REF_REG_DEF_P(REF) (DF_REF_TYPE (REF) == DF_REF_REG_DEF)
#define DF_REF_REG_USE_P(REF) ((REF) && !DF_REF_REG_DEF_P (REF))
#define DF_REF_REG_MEM_STORE_P(REF) (DF_REF_TYPE (REF) == DF_REF_REG_MEM_STORE)
#define DF_REF_REG_MEM_P(REF) (DF_REF_REG_MEM_STORE_P (REF) \
|| DF_REF_REG_MEM_LOAD_P (REF))
+#define DF_MWS_REG_DEF_P(MREF) (DF_MWS_TYPE (MREF) == DF_REF_REG_DEF)
+#define DF_MWS_REG_USE_P(MREF) ((MREF) && !DF_MWS_REG_DEF_P (MREF))
+#define DF_MWS_TYPE(MREF) ((MREF)->type)
+
/* Macros to get the refs out of def_info or use_info refs table. If
the focus of the dataflow has been set to some subset of blocks
with df_set_blocks, these macros will only find the uses and defs
Blocks that are the targets of non-local goto's have the hard
frame pointer defined at the top of the block. */
- struct df_ref **artificial_defs;
+ df_ref *artificial_defs;
/* Blocks that are targets of exception edges may have some
artificial uses. These are logically at the top of the block.
Most blocks have artificial uses at the bottom of the block. */
- struct df_ref **artificial_uses;
+ df_ref *artificial_uses;
};
#ifdef DF_DEBUG_CFG
extern void df_check_cfg_clean (void);
#endif
-extern struct df_ref *df_bb_regno_first_def_find (basic_block, unsigned int);
-extern struct df_ref *df_bb_regno_last_def_find (basic_block, unsigned int);
-extern struct df_ref *df_find_def (rtx, rtx);
+extern df_ref df_bb_regno_first_def_find (basic_block, unsigned int);
+extern df_ref df_bb_regno_last_def_find (basic_block, unsigned int);
+extern df_ref df_find_def (rtx, rtx);
extern bool df_reg_defined (rtx, rtx);
-extern struct df_ref *df_find_use (rtx, rtx);
+extern df_ref df_find_use (rtx, rtx);
extern bool df_reg_used (rtx, rtx);
extern void df_worklist_dataflow (struct dataflow *,bitmap, int *, int);
extern void df_print_regset (FILE *file, bitmap r);
extern void df_dump_start (FILE *);
extern void df_dump_top (basic_block, FILE *);
extern void df_dump_bottom (basic_block, FILE *);
-extern void df_refs_chain_dump (struct df_ref **, bool, FILE *);
-extern void df_regs_chain_dump (struct df_ref *, FILE *);
+extern void df_refs_chain_dump (df_ref *, bool, FILE *);
+extern void df_regs_chain_dump (df_ref, FILE *);
extern void df_insn_debug (rtx, bool, FILE *);
extern void df_insn_debug_regno (rtx, FILE *);
extern void df_regno_debug (unsigned int, FILE *);
-extern void df_ref_debug (struct df_ref *, FILE *);
+extern void df_ref_debug (df_ref, FILE *);
extern void debug_df_insn (rtx);
extern void debug_df_regno (unsigned int);
extern void debug_df_reg (rtx);
extern void debug_df_defno (unsigned int);
extern void debug_df_useno (unsigned int);
-extern void debug_df_ref (struct df_ref *);
+extern void debug_df_ref (df_ref);
extern void debug_df_chain (struct df_link *);
/* Functions defined in df-problems.c. */
-extern struct df_link *df_chain_create (struct df_ref *, struct df_ref *);
-extern void df_chain_unlink (struct df_ref *);
-extern void df_chain_copy (struct df_ref *, struct df_link *);
+extern struct df_link *df_chain_create (df_ref, df_ref);
+extern void df_chain_unlink (df_ref);
+extern void df_chain_copy (df_ref, struct df_link *);
extern bitmap df_get_live_in (basic_block);
extern bitmap df_get_live_out (basic_block);
extern void df_grow_bb_info (struct dataflow *);
extern void df_grow_reg_info (void);
extern void df_grow_insn_info (void);
extern void df_scan_blocks (void);
-extern struct df_ref *df_ref_create (rtx, rtx *, rtx,basic_block,
+extern df_ref df_ref_create (rtx, rtx *, rtx,basic_block,
enum df_ref_type, enum df_ref_flags,
int, int, enum machine_mode);
-extern void df_ref_remove (struct df_ref *);
+extern void df_ref_remove (df_ref);
extern struct df_insn_info * df_insn_create_insn_record (rtx);
extern void df_insn_delete (basic_block, unsigned int);
extern void df_bb_refs_record (int, bool);
extern void df_scan_verify (void);
/* Functions defined in df-byte-scan.c. */
-extern bool df_compute_accessed_bytes (struct df_ref *, enum df_mm,
+extern bool df_compute_accessed_bytes (df_ref, enum df_mm,
unsigned int *, unsigned int *);
/* Get the artificial defs for a basic block. */
-static inline struct df_ref **
+static inline df_ref *
df_get_artificial_defs (unsigned int bb_index)
{
return df_scan_get_bb_info (bb_index)->artificial_defs;
/* Get the artificial uses for a basic block. */
-static inline struct df_ref **
+static inline df_ref *
df_get_artificial_uses (unsigned int bb_index)
{
return df_scan_get_bb_info (bb_index)->artificial_uses;
extern struct web_entry *unionfind_root (struct web_entry *);
extern bool unionfind_union (struct web_entry *, struct web_entry *);
-extern void union_defs (struct df_ref *,
+extern void union_defs (df_ref,
struct web_entry *, struct web_entry *,
bool (*fun) (struct web_entry *, struct web_entry *));
between FROM to (but not including) TO. */
static bool
-local_ref_killed_between_p (struct df_ref * ref, rtx from, rtx to)
+local_ref_killed_between_p (df_ref ref, rtx from, rtx to)
{
rtx insn;
for (insn = from; insn != to; insn = NEXT_INSN (insn))
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
if (!INSN_P (insn))
continue;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_REGNO (ref) == DF_REF_REGNO (def))
return true;
}
we check if the definition is killed after DEF_INSN or before
TARGET_INSN insn, in their respective basic blocks. */
static bool
-use_killed_between (struct df_ref *use, rtx def_insn, rtx target_insn)
+use_killed_between (df_ref use, rtx def_insn, rtx target_insn)
{
basic_block def_bb = BLOCK_FOR_INSN (def_insn);
basic_block target_bb = BLOCK_FOR_INSN (target_insn);
int regno;
- struct df_ref * def;
+ df_ref def;
/* In some obscure situations we can have a def reaching a use
that is _before_ the def. In other words the def does not
regno = DF_REF_REGNO (use);
def = DF_REG_DEF_CHAIN (regno);
if (def
- && def->next_reg == NULL
+ && DF_REF_NEXT_REG (def) == NULL
&& regno >= FIRST_PSEUDO_REGISTER)
return false;
if (single_pred_p (target_bb)
&& single_pred (target_bb) == def_bb)
{
- struct df_ref *x;
+ df_ref x;
/* See if USE is killed between DEF_INSN and the last insn in the
basic block containing DEF_INSN. */
static bool
all_uses_available_at (rtx def_insn, rtx target_insn)
{
- struct df_ref **use_rec;
+ df_ref *use_rec;
struct df_insn_info *insn_info = DF_INSN_INFO_GET (def_insn);
rtx def_set = single_set (def_insn);
invalid. */
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
}
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
- if (rtx_equal_p (use->reg, def_reg))
+ df_ref use = *use_rec;
+ if (rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
}
}
killed between DEF_INSN and TARGET_INSN. */
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (use_killed_between (use, def_insn, target_insn))
return false;
}
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (use_killed_between (use, def_insn, target_insn))
return false;
}
in the data flow object of the pass. Mark any new uses as having the
given TYPE. */
static void
-update_df (rtx insn, rtx *loc, struct df_ref **use_rec, enum df_ref_type type,
+update_df (rtx insn, rtx *loc, df_ref *use_rec, enum df_ref_type type,
int new_flags)
{
bool changed = false;
/* Add a use for the registers that were propagated. */
while (*use_rec)
{
- struct df_ref *use = *use_rec;
- struct df_ref *orig_use = use, *new_use;
+ df_ref use = *use_rec;
+ df_ref orig_use = use, new_use;
int width = -1;
int offset = -1;
enum machine_mode mode = 0;
performed. */
static bool
-try_fwprop_subst (struct df_ref *use, rtx *loc, rtx new_rtx, rtx def_insn, bool set_reg_equal)
+try_fwprop_subst (df_ref use, rtx *loc, rtx new_rtx, rtx def_insn, bool set_reg_equal)
{
rtx insn = DF_REF_INSN (use);
enum df_ref_type type = DF_REF_TYPE (use);
/* If USE is a paradoxical subreg, see if it can be replaced by a pseudo. */
static bool
-forward_propagate_subreg (struct df_ref *use, rtx def_insn, rtx def_set)
+forward_propagate_subreg (df_ref use, rtx def_insn, rtx def_set)
{
rtx use_reg = DF_REF_REG (use);
rtx use_insn, src;
result. */
static bool
-forward_propagate_and_simplify (struct df_ref *use, rtx def_insn, rtx def_set)
+forward_propagate_and_simplify (df_ref use, rtx def_insn, rtx def_set)
{
rtx use_insn = DF_REF_INSN (use);
rtx use_set = single_set (use_insn);
definition, try to forward propagate it into that insn. */
static void
-forward_propagate_into (struct df_ref *use)
+forward_propagate_into (df_ref use)
{
struct df_link *defs;
- struct df_ref *def;
+ df_ref def;
rtx def_insn, def_set, use_insn;
rtx parent;
for (i = 0; i < DF_USES_TABLE_SIZE (); i++)
{
- struct df_ref *use = DF_USES_GET (i);
+ df_ref use = DF_USES_GET (i);
if (use)
if (DF_REF_TYPE (use) == DF_REF_REG_USE
|| DF_REF_BB (use)->loop_father == NULL
for (i = 0; i < DF_USES_TABLE_SIZE (); i++)
{
- struct df_ref *use = DF_USES_GET (i);
+ df_ref use = DF_USES_GET (i);
if (use)
if (DF_REF_TYPE (use) != DF_REF_REG_USE
&& DF_REF_BB (use)->loop_father != NULL
rtx insn;
FOR_BB_INSNS_REVERSE (bb, insn)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
if (insn_contains_asm (insn))
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (dregno < FIRST_PSEUDO_REGISTER)
{
if (!NOTE_P (insn) && !BARRIER_P (insn))
{
unsigned int uid = INSN_UID (insn);
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
c = new_insn_chain ();
c->next = next;
if (INSN_P (insn))
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
/* Ignore may clobbers because these are generated
if (INSN_P (insn))
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int regno = DF_REF_REGNO (use);
rtx reg = DF_REF_REG (use);
if (INSN_P (insn))
{
unsigned int uid = INSN_UID (insn);
- struct df_ref **def_rec;
+ df_ref *def_rec;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
bitmap_set_bit (merge_set, DF_REF_REGNO (def));
}
}
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
- struct df_ref **use_rec;
+ df_ref *use_rec;
if (!INSN_P (insn))
continue;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int regno = DF_REF_REGNO (use);
/* Only do this for the pseudos. */
/* Mark the register referenced by use or def REF as live. */
static void
-mark_ref_live (struct df_ref *ref)
+mark_ref_live (df_ref ref)
{
rtx reg;
/* Mark the register referenced by definition DEF as dead, if the
definition is a total one. */
static void
-mark_ref_dead (struct df_ref *def)
+mark_ref_dead (df_ref def)
{
rtx reg;
{
int alt;
int def;
- struct df_ref **def_rec;
+ df_ref *def_rec;
bool set_p = false;
for (def = 0; def < recog_data.n_operands; def++)
pessimistic, but it probably doesn't matter much in practice. */
FOR_BB_INSNS_REVERSE (bb, insn)
{
- struct df_ref **def_rec, **use_rec;
+ df_ref *def_rec, *use_rec;
bool call_p;
if (! INSN_P (insn))
rtx insn;
FOR_BB_INSNS_REVERSE (bb, insn)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
if (insn_contains_asm (insn))
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (dregno < FIRST_PSEUDO_REGISTER)
{
int insn_count = 0;
int max_uid = get_max_uid ();
int *qty_order;
- struct df_ref ** def_rec;
+ df_ref *def_rec;
/* Count the instructions in the basic block. */
invariant. */
static struct invariant *
-invariant_for_use (struct df_ref *use)
+invariant_for_use (df_ref use)
{
struct df_link *defs;
- struct df_ref *def;
+ df_ref def;
basic_block bb = DF_REF_BB (use), def_bb;
- if (use->flags & DF_REF_READ_WRITE)
+ if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
return NULL;
defs = DF_REF_CHAIN (use);
const char *fmt;
hashval_t val = code;
int do_not_record_p;
- struct df_ref *use;
+ df_ref use;
struct invariant *inv;
switch (code)
enum rtx_code code = GET_CODE (e1);
int i, j;
const char *fmt;
- struct df_ref *use1, *use2;
+ df_ref use1, use2;
struct invariant *inv1 = NULL, *inv2 = NULL;
rtx sub1, sub2;
loop invariants, false otherwise. */
static bool
-check_dependency (basic_block bb, struct df_ref *use, bitmap depends_on)
+check_dependency (basic_block bb, df_ref use, bitmap depends_on)
{
- struct df_ref *def;
+ df_ref def;
basic_block def_bb;
struct df_link *defs;
struct def *def_data;
struct invariant *inv;
- if (use->flags & DF_REF_READ_WRITE)
+ if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
return false;
defs = DF_REF_CHAIN (use);
check_dependencies (rtx insn, bitmap depends_on)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- struct df_ref **use_rec;
+ df_ref *use_rec;
basic_block bb = BLOCK_FOR_INSN (insn);
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
static void
find_invariant_insn (rtx insn, bool always_reached, bool always_executed)
{
- struct df_ref *ref;
+ df_ref ref;
struct def *def;
bitmap depends_on;
rtx set, dest;
record_uses (rtx insn)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- struct df_ref **use_rec;
+ df_ref *use_rec;
struct invariant *inv;
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
inv = invariant_for_use (use);
if (inv)
record_use (inv->def, DF_REF_REAL_LOC (use), DF_REF_INSN (use));
}
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
inv = invariant_for_use (use);
if (inv)
record_use (inv->def, DF_REF_REAL_LOC (use), DF_REF_INSN (use));
is set to NULL and true is returned. */
static bool
-latch_dominating_def (rtx reg, struct df_ref **def)
+latch_dominating_def (rtx reg, df_ref *def)
{
- struct df_ref *single_rd = NULL, *adef;
+ df_ref single_rd = NULL, adef;
unsigned regno = REGNO (reg);
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (current_loop->latch);
- for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = adef->next_reg)
+ for (adef = DF_REG_DEF_CHAIN (regno); adef; adef = DF_REF_NEXT_REG (adef))
{
- if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (adef)->index)
+ if (!bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (adef))
|| !bitmap_bit_p (bb_info->out, DF_REF_ID (adef)))
continue;
/* Gets definition of REG reaching its use in INSN and stores it to DEF. */
static enum iv_grd_result
-iv_get_reaching_def (rtx insn, rtx reg, struct df_ref **def)
+iv_get_reaching_def (rtx insn, rtx reg, df_ref *def)
{
- struct df_ref *use, *adef;
+ df_ref use, adef;
basic_block def_bb, use_bb;
rtx def_insn;
bool dom_p;
adef = DF_REF_CHAIN (use)->ref;
/* We do not handle setting only part of the register. */
- if (adef->flags & DF_REF_READ_WRITE)
+ if (DF_REF_FLAGS (adef) & DF_REF_READ_WRITE)
return GRD_INVALID;
def_insn = DF_REF_INSN (adef);
at get_biv_step. */
static bool
-get_biv_step_1 (struct df_ref *def, rtx reg,
+get_biv_step_1 (df_ref def, rtx reg,
rtx *inner_step, enum machine_mode *inner_mode,
enum rtx_code *extend, enum machine_mode outer_mode,
rtx *outer_step)
rtx next, nextr, tmp;
enum rtx_code code;
rtx insn = DF_REF_INSN (def);
- struct df_ref *next_def;
+ df_ref next_def;
enum iv_grd_result res;
set = single_set (insn);
LAST_DEF is the definition of REG that dominates loop latch. */
static bool
-get_biv_step (struct df_ref *last_def, rtx reg, rtx *inner_step,
+get_biv_step (df_ref last_def, rtx reg, rtx *inner_step,
enum machine_mode *inner_mode, enum rtx_code *extend,
enum machine_mode *outer_mode, rtx *outer_step)
{
/* Records information that DEF is induction variable IV. */
static void
-record_iv (struct df_ref *def, struct rtx_iv *iv)
+record_iv (df_ref def, struct rtx_iv *iv)
{
struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
rtx inner_step, outer_step;
enum machine_mode inner_mode, outer_mode;
enum rtx_code extend;
- struct df_ref *last_def;
+ df_ref last_def;
if (dump_file)
{
/* Analyzes iv DEF and stores the result to *IV. */
static bool
-iv_analyze_def (struct df_ref *def, struct rtx_iv *iv)
+iv_analyze_def (df_ref def, struct rtx_iv *iv)
{
rtx insn = DF_REF_INSN (def);
rtx reg = DF_REF_REG (def);
static bool
iv_analyze_op (rtx insn, rtx op, struct rtx_iv *iv)
{
- struct df_ref *def = NULL;
+ df_ref def = NULL;
enum iv_grd_result res;
if (dump_file)
bool
iv_analyze_result (rtx insn, rtx def, struct rtx_iv *iv)
{
- struct df_ref *adef;
+ df_ref adef;
adef = df_find_def (insn, def);
if (!adef)
biv_p (rtx insn, rtx reg)
{
struct rtx_iv iv;
- struct df_ref *def, *last_def;
+ df_ref def, last_def;
if (!simple_reg_p (reg))
return false;
alloc_pool adjacency_pool;
adjacency_t **adjacency;
-typedef struct df_ref * df_ref_t;
+typedef df_ref df_ref_t;
DEF_VEC_P(df_ref_t);
DEF_VEC_ALLOC_P(df_ref_t,heap);
static void
mark_reg_store (sparseset allocnos_live,
HARD_REG_SET *hard_regs_live,
- struct df_ref *ref)
+ df_ref ref)
{
rtx reg = DF_REF_REG (ref);
unsigned int regno = DF_REF_REGNO (ref);
sbitmap *live_subregs,
int *live_subregs_used,
HARD_REG_SET *hard_regs_live,
- rtx reg, struct df_ref *def)
+ rtx reg, df_ref def)
{
unsigned int regno = (GET_CODE (reg) == SUBREG)
? REGNO (SUBREG_REG (reg)): REGNO (reg);
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
if (!INSN_P (insn))
continue;
later. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* FIXME: Ignoring may clobbers is technically the wrong
thing to do. However the old version of the this
/* Add the interferences for the defs. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
mark_reg_store (allocnos_live, &renumbers_live, def);
}
VEC_truncate (df_ref_t, clobbers, 0);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL))
{
VEC_truncate (df_ref_t, dying_regs, 0);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int regno = DF_REF_REGNO (use);
bool added = false;
int renumber = reg_renumber[regno];
fprintf (dump_file, " clobber conflicts\n");
for (k = VEC_length (df_ref_t, clobbers) - 1; k >= 0; k--)
{
- struct df_ref *def = VEC_index (df_ref_t, clobbers, k);
+ df_ref def = VEC_index (df_ref_t, clobbers, k);
int j;
for (j = VEC_length (df_ref_t, dying_regs) - 1; j >= 0; j--)
{
- struct df_ref *use = VEC_index (df_ref_t, dying_regs, j);
+ df_ref use = VEC_index (df_ref_t, dying_regs, j);
record_one_conflict_between_regnos (GET_MODE (DF_REF_REG (def)),
DF_REF_REGNO (def),
GET_MODE (DF_REF_REG (use)),
for (j = VEC_length (df_ref_t, dying_regs) - 1; j >= 0; j--)
{
int used_in_output = 0;
- struct df_ref *use = VEC_index (df_ref_t, dying_regs, j);
+ df_ref use = VEC_index (df_ref_t, dying_regs, j);
rtx reg = DF_REF_REG (use);
int uregno = DF_REF_REGNO (use);
enum machine_mode umode = GET_MODE (DF_REF_REG (use));
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
int luid = 0;
bitmap_iterator bi;
unsigned int regno;
to begin processing. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
bitmap_clear_bit (live, DF_REF_REGNO (def));
}
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
regno = DF_REF_REGNO (use);
for (mws_rec = DF_INSN_UID_MWS (uid); *mws_rec; mws_rec++)
{
struct df_mw_hardreg *mws = *mws_rec;
- if (mws->type == DF_REF_REG_DEF)
+ if (DF_MWS_REG_DEF_P (mws))
{
bool all_dead = true;
unsigned int r;
clobber. This code is for the return. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((!CALL_P (insn))
|| (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))))
{
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
if (uregno >= FIRST_PSEUDO_REGISTER)
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
bitmap_copy (live, df_get_live_out (bb));
to begin processing. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
bitmap_clear_bit (live, DF_REF_REGNO (def));
}
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
bitmap_set_bit (live, DF_REF_REGNO (use));
}
clobber. This code is for the return. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((!CALL_P (insn))
|| (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))))
{
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
bitmap_set_bit (live, DF_REF_REGNO (use));
}
}
happened and the optimization should be aborted. */
static int
-see_handle_relevant_defs (struct df_ref *ref, rtx insn)
+see_handle_relevant_defs (df_ref ref, rtx insn)
{
struct web_entry *root_entry = NULL;
rtx se_insn = NULL;
happened and the optimization should be aborted. */
static int
-see_handle_relevant_uses (struct df_ref *ref, rtx insn)
+see_handle_relevant_uses (df_ref ref, rtx insn)
{
struct web_entry *root_entry = NULL;
rtx se_insn = NULL;
if (INSN_P (insn))
{
- struct df_ref **use_rec;
- struct df_ref **def_rec;
+ df_ref *use_rec;
+ df_ref *def_rec;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
int result = see_handle_relevant_uses (use, insn);
if (result == -1)
return -1;
}
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
int result = see_handle_relevant_uses (use, insn);
if (result == -1)
return -1;
}
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
int result = see_handle_relevant_defs (def, insn);
if (result == -1)
return -1;
/* Initialized the use_entry field for REF in INSN at INDEX with ET. */
static void
-see_update_uses_relevancy (rtx insn, struct df_ref *ref,
+see_update_uses_relevancy (rtx insn, df_ref ref,
enum entry_type et, unsigned int index)
{
struct see_entry_extra_info *curr_entry_extra_info;
/* Initialized the def_entry field for REF in INSN at INDEX with ET. */
static void
-see_update_defs_relevancy (rtx insn, struct df_ref *ref,
+see_update_defs_relevancy (rtx insn, df_ref ref,
enum entry_type et,
enum machine_mode source_mode,
enum machine_mode source_mode_unsigned,
FOR_ALL_BB (bb)
{
- struct df_ref **use_rec;
- struct df_ref **def_rec;
+ df_ref *use_rec;
+ df_ref *def_rec;
rtx insn;
FOR_BB_INSNS (bb, insn)
{
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
see_update_uses_relevancy (insn, use, et, u);
u++;
}
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
see_update_uses_relevancy (insn, use, et, u);
u++;
}
et = see_analyze_one_def (insn, &source_mode, &source_mode_unsigned);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
see_update_defs_relevancy (insn, def, et, source_mode,
source_mode_unsigned, d);
d++;
for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
see_update_uses_relevancy (NULL, use, NOT_RELEVANT, u);
u++;
}
for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
see_update_defs_relevancy (NULL, def, NOT_RELEVANT,
MAX_MACHINE_MODE, MAX_MACHINE_MODE, d);
d++;
FOR_ALL_BB (bb)
{
rtx insn;
- struct df_ref **use_rec;
+ df_ref *use_rec;
FOR_BB_INSNS (bb, insn)
{
{
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
union_defs (use, def_entry, use_entry, see_update_leader_extra_info);
}
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
union_defs (use, def_entry, use_entry, see_update_leader_extra_info);
}
}
for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
union_defs (use, def_entry, use_entry, see_update_leader_extra_info);
}
}
{
bool must_be_use = false;
unsigned uid = INSN_UID (insn);
- struct df_ref **rec;
+ df_ref *rec;
rtx lhs = IDATA_LHS (id);
rtx rhs = IDATA_RHS (id);
for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
{
- struct df_ref *def = *rec;
+ df_ref def = *rec;
if (DF_REF_INSN (def)
&& DF_REF_FLAGS_IS_SET (def, DF_REF_PRE_POST_MODIFY)
setup_id_reg_sets (idata_t id, insn_t insn)
{
unsigned uid = INSN_UID (insn);
- struct df_ref **rec;
+ df_ref *rec;
regset tmp = get_clear_regset_from_pool ();
for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
{
- struct df_ref *def = *rec;
+ df_ref def = *rec;
unsigned int regno = DF_REF_REGNO (def);
/* Post modifies are treated like clobbers by sched-deps.c. */
for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
{
- struct df_ref *use = *rec;
+ df_ref use = *rec;
unsigned int regno = DF_REF_REGNO (use);
/* When these refs are met for the first time, skip them, as
#include "tree-pass.h"
-static rtx entry_register (struct web_entry *, struct df_ref *, char *);
-static void replace_ref (struct df_ref *, rtx);
+static rtx entry_register (struct web_entry *, df_ref, char *);
+static void replace_ref (df_ref, rtx);
/* Find the root of unionfind tree (the representative of set). */
FUN is the function that does the union. */
void
-union_defs (struct df_ref *use, struct web_entry *def_entry,
+union_defs (df_ref use, struct web_entry *def_entry,
struct web_entry *use_entry,
bool (*fun) (struct web_entry *, struct web_entry *))
{
struct df_insn_info *insn_info = DF_REF_INSN_INFO (use);
struct df_link *link = DF_REF_CHAIN (use);
- struct df_ref **use_link;
- struct df_ref **eq_use_link;
- struct df_ref **def_link;
+ df_ref *use_link;
+ df_ref *eq_use_link;
+ df_ref *def_link;
rtx set;
if (insn_info)
/* A READ_WRITE use requires the corresponding def to be in the same
register. Find it and union. */
- if (use->flags & DF_REF_READ_WRITE)
+ if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
{
- struct df_ref **link;
+ df_ref *link;
if (insn_info)
link = DF_INSN_INFO_DEFS (insn_info);
/* Find the corresponding register for the given entry. */
static rtx
-entry_register (struct web_entry *entry, struct df_ref *ref, char *used)
+entry_register (struct web_entry *entry, df_ref ref, char *used)
{
struct web_entry *root;
rtx reg, newreg;
/* Replace the reference by REG. */
static void
-replace_ref (struct df_ref *ref, rtx reg)
+replace_ref (df_ref ref, rtx reg)
{
rtx oldreg = DF_REF_REAL_REG (ref);
rtx *loc = DF_REF_REAL_LOC (ref);
- unsigned int uid = INSN_UID (DF_REF_INSN (ref));
+ unsigned int uid = DF_REF_INSN_UID (ref);
if (oldreg == reg)
return;
unsigned int uid = INSN_UID (insn);
if (INSN_P (insn))
{
- struct df_ref **use_rec;
+ df_ref *use_rec;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
DF_REF_ID (use) = uses_num++;
}
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
DF_REF_ID (use) = uses_num++;
}
unsigned int uid = INSN_UID (insn);
if (INSN_P (insn))
{
- struct df_ref **use_rec;
+ df_ref *use_rec;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
union_defs (use, def_entry, use_entry, unionfind_union);
}
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
union_defs (use, def_entry, use_entry, unionfind_union);
}
unsigned int uid = INSN_UID (insn);
if (INSN_P (insn))
{
- struct df_ref **use_rec;
- struct df_ref **def_rec;
+ df_ref *use_rec;
+ df_ref *def_rec;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
replace_ref (use, entry_register (use_entry + DF_REF_ID (use), use, used));
}
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
replace_ref (use, entry_register (use_entry + DF_REF_ID (use), use, used));
}
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_REGNO (def) >= FIRST_PSEUDO_REGISTER)
replace_ref (def, entry_register (def_entry + DF_REF_ID (def), def, used));
}