/* Scanning of rtl for dataflow analysis.
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
- 2008, 2009, 2010 Free Software Foundation, Inc.
+ 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
#include "recog.h"
#include "function.h"
#include "regs.h"
-#include "output.h"
#include "alloc-pool.h"
#include "flags.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "sbitmap.h"
#include "bitmap.h"
-#include "timevar.h"
+#include "dumpfile.h"
#include "tree.h"
#include "target.h"
#include "target-def.h"
#include "df.h"
-#include "tree-pass.h"
#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
DEF_VEC_P(df_ref);
rtx, rtx *,
basic_block, struct df_insn_info *,
enum df_ref_type, int ref_flags);
-static void df_def_record_1 (struct df_collection_rec *, rtx,
+static void df_def_record_1 (struct df_collection_rec *, rtx *,
basic_block, struct df_insn_info *,
int ref_flags);
static void df_defs_record (struct df_collection_rec *, rtx,
basic_block, struct df_insn_info *,
int ref_flags);
+static void df_install_ref_incremental (df_ref);
static df_ref df_ref_create_structure (enum df_ref_class,
struct df_collection_rec *, rtx, rtx *,
basic_block, struct df_insn_info *,
{
struct df_scan_problem_data *problem_data;
unsigned int insn_num = get_max_uid () + 1;
- unsigned int block_size = 400;
+ unsigned int block_size = 512;
basic_block bb;
/* Given the number of pools, this is really faster than tearing
sizeof (struct df_reg_info), block_size);
problem_data->mw_reg_pool
= create_alloc_pool ("df_scan mw_reg",
- sizeof (struct df_mw_hardreg), block_size);
+ sizeof (struct df_mw_hardreg), block_size / 16);
bitmap_obstack_initialize (&problem_data->reg_bitmaps);
bitmap_obstack_initialize (&problem_data->insn_bitmaps);
}
if (DF_REG_EQ_USE_COUNT (i))
{
- fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
+ fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
ecount += DF_REG_EQ_USE_COUNT (i);
}
fprintf (file, "} ");
icount++;
}
- fprintf (file, "\n;; total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
- dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
+ fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
+ " in %d{%d regular + %d call} insns.\n",
+ dcount + ucount + ecount, dcount, ucount, ecount,
+ icount + ccount, icount, ccount);
}
/* Dump the bb_info for a given basic block. */
}
}
+/* Create new refs under address LOC within INSN. This function is
+ only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
+ depending on whether LOC is inside PATTERN (INSN) or a note. */
+
+void
+df_uses_create (rtx *loc, rtx insn, int ref_flags)
+{
+ gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
+ df_uses_record (NULL, loc, DF_REF_REG_USE,
+ BLOCK_FOR_INSN (insn),
+ DF_INSN_INFO_GET (insn),
+ ref_flags);
+}
/* Create a new ref of type DF_REF_TYPE for register REG at address
LOC within INSN of BB. This function is only used externally. */
enum df_ref_type ref_type,
int ref_flags)
{
- df_ref ref;
- struct df_reg_info **reg_info;
- struct df_ref_info *ref_info;
- df_ref *ref_rec;
- df_ref **ref_rec_ptr;
- unsigned int count = 0;
- bool add_to_table;
enum df_ref_class cl;
df_grow_reg_info ();
cl = DF_REF_REGULAR;
else
cl = DF_REF_BASE;
- ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
- ref_type, ref_flags);
+
+ return df_ref_create_structure (cl, NULL, reg, loc, bb,
+ DF_INSN_INFO_GET (insn),
+ ref_type, ref_flags);
+}
+
+static void
+df_install_ref_incremental (df_ref ref)
+{
+ struct df_reg_info **reg_info;
+ struct df_ref_info *ref_info;
+ df_ref *ref_rec;
+ df_ref **ref_rec_ptr;
+ unsigned int count = 0;
+ bool add_to_table;
+
+ rtx insn = DF_REF_INSN (ref);
+ basic_block bb = BLOCK_FOR_INSN (insn);
if (DF_REF_REG_DEF_P (ref))
{
to mark the block dirty ourselves. */
if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
df_set_bb_dirty (bb);
-
- return ref;
}
}
df_refs_add_to_chains (&collection_rec, bb, insn);
- if (DEBUG_INSN_P (insn))
- df_set_bb_dirty_nonlr (bb);
- else
+ if (!DEBUG_INSN_P (insn))
df_set_bb_dirty (bb);
VEC_free (df_ref, stack, collection_rec.def_vec);
DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
/* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
- if ((regno < FIRST_PSEUDO_REGISTER)
- && (!DF_REF_IS_ARTIFICIAL (this_ref)))
+ if (regno < FIRST_PSEUDO_REGISTER
+ && !DF_REF_IS_ARTIFICIAL (this_ref)
+ && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
{
if (DF_REF_REG_DEF_P (this_ref))
{
else
VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
}
+ else
+ df_install_ref_incremental (this_ref);
return this_ref;
}
/* If this is a multiword hardreg, we create some extra
datastructures that will enable us to easily build REG_DEAD
and REG_UNUSED notes. */
- if ((endregno != regno + 1) && insn_info)
+ if (collection_rec
+ && (endregno != regno + 1) && insn_info)
{
/* Sets to a subreg of a multiword register are partial.
Sets to a non-subreg of a multiword register are not. */
}
-/* Process all the registers defined in the rtx, X.
- Autoincrement/decrement definitions will be picked up by
- df_uses_record. */
+/* Process all the registers defined in the rtx pointed by LOC.
+ Autoincrement/decrement definitions will be picked up by df_uses_record.
+ Any change here has to be matched in df_find_hard_reg_defs_1. */
static void
df_def_record_1 (struct df_collection_rec *collection_rec,
- rtx x, basic_block bb, struct df_insn_info *insn_info,
+ rtx *loc, basic_block bb, struct df_insn_info *insn_info,
int flags)
{
- rtx *loc;
- rtx dst;
-
- /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
- construct. */
- if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
- loc = &XEXP (x, 0);
- else
- loc = &SET_DEST (x);
- dst = *loc;
+ rtx dst = *loc;
/* It is legal to have a set destination be a parallel. */
if (GET_CODE (dst) == PARALLEL)
{
int i;
-
for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
{
rtx temp = XVECEXP (dst, 0, i);
- if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
- || GET_CODE (temp) == SET)
- df_def_record_1 (collection_rec,
- temp, bb, insn_info,
- GET_CODE (temp) == CLOBBER
- ? flags | DF_REF_MUST_CLOBBER : flags);
+ gcc_assert (GET_CODE (temp) == EXPR_LIST);
+ df_def_record_1 (collection_rec, &XEXP (temp, 0),
+ bb, insn_info, flags);
}
return;
}
}
-/* Process all the registers defined in the pattern rtx, X. */
+/* Process all the registers defined in the pattern rtx, X. Any change
+ here has to be matched in df_find_hard_reg_defs. */
static void
df_defs_record (struct df_collection_rec *collection_rec,
int flags)
{
RTX_CODE code = GET_CODE (x);
+ int i;
- if (code == SET || code == CLOBBER)
- {
- /* Mark the single def within the pattern. */
- int clobber_flags = flags;
- clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
- df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
- }
- else if (code == COND_EXEC)
+ switch (code)
{
+ case SET:
+ df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
+ break;
+
+ case CLOBBER:
+ flags |= DF_REF_MUST_CLOBBER;
+ df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
+ break;
+
+ case COND_EXEC:
df_defs_record (collection_rec, COND_EXEC_CODE (x),
bb, insn_info, DF_REF_CONDITIONAL);
+ break;
+
+ case PARALLEL:
+ for (i = 0; i < XVECLEN (x, 0); i++)
+ df_defs_record (collection_rec, XVECEXP (x, 0, i),
+ bb, insn_info, flags);
+ break;
+ default:
+ /* No DEFs to record in other cases */
+ break;
}
- else if (code == PARALLEL)
+}
+
+/* Set bits in *DEFS for hard registers found in the rtx DST, which is the
+ destination of a set or clobber. This has to match the logic in
+ df_defs_record_1. */
+
+static void
+df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
+{
+ /* It is legal to have a set destination be a parallel. */
+ if (GET_CODE (dst) == PARALLEL)
{
int i;
+ for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
+ {
+ rtx temp = XVECEXP (dst, 0, i);
+ gcc_assert (GET_CODE (temp) == EXPR_LIST);
+ df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
+ }
+ return;
+ }
- /* Mark the multiple defs within the pattern. */
- for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
- df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
+ if (GET_CODE (dst) == STRICT_LOW_PART)
+ dst = XEXP (dst, 0);
+
+ if (GET_CODE (dst) == ZERO_EXTRACT)
+ dst = XEXP (dst, 0);
+
+ /* At this point if we do not have a reg or a subreg, just return. */
+ if (REG_P (dst) && HARD_REGISTER_P (dst))
+ SET_HARD_REG_BIT (*defs, REGNO (dst));
+ else if (GET_CODE (dst) == SUBREG
+ && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
+ SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
+}
+
+/* Set bits in *DEFS for hard registers defined in the pattern X. This
+ has to match the logic in df_defs_record. */
+
+static void
+df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
+{
+ RTX_CODE code = GET_CODE (x);
+ int i;
+
+ switch (code)
+ {
+ case SET:
+ df_find_hard_reg_defs_1 (SET_DEST (x), defs);
+ break;
+
+ case CLOBBER:
+ df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
+ break;
+
+ case COND_EXEC:
+ df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
+ break;
+
+ case PARALLEL:
+ for (i = 0; i < XVECLEN (x, 0); i++)
+ df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
+ break;
+ default:
+ /* No DEFs to record in other cases */
+ break;
}
}
}
case RETURN:
+ case SIMPLE_RETURN:
break;
case ASM_OPERANDS:
}
-/* Get call's extra defs and uses. */
+/* Get call's extra defs and uses (track caller-saved registers). */
static void
-df_get_call_refs (struct df_collection_rec * collection_rec,
+df_get_call_refs (struct df_collection_rec *collection_rec,
basic_block bb,
struct df_insn_info *insn_info,
int flags)
{
rtx note;
- bitmap_iterator bi;
- unsigned int ui;
bool is_sibling_call;
unsigned int i;
- df_ref def;
- bitmap_head defs_generated;
+ HARD_REG_SET defs_generated;
- bitmap_initialize (&defs_generated, &df_bitmap_obstack);
+ CLEAR_HARD_REG_SET (defs_generated);
+ df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
+ is_sibling_call = SIBLING_CALL_P (insn_info->insn);
- /* Do not generate clobbers for registers that are the result of the
- call. This causes ordering problems in the chain building code
- depending on which def is seen first. */
- FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, i, def)
- bitmap_set_bit (&defs_generated, DF_REF_REGNO (def));
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ {
+ if (i == STACK_POINTER_REGNUM)
+ /* The stack ptr is used (honorarily) by a CALL insn. */
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn_info, DF_REF_REG_USE,
+ DF_REF_CALL_STACK_USAGE | flags);
+ else if (global_regs[i])
+ {
+ /* Calls to const functions cannot access any global registers and
+ calls to pure functions cannot set them. All other calls may
+ reference any of the global registers, so they are recorded as
+ used. */
+ if (!RTL_CONST_CALL_P (insn_info->insn))
+ {
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn_info, DF_REF_REG_USE, flags);
+ if (!RTL_PURE_CALL_P (insn_info->insn))
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn_info, DF_REF_REG_DEF, flags);
+ }
+ }
+ else if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
+ /* no clobbers for regs that are the result of the call */
+ && !TEST_HARD_REG_BIT (defs_generated, i)
+ && (!is_sibling_call
+ || !bitmap_bit_p (df->exit_block_uses, i)
+ || refers_to_regno_p (i, i+1,
+ crtl->return_rtx, NULL)))
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn_info, DF_REF_REG_DEF,
+ DF_REF_MAY_CLOBBER | flags);
+ }
/* Record the registers used to pass arguments, and explicitly
noted as clobbered. */
if (REG_P (XEXP (XEXP (note, 0), 0)))
{
unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
- if (!bitmap_bit_p (&defs_generated, regno))
+ if (!TEST_HARD_REG_BIT (defs_generated, regno))
df_defs_record (collection_rec, XEXP (note, 0), bb,
insn_info, flags);
}
}
}
- /* The stack ptr is used (honorarily) by a CALL insn. */
- df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
- NULL, bb, insn_info, DF_REF_REG_USE,
- DF_REF_CALL_STACK_USAGE | flags);
-
- /* Calls may also reference any of the global registers,
- so they are recorded as used. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (global_regs[i])
- {
- df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
- NULL, bb, insn_info, DF_REF_REG_USE, flags);
- df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
- NULL, bb, insn_info, DF_REF_REG_DEF, flags);
- }
-
- is_sibling_call = SIBLING_CALL_P (insn_info->insn);
- EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
- {
- if (!global_regs[ui]
- && (!bitmap_bit_p (&defs_generated, ui))
- && (!is_sibling_call
- || !bitmap_bit_p (df->exit_block_uses, ui)
- || refers_to_regno_p (ui, ui+1,
- crtl->return_rtx, NULL)))
- df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
- NULL, bb, insn_info, DF_REF_REG_DEF,
- DF_REF_MAY_CLOBBER | flags);
- }
-
- bitmap_clear (&defs_generated);
return;
}
and reg chains. */
static void
-df_insn_refs_collect (struct df_collection_rec* collection_rec,
+df_insn_refs_collect (struct df_collection_rec *collection_rec,
basic_block bb, struct df_insn_info *insn_info)
{
rtx note;
VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
- /* Record register defs. */
- df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
-
/* Process REG_EQUIV/REG_EQUAL notes. */
for (note = REG_NOTES (insn_info->insn); note;
note = XEXP (note, 1))
}
}
+ /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
+ uses from CALL_INSN_FUNCTION_USAGE. */
if (CALL_P (insn_info->insn))
df_get_call_refs (collection_rec, bb, insn_info,
(is_cond_exec) ? DF_REF_CONDITIONAL : 0);
+ /* Record other defs. These should be mostly for DF_REF_REGULAR, so
+ that a qsort on the defs is unnecessary in most cases. */
+ df_defs_record (collection_rec,
+ PATTERN (insn_info->insn), bb, insn_info, 0);
+
/* Record the register uses. */
df_uses_record (collection_rec,
&PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
live everywhere -- which might not already be the case for
blocks within infinite loops. */
{
+ unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
+
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
- if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
- && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
- bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
+ if (picreg != INVALID_REGNUM
+ && fixed_regs[picreg])
+ bitmap_set_bit (regular_block_artificial_uses, picreg);
}
/* The all-important stack pointer must always be live. */
bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
{
bitmap_clear (eh_block_artificial_uses);
- /* The following code (down thru the arg_pointer setting APPEARS
+ /* The following code (down through the arg_pointer setting APPEARS
to be necessary because there is nothing that actually
describes what the exception handling code may actually need
to keep alive. */
bitmap_clear (entry_block_defs);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- {
- if (FUNCTION_ARG_REGNO_P (i))
-#ifdef INCOMING_REGNO
- bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
-#else
- bitmap_set_bit (entry_block_defs, i);
-#endif
- }
+ if (FUNCTION_ARG_REGNO_P (i))
+ bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
/* The always important stack pointer. */
bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
/* These registers are live everywhere. */
if (!reload_completed)
{
+#ifdef PIC_OFFSET_TABLE_REGNUM
+ unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
+#endif
+
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* Pseudos with argument area equivalences may require
reloading via the argument pointer. */
#ifdef PIC_OFFSET_TABLE_REGNUM
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
- if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
- && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
- bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
+ if (picreg != INVALID_REGNUM
+ && fixed_regs[picreg])
+ bitmap_set_bit (entry_block_defs, picreg);
#endif
}
df_get_exit_block_use_set (bitmap exit_block_uses)
{
unsigned int i;
+ unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
bitmap_clear (exit_block_uses);
Assume the pic register is not in use, or will be handled by
other means, if it is not fixed. */
if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
- && (unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
- && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
- bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
+ && picreg != INVALID_REGNUM
+ && fixed_regs[picreg])
+ bitmap_set_bit (exit_block_uses, picreg);
/* Mark all global registers, and all registers used by the
epilogue as being live at the end of the function since they
if (!INSN_P (insn))
continue;
df_insn_refs_verify (&collection_rec, bb, insn, true);
- df_free_collection_rec (&collection_rec);
}
/* Do the artificial defs and uses. */
if (!is_eq && abort_if_fail)
{
- print_current_pass (stderr);
fprintf (stderr, "entry_block_defs = ");
df_print_regset (stderr, &entry_block_defs);
fprintf (stderr, "df->entry_block_defs = ");
if (!is_eq && abort_if_fail)
{
- print_current_pass (stderr);
fprintf (stderr, "exit_block_uses = ");
df_print_regset (stderr, &exit_block_uses);
fprintf (stderr, "df->exit_block_uses = ");
/* Verification is a 4 step process. */
- /* (1) All of the refs are marked by going thru the reg chains. */
+ /* (1) All of the refs are marked by going through the reg chains. */
for (i = 0; i < DF_REG_SIZE (df); i++)
{
gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)