{
/* True for input reload of matched operands. */
bool match_p;
+ /* True for input reload of inout earlyclobber operand. */
+ bool early_clobber_p;
/* Reloaded value. */
rtx input;
/* Reload pseudo used. */
/* Create a new pseudo using MODE, RCLASS, EXCLUDE_START_HARD_REGS, ORIGINAL or
reuse an existing reload pseudo. Don't reuse an existing reload pseudo if
IN_SUBREG_P is true and the reused pseudo should be wrapped up in a SUBREG.
+ EARLY_CLOBBER_P is true for input reload of inout early clobber operand.
The result pseudo is returned through RESULT_REG. Return TRUE if we created
a new pseudo, FALSE if we reused an existing reload pseudo. Use TITLE to
describe new registers for debug purposes. */
static bool
get_reload_reg (enum op_type type, machine_mode mode, rtx original,
enum reg_class rclass, HARD_REG_SET *exclude_start_hard_regs,
- bool in_subreg_p, const char *title, rtx *result_reg)
+ bool in_subreg_p, bool early_clobber_p,
+ const char *title, rtx *result_reg)
{
int i, regno;
enum reg_class new_class;
for (i = 0; i < curr_insn_input_reloads_num; i++)
{
if (! curr_insn_input_reloads[i].match_p
+ && ! curr_insn_input_reloads[i].early_clobber_p
&& rtx_equal_p (curr_insn_input_reloads[i].input, original)
&& in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
{
lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = false;
+ curr_insn_input_reloads[curr_insn_input_reloads_num].early_clobber_p
+ = early_clobber_p;
curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
return true;
}
lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
curr_insn_input_reloads[curr_insn_input_reloads_num].input = in_rtx;
curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = true;
+ curr_insn_input_reloads[curr_insn_input_reloads_num].early_clobber_p = false;
curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = new_in_reg;
for (i = 0; (in = ins[i]) >= 0; i++)
if (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
reg = *loc;
if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
mode, reg, cl, NULL,
- subreg_p, "address", &new_reg))
+ subreg_p, false, "address", &new_reg))
before_p = true;
}
else if (new_class != NO_REGS && rclass != new_class)
= (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
if (get_reload_reg (curr_static_id->operand[nop].type, innermode,
reg, rclass, NULL,
- true, "slow/invalid mem", &new_reg))
+ true, false, "slow/invalid mem", &new_reg))
{
bool insert_before, insert_after;
bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
= (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
rclass, NULL,
- true, "slow/invalid mem", &new_reg))
+ true, false, "slow/invalid mem", &new_reg))
{
bool insert_before, insert_after;
bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
rclass, NULL,
- true, "subreg reg", &new_reg))
+ true, false, "subreg reg", &new_reg))
{
bool insert_before, insert_after;
bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
rclass, NULL,
- true, "paradoxical subreg", &new_reg))
+ true, false, "paradoxical subreg", &new_reg))
{
rtx subreg;
bool insert_before, insert_after;
/* This value does not matter for MODIFY. */
GET_MODE_SIZE (GET_MODE (op)));
else if (get_reload_reg (OP_IN, Pmode, *loc, rclass,
- NULL, false,
+ NULL, false, false,
"offsetable address", &new_reg))
{
rtx addr = *loc;
old = *loc;
if (get_reload_reg (type, mode, old, goal_alt[i],
&goal_alt_exclude_start_hard_regs[i],
- loc != curr_id->operand_loc[i], "", &new_reg)
+ loc != curr_id->operand_loc[i],
+ curr_static_id->operand_alternative
+ [goal_alt_number * n_operands + i].earlyclobber,
+ "", &new_reg)
&& type != OP_OUT)
{
push_to_sequence (before);