\f
//////////////////////////////////////////////////////////////////////////////
-// Split insns after peephole2 / befor avr-fuse-move.
+// Split shift insns after peephole2 / befor avr-fuse-move.
+
static const pass_data avr_pass_data_split_after_peephole2 =
{
RTL_PASS, // type
} // anonymous namespace
-/* Whether some shift insn alternatives are a 3-operand insn or a
- 2-operand insn. This 3op alternatives allow the source and the
- destination register of the shift to be different right from the
- start, because the splitter will split the 3op shift into a 3op byte
- shift and a 2op residual bit shift.
- (When the residual shift has an offset of one less than the bitsize,
- then the residual shift is also a 3op insn. */
+/* Whether some shift insn alternatives are a `3op' 3-operand insn.
+ This 3op alternatives allow the source and the destination register
+ of the shift to be different right from the start, because the splitter
+ will split the 3op shift into a 3-operand byte shift and a 2-operand
+ residual bit shift. (When the residual shift has an offset of one
+ less than the bitsize, then the residual shift is also a 3op insn.) */
bool
avr_shift_is_3op ()
{
// Don't split for OPTIMIZE_SIZE_MAX (-Oz).
// For OPTIMIZE_SIZE_BALANCED (-Os), we still split because
- // the size overhead (if exists at all) is marginal.
+ // the size overhead (if at all) is marginal.
return (avropt_split_bit_shift
&& optimize > 0
}
-/* Implement constraints `C4a', `C4l' and `C4r'.
+/* Implement constraints `C2a', `C2l', `C2r' ... `C4a', `C4l', `C4r'.
Whether we split an N_BYTES shift of code CODE in { ASHIFTRT,
LSHIFTRT, ASHIFT } into a byte shift and a residual bit shift. */
bool
avr_split_shift_p (int n_bytes, int offset, rtx_code code)
{
- gcc_assert (n_bytes == 4);
+ gcc_assert (n_bytes == 4 || n_bytes == 3 || n_bytes == 2);
+
+ if (! avr_shift_is_3op ()
+ || offset % 8 == 0)
+ return false;
- if (avr_shift_is_3op ()
- && offset % 8 != 0)
+ if (n_bytes == 4)
return select<bool>()
- : code == ASHIFT ? IN_RANGE (offset, 17, 30)
- : code == ASHIFTRT ? IN_RANGE (offset, 9, 29)
+ : code == ASHIFT ? IN_RANGE (offset, 9, 30) && offset != 15
+ : code == ASHIFTRT ? IN_RANGE (offset, 9, 29) && offset != 15
: code == LSHIFTRT ? IN_RANGE (offset, 9, 30) && offset != 15
: bad_case<bool> ();
+ if (n_bytes == 3)
+ return select<bool>()
+ : code == ASHIFT ? IN_RANGE (offset, 9, 22) && offset != 15
+ : code == ASHIFTRT ? IN_RANGE (offset, 9, 21) && offset != 15
+ : code == LSHIFTRT ? IN_RANGE (offset, 9, 22) && offset != 15
+ : bad_case<bool> ();
+
+ if (n_bytes == 2)
+ return select<bool>()
+ : code == ASHIFT ? IN_RANGE (offset, 9, 14)
+ : code == ASHIFTRT ? IN_RANGE (offset, 9, 13)
+ : code == LSHIFTRT ? IN_RANGE (offset, 9, 14)
+ : bad_case<bool> ();
+
return false;
}
/* Emit a DEST = SRC <code> OFF shift of QImode, HImode or PSImode.
- SCRATCH is a QImode d-register, scratch:QI, or NULL_RTX. */
+ SCRATCH is a QImode d-register, scratch:QI, or NULL_RTX.
+ This function is used to emit shifts that have been split into
+ a byte shift and a residual bit shift that operates on a mode
+ strictly smaller than the original shift. */
static void
avr_emit_shift (rtx_code code, rtx dest, rtx src, int off, rtx scratch)
{
const machine_mode mode = GET_MODE (dest);
+ const int n_bits = GET_MODE_BITSIZE (mode);
rtx xoff = GEN_INT (off);
- bool is_3op = (off % 8 == 0
- || off == GET_MODE_BITSIZE (mode) - 1
- || (code == ASHIFTRT && off == GET_MODE_BITSIZE (mode) - 2)
- || (mode == HImode
- && (code == ASHIFT || code == LSHIFTRT)
- && satisfies_constraint_C7c (xoff) /* 7...12 */));
+
+ // Work out which alternatives can handle 3 operands independent
+ // of options.
+
+ const bool b16_is_3op = select<bool>()
+ : code == ASHIFT ? satisfies_constraint_C7c (xoff) // 7...12
+ : code == LSHIFTRT ? satisfies_constraint_C7c (xoff)
+ : code == ASHIFTRT ? off == 7
+ : bad_case<bool> ();
+
+ const bool b24_is_3op = select<bool>()
+ : code == ASHIFT ? off == 15
+ : code == LSHIFTRT ? off == 15
+ : code == ASHIFTRT ? false
+ : bad_case<bool> ();
+
+ const bool is_3op = (off % 8 == 0
+ || off == n_bits - 1
+ || (code == ASHIFTRT && off == n_bits - 2)
+ || (n_bits == 16 && b16_is_3op)
+ || (n_bits == 24 && b24_is_3op));
rtx shift;
if (is_3op)
shift = gen_rtx_fmt_ee (code, mode, dest, xoff);
}
+ if (n_bits == 8)
+ // 8-bit shifts don't have a scratch operand.
+ scratch = NULL_RTX;
+ else if (! scratch && n_bits == 24)
+ // 24-bit shifts always have a scratch operand.
+ scratch = gen_rtx_SCRATCH (QImode);
+
emit_valid_move_clobbercc (dest, shift, scratch);
}
-/* Worker for define_split that runs when -msplit-bit-shift is on.
- Split a shift of code CODE into a 3op byte shift and a residual bit shift.
- Return 'true' when a split has been performed and insns have been emitted.
- Otherwise, return 'false'. */
+/* Handle the 4-byte case of avr_split_shift below:
+ Split 4-byte shift DEST = SRC <code> IOFF into a 3-operand
+ byte shift and a residual shift in a smaller mode if possible.
+ SCRATCH is a QImode upper scratch register or NULL_RTX. */
-bool
-avr_split_shift (rtx xop[], rtx scratch, rtx_code code)
+static bool
+avr_split_shift4 (rtx dest, rtx src, int ioff, rtx scratch, rtx_code code)
{
- scratch = scratch && REG_P (scratch) ? scratch : NULL_RTX;
- rtx dest = xop[0];
- rtx src = xop[1];
- int ioff = INTVAL (xop[2]);
-
gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 4);
if (code == ASHIFT)
emit_valid_move_clobbercc (avr_word (dest, 0), const0_rtx);
return true;
}
+ // ...the 9...14 cases are only handled by define_split because
+ // for now, we don't exploit that the low byte is zero.
}
else if (code == ASHIFTRT
|| code == LSHIFTRT)
}
else if (IN_RANGE (ioff, 9, 15))
{
- avr_emit_shift (code, dest, src, 8, NULL_RTX);
+ avr_emit_shift (code, dest, src, 8, scratch);
rtx dst24 = avr_chunk (PSImode, dest, 0);
rtx src24 = avr_chunk (PSImode, dest, 0);
- if (! scratch)
- scratch = gen_rtx_SCRATCH (QImode);
avr_emit_shift (code, dst24, src24, ioff - 8, scratch);
return true;
}
}
+/* Handle the 3-byte case of avr_split_shift below:
+ Split 3-byte shift DEST = SRC <code> IOFF into a 3-operand
+ byte shift and a residual shift in a smaller mode if possible.
+ SCRATCH is a QImode upper scratch register or NULL_RTX. */
+
+static bool
+avr_split_shift3 (rtx dest, rtx src, int ioff, rtx scratch, rtx_code code)
+{
+ gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 3);
+
+ if (code == ASHIFT)
+ {
+ if (IN_RANGE (ioff, 17, 22))
+ {
+ rtx dst8 = avr_byte (dest, 2);
+ rtx src8 = avr_byte (src, 0);
+ avr_emit_shift (code, dst8, src8, ioff - 16, NULL_RTX);
+ emit_valid_move_clobbercc (avr_word (dest, 0), const0_rtx);
+ return true;
+ }
+ // ...the 9...14 cases are only handled by define_split because
+ // for now, we don't exploit that the low byte is zero.
+ }
+ else if (code == ASHIFTRT
+ || code == LSHIFTRT)
+ {
+ if (IN_RANGE (ioff, 17, 22))
+ {
+ rtx dst8 = avr_byte (dest, 0);
+ rtx src8 = avr_byte (src, 2);
+ avr_emit_shift (code, dst8, src8, ioff - 16, NULL_RTX);
+ if (code == ASHIFTRT)
+ {
+ rtx signs = avr_byte (dest, 1);
+ avr_emit_shift (code, signs, src8, 7, NULL_RTX);
+ emit_valid_move_clobbercc (avr_byte (dest, 2), signs);
+ }
+ else
+ {
+ emit_valid_move_clobbercc (avr_byte (dest, 1), const0_rtx);
+ emit_valid_move_clobbercc (avr_byte (dest, 2), const0_rtx);
+ }
+ return true;
+ }
+ else if (IN_RANGE (ioff, 9, 15))
+ {
+ avr_emit_shift (code, dest, src, 8, scratch);
+ rtx dst16 = avr_chunk (HImode, dest, 0);
+ rtx src16 = avr_chunk (HImode, dest, 0);
+ avr_emit_shift (code, dst16, src16, ioff - 8, scratch);
+ return true;
+ }
+ }
+ else
+ gcc_unreachable ();
+
+ return false;
+}
+
+
+/* Handle the 2-byte case of avr_split_shift below:
+ Split 2-byte shift DEST = SRC <code> IOFF into a 3-operand
+ byte shift and a residual shift in a smaller mode if possible.
+ SCRATCH is a QImode upper scratch register or NULL_RTX. */
+
+static bool
+avr_split_shift2 (rtx dest, rtx src, int ioff, rtx /*scratch*/, rtx_code code)
+{
+ gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
+
+ if (code == ASHIFT)
+ {
+ if (IN_RANGE (ioff, 9, 14))
+ {
+ rtx dst8 = avr_byte (dest, 1);
+ rtx src8 = avr_byte (src, 0);
+ avr_emit_shift (code, dst8, src8, ioff - 8, NULL_RTX);
+ emit_valid_move_clobbercc (avr_byte (dest, 0), const0_rtx);
+ return true;
+ }
+ }
+ else if (code == ASHIFTRT
+ || code == LSHIFTRT)
+ {
+ if (IN_RANGE (ioff, 9, 14))
+ {
+ rtx dst8 = avr_byte (dest, 0);
+ rtx src8 = avr_byte (src, 1);
+ rtx signs = const0_rtx;
+ avr_emit_shift (code, dst8, src8, ioff - 8, NULL_RTX);
+ if (code == ASHIFTRT)
+ {
+ signs = avr_byte (dest, 1);
+ avr_emit_shift (code, signs, src8, 7, NULL_RTX);
+ }
+ emit_valid_move_clobbercc (avr_byte (dest, 1), signs);
+ return true;
+ }
+ }
+ else
+ gcc_unreachable ();
+
+ return false;
+}
+
+
+/* Worker for a define_split that runs when -msplit-bit-shift is on.
+ Split a shift of code CODE into a 3op byte shift and a residual bit shift.
+ Return 'true' when a split has been performed and insns have been emitted.
+ Otherwise, return 'false'. */
+
+bool
+avr_split_shift (rtx xop[], rtx scratch, rtx_code code)
+{
+ scratch = scratch && REG_P (scratch) ? scratch : NULL_RTX;
+ rtx dest = xop[0];
+ rtx src = xop[1];
+ int ioff = INTVAL (xop[2]);
+ int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
+
+ return select<bool>()
+ : n_bytes == 2 ? avr_split_shift2 (dest, src, ioff, scratch, code)
+ : n_bytes == 3 ? avr_split_shift3 (dest, src, ioff, scratch, code)
+ : n_bytes == 4 ? avr_split_shift4 (dest, src, ioff, scratch, code)
+ : bad_case<bool> ();
+}
+
+
namespace
{
{
int reg0 = REGNO (op[0]);
int reg1 = REGNO (op[1]);
+ bool reg1_unused_after = reg_unused_after (insn, op[1]);
switch (INTVAL (op[2]))
{
: avr_asm_len ("clr %A0" CR_TAB
"mov %B0,%A1" CR_TAB
"mov %C0,%B1", op, plen, 3);
+ case 15:
+ avr_asm_len (reg1_unused_after
+ ? "lsr %B1"
+ : "bst %B1,0", op, plen, 1);
+ if (reg0 + 2 != reg1)
+ avr_asm_len ("mov %C0,%A1", op, plen, 1);
+ avr_asm_len ("clr %A0" CR_TAB
+ "clr %B0" CR_TAB
+ "ror %C0" CR_TAB
+ "ror %B0", op, plen, 5);
+ return reg1_unused_after
+ ? ""
+ : avr_asm_len ("bld %C0,7", op, plen, 1);
+
case 16:
if (reg0 + 2 != reg1)
avr_asm_len ("mov %C0,%A1", op, plen, 1);
{
int reg0 = true_regnum (operands[0]);
int reg1 = true_regnum (operands[1]);
- bool reg1_unused_after_p = reg_unused_after (insn, operands[1]);
+ bool reg1_unused_after = reg_unused_after (insn, operands[1]);
if (plen)
*plen = 0;
"mov %C0,%B1" CR_TAB
"mov %D0,%C1", operands, plen, 4);
case 15:
- avr_asm_len (reg1_unused_after_p
+ avr_asm_len (reg1_unused_after
? "lsr %C1"
: "bst %C1,0", operands, plen, 1);
if (reg0 + 2 != reg1)
avr_asm_len ("mov %C0,%A1" CR_TAB
"mov %D0,%B1", operands, plen, 2);
}
- return reg1_unused_after_p
- ? avr_asm_len ("clr %A0" CR_TAB
- "clr %B0" CR_TAB
- "ror %D0" CR_TAB
- "ror %C0" CR_TAB
- "ror %B0", operands, plen, 5)
- : avr_asm_len ("clr %A0" CR_TAB
- "clr %B0" CR_TAB
- "lsr %D0" CR_TAB
- "ror %C0" CR_TAB
- "ror %B0" CR_TAB
- "bld %D0,7", operands, plen, 6);
+ avr_asm_len ("clr %A0" CR_TAB
+ "clr %B0" CR_TAB
+ "ror %D0" CR_TAB
+ "ror %C0" CR_TAB
+ "ror %B0", operands, plen, 5);
+ return reg1_unused_after
+ ? ""
+ : avr_asm_len ("bld %D0,7", operands, plen, 1);
+
case 16:
if (reg0 + 2 == reg1)
return avr_asm_len ("clr %B0" CR_TAB
"rol %A0" CR_TAB
"rol %B0", operands, plen, 8);
case 7:
- return avr_asm_len ("lsl %A0" CR_TAB
- "mov %A0,%B0" CR_TAB
- "rol %A0" CR_TAB
- "sbc %B0,%B0", operands, plen, 4);
+ return reg1_unused_after
+ ? avr_asm_len ("lsl %A1" CR_TAB
+ "mov %A0,%B1" CR_TAB
+ "rol %A0" CR_TAB
+ "sbc %B0,%B0", operands, plen, 4)
+ : avr_asm_len ("mov %A0,%A1" CR_TAB
+ "lsl %A0" CR_TAB
+ "mov %A0,%B1" CR_TAB
+ "rol %A0" CR_TAB
+ "sbc %B0,%B0", operands, plen, 5);
case 8:
{
int reg0 = true_regnum (operands[0]);
"dec %C0" CR_TAB
"mov %B0,%C1" CR_TAB
"mov %A0,%B1", op, plen, 5);
+ case 15:
+ avr_asm_len (reg1_unused_after
+ ? "lsl %B1"
+ : "bst %B1,7", op, plen, 1);
+ if (dest != src + 2)
+ avr_asm_len ("mov %A0,%C1", op, plen, 1);
+ avr_asm_len ("rol %A0" CR_TAB
+ "sbc %B0,%B0" CR_TAB
+ "sbc %C0,%C0", op, plen, 3);
+ return reg1_unused_after
+ ? ""
+ : avr_asm_len ("bld %A0,0", op, plen, 1);
+
case 16:
if (dest != src + 2)
avr_asm_len ("mov %A0,%C1", op, plen, 1);
- return avr_asm_len ("clr %B0" CR_TAB
- "sbrc %A0,7" CR_TAB
- "com %B0" CR_TAB
- "mov %C0,%B0", op, plen, 4);
+ return reg1_unused_after && dest != src + 2
+ ? avr_asm_len ("rol %C1" CR_TAB
+ "sbc %B0,%B0" CR_TAB
+ "sbc %C0,%C0", op, plen, 3)
+ : avr_asm_len ("clr %B0" CR_TAB
+ "sbrc %A0,7" CR_TAB
+ "com %B0" CR_TAB
+ "mov %C0,%B0", op, plen, 4);
case 22:
{
rtx xop[2] = { op[0], op[1] };
"mov %C0,%D1" CR_TAB
"mov %B0,%C1" CR_TAB
"mov %A0,%B1", operands, plen, 6);
+ case 15:
+ avr_asm_len (reg1_unused_after
+ ? "lsl %B1"
+ : "bst %B1,7", operands, plen, 1);
+ if (reg0 != reg1 + 2)
+ {
+ if (AVR_HAVE_MOVW)
+ avr_asm_len ("movw %A0,%C1", operands, plen, 1);
+ else
+ avr_asm_len ("mov %A0,%C1" CR_TAB
+ "mov %B0,%D1", operands, plen, 2);
+ }
+ avr_asm_len ("rol %A0" CR_TAB
+ "rol %B0" CR_TAB
+ "sbc %C0,%C0" CR_TAB
+ "sbc %D0,%D0", operands, plen, 4);
+ return reg1_unused_after
+ ? ""
+ : avr_asm_len ("bld %A0,0", operands, plen, 1);
+
case 16:
if (reg0 == reg1 + 2)
return avr_asm_len ("clr %D0" CR_TAB
"sbrc %B0,7" CR_TAB
"com %D0" CR_TAB
"mov %C0,%D0", operands, plen, 4);
- return AVR_HAVE_MOVW
- ? avr_asm_len ("movw %A0,%C1" CR_TAB
- "clr %D0" CR_TAB
- "sbrc %B0,7" CR_TAB
- "com %D0" CR_TAB
+ if (AVR_HAVE_MOVW)
+ avr_asm_len ("movw %A0,%C1", operands, plen, 1);
+ else
+ avr_asm_len ("mov %B0,%D1" CR_TAB
+ "mov %A0,%C1", operands, plen, 2);
+ return reg1_unused_after
+ ? avr_asm_len ("lsl %D1" CR_TAB
+ "sbc %D0,%D0" CR_TAB
+ "mov %C0,%D0", operands, plen, 3)
+ : avr_asm_len ("clr %D0" CR_TAB
+ "sbrc %B0,7" CR_TAB
+ "com %D0" CR_TAB
+ "mov %C0,%D0", operands, plen, 4);
+ case 24:
+ return reg1_unused_after
+ ? avr_asm_len ("mov %A0,%D1" CR_TAB
+ "lsl %D1" CR_TAB
+ "sbc %D0,%D0" CR_TAB
+ "mov %B0,%D0" CR_TAB
"mov %C0,%D0", operands, plen, 5)
- : avr_asm_len ("mov %B0,%D1" CR_TAB
- "mov %A0,%C1" CR_TAB
+ : avr_asm_len ("mov %A0,%D1" CR_TAB
"clr %D0" CR_TAB
- "sbrc %B0,7" CR_TAB
+ "sbrc %A0,7" CR_TAB
"com %D0" CR_TAB
+ "mov %B0,%D0" CR_TAB
"mov %C0,%D0", operands, plen, 6);
- case 24:
- return avr_asm_len ("mov %A0,%D1" CR_TAB
- "clr %D0" CR_TAB
- "sbrc %A0,7" CR_TAB
- "com %D0" CR_TAB
- "mov %B0,%D0" CR_TAB
- "mov %C0,%D0", operands, plen, 6);
case 30:
{
rtx xop[2] = { operands[0], operands[1] };
{
int dest = REGNO (op[0]);
int src = REGNO (op[1]);
+ bool src_unused_after_p = reg_unused_after (insn, op[1]);
if (CONST_INT_P (op[2]))
{
return avr_asm_len ("clr %C0" CR_TAB
"mov %B0,%C1" CR_TAB
"mov %A0,%B1", op, plen, 3);
+ case 15:
+ avr_asm_len (src_unused_after_p
+ ? "lsl %B1"
+ : "bst %B1,7", op, plen, 1);
+ if (dest != src + 2)
+ avr_asm_len ("mov %A0,%C1", op, plen, 1);
+ avr_asm_len ("clr %C0" CR_TAB
+ "clr %B0" CR_TAB
+ "rol %A0" CR_TAB
+ "rol %B0", op, plen, 4);
+ return src_unused_after_p
+ ? ""
+ : avr_asm_len ("bld %A0,0", op, plen, 1);
case 16:
if (dest != src + 2)
{
int reg0 = true_regnum (operands[0]);
int reg1 = true_regnum (operands[1]);
- bool reg1_unused_after_p = reg_unused_after (insn, operands[1]);
+ bool reg1_unused_after = reg_unused_after (insn, operands[1]);
if (plen)
*plen = 0;
"mov %B0,%C1" CR_TAB
"mov %A0,%B1", operands, plen, 4);
case 15:
- avr_asm_len (reg1_unused_after_p
+ avr_asm_len (reg1_unused_after
? "lsl %B1"
: "bst %B1,7", operands, plen, 1);
if (reg0 != reg1 + 2)
avr_asm_len ("mov %A0,%C1" CR_TAB
"mov %B0,%D1", operands, plen, 2);
}
- return reg1_unused_after_p
- ? avr_asm_len ("clr %D0" CR_TAB
- "clr %C0" CR_TAB
- "rol %A0" CR_TAB
- "rol %B0" CR_TAB
- "rol %C0", operands, plen, 5)
- : avr_asm_len ("clr %D0" CR_TAB
- "clr %C0" CR_TAB
- "lsl %A0" CR_TAB
- "rol %B0" CR_TAB
- "rol %C0" CR_TAB
- "bld %A0,0", operands, plen, 6);
+ avr_asm_len ("clr %D0" CR_TAB
+ "clr %C0" CR_TAB
+ "rol %A0" CR_TAB
+ "rol %B0" CR_TAB
+ "rol %C0", operands, plen, 5);
+ return reg1_unused_after
+ ? ""
+ : avr_asm_len ("bld %A0,0", operands, plen, 1);
+
case 16:
if (reg0 == reg1 + 2)
return avr_asm_len ("clr %C0" CR_TAB
case ASHIFT:
switch (mode)
{
- case E_QImode:
+ case E_QImode: // ashlqi3
if (speed
&& XEXP (x, 0) == const1_rtx
&& GET_CODE (XEXP (x, 1)) == AND)
}
break;
- case E_HImode:
+ case E_HImode: // ashlhi3
if (AVR_HAVE_MUL)
{
if (const_2_to_7_operand (XEXP (x, 1), HImode)
}
break;
- case E_PSImode:
+ case E_PSImode: // ashlpsi3
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 6 : 73);
case 16:
*total = COSTS_N_INSNS (3);
break;
+ case 9:
+ case 15:
+ *total = COSTS_N_INSNS (6);
+ break;
case 23:
*total = COSTS_N_INSNS (5);
break;
}
break;
- case E_SImode:
+ case E_SImode: // ashlsi3
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 7 : 113);
case ASHIFTRT:
switch (mode)
{
- case E_QImode:
+ case E_QImode: // ashrqi3
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 4 : 17);
}
break;
- case E_HImode:
+ case E_HImode: // ashrhi3
if (CONST_INT_P (XEXP (x, 0))
&& INTVAL (XEXP (x, 0)) == 128
&& GET_CODE (XEXP (x, 1)) == AND)
}
break;
- case E_PSImode:
+ case E_PSImode: // ashrpsi3
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 6 : 73);
case 1:
*total = COSTS_N_INSNS (3);
break;
- case 16:
case 8:
+ case 15:
*total = COSTS_N_INSNS (5);
break;
+ case 16:
+ *total = COSTS_N_INSNS (4);
+ break;
case 22:
*total = COSTS_N_INSNS (6);
break;
}
break;
- case E_SImode:
+ case E_SImode: // ashrsi3
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 7 : 113);
*total = COSTS_N_INSNS (4);
break;
case 8:
+ *total = COSTS_N_INSNS (6);
+ break;
+ case 15:
+ *total = COSTS_N_INSNS (6 - AVR_HAVE_MOVW);
+ break;
case 16:
+ *total = COSTS_N_INSNS (4 - AVR_HAVE_MOVW);
+ break;
case 24:
- *total = COSTS_N_INSNS (6);
+ *total = COSTS_N_INSNS (5);
break;
case 2:
*total = COSTS_N_INSNS (!speed ? 7 : 8);
switch (mode)
{
- case E_QImode:
+ case E_QImode: // lshrqi3
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 4 : 17);
}
break;
- case E_HImode:
+ case E_HImode: // lshrhi3
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 5 : 41);
}
break;
- case E_PSImode:
+ case E_PSImode: // lshrpsi3
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 6 : 73);
case 16:
*total = COSTS_N_INSNS (3);
break;
+ case 15:
+ *total = COSTS_N_INSNS (6);
+ break;
case 23:
*total = COSTS_N_INSNS (5);
break;
}
break;
- case E_SImode:
+ case E_SImode: // lshrsi3
if (!CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (!speed ? 7 : 113);
;; The following ISA attributes are actually not architecture specific,
;; but depend on (optimization) options. This is because the "enabled"
;; attribut can't depend on more than one other attribute. This means
-;; that 2op and 3op must work for all ISAs, and hence a 'flat' attribue
-;; scheme can be used (as opposed to a true cartesian product).
+;; that 3op must work for all ISAs, and hence a 'flat' attribue scheme
+;; can be used (as opposed to a true cartesian product).
-;; 2op : insn is a 2-operand insn 3op : insn is a 3-operand insn
+;; 3op : alternative is a 3-operand insn
(define_attr "isa"
"mov,movw, rjmp,jmp, ijmp,eijmp, lpm,lpmx, elpm,elpmx, no_xmega,xmega,
no_adiw,adiw,
- 2op,3op,
+ 3op,
standard"
(const_string "standard"))
(and (eq_attr "isa" "no_adiw")
(match_test "!AVR_HAVE_ADIW"))
- (and (eq_attr "isa" "2op")
- (match_test "!avr_shift_is_3op ()"))
-
(and (eq_attr "isa" "3op")
(match_test "avr_shift_is_3op ()"))
)
;; "ashlhq3" "ashluhq3"
;; "ashlha3" "ashluha3"
(define_insn_and_split "ashl<mode>3"
- [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r,r")
- (ashift:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C7c C15,n,Qm")))]
+ [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (ashift:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C7c C15,C2l,n,Qm")))]
""
"#"
"&& reload_completed"
[(parallel [(set (match_dup 0)
(ashift:ALL2 (match_dup 1)
(match_dup 2)))
- (clobber (reg:CC REG_CC))])])
+ (clobber (reg:CC REG_CC))])]
+ ""
+ [(set_attr "isa" "*,*,*,3op,*,*")])
;; "*ashlhi3"
;; "*ashlhq3" "*ashluhq3"
;; "*ashlha3" "*ashluha3"
(define_insn "*ashl<mode>3"
- [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r,r")
- (ashift:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C7c C15,n,Qm")))
+ [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (ashift:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C7c C15,C2l,n,Qm")))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return ashlhi3_out (insn, operands, NULL);
}
- [(set_attr "length" "10")
+ [(set_attr "isa" "*,*,*,3op,*,*")
+ (set_attr "length" "10")
(set_attr "adjust_len" "ashlhi")])
;; "ashlsq3" "ashlusq3"
;; "ashlsa3" "ashlusa3"
(define_insn_and_split "ashl<mode>3"
- [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r ,r,r")
- (ashift:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C31,C4l,C4l,n,Qm")))]
+ [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (ashift:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C31,C4l,n,Qm")))]
""
"#"
"&& reload_completed"
(match_dup 2)))
(clobber (reg:CC REG_CC))])]
""
- [(set_attr "isa" "*,*,*,2op,3op,*,*")])
+ [(set_attr "isa" "*,*,*,3op,*,*")])
(define_insn "*ashl<mode>3"
- [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r ,r,r")
- (ashift:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C31,C4l,C4l,n,Qm")))
+ [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (ashift:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C31,C4l,n,Qm")))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return ashlsi3_out (insn, operands, NULL);
}
- [(set_attr "length" "12")
- (set_attr "adjust_len" "ashlsi")
- (set_attr "isa" "*,*,*,2op,3op,*,*")])
+ [(set_attr "isa" "*,*,*,3op,*,*")
+ (set_attr "length" "12")
+ (set_attr "adjust_len" "ashlsi")])
;; Optimize if a scratch register from LD_REGS happens to be available.
operands[2] = avr_to_int_mode (operands[0]);
})
+;; Endow 2-byte shift with a scratch if available.
(define_peephole2 ; *ashlhi3_const *ashrhi3_const *lshrhi3_const
[(match_scratch:QI 3 "d")
(parallel [(set (match_operand:ALL2 0 "register_operand")
;; "*ashlhq3_const" "*ashluhq3_const"
;; "*ashlha3_const" "*ashluha3_const"
(define_insn "*ashl<mode>3_const"
- [(set (match_operand:ALL2 0 "register_operand" "=r ,r ,r")
- (ashift:ALL2 (match_operand:ALL2 1 "register_operand" "0 ,r ,0")
- (match_operand:QI 2 "const_int_operand" "LPK,O C7c C15,n")))
- (clobber (match_scratch:QI 3 "=X ,X ,&d"))
+ [(set (match_operand:ALL2 0 "register_operand" "=r ,r ,r ,r")
+ (ashift:ALL2 (match_operand:ALL2 1 "register_operand" "0 ,r ,r ,0")
+ (match_operand:QI 2 "const_int_operand" "LPK,O C7c C15,C2l,n")))
+ (clobber (match_scratch:QI 3 "=X ,X ,&d ,&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return ashlhi3_out (insn, operands, NULL);
}
- [(set_attr "length" "10")
+ [(set_attr "isa" "*,*,3op,*")
+ (set_attr "length" "10")
(set_attr "adjust_len" "ashlhi")])
-(define_code_attr constr_split_shift4
- [(ashift "C4l")
- (ashiftrt "C4a")
- (lshiftrt "C4r")])
+(define_code_attr constr_split_suffix
+ [(ashift "l")
+ (ashiftrt "a")
+ (lshiftrt "r")])
;; Split shift into a byte shift and a residual bit shift (without scratch)
(define_split
- [(parallel [(set (match_operand:ALL4 0 "register_operand")
- (any_shift:ALL4 (match_operand:ALL4 1 "register_operand")
- (match_operand:QI 2 "const_int_operand")))
+ [(parallel [(set (match_operand:ALL234 0 "register_operand")
+ (any_shift:ALL234 (match_operand:ALL234 1 "register_operand")
+ (match_operand:QI 2 "const_int_operand")))
(clobber (reg:CC REG_CC))])]
"avropt_split_bit_shift
&& n_avr_fuse_add_executed >= 1
- && satisfies_constraint_<constr_split_shift4> (operands[2])"
+ && satisfies_constraint_C<SIZE><constr_split_suffix> (operands[2])"
[(parallel [(set (match_dup 0)
- (any_shift:ALL4 (match_dup 1)
- (match_dup 3)))
+ (any_shift:ALL234 (match_dup 1)
+ (match_dup 3)))
+ (clobber (scratch:QI))
(clobber (reg:CC REG_CC))])
(parallel [(set (match_dup 0)
- (any_shift:ALL4 (match_dup 0)
- (match_dup 4)))
+ (any_shift:ALL234 (match_dup 0)
+ (match_dup 4)))
(clobber (reg:CC REG_CC))])]
{
+ int offset = INTVAL (operands[2]);
if (avr_split_shift (operands, NULL_RTX, <CODE>))
DONE;
- else if (REGNO (operands[0]) == REGNO (operands[1]))
+ else if (offset <= 8)
FAIL;
- int offset = INTVAL (operands[2]);
operands[3] = GEN_INT (offset & ~7);
operands[4] = GEN_INT (offset & 7);
})
;; Split shift into a byte shift and a residual bit shift (with scratch)
(define_split
- [(parallel [(set (match_operand:ALL4 0 "register_operand")
- (any_shift:ALL4 (match_operand:ALL4 1 "register_operand")
- (match_operand:QI 2 "const_int_operand")))
+ [(parallel [(set (match_operand:ALL234 0 "register_operand")
+ (any_shift:ALL234 (match_operand:ALL234 1 "register_operand")
+ (match_operand:QI 2 "const_int_operand")))
(clobber (match_operand:QI 3 "scratch_or_dreg_operand"))
(clobber (reg:CC REG_CC))])]
"avropt_split_bit_shift
&& n_avr_fuse_add_executed >= 1
- && satisfies_constraint_<constr_split_shift4> (operands[2])"
+ && satisfies_constraint_C<SIZE><constr_split_suffix> (operands[2])"
[(parallel [(set (match_dup 0)
- (any_shift:ALL4 (match_dup 1)
- (match_dup 4)))
+ (any_shift:ALL234 (match_dup 1)
+ (match_dup 4)))
+ (clobber (scratch:QI))
(clobber (reg:CC REG_CC))])
(parallel [(set (match_dup 0)
- (any_shift:ALL4 (match_dup 0)
- (match_dup 5)))
+ (any_shift:ALL234 (match_dup 0)
+ (match_dup 5)))
(clobber (match_dup 3))
(clobber (reg:CC REG_CC))])]
{
+ int offset = INTVAL (operands[2]);
if (avr_split_shift (operands, operands[3], <CODE>))
DONE;
- else if (REGNO (operands[0]) == REGNO (operands[1]))
+ else if (offset <= 8)
FAIL;
- int offset = INTVAL (operands[2]);
operands[4] = GEN_INT (offset & ~7);
operands[5] = GEN_INT (offset & 7);
})
;; "*ashlsq3_const" "*ashlusq3_const"
;; "*ashlsa3_const" "*ashlusa3_const"
(define_insn "*ashl<mode>3_const"
- [(set (match_operand:ALL4 0 "register_operand" "=r ,r ,r ,r ,r")
- (ashift:ALL4 (match_operand:ALL4 1 "register_operand" "0 ,r ,0 ,r ,0")
- (match_operand:QI 2 "const_int_operand" "LP,O C15 C31,C4l,C4l,n")))
- (clobber (match_operand:QI 3 "scratch_or_dreg_operand" "=X ,X ,&d ,&d ,&d"))
+ [(set (match_operand:ALL4 0 "register_operand" "=r ,r ,r ,r")
+ (ashift:ALL4 (match_operand:ALL4 1 "register_operand" "0 ,r ,r ,0")
+ (match_operand:QI 2 "const_int_operand" "LP,O C15 C31,C4l,n")))
+ (clobber (match_operand:QI 3 "scratch_or_dreg_operand" "=X ,X ,&d ,&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return ashlsi3_out (insn, operands, NULL);
}
- [(set_attr "length" "10")
- (set_attr "adjust_len" "ashlsi")
- (set_attr "isa" "*,*,2op,3op,*")])
+ [(set_attr "isa" "*,*,3op,*")
+ (set_attr "length" "10")
+ (set_attr "adjust_len" "ashlsi")])
(define_expand "ashlpsi3"
[(parallel [(set (match_operand:PSI 0 "register_operand" "")
})
(define_insn_and_split "*ashlpsi3_split"
- [(set (match_operand:PSI 0 "register_operand" "=r,r,r ,r")
- (ashift:PSI (match_operand:PSI 1 "register_operand" "0,0,r ,0")
- (match_operand:QI 2 "nonmemory_operand" "r,P,O C23,n")))
- (clobber (match_scratch:QI 3 "=X,X,X ,&d"))]
+ [(set (match_operand:PSI 0 "register_operand" "=r,r,r ,r ,r")
+ (ashift:PSI (match_operand:PSI 1 "register_operand" "0,0,r ,r ,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,P,O C15 C23,C3l,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X ,&d ,&d"))]
""
"#"
"&& reload_completed"
(ashift:PSI (match_dup 1)
(match_dup 2)))
(clobber (match_dup 3))
- (clobber (reg:CC REG_CC))])])
+ (clobber (reg:CC REG_CC))])]
+ ""
+ [(set_attr "isa" "*,*,*,3op,*")])
(define_insn "*ashlpsi3"
- [(set (match_operand:PSI 0 "register_operand" "=r,r,r ,r")
- (ashift:PSI (match_operand:PSI 1 "register_operand" "0,0,r ,0")
- (match_operand:QI 2 "nonmemory_operand" "r,P,O C23,n")))
- (clobber (match_scratch:QI 3 "=X,X,X ,&d"))
+ [(set (match_operand:PSI 0 "register_operand" "=r,r,r ,r ,r")
+ (ashift:PSI (match_operand:PSI 1 "register_operand" "0,0,r ,r ,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,P,O C15 C23,C3l,n")))
+ ; "X&d" since the insn may be a split of a 4-byte shift without scratch.
+ (clobber (match_scratch:QI 3 "=X,X,X ,X&d,X&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return avr_out_ashlpsi3 (insn, operands, NULL);
}
- [(set_attr "adjust_len" "ashlpsi")])
+ [(set_attr "isa" "*,*,*,3op,*")
+ (set_attr "adjust_len" "ashlpsi")])
;; >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >>
;; arithmetic shift right
;; "ashrhq3" "ashruhq3"
;; "ashrha3" "ashruha3"
(define_insn_and_split "ashr<mode>3"
- [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r,r")
- (ashiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C14 C15,n,Qm")))]
+ [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (ashiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C07 C14 C15,C2a,n,Qm")))]
""
"#"
"&& reload_completed"
[(parallel [(set (match_dup 0)
(ashiftrt:ALL2 (match_dup 1)
(match_dup 2)))
- (clobber (reg:CC REG_CC))])])
+ (clobber (reg:CC REG_CC))])]
+ ""
+ [(set_attr "isa" "*,*,*,3op,*,*")])
;; "*ashrhi3"
;; "*ashrhq3" "*ashruhq3"
;; "*ashrha3" "*ashruha3"
(define_insn "*ashr<mode>3"
- [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r,r")
- (ashiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C14 C15,n,Qm")))
+ [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (ashiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C07 C14 C15,C2a,n,Qm")))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return ashrhi3_out (insn, operands, NULL);
}
- [(set_attr "length" "10")
+ [(set_attr "isa" "*,*,*,3op,*,*")
+ (set_attr "length" "10")
(set_attr "adjust_len" "ashrhi")])
(define_insn_and_split "ashrpsi3"
- [(set (match_operand:PSI 0 "register_operand" "=r,r ,r ,r")
- (ashiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0 ,r ,0")
- (match_operand:QI 2 "nonmemory_operand" "r,PK,O C22 C23,n")))
- (clobber (match_scratch:QI 3 "=X,X ,X ,&d"))]
+ [(set (match_operand:PSI 0 "register_operand" "=r,r ,r ,r ,r")
+ (ashiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0 ,r ,r ,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,PK,O C15 C22 C23,C3a,n")))
+ (clobber (match_scratch:QI 3 "=X,X ,X ,&d ,&d"))]
""
"#"
"&& reload_completed"
(ashiftrt:PSI (match_dup 1)
(match_dup 2)))
(clobber (match_dup 3))
- (clobber (reg:CC REG_CC))])])
+ (clobber (reg:CC REG_CC))])]
+ ""
+ [(set_attr "isa" "*,*,*,3op,*")])
(define_insn "*ashrpsi3"
- [(set (match_operand:PSI 0 "register_operand" "=r,r ,r ,r")
- (ashiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0 ,r ,0")
- (match_operand:QI 2 "nonmemory_operand" "r,PK,O C22 C23,n")))
- (clobber (match_scratch:QI 3 "=X,X ,X ,&d"))
+ [(set (match_operand:PSI 0 "register_operand" "=r,r ,r ,r ,r")
+ (ashiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0 ,r ,r ,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,PK,O C15 C22 C23,C3a,n")))
+ ; "X&d" since the insn may be a split of a 4-byte shift without scratch.
+ (clobber (match_scratch:QI 3 "=X,X ,X ,X&d,X&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return avr_out_ashrpsi3 (insn, operands, NULL);
}
- [(set_attr "adjust_len" "ashrpsi")])
+ [(set_attr "isa" "*,*,*,3op,*")
+ (set_attr "adjust_len" "ashrpsi")])
;; "ashrsi3"
;; "ashrsq3" "ashrusq3"
;; "ashrsa3" "ashrusa3"
(define_insn_and_split "ashr<mode>3"
- [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r ,r,r")
- (ashiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C30 C31,C4a,C4a,n,Qm")))]
+ [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (ashiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C30 C31,C4a,n,Qm")))]
""
"#"
"&& reload_completed"
(match_dup 2)))
(clobber (reg:CC REG_CC))])]
""
- [(set_attr "isa" "*,*,*,2op,3op,*,*")])
+ [(set_attr "isa" "*,*,*,3op,*,*")])
(define_insn "*ashr<mode>3"
- [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r ,r,r")
- (ashiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C30 C31,C4a,C4a,n,Qm")))
+ [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (ashiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C30 C31,C4a,n,Qm")))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return ashrsi3_out (insn, operands, NULL);
}
- [(set_attr "isa" "*,*,*,2op,3op,*,*")
+ [(set_attr "isa" "*,*,*,3op,*,*")
(set_attr "length" "12")
(set_attr "adjust_len" "ashrsi")])
;; "*ashrhq3_const" "*ashruhq3_const"
;; "*ashrha3_const" "*ashruha3_const"
(define_insn "*ashr<mode>3_const"
- [(set (match_operand:ALL2 0 "register_operand" "=r ,r ,r")
- (ashiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0 ,r ,0")
- (match_operand:QI 2 "const_int_operand" "LPK,O C14 C15,n")))
- (clobber (match_scratch:QI 3 "=X ,X ,&d"))
+ [(set (match_operand:ALL2 0 "register_operand" "=r ,r ,r ,r")
+ (ashiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0 ,r ,r ,0")
+ (match_operand:QI 2 "const_int_operand" "LPK,O C07 C14 C15,C2a,n")))
+ (clobber (match_scratch:QI 3 "=X ,X ,&d ,&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return ashrhi3_out (insn, operands, NULL);
}
- [(set_attr "length" "10")
+ [(set_attr "isa" "*,*,3op,*")
+ (set_attr "length" "10")
(set_attr "adjust_len" "ashrhi")])
;; "*ashrsq3_const" "*ashrusq3_const"
;; "*ashrsa3_const" "*ashrusa3_const"
(define_insn "*ashr<mode>3_const"
- [(set (match_operand:ALL4 0 "register_operand" "=r ,r ,r ,r ,r")
- (ashiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0 ,r ,0 ,r ,0")
- (match_operand:QI 2 "const_int_operand" "LP,O C30 C31,C4a,C4a,n")))
- (clobber (match_operand:QI 3 "scratch_or_dreg_operand" "=X ,X ,&d ,&d ,&d"))
+ [(set (match_operand:ALL4 0 "register_operand" "=r ,r ,r ,r")
+ (ashiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0 ,r ,r ,0")
+ (match_operand:QI 2 "const_int_operand" "LP,O C15 C30 C31,C4a,n")))
+ (clobber (match_operand:QI 3 "scratch_or_dreg_operand" "=X ,X ,&d ,&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return ashrsi3_out (insn, operands, NULL);
}
- [(set_attr "isa" "*,*,2op,3op,*")
+ [(set_attr "isa" "*,*,3op,*")
(set_attr "length" "10")
(set_attr "adjust_len" "ashrsi")])
;; "lshrhq3" "lshruhq3"
;; "lshrha3" "lshruha3"
(define_insn_and_split "lshr<mode>3"
- [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r,r")
- (lshiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C7c C15,n,Qm")))]
+ [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (lshiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C7c C15,C2r,n,Qm")))]
""
"#"
"&& reload_completed"
[(parallel [(set (match_dup 0)
(lshiftrt:ALL2 (match_dup 1)
(match_dup 2)))
- (clobber (reg:CC REG_CC))])])
+ (clobber (reg:CC REG_CC))])]
+ ""
+ [(set_attr "isa" "*,*,*,3op,*,*")])
(define_insn "*lshr<mode>3"
- [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r,r")
- (lshiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C7c C15,n,Qm")))
+ [(set (match_operand:ALL2 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (lshiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C7c C15,C2r,n,Qm")))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return lshrhi3_out (insn, operands, NULL);
}
- [(set_attr "adjust_len" "lshrhi")])
+ [(set_attr "isa" "*,*,*,3op,*,*")
+ (set_attr "adjust_len" "lshrhi")])
(define_insn_and_split "lshrpsi3"
- [(set (match_operand:PSI 0 "register_operand" "=r,r,r,r ,r,r")
- (lshiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0,r,r ,0,0")
- (match_operand:QI 2 "nonmemory_operand" "r,P,O,C23,K,n")))
- (clobber (match_scratch:QI 3 "=X,X,X,X ,X,&d"))]
+ [(set (match_operand:PSI 0 "register_operand" "=r,r ,r ,r ,r")
+ (lshiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0 ,r ,r ,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,PK,O C15 C23,C3r,n")))
+ (clobber (match_scratch:QI 3 "=X,X ,X ,&d ,&d"))]
""
"#"
"&& reload_completed"
(lshiftrt:PSI (match_dup 1)
(match_dup 2)))
(clobber (match_dup 3))
- (clobber (reg:CC REG_CC))])])
+ (clobber (reg:CC REG_CC))])]
+ ""
+ [(set_attr "isa" "*,*,*,3op,*")])
(define_insn "*lshrpsi3"
- [(set (match_operand:PSI 0 "register_operand" "=r,r,r,r ,r,r")
- (lshiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0,r,r ,0,0")
- (match_operand:QI 2 "nonmemory_operand" "r,P,O,C23,K,n")))
- (clobber (match_scratch:QI 3 "=X,X,X,X ,X,&d"))
+ [(set (match_operand:PSI 0 "register_operand" "=r,r ,r ,r ,r")
+ (lshiftrt:PSI (match_operand:PSI 1 "register_operand" "0,0 ,r ,r ,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,PK,O C15 C23,C3r,n")))
+ ; "X&d" since the insn may be a split of a 4-byte shift without scratch.
+ (clobber (match_scratch:QI 3 "=X,X ,X ,X&d,X&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return avr_out_lshrpsi3 (insn, operands, NULL);
}
- [(set_attr "adjust_len" "lshrpsi")])
+ [(set_attr "isa" "*,*,*,3op,*")
+ (set_attr "adjust_len" "lshrpsi")])
;; "lshrsi3"
;; "lshrsq3" "lshrusq3"
;; "lshrsa3" "lshrusa3"
(define_insn_and_split "lshr<mode>3"
- [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r ,r,r")
- (lshiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C31,C4r,C4r,n,Qm")))]
+ [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (lshiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C31,C4r,n,Qm")))]
""
"#"
"&& reload_completed"
(match_dup 2)))
(clobber (reg:CC REG_CC))])]
""
- [(set_attr "isa" "*,*,*,2op,3op,*,*")])
+ [(set_attr "isa" "*,*,*,3op,*,*")])
(define_insn "*lshr<mode>3"
- [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r ,r,r")
- (lshiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,0 ,r ,0,0")
- (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C31,C4r,C4r,n,Qm")))
+ [(set (match_operand:ALL4 0 "register_operand" "=r,r ,r ,r ,r,r")
+ (lshiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0,0 ,r ,r ,0,0")
+ (match_operand:QI 2 "nop_general_operand" "r,LPK,O C15 C31,C4r,n,Qm")))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return lshrsi3_out (insn, operands, NULL);
}
- [(set_attr "isa" "*,*,*,2op,3op,*,*")
+ [(set_attr "isa" "*,*,*,3op,*,*")
(set_attr "adjust_len" "lshrsi")])
;; Optimize if a scratch register from LD_REGS happens to be available.
;; "*lshrhq3_const" "*lshruhq3_const"
;; "*lshrha3_const" "*lshruha3_const"
(define_insn "*lshr<mode>3_const"
- [(set (match_operand:ALL2 0 "register_operand" "=r ,r ,r")
- (lshiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0 ,r ,0")
- (match_operand:QI 2 "const_int_operand" "LPK,O C7c C15,n")))
- (clobber (match_scratch:QI 3 "=X ,X ,&d"))
+ [(set (match_operand:ALL2 0 "register_operand" "=r ,r ,r ,r")
+ (lshiftrt:ALL2 (match_operand:ALL2 1 "register_operand" "0 ,r ,r ,0")
+ (match_operand:QI 2 "const_int_operand" "LPK,O C7c C15,C2r,n")))
+ (clobber (match_scratch:QI 3 "=X ,X ,&d ,&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return lshrhi3_out (insn, operands, NULL);
}
- [(set_attr "adjust_len" "lshrhi")])
+ [(set_attr "isa" "*,*,3op,*")
+ (set_attr "adjust_len" "lshrhi")])
;; "*lshrsi3_const"
;; "*lshrsq3_const" "*lshrusq3_const"
;; "*lshrsa3_const" "*lshrusa3_const"
(define_insn "*lshr<mode>3_const"
- [(set (match_operand:ALL4 0 "register_operand" "=r ,r ,r ,r ,r")
- (lshiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0 ,r ,0 ,r ,0")
- (match_operand:QI 2 "const_int_operand" "LP,O C15 C31,C4r,C4r,n")))
- (clobber (match_operand:QI 3 "scratch_or_dreg_operand" "=X ,X ,&d ,&d ,&d"))
+ [(set (match_operand:ALL4 0 "register_operand" "=r ,r ,r ,r")
+ (lshiftrt:ALL4 (match_operand:ALL4 1 "register_operand" "0 ,r ,r ,0")
+ (match_operand:QI 2 "const_int_operand" "LP,O C15 C31,C4r,n")))
+ (clobber (match_operand:QI 3 "scratch_or_dreg_operand" "=X ,X ,&d ,&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
{
return lshrsi3_out (insn, operands, NULL);
}
- [(set_attr "isa" "*,*,2op,3op,*")
+ [(set_attr "isa" "*,*,3op,*")
(set_attr "adjust_len" "lshrsi")])
;; abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x)