case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
case ADJUST_LEN_FLOAD: avr_out_fload (insn, op, &len); break;
case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
+ case ADJUST_LEN_SEXTR: avr_out_sextr (insn, op, &len); break;
case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
? INTVAL (XEXP (x, 1))
: -1;
+ if (avropt_pr118012)
+ {
+ if ((code == IOR || code == XOR || code == PLUS)
+ && GET_CODE (XEXP (x, 0)) == ASHIFT
+ && GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
+ && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == AND
+ && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == const1_rtx)
+ {
+ *total = COSTS_N_INSNS (2 + n_bytes);
+ return true;
+ }
+ }
+
switch (code)
{
case CONST_INT:
return true;
case NEG:
+ if (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
+ && GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTRACT)
+ {
+ // Just a sign_extract of bit 0?
+ rtx y = XEXP (XEXP (x, 0), 0);
+ if (XEXP (y, 1) == const1_rtx
+ && XEXP (y, 2) == const0_rtx)
+ {
+ *total = COSTS_N_INSNS (1 + n_bytes
+ - (AVR_HAVE_MOVW && n_bytes == 4));
+ return true;
+ }
+ }
+
switch (mode)
{
case E_QImode:
return true;
case MULT:
+ if (avropt_pr118012)
+ {
+ if (GET_CODE (XEXP (x, 0)) == AND
+ && XEXP (XEXP (x, 0), 1) == const1_rtx)
+ {
+ // Try to defeat PR118012. The MUL variant is actually very
+ // expensive, but combine is given a pattern to transform this
+ // into something less toxic. Though this might not work
+ // for SImode, and we still have a completely ridiculous
+ // 32-bit multiplication instead of a simple bit test on
+ // devices that don't even have MUL. This is because on
+ // AVR_TINY, we'll get a libcall which we cannot undo.
+ // (On other devices that don't have MUL, the libcall is
+ // bypassed by providing mulsi3, cf. insn mulsi3_[call_]pr118012.
+ *total = 0;
+ return true;
+ }
+ } // PR118012
+
switch (mode)
{
case E_QImode:
}
+/* Output code for XOP[0] = sign_extract (XOP[1].0) and return "".
+ PLEN == 0: Output instructions.
+ PLEN != 0: Set *PLEN to the length of the sequence in words. */
+
+const char *
+avr_out_sextr (rtx_insn *insn, rtx *xop, int *plen)
+{
+ rtx dest = xop[0];
+ rtx src = xop[1];
+ int bit = INTVAL (xop[2]);
+ int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
+
+ gcc_assert (bit == 0);
+
+ if (reg_unused_after (insn, src))
+ avr_asm_len ("lsr %1", xop, plen, -1);
+ else
+ avr_asm_len ("mov %0,%1" CR_TAB
+ "lsr %0", xop, plen, -2);
+
+ for (int i = 0; i < n_bytes; ++i)
+ {
+ rtx b = avr_byte (dest, i);
+ avr_asm_len ("sbc %0,%0", &b, plen, 1);
+ if (i == 1 && n_bytes == 4 && AVR_HAVE_MOVW)
+ return avr_asm_len ("movw %C0,%A0", xop, plen, 1);
+ }
+
+ return "";
+}
+
+
+/*
+ if (bits.bitno <eqne> 0)
+ dest = op0;
+ else
+ dest = op0 <pix> op1;
+
+ Performed as:
+
+ dest = op0;
+ if (bits.bitno <eqne> 0)
+ goto LL;
+ dest o= op1;
+LL:; */
+
+void
+avr_emit_skip_pixop (rtx_code pix, rtx dest, rtx op0, rtx op1,
+ rtx_code eqne, rtx bits, int bitno)
+{
+ gcc_assert (eqne == EQ);
+
+ const machine_mode mode = GET_MODE (dest);
+
+ // Get rid of early-clobbers.
+
+ if (reg_overlap_mentioned_p (dest, bits))
+ bits = copy_to_mode_reg (GET_MODE (bits), bits);
+
+ if (reg_overlap_mentioned_p (dest, op1))
+ op1 = copy_to_mode_reg (mode, op1);
+
+ // xorqi3 has "register_operand" for op1.
+ if (mode == QImode && pix == XOR)
+ op1 = force_reg (QImode, op1);
+
+ emit_move_insn (dest, op0);
+
+ // Skip if bits.bitno <eqne> bitno.
+ rtx xlabel = gen_label_rtx ();
+ rtx zerox = gen_rtx_ZERO_EXTRACT (QImode, bits, const1_rtx, GEN_INT (bitno));
+ rtx cond = gen_rtx_fmt_ee (eqne, VOIDmode, zerox, const0_rtx);
+ emit (gen_sbrx_branchqi_split (cond, bits, const0_rtx, xlabel));
+
+ // Payload: plus, ior, xor for HI, PSI, SI have a scratch:QI;
+ // QI and plus:HI don't.
+ rtx src = gen_rtx_fmt_ee (pix, mode, dest, op1);
+ rtx set = gen_rtx_SET (dest, src);
+ rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (QImode));
+ bool no_scratch = mode == QImode || (mode == HImode && pix == PLUS);
+ emit (no_scratch
+ ? set
+ : gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
+
+ emit_label (xlabel);
+}
+
+
+/*
+ if (bits.bitno <eqne> 0)
+ dest = src;
+ else
+ dest = 0;
+
+ Performed as:
+
+ dest = src;
+ if (bits.bitno <eqne> 0)
+ goto LL;
+ dest = 0;
+LL:; */
+
+void
+avr_emit_skip_clear (rtx dest, rtx src, rtx_code eqne, rtx bits, int bitno)
+{
+ const machine_mode mode = GET_MODE (dest);
+
+ // Get rid of early-clobber.
+ if (reg_overlap_mentioned_p (dest, bits))
+ bits = copy_to_mode_reg (GET_MODE (bits), bits);
+
+ emit_move_insn (dest, src);
+
+ // Skip if bits.bitno <eqne> bitno.
+ rtx xlabel = gen_label_rtx ();
+ rtx zerox = gen_rtx_ZERO_EXTRACT (QImode, bits, const1_rtx, GEN_INT (bitno));
+ rtx cond = gen_rtx_fmt_ee (eqne, VOIDmode, zerox, const0_rtx);
+ emit (gen_sbrx_branchqi_split (cond, bits, const0_rtx, xlabel));
+
+ // Payload: dest = 0;
+ emit_move_insn (dest, CONST0_RTX (mode));
+
+ emit_label (xlabel);
+}
+
+
/* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
static void
;; Otherwise do special processing depending on the attribute.
(define_attr "adjust_len"
- "out_bitop, plus, addto_sp, sext, extr, extr_not, plus_ext,
+ "out_bitop, plus, addto_sp, sext, extr, extr_not, plus_ext, sextr,
tsthi, tstpsi, tstsi, compare, compare64, call,
mov8, mov16, mov24, mov32, reload_in16, reload_in24, reload_in32,
ufract, sfract, round,
(define_code_iterator any_lshift [lshiftrt ashift]) ; logic shift
(define_code_iterator piaop [plus ior and])
+(define_code_iterator pixop [plus ior xor])
(define_code_iterator bitop [xor ior and])
(define_code_iterator xior [xor ior])
(define_code_iterator eqne [eq ne])
(match_operand:SI 2 "nonmemory_operand" "")))
(clobber (reg:HI 26))
(clobber (reg:DI 18))])]
- "AVR_HAVE_MUL"
+ "AVR_HAVE_MUL
+ || (avropt_pr118012
+ /* AVR_TINY passes args on the stack, so we cannot work
+ around PR118012 like this. */
+ && ! AVR_TINY)"
{
+ if (! AVR_HAVE_MUL)
+ {
+ emit (gen_gen_mulsi3_pr118012 (operands[0], operands[1], operands[2]));
+ DONE;
+ }
+
if (u16_operand (operands[2], SImode))
{
operands[2] = force_reg (HImode, gen_int_mode (INTVAL (operands[2]), HImode));
DONE;
})
+;; With PR118012, we do __mulsi3 as a transparent call, so insn combine
+;; can transform (mult:SI (and:SI * (const_int 1))) into something
+;; less toxic.
+(define_expand "gen_mulsi3_pr118012"
+ [(parallel [(set (match_operand:SI 0 "register_operand")
+ (mult:SI (match_operand:SI 1 "register_operand")
+ (match_operand:SI 2 "nonmemory_operand")))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))
+ (clobber (reg:DI 18))])]
+ "avropt_pr118012
+ && ! AVR_HAVE_MUL
+ && ! AVR_TINY"
+ {
+ operands[2] = force_reg (SImode, operands[2]);
+ if (avr_emit3_fix_outputs (gen_gen_mulsi3_pr118012, operands, 1 << 0,
+ regmask (DImode, 18) | regmask (HImode, 26) | regmask (HImode, 30)))
+ DONE;
+ })
+
(define_insn_and_split "*mulsi3"
[(set (match_operand:SI 0 "pseudo_register_operand" "=r")
(mult:SI (match_operand:SI 1 "pseudo_register_operand" "r")
}
})
+(define_insn_and_split "*mulsi3_pr118012"
+ [(set (match_operand:SI 0 "pseudo_register_operand" "=r")
+ (mult:SI (match_operand:SI 1 "pseudo_register_operand" "r")
+ (match_operand:SI 2 "pseudo_register_operand" "r")))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))
+ (clobber (reg:DI 18))]
+ "avropt_pr118012
+ && ! AVR_HAVE_MUL
+ && ! AVR_TINY
+ && ! reload_completed"
+ { gcc_unreachable(); }
+ "&& 1"
+ [(set (reg:SI 18)
+ (match_dup 1))
+ (set (reg:SI 22)
+ (match_dup 2))
+ (parallel [(set (reg:SI 22)
+ (mult:SI (reg:SI 22)
+ (reg:SI 18)))
+ (clobber (reg:SI 18))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))])
+ (set (match_dup 0)
+ (reg:SI 22))])
+
+
;; "muluqisi3"
;; "muluhisi3"
(define_expand "mulu<mode>si3"
(clobber (reg:HI 26))
(clobber (reg:CC REG_CC))])])
+(define_insn_and_split "*mulsi3_call_pr118012_split"
+ [(set (reg:SI 22)
+ (mult:SI (reg:SI 22)
+ (reg:SI 18)))
+ (clobber (reg:SI 18))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))]
+ "avropt_pr118012
+ && ! AVR_HAVE_MUL
+ && ! AVR_TINY"
+ "#"
+ "&& reload_completed"
+ [(parallel [(set (reg:SI 22)
+ (mult:SI (reg:SI 22)
+ (reg:SI 18)))
+ (clobber (reg:SI 18))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))
+ (clobber (reg:CC REG_CC))])])
+
(define_insn "*mulsi3_call"
[(set (reg:SI 22)
(mult:SI (reg:SI 22)
"%~call __mulsi3"
[(set_attr "type" "xcall")])
+(define_insn "*mulsi3_call_pr118012"
+ [(set (reg:SI 22)
+ (mult:SI (reg:SI 22)
+ (reg:SI 18)))
+ (clobber (reg:SI 18))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))
+ (clobber (reg:CC REG_CC))]
+ "avropt_pr118012
+ && ! AVR_HAVE_MUL
+ && ! AVR_TINY
+ && reload_completed"
+ "%~call __mulsi3"
+ [(set_attr "type" "xcall")])
+
;; "*mulhisi3_call"
;; "*umulhisi3_call"
(define_insn_and_split "*<extend_u>mulhisi3_call_split"
;; Combine will create zero-extract patterns for single-bit tests.
;; Permit any mode in source pattern by using VOIDmode.
-(define_insn_and_split "*sbrx_branch<mode>_split"
+(define_insn_and_split "sbrx_branch<mode>_split"
[(set (pc)
(if_then_else
(match_operator 0 "eqne_operator"
[(set (pc)
(if_then_else (ge (match_operand:QI 0 "register_operand" "")
(const_int 0))
- (label_ref (match_operand 1 "" ""))
- (pc)))]
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
""
"#"
"reload_completed"
(define_insn_and_split "*extzv.qihi1"
[(set (match_operand:HI 0 "register_operand" "=r")
- (zero_extract:HI (match_operand:QI 1 "register_operand" "r")
+ (zero_extract:HI (match_operand:QIHI 1 "register_operand" "r")
(const_int 1)
- (match_operand:QI 2 "const_0_to_7_operand" "n")))]
+ (match_operand:QI 2 "const_0_to_<MSB>_operand" "n")))]
""
"#"
""
(match_dup 2)))])
+(define_insn_and_split "*sextr.<QISI:mode>.<QISI2:mode>_split"
+ [(set (match_operand:QISI 0 "register_operand" "=r")
+ (sign_extract:QISI (match_operand:QISI2 1 "register_operand" "r")
+ (const_int 1)
+ (match_operand:QI 2 "const0_operand" "L")))]
+ ""
+ "#"
+ "&& reload_completed"
+ [(parallel [(set (match_dup 0)
+ (sign_extract:QISI (match_dup 1)
+ (const_int 1)
+ (match_dup 2)))
+ (clobber (reg:CC REG_CC))])])
+
+(define_insn "*sextr.<QISI:mode>.<QISI2:mode>"
+ [(set (match_operand:QISI 0 "register_operand" "=r")
+ (sign_extract:QISI (match_operand:QISI2 1 "register_operand" "r")
+ (const_int 1)
+ (match_operand:QI 2 "const0_operand" "L")))
+ (clobber (reg:CC REG_CC))]
+ "reload_completed"
+ {
+ return avr_out_sextr (insn, operands, NULL);
+ }
+ [(set_attr "adjust_len" "sextr")])
+
+
+(define_insn_and_split "*neg.zextr-to-sextr.<HISI:mode>.<QISI:mode>"
+ [(set (match_operand:HISI 0 "register_operand")
+ (neg:HISI (zero_extend:HISI
+ (zero_extract:QIPSI (match_operand:QISI 1 "register_operand")
+ (const_int 1)
+ (match_operand:QI 2 "const0_operand")))))]
+ "avropt_pr118012
+ && <HISI:SIZE> > <QIPSI:SIZE>
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(set (match_dup 0)
+ (sign_extract:HISI (match_dup 1)
+ (const_int 1)
+ (match_dup 2)))])
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; PR118012: match.pd's
+;;
+;; /* (zero_one == 0) ? y : z <op> y -> ((typeof(y))zero_one * z) <op> y */
+;; /* (zero_one != 0) ? z <op> y : y -> ((typeof(y))zero_one * z) <op> y */
+;;
+;; introduces a crazy "optimization" that transforms code like
+;;
+;; if (b & 1)
+;; c ^= a;
+;; to
+;;
+;; u = extract_bit0 (b);
+;; v = zero_extend (u);
+;; w = NEG v;
+;; x = a AND w
+;; c ^= x
+;;
+;; or even to
+;;
+;; u = extract_bit0 (b);
+;; v = a MULT u
+;; c ^= v
+;;
+;; even on machines that don't have MUL instructions or that
+;; have to perform the multiplication by means of a libgcc call.
+;; Try to fix that below. Notice that on AVR_TINY no MUL insn is
+;; available since is is performed as a libgcc call from which we
+;; cannot roll back. With !AVR_HAVR_MULMUL it's a transparent call
+;; from avr.md so we can get rid of that at least.
+
+;; Map
+;; $0 = ((sign_extract ($1.0)) AND $3) <op> $4
+;; to
+;; $0 = $4
+;; if ($1.0 == 0)
+;; goto L
+;; $0 <op>= $3
+;; L:;
+(define_insn_and_split "*pixop-to-skip.<QISI:mode>"
+ [(set (match_operand:QISI 0 "register_operand")
+ (pixop:QISI (and:QISI (sign_extract:QISI (match_operand:QISI2 1 "register_operand")
+ (const_int 1)
+ (match_operand:QI 2 "const0_operand"))
+ (match_operand:QISI 3 "nonmemory_operand"))
+ (match_operand:QISI 4 "register_operand")))]
+ "avropt_pr118012
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ avr_emit_skip_pixop (<pixop:CODE>, operands[0], operands[4], operands[3],
+ EQ, operands[1], 0);
+ DONE;
+ })
+
+;; Map
+;; $0 = (($1 AND 1) MULT $2) o $3
+;; to
+;; $0 = $3
+;; if ($1.0 == 0)
+;; goto L
+;; $0 o= $2
+;; L:;
+(define_insn_and_split "*mul.and1-to-skip.<mode>"
+ [(set (match_operand:QISI 0 "register_operand")
+ (pixop:QISI (mult:QISI (and:QISI (match_operand:QISI 1 "register_operand")
+ (const_int 1))
+ (match_operand:QISI 2 "nonmemory_operand"))
+ (match_operand:QISI 3 "register_operand")))]
+ "avropt_pr118012
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ avr_emit_skip_pixop (<CODE>, operands[0], operands[3], operands[2],
+ EQ, operands[1], 0);
+ DONE;
+ })
+
+(define_insn_and_split "*mul.ext.and1-to-skip.<HISI:mode>"
+ [(set (match_operand:HISI 0 "register_operand")
+ (pixop:HISI (mult:HISI (any_extend:HISI (and:QIPSI (match_operand:QIPSI 1 "register_operand")
+ (const_int 1)))
+ (match_operand:HISI 2 "nonmemory_operand"))
+ (match_operand:HISI 3 "register_operand")))]
+ "avropt_pr118012
+ && <HISI:SIZE> > <QIPSI:SIZE>
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ avr_emit_skip_pixop (<pixop:CODE>, operands[0], operands[3], operands[2],
+ EQ, operands[1], 0);
+ DONE;
+ })
+
+;; Like the one above, but where $2 was a power of 2 and MULT has been
+;; transformed to ASHIFT (PR118360).
+(define_insn_and_split "*shl.ext.and1-to-skip.<HISI:mode>"
+ [(set (match_operand:HISI 0 "register_operand")
+ (pixop:HISI (ashift:HISI (any_extend:HISI (and:QIPSI (match_operand:QIPSI 1 "register_operand")
+ (const_int 1)))
+ (match_operand:QI 2 "const_int_operand"))
+ (match_operand:HISI 3 "register_operand")))]
+ "avropt_pr118012
+ && <HISI:SIZE> > <QIPSI:SIZE>
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ rtx op2 = gen_int_mode (1u << INTVAL (operands[2]), <HISI:MODE>mode);
+ avr_emit_skip_pixop (<pixop:CODE>, operands[0], operands[3], op2,
+ EQ, operands[1], 0);
+ DONE;
+ })
+
+(define_insn_and_split "*shl.and-to-skip.<mode>"
+ [(set (match_operand:HISI 0 "register_operand")
+ (pixop:HISI (and:HISI (ashift:HISI (match_operand:HISI 1 "register_operand")
+ (match_operand:QI 4 "const_0_to_<MSB>_operand"))
+ (match_operand:HISI 2 "single_one_operand"))
+ (match_operand:HISI 3 "register_operand")))]
+ "avropt_pr118012
+ && exact_log2 (UINTVAL (operands[2]) & GET_MODE_MASK (<MODE>mode))
+ == INTVAL (operands[4])
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ avr_emit_skip_pixop (<CODE>, operands[0], operands[3], operands[2],
+ EQ, operands[1], 0);
+ DONE;
+ })
+
+
+;; Map
+;; $0 = ($1 AND 1) MULT $2
+;; to
+;; $0 = $2
+;; if ($1.0 != 0)
+;; goto L
+;; $0 = 0
+;; L:;
+(define_insn_and_split "*map.mul.and1-to-skip.<QISI:mode>"
+ [(set (match_operand:QISI 0 "register_operand")
+ (mult:QISI (and:QISI (match_operand:QISI2 1 "register_operand")
+ (const_int 1))
+ (match_operand:QISI 2 "nonmemory_operand")))]
+ "avropt_pr118012
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ avr_emit_skip_clear (operands[0], operands[2], NE, operands[1], 0);
+ DONE;
+ })
+
+(define_insn_and_split "*map.mul.and1-to-skip.<mode>"
+ [(set (match_operand:QISI 0 "register_operand")
+ (mult:QISI (and:QISI (match_operand:QISI 1 "register_operand")
+ (const_int 1))
+ (match_operand:QISI 2 "nonmemory_operand")))]
+ "avropt_pr118012
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ avr_emit_skip_clear (operands[0], operands[2], NE, operands[1], 0);
+ DONE;
+ })
+
+(define_insn_and_split "*map.mul.ext.and1-to-skip.<HISI:mode>"
+ [(set (match_operand:HISI 0 "register_operand")
+ (mult:HISI (any_extend:HISI (and:QIPSI (match_operand:QIPSI 1 "register_operand")
+ (const_int 1)))
+ (match_operand:HISI 2 "nonmemory_operand")))]
+ "avropt_pr118012
+ && <HISI:SIZE> > <QIPSI:SIZE>
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ avr_emit_skip_clear (operands[0], operands[2], NE, operands[1], 0);
+ DONE;
+ })
+
+;; Similar, but the MULT has been turned to ASHIFT.
+(define_insn_and_split "*map.shl.ext.and1-to-skip.<HISI:mode>"
+ [(set (match_operand:HISI 0 "register_operand")
+ (ashift:HISI (any_extend:HISI (and:QIPSI (match_operand:QIPSI 1 "register_operand")
+ (const_int 1)))
+ (match_operand:QI 2 "const_0_to_<HISI:MSB>_operand")))]
+ "avropt_pr118012
+ && <HISI:SIZE> > <QIPSI:SIZE>
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ rtx op2 = gen_int_mode (1u << INTVAL (operands[2]), <HISI:MODE>mode);
+ avr_emit_skip_clear (operands[0], op2, NE, operands[1], 0);
+ DONE;
+ })
+
+
+;; Map
+;; $0 = sign_extract($1.0) AND $3
+;; to
+;; $0 = $3
+;; if ($1.0 != 0)
+;; goto L
+;; $0 = 0
+;; L:;
+(define_insn_and_split "*map.and1-to-skip.<QISI:mode>"
+ [(set (match_operand:QISI 0 "register_operand")
+ (and:QISI (sign_extract:QISI (match_operand:QISI2 1 "register_operand")
+ (const_int 1)
+ (match_operand:QI 2 "const0_operand"))
+ (match_operand:QISI 3 "nonmemory_operand")))]
+ "avropt_pr118012
+ && ! reload_completed"
+ { gcc_unreachable (); }
+ "&& 1"
+ [(scratch)]
+ {
+ avr_emit_skip_clear (operands[0], operands[3], NE, operands[1], 0);
+ DONE;
+ })
+
+
;; Work around PR115307: Early passes expand isinf/f/l to a bloat.
;; These passes do not consider costs, and there is no way to
;; hook in or otherwise disable the generated bloat.