;;- Machine description for ARM for GNU compiler
-;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
+;; Copyright (C) 1991-2020 Free Software Foundation, Inc.
;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
;; and Martin Simmons (@harleqn.co.uk).
;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
(LAST_ARM_REGNUM 15) ;
(CC_REGNUM 100) ; Condition code pseudo register
(VFPCC_REGNUM 101) ; VFP Condition code pseudo register
+ (APSRQ_REGNUM 104) ; Q bit pseudo register
+ (APSRGE_REGNUM 105) ; GE bits pseudo register
]
)
;; 3rd operand to select_dominance_cc_mode
(include "marvell-pj4.md")
(include "xgene1.md")
+;; define_subst and associated attributes
+
+(define_subst "add_setq"
+ [(set (match_operand:SI 0 "" "")
+ (match_operand:SI 1 "" ""))]
+ ""
+ [(set (match_dup 0)
+ (match_dup 1))
+ (set (reg:CC APSRQ_REGNUM)
+ (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
+
+(define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
+(define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
+ "ARM_Q_BIT_READ")
\f
;;---------------------------------------------------------------------------
;; Insn patterns
if (!arm_not_operand (hi_op2, SImode))
hi_op2 = force_reg (SImode, hi_op2);
- emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
+ emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
const0_rtx);
if (hi_op2 == const0_rtx)
"
)
-(define_expand "addv<mode>4"
- [(match_operand:SIDI 0 "register_operand")
- (match_operand:SIDI 1 "register_operand")
- (match_operand:SIDI 2 "register_operand")
+(define_expand "addvsi4"
+ [(match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_add_operand")
(match_operand 3 "")]
"TARGET_32BIT"
{
- emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
+ if (CONST_INT_P (operands[2]))
+ emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
+ else
+ emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
DONE;
})
-(define_expand "uaddv<mode>4"
- [(match_operand:SIDI 0 "register_operand")
- (match_operand:SIDI 1 "register_operand")
- (match_operand:SIDI 2 "register_operand")
+(define_expand "addvdi4"
+ [(match_operand:DI 0 "s_register_operand")
+ (match_operand:DI 1 "s_register_operand")
+ (match_operand:DI 2 "reg_or_int_operand")
+ (match_operand 3 "")]
+ "TARGET_32BIT"
+{
+ rtx lo_result, hi_result;
+ rtx lo_op1, hi_op1, lo_op2, hi_op2;
+ arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
+ &lo_op2, &hi_op2);
+ lo_result = gen_lowpart (SImode, operands[0]);
+ hi_result = gen_highpart (SImode, operands[0]);
+
+ if (lo_op2 == const0_rtx)
+ {
+ emit_move_insn (lo_result, lo_op1);
+ if (!arm_add_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+
+ emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
+ }
+ else
+ {
+ if (!arm_add_operand (lo_op2, SImode))
+ lo_op2 = force_reg (SImode, lo_op2);
+ if (!arm_not_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+
+ emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
+
+ if (hi_op2 == const0_rtx)
+ emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
+ else if (CONST_INT_P (hi_op2))
+ emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
+ else
+ emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
+
+ arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
+ }
+
+ DONE;
+})
+
+(define_expand "addsi3_cin_vout_reg"
+ [(parallel
+ [(set (match_dup 3)
+ (compare:CC_V
+ (plus:DI
+ (plus:DI (match_dup 4)
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
+ (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
+ (match_dup 2)))))
+ (set (match_operand:SI 0 "s_register_operand")
+ (plus:SI (plus:SI (match_dup 5) (match_dup 1))
+ (match_dup 2)))])]
+ "TARGET_32BIT"
+ {
+ operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
+ rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
+ operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
+ operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
+ }
+)
+
+(define_insn "*addsi3_cin_vout_reg_insn"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI
+ (plus:DI
+ (match_operand:DI 3 "arm_carry_operation" "")
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
+ (sign_extend:DI
+ (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
+ (match_dup 1))
+ (match_dup 2)))))
+ (set (match_operand:SI 0 "s_register_operand" "=l,r")
+ (plus:SI (plus:SI (match_dup 4) (match_dup 1))
+ (match_dup 2)))]
+ "TARGET_32BIT"
+ "@
+ adcs%?\\t%0, %0, %2
+ adcs%?\\t%0, %1, %2"
+ [(set_attr "type" "alus_sreg")
+ (set_attr "arch" "t2,*")
+ (set_attr "length" "2,4")]
+)
+
+(define_expand "addsi3_cin_vout_imm"
+ [(parallel
+ [(set (match_dup 3)
+ (compare:CC_V
+ (plus:DI
+ (plus:DI (match_dup 4)
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
+ (match_dup 2))
+ (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
+ (match_dup 2)))))
+ (set (match_operand:SI 0 "s_register_operand")
+ (plus:SI (plus:SI (match_dup 5) (match_dup 1))
+ (match_operand 2 "arm_adcimm_operand")))])]
+ "TARGET_32BIT"
+ {
+ operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
+ rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
+ operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
+ operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
+ }
+)
+
+(define_insn "*addsi3_cin_vout_imm_insn"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI
+ (plus:DI
+ (match_operand:DI 3 "arm_carry_operation" "")
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
+ (match_operand 2 "arm_adcimm_operand" "I,K"))
+ (sign_extend:DI
+ (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
+ (match_dup 1))
+ (match_dup 2)))))
+ (set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (plus:SI (plus:SI (match_dup 4) (match_dup 1))
+ (match_dup 2)))]
+ "TARGET_32BIT"
+ "@
+ adcs%?\\t%0, %1, %2
+ sbcs%?\\t%0, %1, #%B2"
+ [(set_attr "type" "alus_imm")]
+)
+
+(define_expand "addsi3_cin_vout_0"
+ [(parallel
+ [(set (match_dup 2)
+ (compare:CC_V
+ (plus:DI (match_dup 3)
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
+ (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
+ (set (match_operand:SI 0 "s_register_operand")
+ (plus:SI (match_dup 4) (match_dup 1)))])]
+ "TARGET_32BIT"
+ {
+ operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
+ rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
+ operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
+ operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
+ }
+)
+
+(define_insn "*addsi3_cin_vout_0_insn"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI
+ (match_operand:DI 2 "arm_carry_operation" "")
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
+ (sign_extend:DI (plus:SI
+ (match_operand:SI 3 "arm_carry_operation" "")
+ (match_dup 1)))))
+ (set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (match_dup 3) (match_dup 1)))]
+ "TARGET_32BIT"
+ "adcs%?\\t%0, %1, #0"
+ [(set_attr "type" "alus_imm")]
+)
+
+(define_expand "uaddvsi4"
+ [(match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_add_operand")
(match_operand 3 "")]
"TARGET_32BIT"
{
- emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
+ emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
DONE;
})
+(define_expand "uaddvdi4"
+ [(match_operand:DI 0 "s_register_operand")
+ (match_operand:DI 1 "s_register_operand")
+ (match_operand:DI 2 "reg_or_int_operand")
+ (match_operand 3 "")]
+ "TARGET_32BIT"
+{
+ rtx lo_result, hi_result;
+ rtx lo_op1, hi_op1, lo_op2, hi_op2;
+ arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
+ &lo_op2, &hi_op2);
+ lo_result = gen_lowpart (SImode, operands[0]);
+ hi_result = gen_highpart (SImode, operands[0]);
+
+ if (lo_op2 == const0_rtx)
+ {
+ emit_move_insn (lo_result, lo_op1);
+ if (!arm_add_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+
+ gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
+ }
+ else
+ {
+ if (!arm_add_operand (lo_op2, SImode))
+ lo_op2 = force_reg (SImode, lo_op2);
+ if (!arm_not_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+
+ emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
+
+ if (hi_op2 == const0_rtx)
+ emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
+ else if (CONST_INT_P (hi_op2))
+ emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
+ else
+ emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
+
+ arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
+ }
+
+ DONE;
+})
+
+(define_expand "addsi3_cin_cout_reg"
+ [(parallel
+ [(set (match_dup 3)
+ (compare:CC_ADC
+ (plus:DI
+ (plus:DI (match_dup 4)
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
+ (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
+ (const_int 4294967296)))
+ (set (match_operand:SI 0 "s_register_operand")
+ (plus:SI (plus:SI (match_dup 5) (match_dup 1))
+ (match_dup 2)))])]
+ "TARGET_32BIT"
+ {
+ operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
+ rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
+ operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
+ operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
+ }
+)
+
+(define_insn "*addsi3_cin_cout_reg_insn"
+ [(set (reg:CC_ADC CC_REGNUM)
+ (compare:CC_ADC
+ (plus:DI
+ (plus:DI
+ (match_operand:DI 3 "arm_carry_operation" "")
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
+ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
+ (const_int 4294967296)))
+ (set (match_operand:SI 0 "s_register_operand" "=l,r")
+ (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
+ (match_dup 1))
+ (match_dup 2)))]
+ "TARGET_32BIT"
+ "@
+ adcs%?\\t%0, %0, %2
+ adcs%?\\t%0, %1, %2"
+ [(set_attr "type" "alus_sreg")
+ (set_attr "arch" "t2,*")
+ (set_attr "length" "2,4")]
+)
+
+(define_expand "addsi3_cin_cout_imm"
+ [(parallel
+ [(set (match_dup 3)
+ (compare:CC_ADC
+ (plus:DI
+ (plus:DI (match_dup 4)
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
+ (match_dup 6))
+ (const_int 4294967296)))
+ (set (match_operand:SI 0 "s_register_operand")
+ (plus:SI (plus:SI (match_dup 5) (match_dup 1))
+ (match_operand:SI 2 "arm_adcimm_operand")))])]
+ "TARGET_32BIT"
+ {
+ operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
+ rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
+ operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
+ operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
+ operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
+ }
+)
+
+(define_insn "*addsi3_cin_cout_imm_insn"
+ [(set (reg:CC_ADC CC_REGNUM)
+ (compare:CC_ADC
+ (plus:DI
+ (plus:DI
+ (match_operand:DI 3 "arm_carry_operation" "")
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
+ (match_operand:DI 5 "const_int_operand" "n,n"))
+ (const_int 4294967296)))
+ (set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
+ (match_dup 1))
+ (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
+ "TARGET_32BIT
+ && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
+ "@
+ adcs%?\\t%0, %1, %2
+ sbcs%?\\t%0, %1, #%B2"
+ [(set_attr "type" "alus_imm")]
+)
+
+(define_expand "addsi3_cin_cout_0"
+ [(parallel
+ [(set (match_dup 2)
+ (compare:CC_ADC
+ (plus:DI (match_dup 3)
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
+ (const_int 4294967296)))
+ (set (match_operand:SI 0 "s_register_operand")
+ (plus:SI (match_dup 4) (match_dup 1)))])]
+ "TARGET_32BIT"
+ {
+ operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
+ rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
+ operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
+ operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
+ }
+)
+
+(define_insn "*addsi3_cin_cout_0_insn"
+ [(set (reg:CC_ADC CC_REGNUM)
+ (compare:CC_ADC
+ (plus:DI
+ (match_operand:DI 2 "arm_carry_operation" "")
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
+ (const_int 4294967296)))
+ (set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
+ "TARGET_32BIT"
+ "adcs%?\\t%0, %1, #0"
+ [(set_attr "type" "alus_imm")]
+)
+
(define_expand "addsi3"
[(set (match_operand:SI 0 "s_register_operand")
(plus:SI (match_operand:SI 1 "s_register_operand")
]
)
-(define_insn "adddi3_compareV"
- [(set (reg:CC_V CC_REGNUM)
- (ne:CC_V
- (plus:TI
- (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
- (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
- (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
- (set (match_operand:DI 0 "s_register_operand" "=&r")
- (plus:DI (match_dup 1) (match_dup 2)))]
- "TARGET_32BIT"
- "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
- [(set_attr "conds" "set")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
-
-(define_insn "addsi3_compareV"
+(define_insn "addsi3_compareV_reg"
[(set (reg:CC_V CC_REGNUM)
- (ne:CC_V
+ (compare:CC_V
(plus:DI
- (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
- (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
+ (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
+ (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
(sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
- (set (match_operand:SI 0 "register_operand" "=r")
+ (set (match_operand:SI 0 "register_operand" "=l,r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"adds%?\\t%0, %1, %2"
[(set_attr "conds" "set")
+ (set_attr "arch" "t2,t2,*")
+ (set_attr "length" "2,2,4")
(set_attr "type" "alus_sreg")]
)
-(define_insn "adddi3_compareC"
- [(set (reg:CC_C CC_REGNUM)
- (compare:CC_C
+(define_insn "*addsi3_compareV_reg_nosum"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
(plus:DI
- (match_operand:DI 1 "register_operand" "r")
- (match_operand:DI 2 "register_operand" "r"))
- (match_dup 1)))
- (set (match_operand:DI 0 "register_operand" "=&r")
- (plus:DI (match_dup 1) (match_dup 2)))]
- "TARGET_32BIT"
- "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
- [(set_attr "conds" "set")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
+ (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
+ (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
+ (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
+ "TARGET_32BIT"
+ "cmn%?\\t%0, %1"
+ [(set_attr "conds" "set")
+ (set_attr "arch" "t2,*")
+ (set_attr "length" "2,4")
+ (set_attr "type" "alus_sreg")]
)
-(define_insn "addsi3_compareC"
- [(set (reg:CC_C CC_REGNUM)
- (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
- (match_operand:SI 2 "register_operand" "r"))
- (match_dup 1)))
- (set (match_operand:SI 0 "register_operand" "=r")
- (plus:SI (match_dup 1) (match_dup 2)))]
+(define_insn "subvsi3_intmin"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI
+ (sign_extend:DI
+ (match_operand:SI 1 "register_operand" "r"))
+ (const_int 2147483648))
+ (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
+ (set (match_operand:SI 0 "register_operand" "=r")
+ (plus:SI (match_dup 1) (const_int -2147483648)))]
"TARGET_32BIT"
- "adds%?\\t%0, %1, %2"
+ "subs%?\\t%0, %1, #-2147483648"
[(set_attr "conds" "set")
- (set_attr "type" "alus_sreg")]
+ (set_attr "type" "alus_imm")]
+)
+
+(define_insn "addsi3_compareV_imm"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI
+ (sign_extend:DI
+ (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
+ (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
+ (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
+ (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
+ (plus:SI (match_dup 1) (match_dup 2)))]
+ "TARGET_32BIT
+ && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
+ "@
+ adds%?\\t%0, %1, %2
+ adds%?\\t%0, %0, %2
+ subs%?\\t%0, %1, #%n2
+ subs%?\\t%0, %0, #%n2
+ adds%?\\t%0, %1, %2
+ subs%?\\t%0, %1, #%n2"
+ [(set_attr "conds" "set")
+ (set_attr "arch" "t2,t2,t2,t2,*,*")
+ (set_attr "length" "2,2,2,2,4,4")
+ (set_attr "type" "alus_imm")]
+)
+
+(define_insn "addsi3_compareV_imm_nosum"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI
+ (sign_extend:DI
+ (match_operand:SI 0 "register_operand" "l,r,r"))
+ (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
+ (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
+ "TARGET_32BIT
+ && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
+ "@
+ cmp%?\\t%0, #%n1
+ cmn%?\\t%0, %1
+ cmp%?\\t%0, #%n1"
+ [(set_attr "conds" "set")
+ (set_attr "arch" "t2,*,*")
+ (set_attr "length" "2,4,4")
+ (set_attr "type" "alus_imm")]
+)
+
+;; We can handle more constants efficently if we can clobber either a scratch
+;; or the other source operand. We deliberately leave this late as in
+;; high register pressure situations it's not worth forcing any reloads.
+(define_peephole2
+ [(match_scratch:SI 2 "l")
+ (set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI
+ (sign_extend:DI
+ (match_operand:SI 0 "low_register_operand"))
+ (match_operand 1 "const_int_operand"))
+ (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
+ "TARGET_THUMB2
+ && satisfies_constraint_Pd (operands[1])"
+ [(parallel[
+ (set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI (sign_extend:DI (match_dup 0))
+ (sign_extend:DI (match_dup 1)))
+ (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
+ (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
+)
+
+(define_peephole2
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI
+ (sign_extend:DI
+ (match_operand:SI 0 "low_register_operand"))
+ (match_operand 1 "const_int_operand"))
+ (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
+ "TARGET_THUMB2
+ && dead_or_set_p (peep2_next_insn (0), operands[0])
+ && satisfies_constraint_Py (operands[1])"
+ [(parallel[
+ (set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (plus:DI (sign_extend:DI (match_dup 0))
+ (sign_extend:DI (match_dup 1)))
+ (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
+ (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
)
(define_insn "addsi3_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
(match_operand:SI 2 "arm_add_operand" "I,L,r"))
(const_int 0)))
)
(define_insn "*addsi3_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
(match_operand:SI 1 "arm_add_operand" "I,L, r"))
(const_int 0)))]
;; the operands, and we know that the use of the condition code is
;; either GEU or LTU, so we can use the carry flag from the addition
;; instead of doing the compare a second time.
-(define_insn "*addsi3_compare_op1"
+(define_insn "addsi3_compare_op1"
[(set (reg:CC_C CC_REGNUM)
(compare:CC_C
- (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
- (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
+ (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
+ (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
(match_dup 1)))
- (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
+ (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
(plus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
subs%?\\t%0, %1, #%n2
subs%?\\t%0, %0, #%n2
adds%?\\t%0, %1, %2
- subs%?\\t%0, %1, #%n2
- adds%?\\t%0, %1, %2"
+ subs%?\\t%0, %1, #%n2"
[(set_attr "conds" "set")
- (set_attr "arch" "t2,t2,t2,t2,*,*,*")
- (set_attr "length" "2,2,2,2,4,4,4")
- (set_attr "type"
- "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
+ (set_attr "arch" "t2,t2,t2,t2,*,*")
+ (set_attr "length" "2,2,2,2,4,4")
+ (set (attr "type")
+ (if_then_else (match_operand 2 "const_int_operand")
+ (const_string "alu_imm")
+ (const_string "alu_sreg")))]
)
(define_insn "*addsi3_compare_op2"
[(set (reg:CC_C CC_REGNUM)
(compare:CC_C
- (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
- (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
+ (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
+ (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
(match_dup 2)))
- (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
+ (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
subs%?\\t%0, %1, #%n2
subs%?\\t%0, %0, #%n2
adds%?\\t%0, %1, %2
- subs%?\\t%0, %1, #%n2
- adds%?\\t%0, %1, %2"
+ subs%?\\t%0, %1, #%n2"
[(set_attr "conds" "set")
- (set_attr "arch" "t2,t2,t2,t2,*,*,*")
- (set_attr "length" "2,2,2,2,4,4,4")
- (set_attr "type"
- "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
+ (set_attr "arch" "t2,t2,t2,t2,*,*")
+ (set_attr "length" "2,2,2,2,4,4")
+ (set (attr "type")
+ (if_then_else (match_operand 2 "const_int_operand")
+ (const_string "alu_imm")
+ (const_string "alu_sreg")))]
)
(define_insn "*compare_addsi2_op0"
[(set (reg:CC_C CC_REGNUM)
(compare:CC_C
- (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
- (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
+ (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
+ (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
(match_dup 0)))]
"TARGET_32BIT"
"@
- cmp%?\\t%0, #%n1
- cmn%?\\t%0, %1
cmn%?\\t%0, %1
cmp%?\\t%0, #%n1
- cmn%?\\t%0, %1"
+ cmn%?\\t%0, %1
+ cmp%?\\t%0, #%n1"
[(set_attr "conds" "set")
(set_attr "predicable" "yes")
- (set_attr "arch" "t2,t2,*,*,*")
- (set_attr "predicable_short_it" "yes,yes,no,no,no")
- (set_attr "length" "2,2,4,4,4")
- (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
+ (set_attr "arch" "t2,t2,*,*")
+ (set_attr "predicable_short_it" "yes,yes,no,no")
+ (set_attr "length" "2,2,4,4")
+ (set (attr "type")
+ (if_then_else (match_operand 1 "const_int_operand")
+ (const_string "alu_imm")
+ (const_string "alu_sreg")))]
)
(define_insn "*compare_addsi2_op1"
[(set (reg:CC_C CC_REGNUM)
(compare:CC_C
- (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
- (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
+ (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
+ (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
(match_dup 1)))]
"TARGET_32BIT"
"@
- cmp%?\\t%0, #%n1
- cmn%?\\t%0, %1
cmn%?\\t%0, %1
cmp%?\\t%0, #%n1
- cmn%?\\t%0, %1"
+ cmn%?\\t%0, %1
+ cmp%?\\t%0, #%n1"
[(set_attr "conds" "set")
(set_attr "predicable" "yes")
- (set_attr "arch" "t2,t2,*,*,*")
- (set_attr "predicable_short_it" "yes,yes,no,no,no")
- (set_attr "length" "2,2,4,4,4")
- (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
+ (set_attr "arch" "t2,t2,*,*")
+ (set_attr "predicable_short_it" "yes,yes,no,no")
+ (set_attr "length" "2,2,4,4")
+ (set (attr "type")
+ (if_then_else (match_operand 1 "const_int_operand")
+ (const_string "alu_imm")
+ (const_string "alu_sreg")))]
)
(define_insn "addsi3_carryin"
(set_attr "arch" "32,a")
(set_attr "shift" "3")
(set_attr "predicable" "yes")
- (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
- (const_string "alu_shift_imm")
- (const_string "alu_shift_reg")))]
+ (set_attr "type" "alu_shift_imm,alu_shift_reg")]
)
(define_insn "*addsi3_carryin_clobercc"
(set_attr "type" "adcs_reg")]
)
-(define_expand "subv<mode>4"
- [(match_operand:SIDI 0 "register_operand")
- (match_operand:SIDI 1 "register_operand")
- (match_operand:SIDI 2 "register_operand")
+(define_expand "subvsi4"
+ [(match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "arm_rhs_operand")
+ (match_operand:SI 2 "arm_add_operand")
(match_operand 3 "")]
"TARGET_32BIT"
{
- emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
+ if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
+ {
+ /* If both operands are constants we can decide the result statically. */
+ wi::overflow_type overflow;
+ wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
+ rtx_mode_t (operands[2], SImode),
+ SIGNED, &overflow);
+ emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
+ if (overflow != wi::OVF_NONE)
+ emit_jump_insn (gen_jump (operands[3]));
+ DONE;
+ }
+ else if (CONST_INT_P (operands[2]))
+ {
+ operands[2] = GEN_INT (-INTVAL (operands[2]));
+ /* Special case for INT_MIN. */
+ if (INTVAL (operands[2]) == 0x80000000)
+ emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
+ else
+ emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
+ operands[2]));
+ }
+ else if (CONST_INT_P (operands[1]))
+ emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
+ else
+ emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
+
+ arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
+ DONE;
+})
+
+(define_expand "subvdi4"
+ [(match_operand:DI 0 "s_register_operand")
+ (match_operand:DI 1 "reg_or_int_operand")
+ (match_operand:DI 2 "reg_or_int_operand")
+ (match_operand 3 "")]
+ "TARGET_32BIT"
+{
+ rtx lo_result, hi_result;
+ rtx lo_op1, hi_op1, lo_op2, hi_op2;
+ lo_result = gen_lowpart (SImode, operands[0]);
+ hi_result = gen_highpart (SImode, operands[0]);
+ machine_mode mode = CCmode;
+
+ if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
+ {
+ /* If both operands are constants we can decide the result statically. */
+ wi::overflow_type overflow;
+ wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
+ rtx_mode_t (operands[2], DImode),
+ SIGNED, &overflow);
+ emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
+ if (overflow != wi::OVF_NONE)
+ emit_jump_insn (gen_jump (operands[3]));
+ DONE;
+ }
+ else if (CONST_INT_P (operands[1]))
+ {
+ arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
+ &lo_op1, &hi_op1);
+ if (const_ok_for_arm (INTVAL (lo_op1)))
+ {
+ emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
+ GEN_INT (~UINTVAL (lo_op1))));
+ /* We could potentially use RSC here in Arm state, but not
+ in Thumb, so it's probably not worth the effort of handling
+ this. */
+ hi_op1 = force_reg (SImode, hi_op1);
+ mode = CC_RSBmode;
+ goto highpart;
+ }
+ operands[1] = force_reg (DImode, operands[1]);
+ }
+
+ arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
+ &lo_op2, &hi_op2);
+ if (lo_op2 == const0_rtx)
+ {
+ emit_move_insn (lo_result, lo_op1);
+ if (!arm_add_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+ emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
+ DONE;
+ }
+
+ if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
+ lo_op2 = force_reg (SImode, lo_op2);
+ if (CONST_INT_P (lo_op2))
+ emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
+ GEN_INT (-INTVAL (lo_op2))));
+ else
+ emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
+
+ highpart:
+ if (!arm_not_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+ rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
+ if (CONST_INT_P (hi_op2))
+ emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
+ gen_rtx_LTU (SImode, ccreg, const0_rtx),
+ gen_rtx_LTU (DImode, ccreg,
+ const0_rtx)));
+ else
+ emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
+ gen_rtx_LTU (SImode, ccreg, const0_rtx),
+ gen_rtx_LTU (DImode, ccreg, const0_rtx)));
arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
DONE;
})
-(define_expand "usubv<mode>4"
- [(match_operand:SIDI 0 "register_operand")
- (match_operand:SIDI 1 "register_operand")
- (match_operand:SIDI 2 "register_operand")
+(define_expand "usubvsi4"
+ [(match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "arm_rhs_operand")
+ (match_operand:SI 2 "arm_add_operand")
(match_operand 3 "")]
"TARGET_32BIT"
{
- emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
- arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
+ machine_mode mode = CCmode;
+ if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
+ {
+ /* If both operands are constants we can decide the result statically. */
+ wi::overflow_type overflow;
+ wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
+ rtx_mode_t (operands[2], SImode),
+ UNSIGNED, &overflow);
+ emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
+ if (overflow != wi::OVF_NONE)
+ emit_jump_insn (gen_jump (operands[3]));
+ DONE;
+ }
+ else if (CONST_INT_P (operands[2]))
+ emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
+ GEN_INT (-INTVAL (operands[2]))));
+ else if (CONST_INT_P (operands[1]))
+ {
+ mode = CC_RSBmode;
+ emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
+ GEN_INT (~UINTVAL (operands[1]))));
+ }
+ else
+ emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
+ arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
DONE;
})
-(define_insn "subdi3_compare1"
- [(set (reg:CC CC_REGNUM)
- (compare:CC
- (match_operand:DI 1 "s_register_operand" "r")
- (match_operand:DI 2 "s_register_operand" "r")))
- (set (match_operand:DI 0 "s_register_operand" "=&r")
- (minus:DI (match_dup 1) (match_dup 2)))]
+(define_expand "usubvdi4"
+ [(match_operand:DI 0 "s_register_operand")
+ (match_operand:DI 1 "reg_or_int_operand")
+ (match_operand:DI 2 "reg_or_int_operand")
+ (match_operand 3 "")]
"TARGET_32BIT"
- "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
- [(set_attr "conds" "set")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
+{
+ rtx lo_result, hi_result;
+ rtx lo_op1, hi_op1, lo_op2, hi_op2;
+ lo_result = gen_lowpart (SImode, operands[0]);
+ hi_result = gen_highpart (SImode, operands[0]);
+ machine_mode mode = CCmode;
+
+ if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
+ {
+ /* If both operands are constants we can decide the result statically. */
+ wi::overflow_type overflow;
+ wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
+ rtx_mode_t (operands[2], DImode),
+ UNSIGNED, &overflow);
+ emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
+ if (overflow != wi::OVF_NONE)
+ emit_jump_insn (gen_jump (operands[3]));
+ DONE;
+ }
+ else if (CONST_INT_P (operands[1]))
+ {
+ arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
+ &lo_op1, &hi_op1);
+ if (const_ok_for_arm (INTVAL (lo_op1)))
+ {
+ emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
+ GEN_INT (~UINTVAL (lo_op1))));
+ /* We could potentially use RSC here in Arm state, but not
+ in Thumb, so it's probably not worth the effort of handling
+ this. */
+ hi_op1 = force_reg (SImode, hi_op1);
+ mode = CC_RSBmode;
+ goto highpart;
+ }
+ operands[1] = force_reg (DImode, operands[1]);
+ }
+
+ arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
+ &lo_op2, &hi_op2);
+ if (lo_op2 == const0_rtx)
+ {
+ emit_move_insn (lo_result, lo_op1);
+ if (!arm_add_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+ emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
+ DONE;
+ }
+
+ if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
+ lo_op2 = force_reg (SImode, lo_op2);
+ if (CONST_INT_P (lo_op2))
+ emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
+ GEN_INT (-INTVAL (lo_op2))));
+ else
+ emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
+
+ highpart:
+ if (!arm_not_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+ rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
+ if (CONST_INT_P (hi_op2))
+ emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
+ GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
+ gen_rtx_LTU (SImode, ccreg, const0_rtx),
+ gen_rtx_LTU (DImode, ccreg,
+ const0_rtx)));
+ else
+ emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
+ gen_rtx_LTU (SImode, ccreg, const0_rtx),
+ gen_rtx_LTU (DImode, ccreg, const0_rtx)));
+ arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
+
+ DONE;
+})
(define_insn "subsi3_compare1"
[(set (reg:CC CC_REGNUM)
(set_attr "type" "alus_sreg")]
)
+(define_insn "subvsi3"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (minus:DI
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
+ (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
+ (set (match_operand:SI 0 "s_register_operand" "=l,r")
+ (minus:SI (match_dup 1) (match_dup 2)))]
+ "TARGET_32BIT"
+ "subs%?\\t%0, %1, %2"
+ [(set_attr "conds" "set")
+ (set_attr "arch" "t2,*")
+ (set_attr "length" "2,4")
+ (set_attr "type" "alus_sreg")]
+)
+
+(define_insn "subvsi3_imm1"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (minus:DI
+ (match_operand 1 "arm_immediate_operand" "I")
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
+ (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
+ (set (match_operand:SI 0 "s_register_operand" "=r")
+ (minus:SI (match_dup 1) (match_dup 2)))]
+ "TARGET_32BIT"
+ "rsbs%?\\t%0, %2, %1"
+ [(set_attr "conds" "set")
+ (set_attr "type" "alus_imm")]
+)
+
(define_insn "subsi3_carryin"
[(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
(minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
(set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
)
+;; Special canonicalization of the above when operand1 == (const_int 1):
+;; in this case the 'borrow' needs to treated like subtracting from the carry.
+(define_insn "rsbsi_carryin_reg"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (minus:SI (match_operand:SI 1 "arm_carry_operation" "")
+ (match_operand:SI 2 "s_register_operand" "r")))]
+ "TARGET_ARM"
+ "rsc%?\\t%0, %2, #1"
+ [(set_attr "conds" "use")
+ (set_attr "predicable" "yes")
+ (set_attr "type" "adc_imm")]
+)
+
+;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
+;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
+(define_insn "*add_not_cin"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (plus:SI
+ (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
+ (match_operand:SI 3 "arm_carry_operation" ""))
+ (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
+ "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
+ "@
+ sbc%?\\t%0, %2, %1
+ rsc%?\\t%0, %1, %2"
+ [(set_attr "conds" "use")
+ (set_attr "predicable" "yes")
+ (set_attr "arch" "*,a")
+ (set_attr "type" "adc_reg,adc_imm")]
+)
+
+;; On Arm we can also use the same trick when the non-inverted operand is
+;; shifted, using RSC.
+(define_insn "add_not_shift_cin"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (plus:SI
+ (plus:SI (match_operator:SI 3 "shift_operator"
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,r")])
+ (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
+ (match_operand:SI 5 "arm_carry_operation" "")))]
+ "TARGET_ARM"
+ "rsc%?\\t%0, %4, %1%S3"
+ [(set_attr "conds" "use")
+ (set_attr "predicable" "yes")
+ (set_attr "type" "alu_shift_imm,alu_shift_reg")]
+)
+
(define_insn "cmpsi3_carryin_<CC_EXTEND>out"
[(set (reg:<CC_EXTEND> CC_REGNUM)
(compare:<CC_EXTEND>
)
(define_insn "*subsi3_carryin_shift"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(minus:SI (minus:SI
- (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 1 "s_register_operand" "r,r")
(match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r")
- (match_operand:SI 4 "reg_or_int_operand" "rM")]))
+ [(match_operand:SI 3 "s_register_operand" "r,r")
+ (match_operand:SI 4 "shift_amount_operand" "M,r")]))
(match_operand:SI 5 "arm_borrow_operation" "")))]
"TARGET_32BIT"
"sbc%?\\t%0, %1, %3%S2"
[(set_attr "conds" "use")
+ (set_attr "arch" "32,a")
+ (set_attr "shift" "3")
(set_attr "predicable" "yes")
- (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
- (const_string "alu_shift_imm")
- (const_string "alu_shift_reg")))]
+ (set_attr "type" "alu_shift_imm,alu_shift_reg")]
)
(define_insn "*subsi3_carryin_shift_alt"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(minus:SI (minus:SI
- (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 1 "s_register_operand" "r,r")
(match_operand:SI 5 "arm_borrow_operation" ""))
(match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r")
- (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
+ [(match_operand:SI 3 "s_register_operand" "r,r")
+ (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
"TARGET_32BIT"
"sbc%?\\t%0, %1, %3%S2"
[(set_attr "conds" "use")
+ (set_attr "arch" "32,a")
+ (set_attr "shift" "3")
(set_attr "predicable" "yes")
- (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
- (const_string "alu_shift_imm")
- (const_string "alu_shift_reg")))]
+ (set_attr "type" "alu_shift_imm,alu_shift_reg")]
)
+;; No RSC in Thumb2
(define_insn "*rsbsi3_carryin_shift"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(minus:SI (minus:SI
(match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r")
- (match_operand:SI 4 "reg_or_int_operand" "rM")])
- (match_operand:SI 1 "s_register_operand" "r"))
+ [(match_operand:SI 3 "s_register_operand" "r,r")
+ (match_operand:SI 4 "shift_amount_operand" "M,r")])
+ (match_operand:SI 1 "s_register_operand" "r,r"))
(match_operand:SI 5 "arm_borrow_operation" "")))]
"TARGET_ARM"
"rsc%?\\t%0, %1, %3%S2"
[(set_attr "conds" "use")
(set_attr "predicable" "yes")
- (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
- (const_string "alu_shift_imm")
- (const_string "alu_shift_reg")))]
+ (set_attr "type" "alu_shift_imm,alu_shift_reg")]
)
(define_insn "*rsbsi3_carryin_shift_alt"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(minus:SI (minus:SI
(match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r")
- (match_operand:SI 4 "reg_or_int_operand" "rM")])
+ [(match_operand:SI 3 "s_register_operand" "r,r")
+ (match_operand:SI 4 "shift_amount_operand" "M,r")])
(match_operand:SI 5 "arm_borrow_operation" ""))
- (match_operand:SI 1 "s_register_operand" "r")))]
+ (match_operand:SI 1 "s_register_operand" "r,r")))]
"TARGET_ARM"
"rsc%?\\t%0, %1, %3%S2"
[(set_attr "conds" "use")
(set_attr "predicable" "yes")
- (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
- (const_string "alu_shift_imm")
- (const_string "alu_shift_reg")))]
+ (set_attr "type" "alu_shift_imm,alu_shift_reg")]
)
; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
)
(define_insn "subsi3_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
(match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
(const_int 0)))
(set_attr "type" "alus_imm")]
)
+(define_insn "usubvsi3_borrow"
+ [(set (reg:CC_B CC_REGNUM)
+ (compare:CC_B
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
+ (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
+ (zero_extend:DI
+ (match_operand:SI 2 "s_register_operand" "l,r")))))
+ (set (match_operand:SI 0 "s_register_operand" "=l,r")
+ (minus:SI (match_dup 1)
+ (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
+ (match_dup 2))))]
+ "TARGET_32BIT"
+ "sbcs%?\\t%0, %1, %2"
+ [(set_attr "conds" "set")
+ (set_attr "arch" "t2,*")
+ (set_attr "length" "2,4")]
+)
+
+(define_insn "usubvsi3_borrow_imm"
+ [(set (reg:CC_B CC_REGNUM)
+ (compare:CC_B
+ (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
+ (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
+ (match_operand:DI 3 "const_int_operand" "n,n"))))
+ (set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (minus:SI (match_dup 1)
+ (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
+ (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
+ "TARGET_32BIT
+ && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
+ "@
+ sbcs%?\\t%0, %1, %2
+ adcs%?\\t%0, %1, #%B2"
+ [(set_attr "conds" "set")
+ (set_attr "type" "alus_imm")]
+)
+
+(define_insn "subvsi3_borrow"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (minus:DI
+ (minus:DI
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
+ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
+ (match_operand:DI 4 "arm_borrow_operation" ""))
+ (sign_extend:DI
+ (minus:SI (minus:SI (match_dup 1) (match_dup 2))
+ (match_operand:SI 3 "arm_borrow_operation" "")))))
+ (set (match_operand:SI 0 "s_register_operand" "=l,r")
+ (minus:SI (minus:SI (match_dup 1) (match_dup 2))
+ (match_dup 3)))]
+ "TARGET_32BIT"
+ "sbcs%?\\t%0, %1, %2"
+ [(set_attr "conds" "set")
+ (set_attr "arch" "t2,*")
+ (set_attr "length" "2,4")]
+)
+
+(define_insn "subvsi3_borrow_imm"
+ [(set (reg:CC_V CC_REGNUM)
+ (compare:CC_V
+ (minus:DI
+ (minus:DI
+ (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
+ (match_operand 2 "arm_adcimm_operand" "I,K"))
+ (match_operand:DI 4 "arm_borrow_operation" ""))
+ (sign_extend:DI
+ (minus:SI (minus:SI (match_dup 1) (match_dup 2))
+ (match_operand:SI 3 "arm_borrow_operation" "")))))
+ (set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (minus:SI (minus:SI (match_dup 1) (match_dup 2))
+ (match_dup 3)))]
+ "TARGET_32BIT
+ && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
+ "@
+ sbcs%?\\t%0, %1, %2
+ adcs%?\\t%0, %1, #%B2"
+ [(set_attr "conds" "set")
+ (set_attr "type" "alus_imm")]
+)
+
(define_expand "subsf3"
[(set (match_operand:SF 0 "s_register_operand")
(minus:SF (match_operand:SF 1 "s_register_operand")
)
(define_insn "*mulsi3_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (mult:SI
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (mult:SI
(match_operand:SI 2 "s_register_operand" "r,r")
(match_operand:SI 1 "s_register_operand" "%0,r"))
(const_int 0)))
)
(define_insn "*mulsi3_compare0_v6"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (mult:SI
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (mult:SI
(match_operand:SI 2 "s_register_operand" "r")
(match_operand:SI 1 "s_register_operand" "r"))
(const_int 0)))
)
(define_insn "*mulsi_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (mult:SI
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (mult:SI
(match_operand:SI 2 "s_register_operand" "r,r")
(match_operand:SI 1 "s_register_operand" "%0,r"))
(const_int 0)))
)
(define_insn "*mulsi_compare0_scratch_v6"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (mult:SI
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (mult:SI
(match_operand:SI 2 "s_register_operand" "r")
(match_operand:SI 1 "s_register_operand" "r"))
(const_int 0)))
)
(define_insn "*mulsi3addsi_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(plus:SI (mult:SI
(match_operand:SI 2 "s_register_operand" "r,r,r,r")
(match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
)
(define_insn "*mulsi3addsi_compare0_v6"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(plus:SI (mult:SI
(match_operand:SI 2 "s_register_operand" "r")
(match_operand:SI 1 "s_register_operand" "r"))
)
(define_insn "*mulsi3addsi_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(plus:SI (mult:SI
(match_operand:SI 2 "s_register_operand" "r,r,r,r")
(match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
)
(define_insn "*mulsi3addsi_compare0_scratch_v6"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(plus:SI (mult:SI
(match_operand:SI 2 "s_register_operand" "r")
(match_operand:SI 1 "s_register_operand" "r"))
(set_attr "predicable" "yes")]
)
-(define_insn "maddhisi4"
+(define_expand "maddhisi4"
+ [(set (match_operand:SI 0 "s_register_operand")
+ (plus:SI (mult:SI (sign_extend:SI
+ (match_operand:HI 1 "s_register_operand"))
+ (sign_extend:SI
+ (match_operand:HI 2 "s_register_operand")))
+ (match_operand:SI 3 "s_register_operand")))]
+ "TARGET_DSP_MULTIPLY"
+ {
+ /* If this function reads the Q bit from ACLE intrinsics break up the
+ multiplication and accumulation as an overflow during accumulation will
+ clobber the Q flag. */
+ if (ARM_Q_BIT_READ)
+ {
+ rtx tmp = gen_reg_rtx (SImode);
+ emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
+ emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
+ DONE;
+ }
+ }
+)
+
+(define_insn "*arm_maddhisi4"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(plus:SI (mult:SI (sign_extend:SI
(match_operand:HI 1 "s_register_operand" "r"))
(sign_extend:SI
(match_operand:HI 2 "s_register_operand" "r")))
(match_operand:SI 3 "s_register_operand" "r")))]
+ "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
+ "smlabb%?\\t%0, %1, %2, %3"
+ [(set_attr "type" "smlaxy")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "arm_smlabb_setq"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (mult:SI (sign_extend:SI
+ (match_operand:HI 1 "s_register_operand" "r"))
+ (sign_extend:SI
+ (match_operand:HI 2 "s_register_operand" "r")))
+ (match_operand:SI 3 "s_register_operand" "r")))
+ (set (reg:CC APSRQ_REGNUM)
+ (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
"TARGET_DSP_MULTIPLY"
"smlabb%?\\t%0, %1, %2, %3"
[(set_attr "type" "smlaxy")
(set_attr "predicable" "yes")]
)
+(define_expand "arm_smlabb"
+ [(match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "s_register_operand")
+ (match_operand:SI 3 "s_register_operand")]
+ "TARGET_DSP_MULTIPLY"
+ {
+ rtx mult1 = gen_lowpart (HImode, operands[1]);
+ rtx mult2 = gen_lowpart (HImode, operands[2]);
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
+ else
+ emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
+ DONE;
+ }
+)
+
;; Note: there is no maddhisi4ibt because this one is canonical form
-(define_insn "*maddhisi4tb"
+(define_insn "maddhisi4tb"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(plus:SI (mult:SI (ashiftrt:SI
(match_operand:SI 1 "s_register_operand" "r")
(sign_extend:SI
(match_operand:HI 2 "s_register_operand" "r")))
(match_operand:SI 3 "s_register_operand" "r")))]
+ "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
+ "smlatb%?\\t%0, %1, %2, %3"
+ [(set_attr "type" "smlaxy")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "arm_smlatb_setq"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (mult:SI (ashiftrt:SI
+ (match_operand:SI 1 "s_register_operand" "r")
+ (const_int 16))
+ (sign_extend:SI
+ (match_operand:HI 2 "s_register_operand" "r")))
+ (match_operand:SI 3 "s_register_operand" "r")))
+ (set (reg:CC APSRQ_REGNUM)
+ (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
"TARGET_DSP_MULTIPLY"
"smlatb%?\\t%0, %1, %2, %3"
[(set_attr "type" "smlaxy")
(set_attr "predicable" "yes")]
)
-(define_insn "*maddhisi4tt"
+(define_expand "arm_smlatb"
+ [(match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "s_register_operand")
+ (match_operand:SI 3 "s_register_operand")]
+ "TARGET_DSP_MULTIPLY"
+ {
+ rtx mult2 = gen_lowpart (HImode, operands[2]);
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
+ mult2, operands[3]));
+ else
+ emit_insn (gen_maddhisi4tb (operands[0], operands[1],
+ mult2, operands[3]));
+ DONE;
+ }
+)
+
+(define_insn "maddhisi4tt"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(plus:SI (mult:SI (ashiftrt:SI
(match_operand:SI 1 "s_register_operand" "r")
(match_operand:SI 2 "s_register_operand" "r")
(const_int 16)))
(match_operand:SI 3 "s_register_operand" "r")))]
+ "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
+ "smlatt%?\\t%0, %1, %2, %3"
+ [(set_attr "type" "smlaxy")
+ (set_attr "predicable" "yes")]
+)
+
+(define_insn "arm_smlatt_setq"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (mult:SI (ashiftrt:SI
+ (match_operand:SI 1 "s_register_operand" "r")
+ (const_int 16))
+ (ashiftrt:SI
+ (match_operand:SI 2 "s_register_operand" "r")
+ (const_int 16)))
+ (match_operand:SI 3 "s_register_operand" "r")))
+ (set (reg:CC APSRQ_REGNUM)
+ (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
"TARGET_DSP_MULTIPLY"
"smlatt%?\\t%0, %1, %2, %3"
[(set_attr "type" "smlaxy")
(set_attr "predicable" "yes")]
)
+(define_expand "arm_smlatt"
+ [(match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "s_register_operand")
+ (match_operand:SI 3 "s_register_operand")]
+ "TARGET_DSP_MULTIPLY"
+ {
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
+ operands[2], operands[3]));
+ else
+ emit_insn (gen_maddhisi4tt (operands[0], operands[1],
+ operands[2], operands[3]));
+ DONE;
+ }
+)
+
(define_insn "maddhidi4"
[(set (match_operand:DI 0 "s_register_operand" "=r")
(plus:DI
[(set_attr "type" "smlalxy")
(set_attr "predicable" "yes")])
+(define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 3 "s_register_operand" "r")]
+ SMLAWBT))]
+ "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
+ "<smlaw_op>%?\\t%0, %1, %2, %3"
+ [(set_attr "type" "smlaxy")
+ (set_attr "predicable" "yes")]
+)
+
+(define_expand "arm_<smlaw_op>"
+ [(set (match_operand:SI 0 "s_register_operand")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "s_register_operand")
+ (match_operand:SI 3 "s_register_operand")]
+ SMLAWBT))]
+ "TARGET_DSP_MULTIPLY"
+ {
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
+ operands[2], operands[3]));
+ else
+ emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
+ operands[2], operands[3]));
+ DONE;
+ }
+)
+
(define_expand "mulsf3"
[(set (match_operand:SF 0 "s_register_operand")
(mult:SF (match_operand:SF 1 "s_register_operand")
)
(define_insn "*andsi3_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
(match_operand:SI 2 "arm_not_operand" "I,K,r"))
(const_int 0)))
)
(define_insn "*andsi3_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
(match_operand:SI 1 "arm_not_operand" "I,K,r"))
(const_int 0)))
)
(define_insn "*zeroextractsi_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (zero_extract:SI
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (zero_extract:SI
(match_operand:SI 0 "s_register_operand" "r")
(match_operand 1 "const_int_operand" "n")
(match_operand 2 "const_int_operand" "n"))
&& INTVAL (operands[2]) > 0
&& INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
&& INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
- [(parallel [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
+ [(parallel [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
(const_int 0)))
(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
(set (match_dup 0)
- (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
+ (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
(match_dup 0) (const_int 1)))]
"
operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
"TARGET_ARM"
"#"
"TARGET_ARM"
- [(parallel [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
+ [(parallel [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
(const_int 0)))
(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
(set (match_dup 0)
- (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
+ (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
(match_dup 0) (const_int 1)))]
"
operands[2] = GEN_INT (32 - INTVAL (operands[2]));
&& INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
&& INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
&& !reg_overlap_mentioned_p (operands[0], operands[4])"
- [(parallel [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
+ [(parallel [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
(const_int 0)))
(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
(set (match_dup 0)
- (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
+ (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
(match_dup 0) (match_dup 4)))]
"
operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
"TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
"#"
"TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
- [(parallel [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
+ [(parallel [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
(const_int 0)))
(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
(set (match_dup 0)
- (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
+ (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
(match_dup 0) (match_dup 3)))]
"
operands[2] = GEN_INT (32 - INTVAL (operands[2]));
)
(define_insn "andsi_not_shiftsi_si"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r")
(and:SI (not:SI (match_operator:SI 4 "shift_operator"
- [(match_operand:SI 2 "s_register_operand" "r")
- (match_operand:SI 3 "arm_rhs_operand" "rM")]))
- (match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_ARM"
+ [(match_operand:SI 2 "s_register_operand" "r,r")
+ (match_operand:SI 3 "shift_amount_operand" "M,r")]))
+ (match_operand:SI 1 "s_register_operand" "r,r")))]
+ "TARGET_32BIT"
"bic%?\\t%0, %1, %2%S4"
[(set_attr "predicable" "yes")
(set_attr "shift" "2")
- (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
- (const_string "logic_shift_imm")
- (const_string "logic_shift_reg")))]
+ (set_attr "arch" "32,a")
+ (set_attr "type" "logic_shift_imm,logic_shift_reg")]
)
;; Shifted bics pattern used to set up CC status register and not reusing
;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
;; does not support shift by register.
(define_insn "andsi_not_shiftsi_si_scc_no_reuse"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(and:SI (not:SI (match_operator:SI 0 "shift_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_rhs_operand" "rM")]))
- (match_operand:SI 3 "s_register_operand" "r"))
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,r")]))
+ (match_operand:SI 3 "s_register_operand" "r,r"))
(const_int 0)))
- (clobber (match_scratch:SI 4 "=r"))]
- "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
+ (clobber (match_scratch:SI 4 "=r,r"))]
+ "TARGET_32BIT"
"bics%?\\t%4, %3, %1%S0"
[(set_attr "predicable" "yes")
+ (set_attr "arch" "32,a")
(set_attr "conds" "set")
(set_attr "shift" "1")
- (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
- (const_string "logic_shift_imm")
- (const_string "logic_shift_reg")))]
+ (set_attr "type" "logic_shift_imm,logic_shift_reg")]
)
;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
;; getting reused later.
(define_insn "andsi_not_shiftsi_si_scc"
- [(parallel [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(parallel [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(and:SI (not:SI (match_operator:SI 0 "shift_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_rhs_operand" "rM")]))
- (match_operand:SI 3 "s_register_operand" "r"))
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,r")]))
+ (match_operand:SI 3 "s_register_operand" "r,r"))
(const_int 0)))
- (set (match_operand:SI 4 "s_register_operand" "=r")
+ (set (match_operand:SI 4 "s_register_operand" "=r,r")
(and:SI (not:SI (match_op_dup 0
[(match_dup 1)
(match_dup 2)]))
(match_dup 3)))])]
- "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
+ "TARGET_32BIT"
"bics%?\\t%4, %3, %1%S0"
[(set_attr "predicable" "yes")
+ (set_attr "arch" "32,a")
(set_attr "conds" "set")
(set_attr "shift" "1")
- (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
- (const_string "logic_shift_imm")
- (const_string "logic_shift_reg")))]
+ (set_attr "type" "logic_shift_imm,logic_shift_reg")]
)
(define_insn "*andsi_notsi_si_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
(match_operand:SI 1 "s_register_operand" "r"))
(const_int 0)))
)
(define_insn "*andsi_notsi_si_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
(match_operand:SI 1 "s_register_operand" "r"))
(const_int 0)))
)
(define_insn "*iorsi3_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
(match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
(const_int 0)))
)
(define_insn "*iorsi3_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
(match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
(const_int 0)))
)
(define_insn "*xorsi3_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
(match_operand:SI 2 "arm_rhs_operand" "I,r"))
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r,r")
)
(define_insn "*xorsi3_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
(match_operand:SI 1 "arm_rhs_operand" "I,r"))
(const_int 0)))]
"TARGET_32BIT"
(set_attr "type" "multiple")]
)
+
+(define_expand "arm_<ss_op>"
+ [(set (match_operand:SI 0 "s_register_operand")
+ (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "s_register_operand")))]
+ "TARGET_DSP_MULTIPLY"
+ {
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
+ operands[1], operands[2]));
+ else
+ emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
+ DONE;
+ }
+)
+
+(define_insn "arm_<ss_op><add_clobber_q_name>_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")))]
+ "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
+ "<ss_op>%?\t%0, %1, %2"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "alu_dsp_reg")]
+)
+
(define_code_iterator SAT [smin smax])
(define_code_attr SATrev [(smin "smax") (smax "smin")])
(define_code_attr SATlo [(smin "1") (smax "2")])
(define_code_attr SAThi [(smin "2") (smax "1")])
-(define_insn "*satsi_<SAT:code>"
+(define_expand "arm_ssat"
+ [(match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "const_int_operand")]
+ "TARGET_32BIT && arm_arch6"
+ {
+ HOST_WIDE_INT val = INTVAL (operands[2]);
+ /* The builtin checking code should have ensured the right
+ range for the immediate. */
+ gcc_assert (IN_RANGE (val, 1, 32));
+ HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
+ HOST_WIDE_INT lower_bound = -upper_bound - 1;
+ rtx up_rtx = gen_int_mode (upper_bound, SImode);
+ rtx lo_rtx = gen_int_mode (lower_bound, SImode);
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
+ up_rtx, operands[1]));
+ else
+ emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
+ DONE;
+ }
+)
+
+(define_expand "arm_usat"
+ [(match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "const_int_operand")]
+ "TARGET_32BIT && arm_arch6"
+ {
+ HOST_WIDE_INT val = INTVAL (operands[2]);
+ /* The builtin checking code should have ensured the right
+ range for the immediate. */
+ gcc_assert (IN_RANGE (val, 0, 31));
+ HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
+ rtx up_rtx = gen_int_mode (upper_bound, SImode);
+ rtx lo_rtx = CONST0_RTX (SImode);
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
+ operands[1]));
+ else
+ emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
+ DONE;
+ }
+)
+
+(define_insn "arm_get_apsr"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
+ "TARGET_ARM_QBIT"
+ "mrs%?\t%0, APSR"
+ [(set_attr "predicable" "yes")
+ (set_attr "conds" "use")]
+)
+
+(define_insn "arm_set_apsr"
+ [(set (reg:CC APSRQ_REGNUM)
+ (unspec_volatile:CC
+ [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
+ "TARGET_ARM_QBIT"
+ "msr%?\tAPSR_nzcvq, %0"
+ [(set_attr "predicable" "yes")
+ (set_attr "conds" "set")]
+)
+
+;; Read the APSR and extract the Q bit (bit 27)
+(define_expand "arm_saturation_occurred"
+ [(match_operand:SI 0 "s_register_operand")]
+ "TARGET_ARM_QBIT"
+ {
+ rtx apsr = gen_reg_rtx (SImode);
+ emit_insn (gen_arm_get_apsr (apsr));
+ emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
+ gen_int_mode (27, SImode)));
+ DONE;
+ }
+)
+
+;; Read the APSR and set the Q bit (bit position 27) according to operand 0
+(define_expand "arm_set_saturation"
+ [(match_operand:SI 0 "reg_or_int_operand")]
+ "TARGET_ARM_QBIT"
+ {
+ rtx apsr = gen_reg_rtx (SImode);
+ emit_insn (gen_arm_get_apsr (apsr));
+ rtx to_insert = gen_reg_rtx (SImode);
+ if (CONST_INT_P (operands[0]))
+ emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
+ ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
+ else
+ {
+ rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
+ emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
+ CONST0_RTX (SImode)));
+ }
+ emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
+ gen_int_mode (27, SImode), to_insert));
+ emit_insn (gen_arm_set_apsr (apsr));
+ DONE;
+ }
+)
+
+(define_insn "satsi_<SAT:code><add_clobber_q_name>"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
(match_operand:SI 1 "const_int_operand" "i"))
(match_operand:SI 2 "const_int_operand" "i")))]
- "TARGET_32BIT && arm_arch6
+ "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
&& arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
{
int mask;
(match_operand:SI 5 "const_int_operand" "i")])
(match_operand:SI 1 "const_int_operand" "i"))
(match_operand:SI 2 "const_int_operand" "i")))]
- "TARGET_32BIT && arm_arch6
+ "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
&& arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
{
int mask;
)
(define_insn "*shiftsi3_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r,r")
(match_operand:SI 2 "arm_rhs_operand" "M,r")])
(const_int 0)))
)
(define_insn "*shiftsi3_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r,r")
(match_operand:SI 2 "arm_rhs_operand" "M,r")])
(const_int 0)))
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(not:SI (match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r,r")
- (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
+ (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
"TARGET_32BIT"
"mvn%?\\t%0, %1%S3"
[(set_attr "predicable" "yes")
(set_attr "type" "mvn_shift,mvn_shift_reg")])
(define_insn "*not_shiftsi_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(not:SI (match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r,r")
- (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
+ (match_operand:SI 2 "shift_amount_operand" "M,r")]))
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
(set_attr "type" "mvn_shift,mvn_shift_reg")])
(define_insn "*not_shiftsi_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(not:SI (match_operator:SI 3 "shift_operator"
[(match_operand:SI 1 "s_register_operand" "r,r")
- (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
+ (match_operand:SI 2 "shift_amount_operand" "M,r")]))
(const_int 0)))
(clobber (match_scratch:SI 0 "=r,r"))]
"TARGET_32BIT"
\f
;; Unary arithmetic insns
-(define_expand "negvsi3"
- [(match_operand:SI 0 "register_operand")
- (match_operand:SI 1 "register_operand")
+(define_expand "negv<SIDI:mode>3"
+ [(match_operand:SIDI 0 "s_register_operand")
+ (match_operand:SIDI 1 "s_register_operand")
(match_operand 2 "")]
"TARGET_32BIT"
{
- emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
- arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
-
- DONE;
-})
-
-(define_expand "negvdi3"
- [(match_operand:DI 0 "s_register_operand")
- (match_operand:DI 1 "s_register_operand")
- (match_operand 2 "")]
- "TARGET_ARM"
-{
- emit_insn (gen_negdi2_compare (operands[0], operands[1]));
- arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
-
+ emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
+ operands[2]));
DONE;
})
-
-(define_insn "negdi2_compare"
- [(set (reg:CC CC_REGNUM)
- (compare:CC
- (const_int 0)
- (match_operand:DI 1 "register_operand" "r,r")))
- (set (match_operand:DI 0 "register_operand" "=&r,&r")
- (minus:DI (const_int 0) (match_dup 1)))]
- "TARGET_ARM"
- "@
- rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
- rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
- [(set_attr "conds" "set")
- (set_attr "arch" "a,t2")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
-
(define_expand "negsi2"
[(set (match_operand:SI 0 "s_register_operand")
(neg:SI (match_operand:SI 1 "s_register_operand")))]
)
(define_insn "*notsi_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand" "=r")
(not:SI (match_dup 1)))]
)
(define_insn "*notsi_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"TARGET_32BIT"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(unspec:SI
[(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "s_register_operand" "r")
- (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
"TARGET_INT_SIMD"
"usada8%?\\t%0, %1, %2, %3"
[(set_attr "predicable" "yes")
[(set_attr "predicable" "yes")
(set_attr "type" "smlald")])
+(define_insn "arm_<simd32_op>"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
+ (set (reg:CC APSRGE_REGNUM)
+ (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
+ "TARGET_INT_SIMD"
+ "<simd32_op>%?\\t%0, %1, %2"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "alu_sreg")])
+
+(define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
+ "TARGET_INT_SIMD && <add_clobber_q_pred>"
+ "<simd32_op>%?\\t%0, %1, %2, %3"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "alu_sreg")])
+
+(define_expand "arm_<simd32_op>"
+ [(set (match_operand:SI 0 "s_register_operand")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "s_register_operand")
+ (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
+ "TARGET_INT_SIMD"
+ {
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
+ operands[2], operands[3]));
+ else
+ emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
+ operands[2], operands[3]));
+ DONE;
+ }
+)
+
+(define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
+ "TARGET_INT_SIMD && <add_clobber_q_pred>"
+ "<simd32_op>%?\\t%0, %1, %2"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "alu_sreg")])
+
+(define_expand "arm_<simd32_op>"
+ [(set (match_operand:SI 0 "s_register_operand")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
+ "TARGET_INT_SIMD"
+ {
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
+ operands[2]));
+ else
+ emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
+ operands[2]));
+ DONE;
+ }
+)
+
+(define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))]
+ "TARGET_INT_SIMD && <add_clobber_q_pred>"
+ "<simd32_op>%?\\t%0, %2, %1"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "alu_sreg")])
+
+(define_expand "arm_<simd32_op>"
+ [(set (match_operand:SI 0 "s_register_operand")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))]
+ "TARGET_INT_SIMD"
+ {
+ if (ARM_Q_BIT_READ)
+ emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
+ operands[2]));
+ else
+ emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
+ operands[2]));
+ DONE;
+ }
+)
+
+(define_insn "arm_sel"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")
+ (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
+ "TARGET_INT_SIMD"
+ "sel%?\\t%0, %1, %2"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "alu_sreg")])
+
(define_expand "extendsfdf2"
[(set (match_operand:DF 0 "s_register_operand")
(float_extend:DF (match_operand:SF 1 "s_register_operand")))]
(define_insn "*cmpsi_shiftsi"
[(set (reg:CC CC_REGNUM)
- (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
+ (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
(match_operator:SI 3 "shift_operator"
- [(match_operand:SI 1 "s_register_operand" "r,r,r")
- (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
"TARGET_32BIT"
"cmp\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
- (set_attr "arch" "32,a,a")
- (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alus_shift_imm,alus_shift_reg")])
(define_insn "*cmpsi_shiftsi_swp"
[(set (reg:CC_SWP CC_REGNUM)
(compare:CC_SWP (match_operator:SI 3 "shift_operator"
- [(match_operand:SI 1 "s_register_operand" "r,r,r")
- (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
- (match_operand:SI 0 "s_register_operand" "r,r,r")))]
+ [(match_operand:SI 1 "s_register_operand" "r,r")
+ (match_operand:SI 2 "shift_amount_operand" "M,r")])
+ (match_operand:SI 0 "s_register_operand" "r,r")))]
"TARGET_32BIT"
"cmp%?\\t%0, %1%S3"
[(set_attr "conds" "set")
(set_attr "shift" "1")
- (set_attr "arch" "32,a,a")
- (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alus_shift_imm,alus_shift_reg")])
(define_insn "*arm_cmpsi_negshiftsi_si"
[(set (reg:CC_Z CC_REGNUM)
(compare:CC_Z
(neg:SI (match_operator:SI 1 "shift_operator"
- [(match_operand:SI 2 "s_register_operand" "r")
- (match_operand:SI 3 "reg_or_int_operand" "rM")]))
- (match_operand:SI 0 "s_register_operand" "r")))]
- "TARGET_ARM"
+ [(match_operand:SI 2 "s_register_operand" "r,r")
+ (match_operand:SI 3 "shift_amount_operand" "M,r")]))
+ (match_operand:SI 0 "s_register_operand" "r,r")))]
+ "TARGET_32BIT"
"cmn%?\\t%0, %2%S1"
[(set_attr "conds" "set")
- (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
- (const_string "alus_shift_imm")
- (const_string "alus_shift_reg")))
+ (set_attr "arch" "32,a")
+ (set_attr "shift" "2")
+ (set_attr "type" "alus_shift_imm,alus_shift_reg")
(set_attr "predicable" "yes")]
)
-;; DImode comparisons. The generic code generates branches that
-;; if-conversion cannot reduce to a conditional compare, so we do
-;; that directly.
-
-(define_insn "*arm_cmpdi_insn"
- [(set (reg:CC_NCV CC_REGNUM)
- (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
- (match_operand:DI 1 "arm_di_operand" "rDi")))
- (clobber (match_scratch:SI 2 "=r"))]
- "TARGET_32BIT"
- "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
- [(set_attr "conds" "set")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
-
-(define_insn_and_split "*arm_cmpdi_unsigned"
- [(set (reg:CC_CZ CC_REGNUM)
- (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
- (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
-
- "TARGET_32BIT"
- "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
- "&& reload_completed"
- [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 2) (match_dup 3)))
- (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
- (set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 0) (match_dup 1))))]
- {
- operands[2] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- if (CONST_INT_P (operands[1]))
- operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
- else
- operands[3] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }
- [(set_attr "conds" "set")
- (set_attr "enabled_for_short_it" "yes,yes,no,*")
- (set_attr "arch" "t2,t2,t2,a")
- (set_attr "length" "6,6,10,8")
- (set_attr "type" "multiple")]
-)
-
; This insn allows redundant compares to be removed by cse, nothing should
; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
; is deleted later on. The match_dup will match the mode here, so that
(define_expand "return_addr_mask"
[(set (match_dup 1)
- (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
+ (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
(const_int 0)))
(set (match_operand:SI 0 "s_register_operand")
(if_then_else:SI (eq (match_dup 1) (const_int 0))
(const_int 67108860)))] ; 0x03fffffc
"TARGET_ARM"
"
- operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
+ operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM);
")
(define_insn "*check_arch2"
- [(set (match_operand:CC_NOOV 0 "cc_register" "")
- (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
+ [(set (match_operand:CC_NZ 0 "cc_register" "")
+ (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
(const_int 0)))]
"TARGET_ARM"
"teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
"")
(define_insn "*arith_shiftsi_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(match_operator:SI 1 "shiftable_operator"
[(match_operator:SI 3 "shift_operator"
[(match_operand:SI 4 "s_register_operand" "r,r")
(set_attr "type" "alus_shift_imm,alus_shift_reg")])
(define_insn "*arith_shiftsi_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(match_operator:SI 1 "shiftable_operator"
[(match_operator:SI 3 "shift_operator"
[(match_operand:SI 4 "s_register_operand" "r,r")
(set_attr "type" "alus_shift_imm,alus_shift_reg")])
(define_insn "*sub_shiftsi_compare0"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
- (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
+ (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
(match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r,r,r")
- (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
+ [(match_operand:SI 3 "s_register_operand" "r,r")
+ (match_operand:SI 4 "shift_amount_operand" "M,r")]))
(const_int 0)))
- (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
+ (set (match_operand:SI 0 "s_register_operand" "=r,r")
(minus:SI (match_dup 1)
(match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
"TARGET_32BIT"
"subs%?\\t%0, %1, %3%S2"
[(set_attr "conds" "set")
(set_attr "shift" "3")
- (set_attr "arch" "32,a,a")
- (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alus_shift_imm,alus_shift_reg")])
(define_insn "*sub_shiftsi_compare0_scratch"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
- (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
+ (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
(match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r,r,r")
- (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
+ [(match_operand:SI 3 "s_register_operand" "r,r")
+ (match_operand:SI 4 "shift_amount_operand" "M,r")]))
(const_int 0)))
- (clobber (match_scratch:SI 0 "=r,r,r"))]
+ (clobber (match_scratch:SI 0 "=r,r"))]
"TARGET_32BIT"
"subs%?\\t%0, %1, %3%S2"
[(set_attr "conds" "set")
(set_attr "shift" "3")
- (set_attr "arch" "32,a,a")
- (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
+ (set_attr "arch" "32,a")
+ (set_attr "type" "alus_shift_imm,alus_shift_reg")])
\f
(define_insn_and_split "*and_scc"
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT && reload_completed"
[(parallel
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2))
(const_int 0)))
(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
- (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
+ (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0))
(set (match_dup 0) (const_int 1)))])
(define_insn_and_split "*compare_scc"
)
(define_split
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (ior:SI
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (ior:SI
(and:SI (match_operand:SI 0 "s_register_operand" "")
(const_int 1))
(match_operator:SI 1 "arm_comparison_operator"
[(set (match_dup 4)
(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
(match_dup 0)))
- (set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
+ (set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
(const_int 0)))]
"")
(define_split
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (ior:SI
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (ior:SI
(match_operator:SI 1 "arm_comparison_operator"
[(match_operand:SI 2 "s_register_operand" "")
(match_operand:SI 3 "arm_add_operand" "")])
[(set (match_dup 4)
(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
(match_dup 0)))
- (set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
+ (set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
(const_int 0)))]
"")
;; ??? The conditional patterns above need checking for Thumb-2 usefulness
"TARGET_32BIT"
"#"
"&& reload_completed"
- [(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV
+ [(set (reg:CC_NZ CC_REGNUM)
+ (compare:CC_NZ
(plus:SI (match_dup 3)
(match_dup 4))
(const_int 0)))