;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
;; put the duplicated register first, and not try the commutative version.
(define_insn_and_split "*arm_addsi3"
- [(set (match_operand:SI 0 "s_register_operand" "=r, k,r,r, k, r, k,k,r, k, r")
- (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,k,r,rk,k, rk,k,r,rk,k, rk")
- (match_operand:SI 2 "reg_or_int_operand" "rI,rI,k,Pj,Pj,L, L,L,PJ,PJ,?n")))]
+ [(set (match_operand:SI 0 "s_register_operand" "=rk, r,k, r,r, k, r, k,k,r, k, r")
+ (plus:SI (match_operand:SI 1 "s_register_operand" "%0, rk,k, r,rk,k, rk,k,r,rk,k, rk")
+ (match_operand:SI 2 "reg_or_int_operand" "rk, rI,rI,k,Pj,Pj,L, L,L,PJ,PJ,?n")))]
"TARGET_32BIT"
"@
+ add%?\\t%0, %0, %2
add%?\\t%0, %1, %2
add%?\\t%0, %1, %2
add%?\\t%0, %2, %1
operands[1], 0);
DONE;
"
- [(set_attr "length" "4,4,4,4,4,4,4,4,4,4,16")
+ [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,16")
(set_attr "predicable" "yes")
- (set_attr "arch" "*,*,*,t2,t2,*,*,a,t2,t2,*")]
+ (set_attr "arch" "t2,*,*,*,t2,t2,*,*,a,t2,t2,*")]
)
(define_insn_and_split "*thumb1_addsi3"