;;- Machine description for ARM for GNU compiler
-;; Copyright (C) 1991-2016 Free Software Foundation, Inc.
+;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
;; and Martin Simmons (@harleqn.co.uk).
;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
(define_constants
[(R0_REGNUM 0) ; First CORE register
(R1_REGNUM 1) ; Second CORE register
+ (R4_REGNUM 4) ; Fifth CORE register
+ (FDPIC_REGNUM 9) ; FDPIC register
(IP_REGNUM 12) ; Scratch register
(SP_REGNUM 13) ; Stack pointer
(LR_REGNUM 14) ; Return address register
(const (if_then_else (symbol_ref "TARGET_THUMB1")
(const_string "yes") (const_string "no"))))
-; We use this attribute to disable alternatives that can produce 32-bit
-; instructions inside an IT-block in Thumb2 state. ARMv8 deprecates IT blocks
-; that contain 32-bit instructions.
-(define_attr "enabled_for_depr_it" "no,yes" (const_string "yes"))
+; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
+; The arm_restrict_it flag enables the "short IT" feature which
+; restricts IT blocks to a single 16-bit instruction.
+; This attribute should only be used on 16-bit Thumb-2 instructions
+; which may be predicated (the "predicable" attribute must be set).
+(define_attr "predicable_short_it" "no,yes" (const_string "no"))
-; This attribute is used to disable a predicated alternative when we have
-; arm_restrict_it.
-(define_attr "predicable_short_it" "no,yes" (const_string "yes"))
+; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
+; This attribute should only be used on instructions which may emit
+; an IT block in their expansion which is not a short IT.
+(define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
;; Operand number of an input operand that is shifted. Zero if the
;; given instruction does not shift one of its input operands.
; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
; Baseline. This attribute is used to compute attribute "enabled",
; use type "any" to enable an alternative in all cases.
-(define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,neon_for_64bits,avoid_neon_for_64bits,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
+(define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
(const_string "any"))
(define_attr "arch_enabled" "no,yes"
(match_test "TARGET_THUMB1 && arm_arch8"))
(const_string "yes")
- (and (eq_attr "arch" "avoid_neon_for_64bits")
- (match_test "TARGET_NEON")
- (not (match_test "TARGET_PREFER_NEON_64BITS")))
- (const_string "yes")
-
- (and (eq_attr "arch" "neon_for_64bits")
- (match_test "TARGET_NEON")
- (match_test "TARGET_PREFER_NEON_64BITS"))
- (const_string "yes")
-
(and (eq_attr "arch" "iwmmxt2")
(match_test "TARGET_REALLY_IWMMXT2"))
(const_string "yes")
(match_test "arm_restrict_it")))
(const_string "no")
- (and (eq_attr "enabled_for_depr_it" "no")
+ (and (eq_attr "enabled_for_short_it" "no")
(match_test "arm_restrict_it"))
(const_string "no")
- (and (eq_attr "use_literal_pool" "yes")
- (match_test "arm_disable_literal_pool"))
- (const_string "no")
-
(eq_attr "arch_enabled" "no")
(const_string "no")]
(const_string "yes")))
; CLOB means that the condition codes are altered in an undefined manner, if
; they are altered at all
;
-; UNCONDITIONAL means the instruction can not be conditionally executed and
+; UNCONDITIONAL means the instruction cannot be conditionally executed and
; that the instruction does not use or alter the condition codes.
;
; NOCOND means that the instruction does not use or alter the condition
; to stall the processor. Used with model_wbuf above.
(define_attr "write_conflict" "no,yes"
(if_then_else (eq_attr "type"
- "block,call,load1")
+ "block,call,load_4")
(const_string "yes")
(const_string "no")))
(define_attr "generic_sched" "yes,no"
(const (if_then_else
(ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
- arm926ejs,arm1020e,arm1026ejs,arm1136js,\
+ arm926ejs,arm10e,arm1026ejs,arm1136js,\
arm1136jfs,cortexa5,cortexa7,cortexa8,\
cortexa9,cortexa12,cortexa15,cortexa17,\
cortexa53,cortexa57,cortexm4,cortexm7,\
(define_attr "generic_vfp" "yes,no"
(const (if_then_else
(and (eq_attr "fpu" "vfp")
- (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa7,\
+ (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
cortexa8,cortexa9,cortexa53,cortexm4,\
cortexm7,marvell_pj4,xgene1")
(eq_attr "tune_cortexr4" "no"))
(define_expand "adddi3"
[(parallel
- [(set (match_operand:DI 0 "s_register_operand" "")
- (plus:DI (match_operand:DI 1 "s_register_operand" "")
- (match_operand:DI 2 "arm_adddi_operand" "")))
+ [(set (match_operand:DI 0 "s_register_operand")
+ (plus:DI (match_operand:DI 1 "s_register_operand")
+ (match_operand:DI 2 "reg_or_int_operand")))
(clobber (reg:CC CC_REGNUM))])]
"TARGET_EITHER"
"
if (TARGET_THUMB1)
{
- if (!REG_P (operands[1]))
- operands[1] = force_reg (DImode, operands[1]);
if (!REG_P (operands[2]))
- operands[2] = force_reg (DImode, operands[2]);
- }
- "
-)
-
-(define_insn_and_split "*arm_adddi3"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,&r")
- (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0, r, 0, r")
- (match_operand:DI 2 "arm_adddi_operand" "r, 0, r, Dd, Dd")))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT && !TARGET_NEON"
- "#"
- "TARGET_32BIT && reload_completed
- && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
- [(parallel [(set (reg:CC_C CC_REGNUM)
- (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
- (match_dup 1)))
- (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
- (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- "
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }"
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
-
-(define_insn_and_split "*adddi_sesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (plus:DI (sign_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,r")))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT"
- "#"
- "TARGET_32BIT && reload_completed"
- [(parallel [(set (reg:CC_C CC_REGNUM)
- (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
- (match_dup 1)))
- (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
- (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
- (const_int 31))
- (match_dup 4))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- "
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }"
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
+ operands[2] = force_reg (DImode, operands[2]);
+ }
+ else
+ {
+ rtx lo_result, hi_result, lo_dest, hi_dest;
+ rtx lo_op1, hi_op1, lo_op2, hi_op2;
+ arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
+ &lo_op2, &hi_op2);
+ lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
+ hi_result = hi_dest = gen_highpart (SImode, operands[0]);
+
+ if (lo_op2 == const0_rtx)
+ {
+ lo_dest = lo_op1;
+ if (!arm_add_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+ /* Assume hi_op2 won't also be zero. */
+ emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
+ }
+ else
+ {
+ if (!arm_add_operand (lo_op2, SImode))
+ lo_op2 = force_reg (SImode, lo_op2);
+ if (!arm_not_operand (hi_op2, SImode))
+ hi_op2 = force_reg (SImode, hi_op2);
+
+ emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
+ if (hi_op2 == const0_rtx)
+ emit_insn (gen_add0si3_carryin_ltu (hi_dest, hi_op1));
+ else
+ emit_insn (gen_addsi3_carryin_ltu (hi_dest, hi_op1, hi_op2));
+ }
-(define_insn_and_split "*adddi_zesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (plus:DI (zero_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,r")))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT"
- "#"
- "TARGET_32BIT && reload_completed"
- [(parallel [(set (reg:CC_C CC_REGNUM)
- (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
- (match_dup 1)))
- (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
- (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
+ if (lo_result != lo_dest)
+ emit_move_insn (lo_result, lo_dest);
+ if (hi_result != hi_dest)
+ emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
+ DONE;
+ }
"
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }"
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
)
(define_expand "addv<mode>4"
"TARGET_32BIT"
{
emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
- arm_gen_unlikely_cbranch (NE, CC_Cmode, operands[3]);
+ arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
DONE;
})
(define_expand "addsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (plus:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "reg_or_int_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (plus:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "reg_or_int_operand")))]
"TARGET_EITHER"
"
if (TARGET_32BIT && CONST_INT_P (operands[2]))
]
)
-(define_insn_and_split "adddi3_compareV"
+(define_insn "adddi3_compareV"
[(set (reg:CC_V CC_REGNUM)
(ne:CC_V
(plus:TI
- (sign_extend:TI (match_operand:DI 1 "register_operand" "r"))
- (sign_extend:TI (match_operand:DI 2 "register_operand" "r")))
+ (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
+ (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
(sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
- (set (match_operand:DI 0 "register_operand" "=&r")
+ (set (match_operand:DI 0 "s_register_operand" "=&r")
(plus:DI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
- "#"
- "&& reload_completed"
- [(parallel [(set (reg:CC_C CC_REGNUM)
- (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
- (match_dup 1)))
- (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
- (parallel [(set (reg:CC_V CC_REGNUM)
- (ne:CC_V
- (plus:DI (plus:DI
- (sign_extend:DI (match_dup 4))
- (sign_extend:DI (match_dup 5)))
- (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))
- (plus:DI (sign_extend:DI
- (plus:SI (match_dup 4) (match_dup 5)))
- (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))))
- (set (match_dup 3) (plus:SI (plus:SI
- (match_dup 4) (match_dup 5))
- (ltu:SI (reg:CC_C CC_REGNUM)
- (const_int 0))))])]
- "
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[5] = gen_highpart (SImode, operands[2]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }"
+ "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
[(set_attr "conds" "set")
(set_attr "length" "8")
(set_attr "type" "multiple")]
(set_attr "type" "alus_sreg")]
)
-(define_insn "*addsi3_compareV_upper"
- [(set (reg:CC_V CC_REGNUM)
- (ne:CC_V
- (plus:DI
- (plus:DI
- (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
- (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
- (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))
- (plus:DI (sign_extend:DI
- (plus:SI (match_dup 1) (match_dup 2)))
- (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))))
- (set (match_operand:SI 0 "register_operand" "=r")
- (plus:SI
- (plus:SI (match_dup 1) (match_dup 2))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- "TARGET_32BIT"
- "adcs%?\\t%0, %1, %2"
- [(set_attr "conds" "set")
- (set_attr "type" "adcs_reg")]
-)
-
-(define_insn_and_split "adddi3_compareC"
+(define_insn "adddi3_compareC"
[(set (reg:CC_C CC_REGNUM)
- (ne:CC_C
- (plus:TI
- (zero_extend:TI (match_operand:DI 1 "register_operand" "r"))
- (zero_extend:TI (match_operand:DI 2 "register_operand" "r")))
- (zero_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
+ (compare:CC_C
+ (plus:DI
+ (match_operand:DI 1 "register_operand" "r")
+ (match_operand:DI 2 "register_operand" "r"))
+ (match_dup 1)))
(set (match_operand:DI 0 "register_operand" "=&r")
(plus:DI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
- "#"
- "&& reload_completed"
- [(parallel [(set (reg:CC_C CC_REGNUM)
- (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
- (match_dup 1)))
- (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
- (parallel [(set (reg:CC_C CC_REGNUM)
- (ne:CC_C
- (plus:DI (plus:DI
- (zero_extend:DI (match_dup 4))
- (zero_extend:DI (match_dup 5)))
- (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))
- (plus:DI (zero_extend:DI
- (plus:SI (match_dup 4) (match_dup 5)))
- (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))))
- (set (match_dup 3) (plus:SI
- (plus:SI (match_dup 4) (match_dup 5))
- (ltu:SI (reg:CC_C CC_REGNUM)
- (const_int 0))))])]
- "
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[5] = gen_highpart (SImode, operands[2]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }"
+ "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
[(set_attr "conds" "set")
(set_attr "length" "8")
(set_attr "type" "multiple")]
)
-(define_insn "*addsi3_compareC_upper"
- [(set (reg:CC_C CC_REGNUM)
- (ne:CC_C
- (plus:DI
- (plus:DI
- (zero_extend:DI (match_operand:SI 1 "register_operand" "r"))
- (zero_extend:DI (match_operand:SI 2 "register_operand" "r")))
- (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))
- (plus:DI (zero_extend:DI
- (plus:SI (match_dup 1) (match_dup 2)))
- (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))))
- (set (match_operand:SI 0 "register_operand" "=r")
- (plus:SI
- (plus:SI (match_dup 1) (match_dup 2))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- "TARGET_32BIT"
- "adcs%?\\t%0, %1, %2"
- [(set_attr "conds" "set")
- (set_attr "type" "adcs_reg")]
-)
-
(define_insn "addsi3_compareC"
[(set (reg:CC_C CC_REGNUM)
- (ne:CC_C
- (plus:DI
- (zero_extend:DI (match_operand:SI 1 "register_operand" "r"))
- (zero_extend:DI (match_operand:SI 2 "register_operand" "r")))
- (zero_extend:DI
- (plus:SI (match_dup 1) (match_dup 2)))))
+ (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
+ (match_operand:SI 2 "register_operand" "r"))
+ (match_dup 1)))
(set (match_operand:SI 0 "register_operand" "=r")
(plus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
(set_attr "type" "alus_sreg")]
)
-;; This is the canonicalization of addsi3_compare0_for_combiner when the
+;; This is the canonicalization of subsi3_compare when the
;; addend is a constant.
(define_insn "cmpsi2_addneg"
[(set (reg:CC CC_REGNUM)
(compare:CC
(match_operand:SI 1 "s_register_operand" "r,r")
- (match_operand:SI 2 "arm_addimm_operand" "L,I")))
+ (match_operand:SI 2 "arm_addimm_operand" "I,L")))
(set (match_operand:SI 0 "s_register_operand" "=r,r")
(plus:SI (match_dup 1)
- (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
- "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
- "@
- adds%?\\t%0, %1, %3
- subs%?\\t%0, %1, #%n3"
+ (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
+ "TARGET_32BIT
+ && (INTVAL (operands[2])
+ == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
+{
+ /* For 0 and INT_MIN it is essential that we use subs, as adds will result
+ in different condition codes (like cmn rather than like cmp), so that
+ alternative comes first. Both alternatives can match for any 0x??000000
+ where except for 0 and INT_MIN it doesn't matter what we choose, and also
+ for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
+ as it is shorter. */
+ if (which_alternative == 0 && operands[3] != const1_rtx)
+ return "subs%?\\t%0, %1, #%n3";
+ else
+ return "adds%?\\t%0, %1, %3";
+}
[(set_attr "conds" "set")
(set_attr "type" "alus_sreg")]
)
(define_insn "*addsi3_compare_op1"
[(set (reg:CC_C CC_REGNUM)
(compare:CC_C
- (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
- (match_operand:SI 2 "arm_add_operand" "I,L,r"))
+ (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
+ (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
(match_dup 1)))
- (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
+ (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
+ adds%?\\t%0, %1, %2
+ adds%?\\t%0, %0, %2
+ subs%?\\t%0, %1, #%n2
+ subs%?\\t%0, %0, #%n2
adds%?\\t%0, %1, %2
subs%?\\t%0, %1, #%n2
adds%?\\t%0, %1, %2"
[(set_attr "conds" "set")
- (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
+ (set_attr "arch" "t2,t2,t2,t2,*,*,*")
+ (set_attr "length" "2,2,2,2,4,4,4")
+ (set_attr "type"
+ "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
)
(define_insn "*addsi3_compare_op2"
[(set (reg:CC_C CC_REGNUM)
(compare:CC_C
- (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
- (match_operand:SI 2 "arm_add_operand" "I,L,r"))
+ (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
+ (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
(match_dup 2)))
- (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
+ (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"@
+ adds%?\\t%0, %1, %2
+ adds%?\\t%0, %0, %2
+ subs%?\\t%0, %1, #%n2
+ subs%?\\t%0, %0, #%n2
adds%?\\t%0, %1, %2
subs%?\\t%0, %1, #%n2
adds%?\\t%0, %1, %2"
[(set_attr "conds" "set")
- (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
+ (set_attr "arch" "t2,t2,t2,t2,*,*,*")
+ (set_attr "length" "2,2,2,2,4,4,4")
+ (set_attr "type"
+ "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
)
(define_insn "*compare_addsi2_op0"
(set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
)
-(define_insn "*addsi3_carryin_<optab>"
+(define_insn "addsi3_carryin_<optab>"
[(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
(plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
(match_operand:SI 2 "arm_not_operand" "0,rI,K"))
(set_attr "type" "adc_reg,adc_reg,adc_imm")]
)
+;; Canonicalization of the above when the immediate is zero.
+(define_insn "add0si3_carryin_<optab>"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
+ (match_operand:SI 1 "arm_not_operand" "r")))]
+ "TARGET_32BIT"
+ "adc%?\\t%0, %1, #0"
+ [(set_attr "conds" "use")
+ (set_attr "predicable" "yes")
+ (set_attr "length" "4")
+ (set_attr "type" "adc_imm")]
+)
+
(define_insn "*addsi3_carryin_alt2_<optab>"
[(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
(plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
(match_operand:SI 1 "s_register_operand" "%l,r,r"))
- (match_operand:SI 2 "arm_rhs_operand" "l,rI,K")))]
+ (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
"TARGET_32BIT"
"@
adc%?\\t%0, %1, %2
(match_operator:SI 2 "shift_operator"
[(match_operand:SI 3 "s_register_operand" "r")
(match_operand:SI 4 "reg_or_int_operand" "rM")])
- (match_operand:SI 1 "s_register_operand" "r"))
- (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
+ (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0)))
+ (match_operand:SI 1 "s_register_operand" "r")))]
"TARGET_32BIT"
"adc%?\\t%0, %1, %3%S2"
[(set_attr "conds" "use")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
(const_string "alu_shift_imm")
(const_string "alu_shift_reg")))]
DONE;
})
-(define_insn_and_split "subdi3_compare1"
+(define_insn "subdi3_compare1"
[(set (reg:CC CC_REGNUM)
(compare:CC
- (match_operand:DI 1 "register_operand" "r")
- (match_operand:DI 2 "register_operand" "r")))
- (set (match_operand:DI 0 "register_operand" "=&r")
+ (match_operand:DI 1 "s_register_operand" "r")
+ (match_operand:DI 2 "s_register_operand" "r")))
+ (set (match_operand:DI 0 "s_register_operand" "=&r")
(minus:DI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
- "#"
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 1) (match_dup 2)))
- (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
- (parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 4) (match_dup 5)))
- (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))])]
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[5] = gen_highpart (SImode, operands[2]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }
+ "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
[(set_attr "conds" "set")
(set_attr "length" "8")
(set_attr "type" "multiple")]
)
(define_insn "*subsi3_carryin"
- [(set (match_operand:SI 0 "s_register_operand" "=r,r")
- (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I")
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
+ [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
+ (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
+ (match_operand:SI 2 "s_register_operand" "r,r,r"))
+ (match_operand:SI 3 "arm_borrow_operation" "")))]
"TARGET_32BIT"
"@
sbc%?\\t%0, %1, %2
- rsc%?\\t%0, %2, %1"
+ rsc%?\\t%0, %2, %1
+ sbc%?\\t%0, %2, %2, lsl #1"
[(set_attr "conds" "use")
- (set_attr "arch" "*,a")
+ (set_attr "arch" "*,a,t2")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
- (set_attr "type" "adc_reg,adc_imm")]
+ (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
)
(define_insn "*subsi3_carryin_const"
[(set (match_operand:SI 0 "s_register_operand" "=r")
- (minus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_not_immediate_operand" "K"))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
+ (minus:SI (plus:SI
+ (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
+ (match_operand:SI 3 "arm_borrow_operation" "")))]
"TARGET_32BIT"
- "sbc\\t%0, %1, #%B2"
+ "sbc\\t%0, %1, #%n2"
[(set_attr "conds" "use")
(set_attr "type" "adc_imm")]
)
-(define_insn "*subsi3_carryin_compare"
- [(set (reg:CC CC_REGNUM)
- (compare:CC (match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "s_register_operand" "r")))
- (set (match_operand:SI 0 "s_register_operand" "=r")
- (minus:SI (minus:SI (match_dup 1)
- (match_dup 2))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- "TARGET_32BIT"
- "sbcs\\t%0, %1, %2"
- [(set_attr "conds" "set")
- (set_attr "type" "adcs_reg")]
-)
-
-(define_insn "*subsi3_carryin_compare_const"
- [(set (reg:CC CC_REGNUM)
- (compare:CC (match_operand:SI 1 "reg_or_int_operand" "r")
- (match_operand:SI 2 "arm_not_operand" "K")))
- (set (match_operand:SI 0 "s_register_operand" "=r")
- (minus:SI (plus:SI (match_dup 1)
- (match_dup 2))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
+(define_insn "*subsi3_carryin_const0"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (minus:SI (match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "arm_borrow_operation" "")))]
"TARGET_32BIT"
- "sbcs\\t%0, %1, #%B2"
- [(set_attr "conds" "set")
- (set_attr "type" "adcs_imm")]
+ "sbc\\t%0, %1, #0"
+ [(set_attr "conds" "use")
+ (set_attr "type" "adc_imm")]
)
(define_insn "*subsi3_carryin_shift"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(minus:SI (minus:SI
- (match_operand:SI 1 "s_register_operand" "r")
- (match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r")
- (match_operand:SI 4 "reg_or_int_operand" "rM")]))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
+ (match_operand:SI 1 "s_register_operand" "r")
+ (match_operator:SI 2 "shift_operator"
+ [(match_operand:SI 3 "s_register_operand" "r")
+ (match_operand:SI 4 "reg_or_int_operand" "rM")]))
+ (match_operand:SI 5 "arm_borrow_operation" "")))]
"TARGET_32BIT"
"sbc%?\\t%0, %1, %3%S2"
[(set_attr "conds" "use")
(set_attr "predicable" "yes")
(set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
- (const_string "alu_shift_imm")
- (const_string "alu_shift_reg")))]
+ (const_string "alu_shift_imm")
+ (const_string "alu_shift_reg")))]
)
(define_insn "*rsbsi3_carryin_shift"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(minus:SI (minus:SI
- (match_operator:SI 2 "shift_operator"
- [(match_operand:SI 3 "s_register_operand" "r")
- (match_operand:SI 4 "reg_or_int_operand" "rM")])
+ (match_operator:SI 2 "shift_operator"
+ [(match_operand:SI 3 "s_register_operand" "r")
+ (match_operand:SI 4 "reg_or_int_operand" "rM")])
(match_operand:SI 1 "s_register_operand" "r"))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
+ (match_operand:SI 5 "arm_borrow_operation" "")))]
"TARGET_ARM"
"rsc%?\\t%0, %1, %3%S2"
[(set_attr "conds" "use")
")
(define_expand "addsf3"
- [(set (match_operand:SF 0 "s_register_operand" "")
- (plus:SF (match_operand:SF 1 "s_register_operand" "")
- (match_operand:SF 2 "s_register_operand" "")))]
+ [(set (match_operand:SF 0 "s_register_operand")
+ (plus:SF (match_operand:SF 1 "s_register_operand")
+ (match_operand:SF 2 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"
")
(define_expand "adddf3"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (plus:DF (match_operand:DF 1 "s_register_operand" "")
- (match_operand:DF 2 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (plus:DF (match_operand:DF 1 "s_register_operand")
+ (match_operand:DF 2 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
")
(define_expand "subdi3"
[(parallel
- [(set (match_operand:DI 0 "s_register_operand" "")
- (minus:DI (match_operand:DI 1 "s_register_operand" "")
- (match_operand:DI 2 "s_register_operand" "")))
+ [(set (match_operand:DI 0 "s_register_operand")
+ (minus:DI (match_operand:DI 1 "s_register_operand")
+ (match_operand:DI 2 "s_register_operand")))
(clobber (reg:CC CC_REGNUM))])]
"TARGET_EITHER"
"
- if (TARGET_THUMB1)
- {
- if (!REG_P (operands[1]))
- operands[1] = force_reg (DImode, operands[1]);
- if (!REG_P (operands[2]))
- operands[2] = force_reg (DImode, operands[2]);
- }
- "
-)
-
-(define_insn_and_split "*arm_subdi3"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
- (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
- (match_operand:DI 2 "s_register_operand" "r,0,0")))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT && !TARGET_NEON"
- "#" ; "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 1) (match_dup 2)))
- (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
- (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[5] = gen_highpart (SImode, operands[2]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
-
-(define_insn_and_split "*subdi_di_zesidi"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
- (zero_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT"
- "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 1) (match_dup 2)))
- (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
- (set (match_dup 3) (minus:SI (plus:SI (match_dup 4) (match_dup 5))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[5] = GEN_INT (~0);
- }
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
-
-(define_insn_and_split "*subdi_di_sesidi"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
- (sign_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT"
- "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 1) (match_dup 2)))
- (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
- (set (match_dup 3) (minus:SI (minus:SI (match_dup 4)
- (ashiftrt:SI (match_dup 2)
- (const_int 31)))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
-
-(define_insn_and_split "*subdi_zesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (minus:DI (zero_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,r")))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
- "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
- ; is equivalent to:
- ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, #0"
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 2) (match_dup 1)))
- (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
- (set (match_dup 3) (minus:SI (minus:SI (const_int 0) (match_dup 4))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
-
-(define_insn_and_split "*subdi_sesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (minus:DI (sign_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,r")))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
- "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
- ; is equivalent to:
- ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, %2, asr #31"
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 2) (match_dup 1)))
- (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
- (set (match_dup 3) (minus:SI (minus:SI
- (ashiftrt:SI (match_dup 2)
- (const_int 31))
- (match_dup 4))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
+")
-(define_insn_and_split "*subdi_zesidi_zesidi"
- [(set (match_operand:DI 0 "s_register_operand" "=r")
- (minus:DI (zero_extend:DI
- (match_operand:SI 1 "s_register_operand" "r"))
- (zero_extend:DI
- (match_operand:SI 2 "s_register_operand" "r"))))
+(define_insn "*arm_subdi3"
+ [(set (match_operand:DI 0 "arm_general_register_operand" "=&r,&r,&r")
+ (minus:DI (match_operand:DI 1 "arm_general_register_operand" "0,r,0")
+ (match_operand:DI 2 "arm_general_register_operand" "r,0,0")))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT"
- "#" ; "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 1) (match_dup 2)))
- (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
- (set (match_dup 3) (minus:SI (minus:SI (match_dup 1) (match_dup 1))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- }
+ "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
[(set_attr "conds" "clob")
(set_attr "length" "8")
(set_attr "type" "multiple")]
)
(define_expand "subsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
- (match_operand:SI 2 "s_register_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (minus:SI (match_operand:SI 1 "reg_or_int_operand")
+ (match_operand:SI 2 "s_register_operand")))]
"TARGET_EITHER"
"
if (CONST_INT_P (operands[1]))
)
(define_expand "subsf3"
- [(set (match_operand:SF 0 "s_register_operand" "")
- (minus:SF (match_operand:SF 1 "s_register_operand" "")
- (match_operand:SF 2 "s_register_operand" "")))]
+ [(set (match_operand:SF 0 "s_register_operand")
+ (minus:SF (match_operand:SF 1 "s_register_operand")
+ (match_operand:SF 2 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"
")
(define_expand "subdf3"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (minus:DF (match_operand:DF 1 "s_register_operand" "")
- (match_operand:DF 2 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (minus:DF (match_operand:DF 1 "s_register_operand")
+ (match_operand:DF 2 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
")
;; Multiplication insns
(define_expand "mulhi3"
- [(set (match_operand:HI 0 "s_register_operand" "")
- (mult:HI (match_operand:HI 1 "s_register_operand" "")
- (match_operand:HI 2 "s_register_operand" "")))]
+ [(set (match_operand:HI 0 "s_register_operand")
+ (mult:HI (match_operand:HI 1 "s_register_operand")
+ (match_operand:HI 2 "s_register_operand")))]
"TARGET_DSP_MULTIPLY"
"
{
)
(define_expand "mulsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (mult:SI (match_operand:SI 2 "s_register_operand" "")
- (match_operand:SI 1 "s_register_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (mult:SI (match_operand:SI 2 "s_register_operand")
+ (match_operand:SI 1 "s_register_operand")))]
"TARGET_EITHER"
""
)
-;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
-(define_insn "*arm_mulsi3"
- [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
- (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
- (match_operand:SI 1 "s_register_operand" "%0,r")))]
- "TARGET_32BIT && !arm_arch6"
+;; Use `&' and then `0' to prevent operands 0 and 2 being the same
+(define_insn "*mul"
+ [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
+ (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
+ (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
+ "TARGET_32BIT"
"mul%?\\t%0, %2, %1"
[(set_attr "type" "mul")
- (set_attr "predicable" "yes")]
+ (set_attr "predicable" "yes")
+ (set_attr "arch" "t2,v6,nov6,nov6")
+ (set_attr "length" "4")
+ (set_attr "predicable_short_it" "yes,no,*,*")]
)
-(define_insn "*arm_mulsi3_v6"
- [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
- (mult:SI (match_operand:SI 1 "s_register_operand" "0,l,r")
- (match_operand:SI 2 "s_register_operand" "l,0,r")))]
- "TARGET_32BIT && arm_arch6"
- "mul%?\\t%0, %1, %2"
- [(set_attr "type" "mul")
+;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
+;; reusing the same register.
+
+(define_insn "*mla"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
+ (plus:SI
+ (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
+ (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
+ (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
+ "TARGET_32BIT"
+ "mla%?\\t%0, %3, %2, %1"
+ [(set_attr "type" "mla")
(set_attr "predicable" "yes")
- (set_attr "arch" "t2,t2,*")
- (set_attr "length" "4")
- (set_attr "predicable_short_it" "yes,yes,no")]
+ (set_attr "arch" "v6,nov6,nov6,nov6")]
+)
+
+(define_insn "*mls"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (minus:SI
+ (match_operand:SI 1 "s_register_operand" "r")
+ (mult:SI (match_operand:SI 3 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r"))))]
+ "TARGET_32BIT && arm_arch_thumb2"
+ "mls%?\\t%0, %3, %2, %1"
+ [(set_attr "type" "mla")
+ (set_attr "predicable" "yes")]
)
(define_insn "*mulsi3_compare0"
(set_attr "type" "muls")]
)
-;; Unnamed templates to match MLA instruction.
-
-(define_insn "*mulsi3addsi"
- [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
- (plus:SI
- (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
- (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
- (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
- "TARGET_32BIT && !arm_arch6"
- "mla%?\\t%0, %2, %1, %3"
- [(set_attr "type" "mla")
- (set_attr "predicable" "yes")]
-)
-
-(define_insn "*mulsi3addsi_v6"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
- (plus:SI
- (mult:SI (match_operand:SI 2 "s_register_operand" "r")
- (match_operand:SI 1 "s_register_operand" "r"))
- (match_operand:SI 3 "s_register_operand" "r")))]
- "TARGET_32BIT && arm_arch6"
- "mla%?\\t%0, %2, %1, %3"
- [(set_attr "type" "mla")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
-)
-
(define_insn "*mulsi3addsi_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
(compare:CC_NOOV
(set_attr "type" "mlas")]
)
-(define_insn "*mulsi3subsi"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
- (minus:SI
- (match_operand:SI 3 "s_register_operand" "r")
- (mult:SI (match_operand:SI 2 "s_register_operand" "r")
- (match_operand:SI 1 "s_register_operand" "r"))))]
- "TARGET_32BIT && arm_arch_thumb2"
- "mls%?\\t%0, %2, %1, %3"
- [(set_attr "type" "mla")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
-)
-
-(define_expand "maddsidi4"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (plus:DI
- (mult:DI
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
- (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
- (match_operand:DI 3 "s_register_operand" "")))]
- "TARGET_32BIT && arm_arch3m"
- "")
-
-(define_insn "*mulsidi3adddi"
- [(set (match_operand:DI 0 "s_register_operand" "=&r")
- (plus:DI
- (mult:DI
- (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
- (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
- (match_operand:DI 1 "s_register_operand" "0")))]
- "TARGET_32BIT && arm_arch3m && !arm_arch6"
- "smlal%?\\t%Q0, %R0, %3, %2"
- [(set_attr "type" "smlal")
- (set_attr "predicable" "yes")]
-)
-
-(define_insn "*mulsidi3adddi_v6"
- [(set (match_operand:DI 0 "s_register_operand" "=r")
- (plus:DI
- (mult:DI
- (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
- (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
- (match_operand:DI 1 "s_register_operand" "0")))]
- "TARGET_32BIT && arm_arch6"
- "smlal%?\\t%Q0, %R0, %3, %2"
- [(set_attr "type" "smlal")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
-)
-
;; 32x32->64 widening multiply.
-;; As with mulsi3, the only difference between the v3-5 and v6+
-;; versions of these patterns is the requirement that the output not
-;; overlap the inputs, but that still means we have to have a named
-;; expander and two different starred insns.
+;; The only difference between the v3-5 and v6+ versions is the requirement
+;; that the output does not overlap with either input.
-(define_expand "mulsidi3"
- [(set (match_operand:DI 0 "s_register_operand" "")
+(define_expand "<Us>mulsidi3"
+ [(set (match_operand:DI 0 "s_register_operand")
(mult:DI
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
- (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
- "TARGET_32BIT && arm_arch3m"
- ""
-)
-
-(define_insn "*mulsidi3_nov6"
- [(set (match_operand:DI 0 "s_register_operand" "=&r")
- (mult:DI
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
- (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
- "TARGET_32BIT && arm_arch3m && !arm_arch6"
- "smull%?\\t%Q0, %R0, %1, %2"
- [(set_attr "type" "smull")
- (set_attr "predicable" "yes")]
-)
-
-(define_insn "*mulsidi3_v6"
- [(set (match_operand:DI 0 "s_register_operand" "=r")
- (mult:DI
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
- (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
- "TARGET_32BIT && arm_arch6"
- "smull%?\\t%Q0, %R0, %1, %2"
- [(set_attr "type" "smull")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
-)
-
-(define_expand "umulsidi3"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (mult:DI
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
- "TARGET_32BIT && arm_arch3m"
- ""
-)
-
-(define_insn "*umulsidi3_nov6"
- [(set (match_operand:DI 0 "s_register_operand" "=&r")
- (mult:DI
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
- "TARGET_32BIT && arm_arch3m && !arm_arch6"
- "umull%?\\t%Q0, %R0, %1, %2"
- [(set_attr "type" "umull")
- (set_attr "predicable" "yes")]
+ (SE:DI (match_operand:SI 1 "s_register_operand"))
+ (SE:DI (match_operand:SI 2 "s_register_operand"))))]
+ "TARGET_32BIT"
+ {
+ emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
+ gen_highpart (SImode, operands[0]),
+ operands[1], operands[2]));
+ DONE;
+ }
)
-(define_insn "*umulsidi3_v6"
- [(set (match_operand:DI 0 "s_register_operand" "=r")
- (mult:DI
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
- "TARGET_32BIT && arm_arch6"
- "umull%?\\t%Q0, %R0, %1, %2"
+(define_insn "<US>mull"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
+ (mult:SI
+ (match_operand:SI 2 "s_register_operand" "%r,r")
+ (match_operand:SI 3 "s_register_operand" "r,r")))
+ (set (match_operand:SI 1 "s_register_operand" "=r,&r")
+ (truncate:SI
+ (lshiftrt:DI
+ (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
+ (const_int 32))))]
+ "TARGET_32BIT"
+ "<US>mull%?\\t%0, %1, %2, %3"
[(set_attr "type" "umull")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "arch" "v6,nov6")]
)
-(define_expand "umaddsidi4"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (plus:DI
- (mult:DI
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
- (match_operand:DI 3 "s_register_operand" "")))]
- "TARGET_32BIT && arm_arch3m"
- "")
-
-(define_insn "*umulsidi3adddi"
- [(set (match_operand:DI 0 "s_register_operand" "=&r")
+(define_expand "<Us>maddsidi4"
+ [(set (match_operand:DI 0 "s_register_operand")
(plus:DI
(mult:DI
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
- (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
- (match_operand:DI 1 "s_register_operand" "0")))]
- "TARGET_32BIT && arm_arch3m && !arm_arch6"
- "umlal%?\\t%Q0, %R0, %3, %2"
- [(set_attr "type" "umlal")
- (set_attr "predicable" "yes")]
+ (SE:DI (match_operand:SI 1 "s_register_operand"))
+ (SE:DI (match_operand:SI 2 "s_register_operand")))
+ (match_operand:DI 3 "s_register_operand")))]
+ "TARGET_32BIT"
+ {
+ emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
+ gen_lowpart (SImode, operands[3]),
+ gen_highpart (SImode, operands[0]),
+ gen_highpart (SImode, operands[3]),
+ operands[1], operands[2]));
+ DONE;
+ }
)
-(define_insn "*umulsidi3adddi_v6"
- [(set (match_operand:DI 0 "s_register_operand" "=r")
- (plus:DI
- (mult:DI
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
- (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
- (match_operand:DI 1 "s_register_operand" "0")))]
- "TARGET_32BIT && arm_arch6"
- "umlal%?\\t%Q0, %R0, %3, %2"
+(define_insn "<US>mlal"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
+ (plus:SI
+ (mult:SI
+ (match_operand:SI 4 "s_register_operand" "%r,r")
+ (match_operand:SI 5 "s_register_operand" "r,r"))
+ (match_operand:SI 1 "s_register_operand" "0,0")))
+ (set (match_operand:SI 2 "s_register_operand" "=r,&r")
+ (plus:SI
+ (truncate:SI
+ (lshiftrt:DI
+ (plus:DI
+ (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
+ (zero_extend:DI (match_dup 1)))
+ (const_int 32)))
+ (match_operand:SI 3 "s_register_operand" "2,2")))]
+ "TARGET_32BIT"
+ "<US>mlal%?\\t%0, %2, %4, %5"
[(set_attr "type" "umlal")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
-)
-
-(define_expand "smulsi3_highpart"
- [(parallel
- [(set (match_operand:SI 0 "s_register_operand" "")
- (truncate:SI
- (lshiftrt:DI
- (mult:DI
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
- (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
- (const_int 32))))
- (clobber (match_scratch:SI 3 ""))])]
- "TARGET_32BIT && arm_arch3m"
- ""
-)
-
-(define_insn "*smulsi3_highpart_nov6"
- [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
- (truncate:SI
- (lshiftrt:DI
- (mult:DI
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
- (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
- (const_int 32))))
- (clobber (match_scratch:SI 3 "=&r,&r"))]
- "TARGET_32BIT && arm_arch3m && !arm_arch6"
- "smull%?\\t%3, %0, %2, %1"
- [(set_attr "type" "smull")
- (set_attr "predicable" "yes")]
-)
-
-(define_insn "*smulsi3_highpart_v6"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
- (truncate:SI
- (lshiftrt:DI
- (mult:DI
- (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
- (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
- (const_int 32))))
- (clobber (match_scratch:SI 3 "=r"))]
- "TARGET_32BIT && arm_arch6"
- "smull%?\\t%3, %0, %2, %1"
- [(set_attr "type" "smull")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "arch" "v6,nov6")]
)
-(define_expand "umulsi3_highpart"
+(define_expand "<US>mulsi3_highpart"
[(parallel
- [(set (match_operand:SI 0 "s_register_operand" "")
+ [(set (match_operand:SI 0 "s_register_operand")
(truncate:SI
(lshiftrt:DI
(mult:DI
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
+ (SE:DI (match_operand:SI 1 "s_register_operand"))
+ (SE:DI (match_operand:SI 2 "s_register_operand")))
(const_int 32))))
(clobber (match_scratch:SI 3 ""))])]
- "TARGET_32BIT && arm_arch3m"
+ "TARGET_32BIT"
""
)
-(define_insn "*umulsi3_highpart_nov6"
- [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
- (truncate:SI
- (lshiftrt:DI
- (mult:DI
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
- (const_int 32))))
- (clobber (match_scratch:SI 3 "=&r,&r"))]
- "TARGET_32BIT && arm_arch3m && !arm_arch6"
- "umull%?\\t%3, %0, %2, %1"
- [(set_attr "type" "umull")
- (set_attr "predicable" "yes")]
-)
-
-(define_insn "*umulsi3_highpart_v6"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
+(define_insn "*<US>mull_high"
+ [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
(truncate:SI
(lshiftrt:DI
(mult:DI
- (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
+ (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
+ (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
(const_int 32))))
- (clobber (match_scratch:SI 3 "=r"))]
- "TARGET_32BIT && arm_arch6"
- "umull%?\\t%3, %0, %2, %1"
+ (clobber (match_scratch:SI 3 "=r,&r,&r"))]
+ "TARGET_32BIT"
+ "<US>mull%?\\t%3, %0, %2, %1"
[(set_attr "type" "umull")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "arch" "v6,nov6,nov6")]
)
(define_insn "mulhisi3"
"TARGET_DSP_MULTIPLY"
"smultb%?\\t%0, %1, %2"
[(set_attr "type" "smulxy")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*mulhisi3bt"
"TARGET_DSP_MULTIPLY"
"smulbt%?\\t%0, %1, %2"
[(set_attr "type" "smulxy")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*mulhisi3tt"
"TARGET_DSP_MULTIPLY"
"smultt%?\\t%0, %1, %2"
[(set_attr "type" "smulxy")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
(define_insn "maddhisi4"
"TARGET_DSP_MULTIPLY"
"smlabb%?\\t%0, %1, %2, %3"
[(set_attr "type" "smlaxy")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
;; Note: there is no maddhisi4ibt because this one is canonical form
"TARGET_DSP_MULTIPLY"
"smlatb%?\\t%0, %1, %2, %3"
[(set_attr "type" "smlaxy")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*maddhisi4tt"
"TARGET_DSP_MULTIPLY"
"smlatt%?\\t%0, %1, %2, %3"
[(set_attr "type" "smlaxy")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
(define_insn "maddhidi4"
"TARGET_DSP_MULTIPLY"
"smlalbb%?\\t%Q0, %R0, %1, %2"
[(set_attr "type" "smlalxy")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")])
+ (set_attr "predicable" "yes")])
;; Note: there is no maddhidi4ibt because this one is canonical form
(define_insn "*maddhidi4tb"
"TARGET_DSP_MULTIPLY"
"smlaltb%?\\t%Q0, %R0, %1, %2"
[(set_attr "type" "smlalxy")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")])
+ (set_attr "predicable" "yes")])
(define_insn "*maddhidi4tt"
[(set (match_operand:DI 0 "s_register_operand" "=r")
"TARGET_DSP_MULTIPLY"
"smlaltt%?\\t%Q0, %R0, %1, %2"
[(set_attr "type" "smlalxy")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")])
+ (set_attr "predicable" "yes")])
(define_expand "mulsf3"
- [(set (match_operand:SF 0 "s_register_operand" "")
- (mult:SF (match_operand:SF 1 "s_register_operand" "")
- (match_operand:SF 2 "s_register_operand" "")))]
+ [(set (match_operand:SF 0 "s_register_operand")
+ (mult:SF (match_operand:SF 1 "s_register_operand")
+ (match_operand:SF 2 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"
")
(define_expand "muldf3"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (mult:DF (match_operand:DF 1 "s_register_operand" "")
- (match_operand:DF 2 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (mult:DF (match_operand:DF 1 "s_register_operand")
+ (match_operand:DF 2 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
")
;; Division insns
(define_expand "divsf3"
- [(set (match_operand:SF 0 "s_register_operand" "")
- (div:SF (match_operand:SF 1 "s_register_operand" "")
- (match_operand:SF 2 "s_register_operand" "")))]
+ [(set (match_operand:SF 0 "s_register_operand")
+ (div:SF (match_operand:SF 1 "s_register_operand")
+ (match_operand:SF 2 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"")
(define_expand "divdf3"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (div:DF (match_operand:DF 1 "s_register_operand" "")
- (match_operand:DF 2 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (div:DF (match_operand:DF 1 "s_register_operand")
+ (match_operand:DF 2 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
"")
\f
-;; Boolean and,ior,xor insns
-;; Split up double word logical operations
+; Expand logical operations. The mid-end expander does not split off memory
+; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
+; So an explicit expander is needed to generate better code.
+
+(define_expand "<LOGICAL:optab>di3"
+ [(set (match_operand:DI 0 "s_register_operand")
+ (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
+ (match_operand:DI 2 "arm_<optab>di_operand")))]
+ "TARGET_32BIT"
+ {
+ rtx low = simplify_gen_binary (<CODE>, SImode,
+ gen_lowpart (SImode, operands[1]),
+ gen_lowpart (SImode, operands[2]));
+ rtx high = simplify_gen_binary (<CODE>, SImode,
+ gen_highpart (SImode, operands[1]),
+ gen_highpart_mode (SImode, DImode,
+ operands[2]));
+
+ emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
+ emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
+ DONE;
+ }
+)
-;; Split up simple DImode logical operations. Simply perform the logical
+(define_expand "one_cmpldi2"
+ [(set (match_operand:DI 0 "s_register_operand")
+ (not:DI (match_operand:DI 1 "s_register_operand")))]
+ "TARGET_32BIT"
+ {
+ rtx low = simplify_gen_unary (NOT, SImode,
+ gen_lowpart (SImode, operands[1]),
+ SImode);
+ rtx high = simplify_gen_unary (NOT, SImode,
+ gen_highpart_mode (SImode, DImode,
+ operands[1]),
+ SImode);
+
+ emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
+ emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
+ DONE;
+ }
+)
+
+;; Split DImode and, ior, xor operations. Simply perform the logical
;; operation on the upper and lower halves of the registers.
+;; This is needed for atomic operations in arm_split_atomic_op.
+;; Avoid splitting IWMMXT instructions.
(define_split
[(set (match_operand:DI 0 "s_register_operand" "")
(match_operator:DI 6 "logical_binary_operator"
[(match_operand:DI 1 "s_register_operand" "")
(match_operand:DI 2 "s_register_operand" "")]))]
"TARGET_32BIT && reload_completed
- && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
&& ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
[(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
(set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
}"
)
+;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
+;; Unconditionally split since there is no SIMD DImode NOT pattern.
(define_split
- [(set (match_operand:DI 0 "s_register_operand" "")
- (match_operator:DI 6 "logical_binary_operator"
- [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
- (match_operand:DI 1 "s_register_operand" "")]))]
- "TARGET_32BIT && reload_completed"
- [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
- (set (match_dup 3) (match_op_dup:SI 6
- [(ashiftrt:SI (match_dup 2) (const_int 31))
- (match_dup 4)]))]
- "
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[5] = gen_highpart (SImode, operands[2]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }"
-)
-
-;; The zero extend of operand 2 means we can just copy the high part of
-;; operand1 into operand0.
-(define_split
- [(set (match_operand:DI 0 "s_register_operand" "")
- (ior:DI
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
- (match_operand:DI 1 "s_register_operand" "")))]
- "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
- [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
- (set (match_dup 3) (match_dup 4))]
- "
- {
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }"
-)
-
-;; The zero extend of operand 2 means we can just copy the high part of
-;; operand1 into operand0.
-(define_split
- [(set (match_operand:DI 0 "s_register_operand" "")
- (xor:DI
- (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
- (match_operand:DI 1 "s_register_operand" "")))]
- "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
- [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
- (set (match_dup 3) (match_dup 4))]
- "
- {
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }"
-)
-
-(define_expand "anddi3"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (and:DI (match_operand:DI 1 "s_register_operand" "")
- (match_operand:DI 2 "neon_inv_logic_op2" "")))]
- "TARGET_32BIT"
- ""
-)
-
-(define_insn_and_split "*anddi3_insn"
- [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
- (and:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
- (match_operand:DI 2 "arm_anddi_operand_neon" "w ,DL,r ,r ,De,De,w ,DL")))]
- "TARGET_32BIT && !TARGET_IWMMXT"
-{
- switch (which_alternative)
- {
- case 0: /* fall through */
- case 6: return "vand\t%P0, %P1, %P2";
- case 1: /* fall through */
- case 7: return neon_output_logic_immediate ("vand", &operands[2],
- DImode, 1, VALID_NEON_QREG_MODE (DImode));
- case 2:
- case 3:
- case 4:
- case 5: /* fall through */
- return "#";
- default: gcc_unreachable ();
- }
-}
- "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
- && !(IS_VFP_REGNUM (REGNO (operands[0])))"
- [(set (match_dup 3) (match_dup 4))
- (set (match_dup 5) (match_dup 6))]
- "
- {
- operands[3] = gen_lowpart (SImode, operands[0]);
- operands[5] = gen_highpart (SImode, operands[0]);
-
- operands[4] = simplify_gen_binary (AND, SImode,
- gen_lowpart (SImode, operands[1]),
- gen_lowpart (SImode, operands[2]));
- operands[6] = simplify_gen_binary (AND, SImode,
- gen_highpart (SImode, operands[1]),
- gen_highpart_mode (SImode, DImode, operands[2]));
-
- }"
- [(set_attr "type" "neon_logic,neon_logic,multiple,multiple,\
- multiple,multiple,neon_logic,neon_logic")
- (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,
- avoid_neon_for_64bits,avoid_neon_for_64bits")
- (set_attr "length" "*,*,8,8,8,8,*,*")
- ]
-)
-
-(define_insn_and_split "*anddi_zesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (and:DI (zero_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,r")))]
+ [(set (match_operand:DI 0 "s_register_operand")
+ (not:DI (match_operand:DI 1 "s_register_operand")))]
"TARGET_32BIT"
- "#"
- "TARGET_32BIT && reload_completed"
- ; The zero extend of operand 2 clears the high word of the output
- ; operand.
- [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
- (set (match_dup 3) (const_int 0))]
+ [(set (match_dup 0) (not:SI (match_dup 1)))
+ (set (match_dup 2) (not:SI (match_dup 3)))]
"
{
- operands[3] = gen_highpart (SImode, operands[0]);
+ operands[2] = gen_highpart (SImode, operands[0]);
operands[0] = gen_lowpart (SImode, operands[0]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }"
- [(set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
-
-(define_insn "*anddi_sesdi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (and:DI (sign_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,r")))]
- "TARGET_32BIT"
- "#"
- [(set_attr "length" "8")
- (set_attr "type" "multiple")]
+ operands[3] = gen_highpart (SImode, operands[1]);
+ operands[1] = gen_lowpart (SImode, operands[1]);
+ }"
)
(define_expand "andsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (and:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "reg_or_int_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (and:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "reg_or_int_operand")))]
"TARGET_EITHER"
"
if (TARGET_32BIT)
"
[(set_attr "conds" "set")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "logics_imm")]
)
;;; this insv pattern, so this pattern needs to be reevalutated.
(define_expand "insv"
- [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
- (match_operand 1 "general_operand" "")
- (match_operand 2 "general_operand" ""))
- (match_operand 3 "reg_or_int_operand" ""))]
+ [(set (zero_extract (match_operand 0 "nonimmediate_operand")
+ (match_operand 1 "general_operand")
+ (match_operand 2 "general_operand"))
+ (match_operand 3 "reg_or_int_operand"))]
"TARGET_ARM || arm_arch_thumb2"
"
{
"bfc%?\t%0, %2, %1"
[(set_attr "length" "4")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "bfm")]
)
"bfi%?\t%0, %3, %2, %1"
[(set_attr "length" "4")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "bfm")]
)
-; constants for op 2 will never be given to these patterns.
-(define_insn_and_split "*anddi_notdi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
- (match_operand:DI 2 "s_register_operand" "r,0")))]
- "TARGET_32BIT"
- "#"
- "TARGET_32BIT && reload_completed
- && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
- && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
- [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
- (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
- "
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[5] = gen_highpart (SImode, operands[2]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }"
- [(set_attr "length" "8")
- (set_attr "predicable" "yes")
- (set_attr "type" "multiple")]
-)
-
-(define_insn_and_split "*anddi_notzesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (and:DI (not:DI (zero_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r")))
- (match_operand:DI 1 "s_register_operand" "0,?r")))]
- "TARGET_32BIT"
- "@
- bic%?\\t%Q0, %Q1, %2
- #"
- ; (not (zero_extend ...)) allows us to just copy the high word from
- ; operand1 to operand0.
- "TARGET_32BIT
- && reload_completed
- && operands[0] != operands[1]"
- [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
- (set (match_dup 3) (match_dup 4))]
- "
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }"
- [(set_attr "length" "4,8")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
- (set_attr "type" "multiple")]
-)
-
-(define_insn_and_split "*anddi_notdi_zesidi"
- [(set (match_operand:DI 0 "s_register_operand" "=r")
- (and:DI (not:DI (match_operand:DI 2 "s_register_operand" "r"))
- (zero_extend:DI
- (match_operand:SI 1 "s_register_operand" "r"))))]
- "TARGET_32BIT"
- "#"
- "TARGET_32BIT && reload_completed"
- [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
- (set (match_dup 3) (const_int 0))]
- "
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }"
- [(set_attr "length" "8")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
- (set_attr "type" "multiple")]
-)
-
-(define_insn_and_split "*anddi_notsesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (and:DI (not:DI (sign_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r")))
- (match_operand:DI 1 "s_register_operand" "0,r")))]
- "TARGET_32BIT"
- "#"
- "TARGET_32BIT && reload_completed"
- [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
- (set (match_dup 3) (and:SI (not:SI
- (ashiftrt:SI (match_dup 2) (const_int 31)))
- (match_dup 4)))]
- "
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }"
- [(set_attr "length" "8")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
- (set_attr "type" "multiple")]
-)
-
(define_insn "andsi_notsi_si"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
"TARGET_32BIT"
"bic%?\\t%0, %1, %2"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "logic_reg")]
)
"TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
"bics%?\\t%4, %3, %1%S0"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "conds" "set")
(set_attr "shift" "1")
(set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
"TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
"bics%?\\t%4, %3, %1%S0"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "conds" "set")
(set_attr "shift" "1")
(set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
(set_attr "type" "logics_shift_reg")]
)
-(define_expand "iordi3"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (ior:DI (match_operand:DI 1 "s_register_operand" "")
- (match_operand:DI 2 "neon_logic_op2" "")))]
- "TARGET_32BIT"
- ""
-)
-
-(define_insn_and_split "*iordi3_insn"
- [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
- (ior:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
- (match_operand:DI 2 "arm_iordi_operand_neon" "w ,Dl,r ,r ,Df,Df,w ,Dl")))]
- "TARGET_32BIT && !TARGET_IWMMXT"
- {
- switch (which_alternative)
- {
- case 0: /* fall through */
- case 6: return "vorr\t%P0, %P1, %P2";
- case 1: /* fall through */
- case 7: return neon_output_logic_immediate ("vorr", &operands[2],
- DImode, 0, VALID_NEON_QREG_MODE (DImode));
- case 2:
- case 3:
- case 4:
- case 5:
- return "#";
- default: gcc_unreachable ();
- }
- }
- "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
- && !(IS_VFP_REGNUM (REGNO (operands[0])))"
- [(set (match_dup 3) (match_dup 4))
- (set (match_dup 5) (match_dup 6))]
- "
- {
- operands[3] = gen_lowpart (SImode, operands[0]);
- operands[5] = gen_highpart (SImode, operands[0]);
-
- operands[4] = simplify_gen_binary (IOR, SImode,
- gen_lowpart (SImode, operands[1]),
- gen_lowpart (SImode, operands[2]));
- operands[6] = simplify_gen_binary (IOR, SImode,
- gen_highpart (SImode, operands[1]),
- gen_highpart_mode (SImode, DImode, operands[2]));
-
- }"
- [(set_attr "type" "neon_logic,neon_logic,multiple,multiple,multiple,\
- multiple,neon_logic,neon_logic")
- (set_attr "length" "*,*,8,8,8,8,*,*")
- (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,avoid_neon_for_64bits,avoid_neon_for_64bits")]
-)
-
-(define_insn "*iordi_zesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (ior:DI (zero_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,?r")))]
- "TARGET_32BIT"
- "@
- orr%?\\t%Q0, %Q1, %2
- #"
- [(set_attr "length" "4,8")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
- (set_attr "type" "logic_reg,multiple")]
-)
-
-(define_insn "*iordi_sesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (ior:DI (sign_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,r")))]
- "TARGET_32BIT"
- "#"
- [(set_attr "length" "8")
- (set_attr "predicable" "yes")
- (set_attr "type" "multiple")]
-)
-
(define_expand "iorsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ior:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "reg_or_int_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (ior:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "reg_or_int_operand")))]
"TARGET_EITHER"
"
if (CONST_INT_P (operands[2]))
(define_insn "*iorsi3_compare0"
[(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
- (match_operand:SI 2 "arm_rhs_operand" "I,r"))
- (const_int 0)))
- (set (match_operand:SI 0 "s_register_operand" "=r,r")
+ (compare:CC_NOOV
+ (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
+ (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
+ (const_int 0)))
+ (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
(ior:SI (match_dup 1) (match_dup 2)))]
"TARGET_32BIT"
"orrs%?\\t%0, %1, %2"
[(set_attr "conds" "set")
- (set_attr "type" "logics_imm,logics_reg")]
+ (set_attr "arch" "*,t2,*")
+ (set_attr "length" "4,2,4")
+ (set_attr "type" "logics_imm,logics_reg,logics_reg")]
)
(define_insn "*iorsi3_compare0_scratch"
[(set (reg:CC_NOOV CC_REGNUM)
- (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
- (match_operand:SI 2 "arm_rhs_operand" "I,r"))
- (const_int 0)))
- (clobber (match_scratch:SI 0 "=r,r"))]
+ (compare:CC_NOOV
+ (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
+ (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
+ (const_int 0)))
+ (clobber (match_scratch:SI 0 "=r,l,r"))]
"TARGET_32BIT"
"orrs%?\\t%0, %1, %2"
[(set_attr "conds" "set")
- (set_attr "type" "logics_imm,logics_reg")]
-)
-
-(define_expand "xordi3"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (xor:DI (match_operand:DI 1 "s_register_operand" "")
- (match_operand:DI 2 "arm_xordi_operand" "")))]
- "TARGET_32BIT"
- ""
-)
-
-(define_insn_and_split "*xordi3_insn"
- [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,&r,&r,?w")
- (xor:DI (match_operand:DI 1 "s_register_operand" "%w ,0,r ,0 ,r ,w")
- (match_operand:DI 2 "arm_xordi_operand" "w ,r ,r ,Dg,Dg,w")))]
- "TARGET_32BIT && !TARGET_IWMMXT"
-{
- switch (which_alternative)
- {
- case 1:
- case 2:
- case 3:
- case 4: /* fall through */
- return "#";
- case 0: /* fall through */
- case 5: return "veor\t%P0, %P1, %P2";
- default: gcc_unreachable ();
- }
-}
- "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
- && !(IS_VFP_REGNUM (REGNO (operands[0])))"
- [(set (match_dup 3) (match_dup 4))
- (set (match_dup 5) (match_dup 6))]
- "
- {
- operands[3] = gen_lowpart (SImode, operands[0]);
- operands[5] = gen_highpart (SImode, operands[0]);
-
- operands[4] = simplify_gen_binary (XOR, SImode,
- gen_lowpart (SImode, operands[1]),
- gen_lowpart (SImode, operands[2]));
- operands[6] = simplify_gen_binary (XOR, SImode,
- gen_highpart (SImode, operands[1]),
- gen_highpart_mode (SImode, DImode, operands[2]));
-
- }"
- [(set_attr "length" "*,8,8,8,8,*")
- (set_attr "type" "neon_logic,multiple,multiple,multiple,multiple,neon_logic")
- (set_attr "arch" "neon_for_64bits,*,*,*,*,avoid_neon_for_64bits")]
-)
-
-(define_insn "*xordi_zesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (xor:DI (zero_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,?r")))]
- "TARGET_32BIT"
- "@
- eor%?\\t%Q0, %Q1, %2
- #"
- [(set_attr "length" "4,8")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
- (set_attr "type" "logic_reg")]
-)
-
-(define_insn "*xordi_sesidi_di"
- [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
- (xor:DI (sign_extend:DI
- (match_operand:SI 2 "s_register_operand" "r,r"))
- (match_operand:DI 1 "s_register_operand" "0,r")))]
- "TARGET_32BIT"
- "#"
- [(set_attr "length" "8")
- (set_attr "predicable" "yes")
- (set_attr "type" "multiple")]
+ (set_attr "arch" "*,t2,*")
+ (set_attr "length" "4,2,4")
+ (set_attr "type" "logics_imm,logics_reg,logics_reg")]
)
(define_expand "xorsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (xor:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "reg_or_int_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (xor:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "reg_or_int_operand")))]
"TARGET_EITHER"
"if (CONST_INT_P (operands[2]))
{
[(set_attr "length" "8")
(set_attr "ce_count" "2")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "multiple")]
)
(define_expand "smaxsi3"
[(parallel [
- (set (match_operand:SI 0 "s_register_operand" "")
- (smax:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "arm_rhs_operand" "")))
+ (set (match_operand:SI 0 "s_register_operand")
+ (smax:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_rhs_operand")))
(clobber (reg:CC CC_REGNUM))])]
"TARGET_32BIT"
"
"TARGET_32BIT"
"bic%?\\t%0, %1, %1, asr #31"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "logic_shift_reg")]
)
"TARGET_32BIT"
"orr%?\\t%0, %1, %1, asr #31"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "logic_shift_reg")]
)
(define_expand "sminsi3"
[(parallel [
- (set (match_operand:SI 0 "s_register_operand" "")
- (smin:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "arm_rhs_operand" "")))
+ (set (match_operand:SI 0 "s_register_operand")
+ (smin:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_rhs_operand")))
(clobber (reg:CC CC_REGNUM))])]
"TARGET_32BIT"
"
"TARGET_32BIT"
"and%?\\t%0, %1, %1, asr #31"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "logic_shift_reg")]
)
(define_expand "umaxsi3"
[(parallel [
- (set (match_operand:SI 0 "s_register_operand" "")
- (umax:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "arm_rhs_operand" "")))
+ (set (match_operand:SI 0 "s_register_operand")
+ (umax:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_rhs_operand")))
(clobber (reg:CC CC_REGNUM))])]
"TARGET_32BIT"
""
""
[(set_attr "conds" "clob")
(set_attr "length" "8,8,12")
- (set_attr "type" "store1")]
+ (set_attr "type" "store_4")]
)
(define_expand "uminsi3"
[(parallel [
- (set (match_operand:SI 0 "s_register_operand" "")
- (umin:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "arm_rhs_operand" "")))
+ (set (match_operand:SI 0 "s_register_operand")
+ (umin:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_rhs_operand")))
(clobber (reg:CC CC_REGNUM))])]
"TARGET_32BIT"
""
""
[(set_attr "conds" "clob")
(set_attr "length" "8,8,12")
- (set_attr "type" "store1")]
+ (set_attr "type" "store_4")]
)
(define_insn "*store_minmaxsi"
(if_then_else (eq_attr "is_thumb" "yes")
(const_int 14)
(const_int 12)))
- (set_attr "type" "store1")]
+ (set_attr "type" "store_4")]
)
; Reject the frame pointer in operand[1], since reloading this after
)
(define_code_iterator SAT [smin smax])
-(define_code_iterator SATrev [smin smax])
+(define_code_attr SATrev [(smin "smax") (smax "smin")])
(define_code_attr SATlo [(smin "1") (smax "2")])
(define_code_attr SAThi [(smin "2") (smax "1")])
(define_insn "*satsi_<SAT:code>"
[(set (match_operand:SI 0 "s_register_operand" "=r")
- (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
+ (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
(match_operand:SI 1 "const_int_operand" "i"))
(match_operand:SI 2 "const_int_operand" "i")))]
- "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
+ "TARGET_32BIT && arm_arch6
&& arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
{
int mask;
return "usat%?\t%0, %1, %3";
}
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "alus_imm")]
)
(define_insn "*satsi_<SAT:code>_shift"
[(set (match_operand:SI 0 "s_register_operand" "=r")
- (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
+ (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
[(match_operand:SI 4 "s_register_operand" "r")
(match_operand:SI 5 "const_int_operand" "i")])
(match_operand:SI 1 "const_int_operand" "i"))
(match_operand:SI 2 "const_int_operand" "i")))]
- "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
+ "TARGET_32BIT && arm_arch6
&& arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
{
int mask;
return "usat%?\t%0, %1, %4%S3";
}
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "shift" "3")
(set_attr "type" "logic_shift_reg")])
\f
;; Shift and rotation insns
(define_expand "ashldi3"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (ashift:DI (match_operand:DI 1 "s_register_operand" "")
- (match_operand:SI 2 "general_operand" "")))]
+ [(set (match_operand:DI 0 "s_register_operand")
+ (ashift:DI (match_operand:DI 1 "s_register_operand")
+ (match_operand:SI 2 "reg_or_int_operand")))]
"TARGET_32BIT"
"
- if (TARGET_NEON)
- {
- /* Delay the decision whether to use NEON or core-regs until
- register allocation. */
- emit_insn (gen_ashldi3_neon (operands[0], operands[1], operands[2]));
- DONE;
- }
- else
- {
- /* Only the NEON case can handle in-memory shift counts. */
- if (!reg_or_int_operand (operands[2], SImode))
- operands[2] = force_reg (SImode, operands[2]);
- }
-
- if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
- ; /* No special preparation statements; expand pattern as above. */
- else
- {
- rtx scratch1, scratch2;
-
- if (operands[2] == CONST1_RTX (SImode))
- {
- emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
- DONE;
- }
-
- /* Ideally we should use iwmmxt here if we could know that operands[1]
- ends up already living in an iwmmxt register. Otherwise it's
- cheaper to have the alternate code being generated than moving
- values to iwmmxt regs and back. */
-
- /* Expand operation using core-registers.
- 'FAIL' would achieve the same thing, but this is a bit smarter. */
- scratch1 = gen_reg_rtx (SImode);
- scratch2 = gen_reg_rtx (SImode);
- arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
- operands[2], scratch1, scratch2);
- DONE;
- }
- "
-)
-
-(define_insn "arm_ashldi3_1bit"
- [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
- (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
- (const_int 1)))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT"
- "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
+ arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
+ operands[2], gen_reg_rtx (SImode),
+ gen_reg_rtx (SImode));
+ DONE;
+")
(define_expand "ashlsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ashift:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "arm_rhs_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (ashift:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_rhs_operand")))]
"TARGET_EITHER"
"
if (CONST_INT_P (operands[2])
)
(define_expand "ashrdi3"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
- (match_operand:SI 2 "reg_or_int_operand" "")))]
+ [(set (match_operand:DI 0 "s_register_operand")
+ (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
+ (match_operand:SI 2 "reg_or_int_operand")))]
"TARGET_32BIT"
"
- if (TARGET_NEON)
- {
- /* Delay the decision whether to use NEON or core-regs until
- register allocation. */
- emit_insn (gen_ashrdi3_neon (operands[0], operands[1], operands[2]));
- DONE;
- }
-
- if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
- ; /* No special preparation statements; expand pattern as above. */
- else
- {
- rtx scratch1, scratch2;
-
- if (operands[2] == CONST1_RTX (SImode))
- {
- emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
- DONE;
- }
-
- /* Ideally we should use iwmmxt here if we could know that operands[1]
- ends up already living in an iwmmxt register. Otherwise it's
- cheaper to have the alternate code being generated than moving
- values to iwmmxt regs and back. */
-
- /* Expand operation using core-registers.
- 'FAIL' would achieve the same thing, but this is a bit smarter. */
- scratch1 = gen_reg_rtx (SImode);
- scratch2 = gen_reg_rtx (SImode);
- arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
- operands[2], scratch1, scratch2);
- DONE;
- }
- "
-)
-
-(define_insn "arm_ashrdi3_1bit"
- [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
- (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
- (const_int 1)))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT"
- "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
+ arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
+ operands[2], gen_reg_rtx (SImode),
+ gen_reg_rtx (SImode));
+ DONE;
+")
(define_expand "ashrsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "arm_rhs_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_rhs_operand")))]
"TARGET_EITHER"
"
if (CONST_INT_P (operands[2])
)
(define_expand "lshrdi3"
- [(set (match_operand:DI 0 "s_register_operand" "")
- (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
- (match_operand:SI 2 "reg_or_int_operand" "")))]
+ [(set (match_operand:DI 0 "s_register_operand")
+ (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
+ (match_operand:SI 2 "reg_or_int_operand")))]
"TARGET_32BIT"
"
- if (TARGET_NEON)
- {
- /* Delay the decision whether to use NEON or core-regs until
- register allocation. */
- emit_insn (gen_lshrdi3_neon (operands[0], operands[1], operands[2]));
- DONE;
- }
-
- if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
- ; /* No special preparation statements; expand pattern as above. */
- else
- {
- rtx scratch1, scratch2;
-
- if (operands[2] == CONST1_RTX (SImode))
- {
- emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
- DONE;
- }
-
- /* Ideally we should use iwmmxt here if we could know that operands[1]
- ends up already living in an iwmmxt register. Otherwise it's
- cheaper to have the alternate code being generated than moving
- values to iwmmxt regs and back. */
-
- /* Expand operation using core-registers.
- 'FAIL' would achieve the same thing, but this is a bit smarter. */
- scratch1 = gen_reg_rtx (SImode);
- scratch2 = gen_reg_rtx (SImode);
- arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
- operands[2], scratch1, scratch2);
- DONE;
- }
- "
-)
-
-(define_insn "arm_lshrdi3_1bit"
- [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
- (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
- (const_int 1)))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT"
- "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")]
-)
+ arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
+ operands[2], gen_reg_rtx (SImode),
+ gen_reg_rtx (SImode));
+ DONE;
+")
(define_expand "lshrsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "arm_rhs_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_rhs_operand")))]
"TARGET_EITHER"
"
if (CONST_INT_P (operands[2])
)
(define_expand "rotlsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "reg_or_int_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (rotatert:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "reg_or_int_operand")))]
"TARGET_32BIT"
"
if (CONST_INT_P (operands[2]))
)
(define_expand "rotrsi3"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "arm_rhs_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (rotatert:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "arm_rhs_operand")))]
"TARGET_EITHER"
"
if (TARGET_32BIT)
"TARGET_32BIT"
"mvn%?\\t%0, %1%S3"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "shift" "1")
(set_attr "arch" "32,a")
(set_attr "type" "mvn_shift,mvn_shift_reg")])
;; to reduce register pressure later on.
(define_expand "extzv"
- [(set (match_operand 0 "s_register_operand" "")
- (zero_extract (match_operand 1 "nonimmediate_operand" "")
- (match_operand 2 "const_int_operand" "")
- (match_operand 3 "const_int_operand" "")))]
+ [(set (match_operand 0 "s_register_operand")
+ (zero_extract (match_operand 1 "nonimmediate_operand")
+ (match_operand 2 "const_int_operand")
+ (match_operand 3 "const_int_operand")))]
"TARGET_THUMB1 || arm_arch_thumb2"
"
{
;; Helper for extzv, for the Thumb-1 register-shifts case.
(define_expand "extzv_t1"
- [(set (match_operand:SI 4 "s_register_operand" "")
- (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
- (match_operand:SI 2 "const_int_operand" "")))
- (set (match_operand:SI 0 "s_register_operand" "")
+ [(set (match_operand:SI 4 "s_register_operand")
+ (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
+ (match_operand:SI 2 "const_int_operand")))
+ (set (match_operand:SI 0 "s_register_operand")
(lshiftrt:SI (match_dup 4)
- (match_operand:SI 3 "const_int_operand" "")))]
+ (match_operand:SI 3 "const_int_operand")))]
"TARGET_THUMB1"
"")
(define_expand "extv"
- [(set (match_operand 0 "s_register_operand" "")
- (sign_extract (match_operand 1 "nonimmediate_operand" "")
- (match_operand 2 "const_int_operand" "")
- (match_operand 3 "const_int_operand" "")))]
+ [(set (match_operand 0 "s_register_operand")
+ (sign_extract (match_operand 1 "nonimmediate_operand")
+ (match_operand 2 "const_int_operand")
+ (match_operand 3 "const_int_operand")))]
"arm_arch_thumb2"
{
HOST_WIDE_INT width = INTVAL (operands[2]);
; Helper to expand register forms of extv with the proper modes.
(define_expand "extv_regsi"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
- (match_operand 2 "const_int_operand" "")
- (match_operand 3 "const_int_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (sign_extract:SI (match_operand:SI 1 "s_register_operand")
+ (match_operand 2 "const_int_operand")
+ (match_operand 3 "const_int_operand")))]
""
{
})
; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
+(define_insn "unaligned_loaddi"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
+ UNSPEC_UNALIGNED_LOAD))]
+ "TARGET_32BIT && TARGET_LDRD"
+ "*
+ return output_move_double (operands, true, NULL);
+ "
+ [(set_attr "length" "8")
+ (set_attr "type" "load_8")])
+
(define_insn "unaligned_loadsi"
- [(set (match_operand:SI 0 "s_register_operand" "=l,r")
- (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
+ [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
+ (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
UNSPEC_UNALIGNED_LOAD))]
"unaligned_access"
- "ldr%?\t%0, %1\t@ unaligned"
- [(set_attr "arch" "t2,any")
- (set_attr "length" "2,4")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "yes,no")
- (set_attr "type" "load1")])
+ "@
+ ldr\t%0, %1\t@ unaligned
+ ldr%?\t%0, %1\t@ unaligned
+ ldr%?\t%0, %1\t@ unaligned"
+ [(set_attr "arch" "t1,t2,32")
+ (set_attr "length" "2,2,4")
+ (set_attr "predicable" "no,yes,yes")
+ (set_attr "predicable_short_it" "no,yes,no")
+ (set_attr "type" "load_4")])
+;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
+;; address (there's no immediate format). That's tricky to support
+;; here and we don't really need this pattern for that case, so only
+;; enable for 32-bit ISAs.
(define_insn "unaligned_loadhis"
- [(set (match_operand:SI 0 "s_register_operand" "=l,r")
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
(sign_extend:SI
- (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,Uh")]
+ (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
UNSPEC_UNALIGNED_LOAD)))]
- "unaligned_access"
+ "unaligned_access && TARGET_32BIT"
"ldrsh%?\t%0, %1\t@ unaligned"
- [(set_attr "arch" "t2,any")
- (set_attr "length" "2,4")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "yes,no")
+ [(set_attr "predicable" "yes")
(set_attr "type" "load_byte")])
(define_insn "unaligned_loadhiu"
- [(set (match_operand:SI 0 "s_register_operand" "=l,r")
+ [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
(zero_extend:SI
- (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
+ (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
UNSPEC_UNALIGNED_LOAD)))]
"unaligned_access"
- "ldrh%?\t%0, %1\t@ unaligned"
- [(set_attr "arch" "t2,any")
- (set_attr "length" "2,4")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "yes,no")
+ "@
+ ldrh\t%0, %1\t@ unaligned
+ ldrh%?\t%0, %1\t@ unaligned
+ ldrh%?\t%0, %1\t@ unaligned"
+ [(set_attr "arch" "t1,t2,32")
+ (set_attr "length" "2,2,4")
+ (set_attr "predicable" "no,yes,yes")
+ (set_attr "predicable_short_it" "no,yes,no")
(set_attr "type" "load_byte")])
+(define_insn "unaligned_storedi"
+ [(set (match_operand:DI 0 "memory_operand" "=m")
+ (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
+ UNSPEC_UNALIGNED_STORE))]
+ "TARGET_32BIT && TARGET_LDRD"
+ "*
+ return output_move_double (operands, true, NULL);
+ "
+ [(set_attr "length" "8")
+ (set_attr "type" "store_8")])
+
(define_insn "unaligned_storesi"
- [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
- (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
+ [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
+ (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
UNSPEC_UNALIGNED_STORE))]
"unaligned_access"
- "str%?\t%1, %0\t@ unaligned"
- [(set_attr "arch" "t2,any")
- (set_attr "length" "2,4")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "yes,no")
- (set_attr "type" "store1")])
+ "@
+ str\t%1, %0\t@ unaligned
+ str%?\t%1, %0\t@ unaligned
+ str%?\t%1, %0\t@ unaligned"
+ [(set_attr "arch" "t1,t2,32")
+ (set_attr "length" "2,2,4")
+ (set_attr "predicable" "no,yes,yes")
+ (set_attr "predicable_short_it" "no,yes,no")
+ (set_attr "type" "store_4")])
(define_insn "unaligned_storehi"
- [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
- (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
+ [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
+ (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
UNSPEC_UNALIGNED_STORE))]
"unaligned_access"
- "strh%?\t%1, %0\t@ unaligned"
- [(set_attr "arch" "t2,any")
- (set_attr "length" "2,4")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "yes,no")
- (set_attr "type" "store1")])
+ "@
+ strh\t%1, %0\t@ unaligned
+ strh%?\t%1, %0\t@ unaligned
+ strh%?\t%1, %0\t@ unaligned"
+ [(set_attr "arch" "t1,t2,32")
+ (set_attr "length" "2,2,4")
+ (set_attr "predicable" "no,yes,yes")
+ (set_attr "predicable_short_it" "no,yes,no")
+ (set_attr "type" "store_4")])
(define_insn "*extv_reg"
"sbfx%?\t%0, %1, %3, %2"
[(set_attr "length" "4")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "bfm")]
)
"ubfx%?\t%0, %1, %3, %2"
[(set_attr "length" "4")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "bfm")]
)
sdiv\t%0, %1, %2"
[(set_attr "arch" "32,v8mb")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "sdiv")]
)
udiv\t%0, %1, %2"
[(set_attr "arch" "32,v8mb")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "udiv")]
)
})
(define_expand "negvdi3"
- [(match_operand:DI 0 "register_operand")
- (match_operand:DI 1 "register_operand")
+ [(match_operand:DI 0 "s_register_operand")
+ (match_operand:DI 1 "s_register_operand")
(match_operand 2 "")]
"TARGET_ARM"
{
})
-(define_insn_and_split "negdi2_compare"
+(define_insn "negdi2_compare"
[(set (reg:CC CC_REGNUM)
(compare:CC
(const_int 0)
- (match_operand:DI 1 "register_operand" "0,r")))
- (set (match_operand:DI 0 "register_operand" "=r,&r")
+ (match_operand:DI 1 "register_operand" "r,r")))
+ (set (match_operand:DI 0 "register_operand" "=&r,&r")
(minus:DI (const_int 0) (match_dup 1)))]
"TARGET_ARM"
- "#"
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (const_int 0) (match_dup 1)))
- (set (match_dup 0) (minus:SI (const_int 0)
- (match_dup 1)))])
- (parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (const_int 0) (match_dup 3)))
- (set (match_dup 2)
- (minus:SI
- (minus:SI (const_int 0) (match_dup 3))
- (ltu:SI (reg:CC_C CC_REGNUM)
- (const_int 0))))])]
- {
- operands[2] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[3] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }
+ "@
+ rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
+ rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
[(set_attr "conds" "set")
+ (set_attr "arch" "a,t2")
(set_attr "length" "8")
(set_attr "type" "multiple")]
)
(define_expand "negdi2"
[(parallel
- [(set (match_operand:DI 0 "s_register_operand" "")
- (neg:DI (match_operand:DI 1 "s_register_operand" "")))
+ [(set (match_operand:DI 0 "s_register_operand")
+ (neg:DI (match_operand:DI 1 "s_register_operand")))
(clobber (reg:CC CC_REGNUM))])]
"TARGET_EITHER"
- {
- if (TARGET_NEON)
- {
- emit_insn (gen_negdi2_neon (operands[0], operands[1]));
- DONE;
- }
- }
)
;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
-;; The first alternative allows the common case of a *full* overlap.
-(define_insn_and_split "*arm_negdi2"
- [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
- (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
+(define_insn "*negdi2_insn"
+ [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
+ (neg:DI (match_operand:DI 1 "s_register_operand" "r,r")))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_ARM"
- "#" ; "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (const_int 0) (match_dup 1)))
- (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
- (set (match_dup 2) (minus:SI (minus:SI (const_int 0) (match_dup 3))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- {
- operands[2] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[3] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }
+ "TARGET_32BIT"
+ "@
+ rsbs\\t%Q0, %Q1, #0; rsc\\t%R0, %R1, #0
+ negs\\t%Q0, %Q1; sbc\\t%R0, %R1, %R1, lsl #1"
[(set_attr "conds" "clob")
+ (set_attr "arch" "a,t2")
(set_attr "length" "8")
(set_attr "type" "multiple")]
)
-(define_insn "*negsi2_carryin_compare"
- [(set (reg:CC CC_REGNUM)
- (compare:CC (const_int 0)
- (match_operand:SI 1 "s_register_operand" "r")))
- (set (match_operand:SI 0 "s_register_operand" "=r")
- (minus:SI (minus:SI (const_int 0)
- (match_dup 1))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- "TARGET_ARM"
- "rscs\\t%0, %1, #0"
- [(set_attr "conds" "set")
- (set_attr "type" "alus_imm")]
-)
-
(define_expand "negsi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (neg:SI (match_operand:SI 1 "s_register_operand")))]
"TARGET_EITHER"
""
)
)
(define_expand "negsf2"
- [(set (match_operand:SF 0 "s_register_operand" "")
- (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
+ [(set (match_operand:SF 0 "s_register_operand")
+ (neg:SF (match_operand:SF 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
""
)
(define_expand "negdf2"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (neg:DF (match_operand:DF 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
"")
asr Rhi, Rin, #31
rsbs Rlo, Rin, #0
rsc Rhi, Rhi, #0 (thumb2: sbc Rhi, Rhi, Rhi, lsl #1). */
- rtx cc_reg = gen_rtx_REG (CC_Cmode, CC_REGNUM);
+ rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
emit_insn (gen_rtx_SET (high,
gen_rtx_ASHIFTRT (SImode, operands[1],
(set_attr "type" "multiple")]
)
-(define_insn_and_split "*negdi_zero_extendsidi"
- [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
- (neg:DI (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))))
- (clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT"
- "#" ; "rsbs\\t%Q0, %1, #0\;sbc\\t%R0,%R0,%R0"
- ;; Don't care what register is input to sbc,
- ;; since we just need to propagate the carry.
- "&& reload_completed"
- [(parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (const_int 0) (match_dup 1)))
- (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
- (set (match_dup 2) (minus:SI (minus:SI (match_dup 2) (match_dup 2))
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
- {
- operands[2] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- }
- [(set_attr "conds" "clob")
- (set_attr "length" "8")
- (set_attr "type" "multiple")] ;; length in thumb is 4
-)
-
;; abssi2 doesn't really clobber the condition codes if a different register
;; is being set. To keep things simple, assume during rtl manipulations that
;; it does, but tell the final scan operator the truth. Similarly for
(define_expand "abssi2"
[(parallel
- [(set (match_operand:SI 0 "s_register_operand" "")
- (abs:SI (match_operand:SI 1 "s_register_operand" "")))
+ [(set (match_operand:SI 0 "s_register_operand")
+ (abs:SI (match_operand:SI 1 "s_register_operand")))
(clobber (match_dup 2))])]
"TARGET_EITHER"
"
)
(define_expand "abssf2"
- [(set (match_operand:SF 0 "s_register_operand" "")
- (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
+ [(set (match_operand:SF 0 "s_register_operand")
+ (abs:SF (match_operand:SF 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"")
(define_expand "absdf2"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (abs:DF (match_operand:DF 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"")
(define_expand "sqrtsf2"
- [(set (match_operand:SF 0 "s_register_operand" "")
- (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
+ [(set (match_operand:SF 0 "s_register_operand")
+ (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"")
(define_expand "sqrtdf2"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
"")
-(define_insn_and_split "one_cmpldi2"
- [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,?w")
- (not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
- "TARGET_32BIT"
- "@
- vmvn\t%P0, %P1
- #
- #
- vmvn\t%P0, %P1"
- "TARGET_32BIT && reload_completed
- && arm_general_register_operand (operands[0], DImode)"
- [(set (match_dup 0) (not:SI (match_dup 1)))
- (set (match_dup 2) (not:SI (match_dup 3)))]
- "
- {
- operands[2] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- operands[3] = gen_highpart (SImode, operands[1]);
- operands[1] = gen_lowpart (SImode, operands[1]);
- }"
- [(set_attr "length" "*,8,8,*")
- (set_attr "predicable" "no,yes,yes,no")
- (set_attr "type" "neon_move,multiple,multiple,neon_move")
- (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")]
-)
-
(define_expand "one_cmplsi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (not:SI (match_operand:SI 1 "s_register_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (not:SI (match_operand:SI 1 "s_register_operand")))]
"TARGET_EITHER"
""
)
;; Fixed <--> Floating conversion insns
(define_expand "floatsihf2"
- [(set (match_operand:HF 0 "general_operand" "")
- (float:HF (match_operand:SI 1 "general_operand" "")))]
+ [(set (match_operand:HF 0 "general_operand")
+ (float:HF (match_operand:SI 1 "general_operand")))]
"TARGET_EITHER"
"
{
)
(define_expand "floatdihf2"
- [(set (match_operand:HF 0 "general_operand" "")
- (float:HF (match_operand:DI 1 "general_operand" "")))]
+ [(set (match_operand:HF 0 "general_operand")
+ (float:HF (match_operand:DI 1 "general_operand")))]
"TARGET_EITHER"
"
{
)
(define_expand "floatsisf2"
- [(set (match_operand:SF 0 "s_register_operand" "")
- (float:SF (match_operand:SI 1 "s_register_operand" "")))]
+ [(set (match_operand:SF 0 "s_register_operand")
+ (float:SF (match_operand:SI 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"
")
(define_expand "floatsidf2"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (float:DF (match_operand:SI 1 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (float:DF (match_operand:SI 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
")
(define_expand "fix_trunchfsi2"
- [(set (match_operand:SI 0 "general_operand" "")
- (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
+ [(set (match_operand:SI 0 "general_operand")
+ (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
"TARGET_EITHER"
"
{
)
(define_expand "fix_trunchfdi2"
- [(set (match_operand:DI 0 "general_operand" "")
- (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
+ [(set (match_operand:DI 0 "general_operand")
+ (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
"TARGET_EITHER"
"
{
)
(define_expand "fix_truncsfsi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"
")
(define_expand "fix_truncdfsi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"
")
;; Truncation insns
(define_expand "truncdfsf2"
- [(set (match_operand:SF 0 "s_register_operand" "")
+ [(set (match_operand:SF 0 "s_register_operand")
(float_truncate:SF
- (match_operand:DF 1 "s_register_operand" "")))]
+ (match_operand:DF 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
""
)
-;; DFmode to HFmode conversions have to go through SFmode.
+;; DFmode to HFmode conversions on targets without a single-step hardware
+;; instruction for it would have to go through SFmode. This is dangerous
+;; as it introduces double rounding.
+;;
+;; Disable this pattern unless we are in an unsafe math mode, or we have
+;; a single-step instruction.
+
(define_expand "truncdfhf2"
- [(set (match_operand:HF 0 "general_operand" "")
+ [(set (match_operand:HF 0 "s_register_operand")
(float_truncate:HF
- (match_operand:DF 1 "general_operand" "")))]
- "TARGET_EITHER"
- "
- {
- rtx op1;
- op1 = convert_to_mode (SFmode, operands[1], 0);
- op1 = convert_to_mode (HFmode, op1, 0);
- emit_move_insn (operands[0], op1);
- DONE;
- }"
+ (match_operand:DF 1 "s_register_operand")))]
+ "(TARGET_EITHER && flag_unsafe_math_optimizations)
+ || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
+{
+ /* We don't have a direct instruction for this, so we must be in
+ an unsafe math mode, and going via SFmode. */
+
+ if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
+ {
+ rtx op1;
+ op1 = convert_to_mode (SFmode, operands[1], 0);
+ op1 = convert_to_mode (HFmode, op1, 0);
+ emit_move_insn (operands[0], op1);
+ DONE;
+ }
+ /* Otherwise, we will pick this up as a single instruction with
+ no intermediary rounding. */
+}
)
\f
;; Zero and sign extension instructions.
-(define_insn "zero_extend<mode>di2"
- [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,w")
- (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
- "<qhs_zextenddi_cstr>")))]
+(define_expand "zero_extend<mode>di2"
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
"TARGET_32BIT <qhs_zextenddi_cond>"
- "#"
- [(set_attr "length" "8,4,8,8")
- (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")
- (set_attr "ce_count" "2")
- (set_attr "predicable" "yes")
- (set_attr "type" "multiple,mov_reg,multiple,multiple")]
+ {
+ rtx res_lo, res_hi, op0_lo, op0_hi;
+ res_lo = gen_lowpart (SImode, operands[0]);
+ res_hi = gen_highpart (SImode, operands[0]);
+ if (can_create_pseudo_p ())
+ {
+ op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
+ op0_hi = gen_reg_rtx (SImode);
+ }
+ else
+ {
+ op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
+ op0_hi = res_hi;
+ }
+ if (<MODE>mode != SImode)
+ emit_insn (gen_rtx_SET (op0_lo,
+ gen_rtx_ZERO_EXTEND (SImode, operands[1])));
+ emit_insn (gen_movsi (op0_hi, const0_rtx));
+ if (res_lo != op0_lo)
+ emit_move_insn (res_lo, op0_lo);
+ if (res_hi != op0_hi)
+ emit_move_insn (res_hi, op0_hi);
+ DONE;
+ }
)
-(define_insn "extend<mode>di2"
- [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,?r,w")
- (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
- "<qhs_extenddi_cstr>")))]
+(define_expand "extend<mode>di2"
+ [(set (match_operand:DI 0 "s_register_operand" "")
+ (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
"TARGET_32BIT <qhs_sextenddi_cond>"
- "#"
- [(set_attr "length" "8,4,8,8,8")
- (set_attr "ce_count" "2")
- (set_attr "shift" "1")
- (set_attr "predicable" "yes")
- (set_attr "arch" "neon_for_64bits,*,a,t,avoid_neon_for_64bits")
- (set_attr "type" "multiple,mov_reg,multiple,multiple,multiple")]
+ {
+ rtx res_lo, res_hi, op0_lo, op0_hi;
+ res_lo = gen_lowpart (SImode, operands[0]);
+ res_hi = gen_highpart (SImode, operands[0]);
+ if (can_create_pseudo_p ())
+ {
+ op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
+ op0_hi = gen_reg_rtx (SImode);
+ }
+ else
+ {
+ op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
+ op0_hi = res_hi;
+ }
+ if (<MODE>mode != SImode)
+ emit_insn (gen_rtx_SET (op0_lo,
+ gen_rtx_SIGN_EXTEND (SImode, operands[1])));
+ emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
+ if (res_lo != op0_lo)
+ emit_move_insn (res_lo, op0_lo);
+ if (res_hi != op0_hi)
+ emit_move_insn (res_hi, op0_hi);
+ DONE;
+ }
)
;; Splits for all extensions to DImode
(define_split
[(set (match_operand:DI 0 "s_register_operand" "")
(zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
- "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
+ "TARGET_32BIT"
[(set (match_dup 0) (match_dup 1))]
{
rtx lo_part = gen_lowpart (SImode, operands[0]);
machine_mode src_mode = GET_MODE (operands[1]);
- if (REG_P (operands[0])
- && !reg_overlap_mentioned_p (operands[0], operands[1]))
- emit_clobber (operands[0]);
- if (!REG_P (lo_part) || src_mode != SImode
- || !rtx_equal_p (lo_part, operands[1]))
- {
- if (src_mode == SImode)
- emit_move_insn (lo_part, operands[1]);
- else
- emit_insn (gen_rtx_SET (lo_part,
- gen_rtx_ZERO_EXTEND (SImode, operands[1])));
- operands[1] = lo_part;
- }
+ if (src_mode == SImode)
+ emit_move_insn (lo_part, operands[1]);
+ else
+ emit_insn (gen_rtx_SET (lo_part,
+ gen_rtx_ZERO_EXTEND (SImode, operands[1])));
operands[0] = gen_highpart (SImode, operands[0]);
operands[1] = const0_rtx;
})
(define_split
[(set (match_operand:DI 0 "s_register_operand" "")
(sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
- "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
+ "TARGET_32BIT"
[(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
{
rtx lo_part = gen_lowpart (SImode, operands[0]);
machine_mode src_mode = GET_MODE (operands[1]);
- if (REG_P (operands[0])
- && !reg_overlap_mentioned_p (operands[0], operands[1]))
- emit_clobber (operands[0]);
-
- if (!REG_P (lo_part) || src_mode != SImode
- || !rtx_equal_p (lo_part, operands[1]))
- {
- if (src_mode == SImode)
- emit_move_insn (lo_part, operands[1]);
- else
- emit_insn (gen_rtx_SET (lo_part,
- gen_rtx_SIGN_EXTEND (SImode, operands[1])));
- operands[1] = lo_part;
- }
+ if (src_mode == SImode)
+ emit_move_insn (lo_part, operands[1]);
+ else
+ emit_insn (gen_rtx_SET (lo_part,
+ gen_rtx_SIGN_EXTEND (SImode, operands[1])));
+ operands[1] = lo_part;
operands[0] = gen_highpart (SImode, operands[0]);
})
(define_expand "zero_extendhisi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
"TARGET_EITHER"
{
if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
"TARGET_INT_SIMD"
"uxtah%?\\t%0, %2, %1"
[(set_attr "type" "alu_shift_reg")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
(define_expand "zero_extendqisi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
"TARGET_EITHER"
{
if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
"TARGET_INT_SIMD"
"uxtab%?\\t%0, %2, %1"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "alu_shift_reg")]
)
"tst%?\\t%0, #255"
[(set_attr "conds" "set")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "logic_imm")]
)
(define_expand "extendhisi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
"TARGET_EITHER"
{
if (TARGET_THUMB1)
sxth%?\\t%0, %1
ldrsh%?\\t%0, %1"
[(set_attr "type" "extend,load_byte")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*arm_extendhisi2addsi"
(define_expand "extendqihi2"
[(set (match_dup 2)
- (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
+ (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
(const_int 24)))
- (set (match_operand:HI 0 "s_register_operand" "")
+ (set (match_operand:HI 0 "s_register_operand")
(ashiftrt:SI (match_dup 2)
(const_int 24)))]
"TARGET_ARM"
)
(define_expand "extendqisi2"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
"TARGET_EITHER"
{
if (!arm_arch4 && MEM_P (operands[1]))
"TARGET_INT_SIMD"
"sxtab%?\\t%0, %2, %1"
[(set_attr "type" "alu_shift_reg")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
+(define_insn "arm_<sup>xtb16"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
+ "TARGET_INT_SIMD"
+ "<sup>xtb16%?\\t%0, %1"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "alu_dsp_reg")])
+
+(define_insn "arm_<simd32_op>"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
+ "TARGET_INT_SIMD"
+ "<simd32_op>%?\\t%0, %1, %2"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "alu_dsp_reg")])
+
+(define_insn "arm_usada8"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec:SI
+ [(match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
+ "TARGET_INT_SIMD"
+ "usada8%?\\t%0, %1, %2, %3"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "alu_dsp_reg")])
+
+(define_insn "arm_<simd32_op>"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (unspec:DI
+ [(match_operand:SI 1 "s_register_operand" "r")
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
+ "TARGET_INT_SIMD"
+ "<simd32_op>%?\\t%Q0, %R0, %1, %2"
+ [(set_attr "predicable" "yes")
+ (set_attr "type" "smlald")])
+
(define_expand "extendsfdf2"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
""
)
-;; HFmode -> DFmode conversions have to go through SFmode.
+;; HFmode -> DFmode conversions where we don't have an instruction for it
+;; must go through SFmode.
+;;
+;; This is always safe for an extend.
+
(define_expand "extendhfdf2"
- [(set (match_operand:DF 0 "general_operand" "")
- (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
"TARGET_EITHER"
- "
- {
- rtx op1;
- op1 = convert_to_mode (SFmode, operands[1], 0);
- op1 = convert_to_mode (DFmode, op1, 0);
- emit_insn (gen_movdf (operands[0], op1));
- DONE;
- }"
+{
+ /* We don't have a direct instruction for this, so go via SFmode. */
+ if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
+ {
+ rtx op1;
+ op1 = convert_to_mode (SFmode, operands[1], 0);
+ op1 = convert_to_mode (DFmode, op1, 0);
+ emit_insn (gen_movdf (operands[0], op1));
+ DONE;
+ }
+ /* Otherwise, we're done producing RTL and will pick up the correct
+ pattern to do this with one rounding-step in a single instruction. */
+}
)
\f
;; Move insns (including loads and stores)
;; I don't think these are a good idea on the arm, there just aren't enough
;; registers
;;(define_expand "loadti"
-;; [(set (match_operand:TI 0 "s_register_operand" "")
-;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
+;; [(set (match_operand:TI 0 "s_register_operand")
+;; (mem:TI (match_operand:SI 1 "address_operand")))]
;; "" "")
;;(define_expand "storeti"
-;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
-;; (match_operand:TI 1 "s_register_operand" ""))]
+;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
+;; (match_operand:TI 1 "s_register_operand"))]
;; "" "")
;;(define_expand "movti"
-;; [(set (match_operand:TI 0 "general_operand" "")
-;; (match_operand:TI 1 "general_operand" ""))]
+;; [(set (match_operand:TI 0 "general_operand")
+;; (match_operand:TI 1 "general_operand"))]
;; ""
;; "
;;{
;; }")
(define_expand "movdi"
- [(set (match_operand:DI 0 "general_operand" "")
- (match_operand:DI 1 "general_operand" ""))]
+ [(set (match_operand:DI 0 "general_operand")
+ (match_operand:DI 1 "general_operand"))]
"TARGET_EITHER"
"
+ gcc_checking_assert (aligned_operand (operands[0], DImode));
+ gcc_checking_assert (aligned_operand (operands[1], DImode));
if (can_create_pseudo_p ())
{
if (!REG_P (operands[0]))
operands[1] = force_reg (DImode, operands[1]);
}
if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
- && !HARD_REGNO_MODE_OK (REGNO (operands[0]), DImode))
+ && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
{
/* Avoid LDRD's into an odd-numbered register pair in ARM state
when expanding function calls. */
DONE;
}
else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
- && !HARD_REGNO_MODE_OK (REGNO (operands[1]), DImode))
+ && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
{
/* Avoid STRD's from an odd-numbered register pair in ARM state
when expanding function prologue. */
)
(define_insn "*arm_movdi"
- [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, q, m")
- (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,q"))]
+ [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
+ (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
"TARGET_32BIT
&& !(TARGET_HARD_FLOAT)
&& !TARGET_IWMMXT
case 1:
case 2:
return \"#\";
+ case 3:
+ /* Cannot load it directly, split to load it via MOV / MOVT. */
+ if (!MEM_P (operands[1]) && arm_disable_literal_pool)
+ return \"#\";
+ /* Fall through. */
default:
return output_move_double (operands, true, NULL);
}
"
[(set_attr "length" "8,12,16,8,8")
- (set_attr "type" "multiple,multiple,multiple,load2,store2")
+ (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
(set_attr "arm_pool_range" "*,*,*,1020,*")
(set_attr "arm_neg_pool_range" "*,*,*,1004,*")
(set_attr "thumb2_pool_range" "*,*,*,4094,*")
(match_operand:ANY64 1 "immediate_operand" ""))]
"TARGET_32BIT
&& reload_completed
- && (arm_const_double_inline_cost (operands[1])
- <= arm_max_const_double_inline_cost ())"
+ && (arm_disable_literal_pool
+ || (arm_const_double_inline_cost (operands[1])
+ <= arm_max_const_double_inline_cost ()))"
[(const_int 0)]
"
arm_split_constant (SET, SImode, curr_insn,
)
(define_expand "movsi"
- [(set (match_operand:SI 0 "general_operand" "")
- (match_operand:SI 1 "general_operand" ""))]
+ [(set (match_operand:SI 0 "general_operand")
+ (match_operand:SI 1 "general_operand"))]
"TARGET_EITHER"
"
{
rtx base, offset, tmp;
- if (TARGET_32BIT)
+ gcc_checking_assert (aligned_operand (operands[0], SImode));
+ gcc_checking_assert (aligned_operand (operands[1], SImode));
+ if (TARGET_32BIT || TARGET_HAVE_MOVT)
{
/* Everything except mem = const or mem = mem can be done easily. */
if (MEM_P (operands[0]))
}
}
}
- else /* TARGET_THUMB1... */
+ else /* Target doesn't have MOVT... */
{
if (can_create_pseudo_p ())
{
}
}
- if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
+ split_const (operands[1], &base, &offset);
+ if (INTVAL (offset) != 0
+ && targetm.cannot_force_const_mem (SImode, operands[1]))
{
- split_const (operands[1], &base, &offset);
- if (GET_CODE (base) == SYMBOL_REF
- && !offset_within_block_p (base, INTVAL (offset)))
- {
- tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
- emit_move_insn (tmp, base);
- emit_insn (gen_addsi3 (operands[0], tmp, offset));
- DONE;
- }
+ tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
+ emit_move_insn (tmp, base);
+ emit_insn (gen_addsi3 (operands[0], tmp, offset));
+ DONE;
}
+ tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
+
/* Recognize the case where operand[1] is a reference to thread-local
- data and load its address to a register. */
+ data and load its address to a register. Offsets have been split off
+ already. */
if (arm_tls_referenced_p (operands[1]))
- {
- rtx tmp = operands[1];
- rtx addend = NULL;
-
- if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
- {
- addend = XEXP (XEXP (tmp, 0), 1);
- tmp = XEXP (XEXP (tmp, 0), 0);
- }
-
- gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
- gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
-
- tmp = legitimize_tls_address (tmp,
- !can_create_pseudo_p () ? operands[0] : 0);
- if (addend)
- {
- tmp = gen_rtx_PLUS (SImode, tmp, addend);
- tmp = force_operand (tmp, operands[0]);
- }
- operands[1] = tmp;
- }
+ operands[1] = legitimize_tls_address (operands[1], tmp);
else if (flag_pic
&& (CONSTANT_P (operands[1])
|| symbol_mentioned_p (operands[1])
|| label_mentioned_p (operands[1])))
- operands[1] = legitimize_pic_address (operands[1], SImode,
- (!can_create_pseudo_p ()
- ? operands[0]
- : 0));
+ operands[1] =
+ legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
}
"
)
movt\t%0, #:upper16:%c2"
[(set_attr "arch" "32,v8mb")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "length" "4")
(set_attr "type" "alu_sreg")]
)
movw%?\\t%0, %1
ldr%?\\t%0, %1
str%?\\t%1, %0"
- [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load1,store1")
+ [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
(set_attr "predicable" "yes")
(set_attr "arch" "*,*,*,v6t2,*,*")
(set_attr "pool_range" "*,*,*,*,4096,*")
(define_split
[(set (match_operand:SI 0 "arm_general_register_operand" "")
(match_operand:SI 1 "const_int_operand" ""))]
- "TARGET_32BIT
+ "(TARGET_32BIT || TARGET_HAVE_MOVT)
&& (!(const_ok_for_arm (INTVAL (operands[1]))
|| const_ok_for_arm (~INTVAL (operands[1]))))"
[(clobber (const_int 0))]
[(set (match_operand:SI 0 "arm_general_register_operand" "")
(match_operand:SI 1 "general_operand" ""))]
"TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
- && !flag_pic && !target_word_relocations
+ && !target_word_relocations
&& !arm_tls_referenced_p (operands[1])"
[(clobber (const_int 0))]
{
;;
;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
(define_expand "calculate_pic_address"
- [(set (match_operand:SI 0 "register_operand" "")
- (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
+ [(set (match_operand:SI 0 "register_operand")
+ (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
(unspec:SI [(match_operand:SI 2 "" "")]
UNSPEC_PIC_SYM))))]
"flag_pic"
(set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
(match_dup 2)] UNSPEC_PIC_BASE))]
"operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
- [(set_attr "type" "load1,load1,load1")
+ [(set_attr "type" "load_4,load_4,load_4")
(set_attr "pool_range" "4096,4094,1022")
(set_attr "neg_pool_range" "4084,0,0")
(set_attr "arch" "a,t2,t1")
(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
"TARGET_32BIT && flag_pic"
"ldr%?\\t%0, %1"
- [(set_attr "type" "load1")
+ [(set_attr "type" "load_4")
(set (attr "pool_range")
(if_then_else (eq_attr "is_thumb" "no")
(const_int 4096)
(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
"TARGET_THUMB1 && flag_pic"
"ldr\\t%0, %1"
- [(set_attr "type" "load1")
+ [(set_attr "type" "load_4")
(set (attr "pool_range") (const_int 1018))]
)
return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
"
[(set_attr "predicable" "yes")
- (set_attr "type" "load1")]
+ (set_attr "type" "load_4")]
)
;; PIC references to local variables can generate pic_add_dot_plus_eight
UNSPEC_PIC_OFFSET))))]
"TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
"ldr%?\\t%0, [%1,%2]"
- [(set_attr "type" "load1")]
+ [(set_attr "type" "load_4")]
)
(define_expand "builtin_setjmp_receiver"
/* r3 is clobbered by set/longjmp, so we can use it as a scratch
register. */
if (arm_pic_register != INVALID_REGNUM)
- arm_load_pic_register (1UL << 3);
+ arm_load_pic_register (1UL << 3, NULL_RTX);
DONE;
}")
)
(define_expand "storehi_single_op"
- [(set (match_operand:HI 0 "memory_operand" "")
- (match_operand:HI 1 "general_operand" ""))]
+ [(set (match_operand:HI 0 "memory_operand")
+ (match_operand:HI 1 "general_operand"))]
"TARGET_32BIT && arm_arch4"
"
if (!s_register_operand (operands[1], HImode))
)
(define_expand "movhi"
- [(set (match_operand:HI 0 "general_operand" "")
- (match_operand:HI 1 "general_operand" ""))]
+ [(set (match_operand:HI 0 "general_operand")
+ (match_operand:HI 1 "general_operand"))]
"TARGET_EITHER"
"
+ gcc_checking_assert (aligned_operand (operands[0], HImode));
+ gcc_checking_assert (aligned_operand (operands[1], HImode));
if (TARGET_ARM)
{
if (can_create_pseudo_p ())
(define_expand "movhi_bigend"
[(set (match_dup 2)
- (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
+ (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
(const_int 16)))
(set (match_dup 3)
(ashiftrt:SI (match_dup 2) (const_int 16)))
- (set (match_operand:HI 0 "s_register_operand" "")
+ (set (match_operand:HI 0 "s_register_operand")
(match_dup 4))]
"TARGET_ARM"
"
(const_string "mov_reg"))
(const_string "mvn_imm")
(const_string "mov_imm")
- (const_string "store1")
- (const_string "load1")])]
+ (const_string "store_4")
+ (const_string "load_4")])]
)
(define_insn "*movhi_bytes"
;; We use a DImode scratch because we may occasionally need an additional
;; temporary if the address isn't offsettable -- push_reload doesn't seem
;; to take any notice of the "o" constraints on reload_memory_operand operand.
+;; The reload_in<m> and reload_out<m> patterns require special constraints
+;; to be correctly handled in default_secondary_reload function.
(define_expand "reload_outhi"
[(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
(match_operand:HI 1 "s_register_operand" "r")
")
(define_expand "movqi"
- [(set (match_operand:QI 0 "general_operand" "")
- (match_operand:QI 1 "general_operand" ""))]
+ [(set (match_operand:QI 0 "general_operand")
+ (match_operand:QI 1 "general_operand"))]
"TARGET_EITHER"
"
/* Everything except mem = const or mem = mem can be done easily */
strb%?\\t%1, %0
ldrb%?\\t%0, %1
strb%?\\t%1, %0"
- [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load1,store1,load1,store1")
+ [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
(set_attr "predicable" "yes")
(set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
(set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
;; HFmode moves
(define_expand "movhf"
- [(set (match_operand:HF 0 "general_operand" "")
- (match_operand:HF 1 "general_operand" ""))]
+ [(set (match_operand:HF 0 "general_operand")
+ (match_operand:HF 1 "general_operand"))]
"TARGET_EITHER"
"
+ gcc_checking_assert (aligned_operand (operands[0], HFmode));
+ gcc_checking_assert (aligned_operand (operands[1], HFmode));
if (TARGET_32BIT)
{
if (MEM_P (operands[0]))
}
"
[(set_attr "conds" "unconditional")
- (set_attr "type" "load1,store1,mov_reg,multiple")
+ (set_attr "type" "load_4,store_4,mov_reg,multiple")
(set_attr "length" "4,4,4,8")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")]
+ (set_attr "predicable" "yes")]
)
(define_expand "movsf"
- [(set (match_operand:SF 0 "general_operand" "")
- (match_operand:SF 1 "general_operand" ""))]
+ [(set (match_operand:SF 0 "general_operand")
+ (match_operand:SF 1 "general_operand"))]
"TARGET_EITHER"
"
+ gcc_checking_assert (aligned_operand (operands[0], SFmode));
+ gcc_checking_assert (aligned_operand (operands[1], SFmode));
if (TARGET_32BIT)
{
if (MEM_P (operands[0]))
operands[1] = force_reg (SFmode, operands[1]);
}
}
+
+ /* Cannot load it directly, generate a load with clobber so that it can be
+ loaded via GPR with MOV / MOVT. */
+ if (arm_disable_literal_pool
+ && (REG_P (operands[0]) || SUBREG_P (operands[0]))
+ && CONST_DOUBLE_P (operands[1])
+ && TARGET_HARD_FLOAT
+ && !vfp3_const_double_rtx (operands[1]))
+ {
+ rtx clobreg = gen_reg_rtx (SFmode);
+ emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
+ clobreg));
+ DONE;
+ }
"
)
&& TARGET_SOFT_FLOAT
&& (!MEM_P (operands[0])
|| register_operand (operands[1], SFmode))"
- "@
- mov%?\\t%0, %1
- ldr%?\\t%0, %1\\t%@ float
- str%?\\t%1, %0\\t%@ float"
+{
+ switch (which_alternative)
+ {
+ case 0: return \"mov%?\\t%0, %1\";
+ case 1:
+ /* Cannot load it directly, split to load it via MOV / MOVT. */
+ if (!MEM_P (operands[1]) && arm_disable_literal_pool)
+ return \"#\";
+ return \"ldr%?\\t%0, %1\\t%@ float\";
+ case 2: return \"str%?\\t%1, %0\\t%@ float\";
+ default: gcc_unreachable ();
+ }
+}
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
- (set_attr "type" "mov_reg,load1,store1")
+ (set_attr "type" "mov_reg,load_4,store_4")
(set_attr "arm_pool_range" "*,4096,*")
(set_attr "thumb2_pool_range" "*,4094,*")
(set_attr "arm_neg_pool_range" "*,4084,*")
(set_attr "thumb2_neg_pool_range" "*,0,*")]
)
+;; Splitter for the above.
+(define_split
+ [(set (match_operand:SF 0 "s_register_operand")
+ (match_operand:SF 1 "const_double_operand"))]
+ "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
+ [(const_int 0)]
+{
+ long buf;
+ real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
+ rtx cst = gen_int_mode (buf, SImode);
+ emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
+ DONE;
+}
+)
+
(define_expand "movdf"
- [(set (match_operand:DF 0 "general_operand" "")
- (match_operand:DF 1 "general_operand" ""))]
+ [(set (match_operand:DF 0 "general_operand")
+ (match_operand:DF 1 "general_operand"))]
"TARGET_EITHER"
"
+ gcc_checking_assert (aligned_operand (operands[0], DFmode));
+ gcc_checking_assert (aligned_operand (operands[1], DFmode));
if (TARGET_32BIT)
{
if (MEM_P (operands[0]))
operands[1] = force_reg (DFmode, operands[1]);
}
}
+
+ /* Cannot load it directly, generate a load with clobber so that it can be
+ loaded via GPR with MOV / MOVT. */
+ if (arm_disable_literal_pool
+ && (REG_P (operands[0]) || SUBREG_P (operands[0]))
+ && CONSTANT_P (operands[1])
+ && TARGET_HARD_FLOAT
+ && !arm_const_double_rtx (operands[1])
+ && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
+ {
+ rtx clobreg = gen_reg_rtx (DFmode);
+ emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
+ clobreg));
+ DONE;
+ }
"
)
;; Reloading a df mode value stored in integer regs to memory can require a
;; scratch reg.
+;; Another reload_out<m> pattern that requires special constraints.
(define_expand "reload_outdf"
[(match_operand:DF 0 "arm_reload_memory_operand" "=o")
(match_operand:DF 1 "s_register_operand" "r")
)
(define_insn "*movdf_soft_insn"
- [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,q,m")
- (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,q"))]
+ [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
+ (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
"TARGET_32BIT && TARGET_SOFT_FLOAT
&& ( register_operand (operands[0], DFmode)
|| register_operand (operands[1], DFmode))"
case 1:
case 2:
return \"#\";
+ case 3:
+ /* Cannot load it directly, split to load it via MOV / MOVT. */
+ if (!MEM_P (operands[1]) && arm_disable_literal_pool)
+ return \"#\";
+ /* Fall through. */
default:
return output_move_double (operands, true, NULL);
}
"
[(set_attr "length" "8,12,16,8,8")
- (set_attr "type" "multiple,multiple,multiple,load2,store2")
+ (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
(set_attr "arm_pool_range" "*,*,*,1020,*")
(set_attr "thumb2_pool_range" "*,*,*,1018,*")
(set_attr "arm_neg_pool_range" "*,*,*,1004,*")
(set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
)
+
+;; Splitter for the above.
+(define_split
+ [(set (match_operand:DF 0 "s_register_operand")
+ (match_operand:DF 1 "const_double_operand"))]
+ "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
+ [(const_int 0)]
+{
+ long buf[2];
+ int order = BYTES_BIG_ENDIAN ? 1 : 0;
+ real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
+ unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
+ ival |= (zext_hwi (buf[1 - order], 32) << 32);
+ rtx cst = gen_int_mode (ival, DImode);
+ emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
+ DONE;
+}
+)
\f
;; load- and store-multiple insns
(define_expand "setmemsi"
- [(match_operand:BLK 0 "general_operand" "")
- (match_operand:SI 1 "const_int_operand" "")
- (match_operand:SI 2 "const_int_operand" "")
- (match_operand:SI 3 "const_int_operand" "")]
+ [(match_operand:BLK 0 "general_operand")
+ (match_operand:SI 1 "const_int_operand")
+ (match_operand:SI 2 "const_int_operand")
+ (match_operand:SI 3 "const_int_operand")]
"TARGET_32BIT"
{
if (arm_gen_setmem (operands))
;; We could let this apply for blocks of less than this, but it clobbers so
;; many registers that there is then probably a better way.
-(define_expand "movmemqi"
- [(match_operand:BLK 0 "general_operand" "")
- (match_operand:BLK 1 "general_operand" "")
- (match_operand:SI 2 "const_int_operand" "")
- (match_operand:SI 3 "const_int_operand" "")]
+(define_expand "cpymemqi"
+ [(match_operand:BLK 0 "general_operand")
+ (match_operand:BLK 1 "general_operand")
+ (match_operand:SI 2 "const_int_operand")
+ (match_operand:SI 3 "const_int_operand")]
""
"
if (TARGET_32BIT)
if (TARGET_LDRD && current_tune->prefer_ldrd_strd
&& !optimize_function_for_size_p (cfun))
{
- if (gen_movmem_ldrd_strd (operands))
+ if (gen_cpymem_ldrd_strd (operands))
DONE;
FAIL;
}
- if (arm_gen_movmemqi (operands))
+ if (arm_gen_cpymemqi (operands))
DONE;
FAIL;
}
|| INTVAL (operands[2]) > 48)
FAIL;
- thumb_expand_movmemqi (operands);
+ thumb_expand_cpymemqi (operands);
DONE;
}
"
(define_expand "cbranchsi4"
[(set (pc) (if_then_else
(match_operator 0 "expandable_comparison_operator"
- [(match_operand:SI 1 "s_register_operand" "")
- (match_operand:SI 2 "nonmemory_operand" "")])
+ [(match_operand:SI 1 "s_register_operand")
+ (match_operand:SI 2 "nonmemory_operand")])
(label_ref (match_operand 3 "" ""))
(pc)))]
"TARGET_EITHER"
(define_expand "cbranchsf4"
[(set (pc) (if_then_else
(match_operator 0 "expandable_comparison_operator"
- [(match_operand:SF 1 "s_register_operand" "")
- (match_operand:SF 2 "vfp_compare_operand" "")])
+ [(match_operand:SF 1 "s_register_operand")
+ (match_operand:SF 2 "vfp_compare_operand")])
(label_ref (match_operand 3 "" ""))
(pc)))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
(define_expand "cbranchdf4"
[(set (pc) (if_then_else
(match_operator 0 "expandable_comparison_operator"
- [(match_operand:DF 1 "s_register_operand" "")
- (match_operand:DF 2 "vfp_compare_operand" "")])
+ [(match_operand:DF 1 "s_register_operand")
+ (match_operand:DF 2 "vfp_compare_operand")])
(label_ref (match_operand 3 "" ""))
(pc)))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
(define_expand "cbranchdi4"
[(set (pc) (if_then_else
(match_operator 0 "expandable_comparison_operator"
- [(match_operand:DI 1 "s_register_operand" "")
- (match_operand:DI 2 "cmpdi_operand" "")])
+ [(match_operand:DI 1 "s_register_operand")
+ (match_operand:DI 2 "cmpdi_operand")])
(label_ref (match_operand 3 "" ""))
(pc)))]
"TARGET_32BIT"
)
;; DImode comparisons. The generic code generates branches that
-;; if-conversion can not reduce to a conditional compare, so we do
+;; if-conversion cannot reduce to a conditional compare, so we do
;; that directly.
-(define_insn_and_split "*arm_cmpdi_insn"
+(define_insn "*arm_cmpdi_insn"
[(set (reg:CC_NCV CC_REGNUM)
(compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
(match_operand:DI 1 "arm_di_operand" "rDi")))
(clobber (match_scratch:SI 2 "=r"))]
"TARGET_32BIT"
- "#" ; "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
- "&& reload_completed"
- [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 0) (match_dup 1)))
- (parallel [(set (reg:CC CC_REGNUM)
- (compare:CC (match_dup 3) (match_dup 4)))
- (set (match_dup 2)
- (minus:SI (match_dup 5)
- (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))])]
- {
- operands[3] = gen_highpart (SImode, operands[0]);
- operands[0] = gen_lowpart (SImode, operands[0]);
- if (CONST_INT_P (operands[1]))
- {
- operands[4] = GEN_INT (~INTVAL (gen_highpart_mode (SImode,
- DImode,
- operands[1])));
- operands[5] = gen_rtx_PLUS (SImode, operands[3], operands[4]);
- }
- else
- {
- operands[4] = gen_highpart (SImode, operands[1]);
- operands[5] = gen_rtx_MINUS (SImode, operands[3], operands[4]);
- }
- operands[1] = gen_lowpart (SImode, operands[1]);
- operands[2] = gen_lowpart (SImode, operands[2]);
- }
+ "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
[(set_attr "conds" "set")
(set_attr "length" "8")
(set_attr "type" "multiple")]
operands[1] = gen_lowpart (SImode, operands[1]);
}
[(set_attr "conds" "set")
- (set_attr "enabled_for_depr_it" "yes,yes,no,*")
+ (set_attr "enabled_for_short_it" "yes,yes,no,*")
(set_attr "arch" "t2,t2,t2,a")
(set_attr "length" "6,6,10,8")
(set_attr "type" "multiple")]
; scc insns
(define_expand "cstore_cc"
- [(set (match_operand:SI 0 "s_register_operand" "")
+ [(set (match_operand:SI 0 "s_register_operand")
(match_operator:SI 1 "" [(match_operand 2 "" "")
(match_operand 3 "" "")]))]
"TARGET_32BIT"
)
(define_expand "cstoresi4"
- [(set (match_operand:SI 0 "s_register_operand" "")
+ [(set (match_operand:SI 0 "s_register_operand")
(match_operator:SI 1 "expandable_comparison_operator"
- [(match_operand:SI 2 "s_register_operand" "")
- (match_operand:SI 3 "reg_or_int_operand" "")]))]
+ [(match_operand:SI 2 "s_register_operand")
+ (match_operand:SI 3 "reg_or_int_operand")]))]
"TARGET_32BIT || TARGET_THUMB1"
"{
rtx op3, scratch, scratch2;
)
(define_expand "cstoresf4"
- [(set (match_operand:SI 0 "s_register_operand" "")
+ [(set (match_operand:SI 0 "s_register_operand")
(match_operator:SI 1 "expandable_comparison_operator"
- [(match_operand:SF 2 "s_register_operand" "")
- (match_operand:SF 3 "vfp_compare_operand" "")]))]
+ [(match_operand:SF 2 "s_register_operand")
+ (match_operand:SF 3 "vfp_compare_operand")]))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"emit_insn (gen_cstore_cc (operands[0], operands[1],
operands[2], operands[3])); DONE;"
)
(define_expand "cstoredf4"
- [(set (match_operand:SI 0 "s_register_operand" "")
+ [(set (match_operand:SI 0 "s_register_operand")
(match_operator:SI 1 "expandable_comparison_operator"
- [(match_operand:DF 2 "s_register_operand" "")
- (match_operand:DF 3 "vfp_compare_operand" "")]))]
+ [(match_operand:DF 2 "s_register_operand")
+ (match_operand:DF 3 "vfp_compare_operand")]))]
"TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
"emit_insn (gen_cstore_cc (operands[0], operands[1],
operands[2], operands[3])); DONE;"
)
(define_expand "cstoredi4"
- [(set (match_operand:SI 0 "s_register_operand" "")
+ [(set (match_operand:SI 0 "s_register_operand")
(match_operator:SI 1 "expandable_comparison_operator"
- [(match_operand:DI 2 "s_register_operand" "")
- (match_operand:DI 3 "cmpdi_operand" "")]))]
+ [(match_operand:DI 2 "s_register_operand")
+ (match_operand:DI 3 "cmpdi_operand")]))]
"TARGET_32BIT"
"{
if (!arm_validize_comparison (&operands[1],
;; Conditional move insns
(define_expand "movsicc"
- [(set (match_operand:SI 0 "s_register_operand" "")
- (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
- (match_operand:SI 2 "arm_not_operand" "")
- (match_operand:SI 3 "arm_not_operand" "")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
+ (match_operand:SI 2 "arm_not_operand")
+ (match_operand:SI 3 "arm_not_operand")))]
"TARGET_32BIT"
"
{
)
(define_expand "movsfcc"
- [(set (match_operand:SF 0 "s_register_operand" "")
- (if_then_else:SF (match_operand 1 "arm_cond_move_operator" "")
- (match_operand:SF 2 "s_register_operand" "")
- (match_operand:SF 3 "s_register_operand" "")))]
+ [(set (match_operand:SF 0 "s_register_operand")
+ (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
+ (match_operand:SF 2 "s_register_operand")
+ (match_operand:SF 3 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT"
"
{
)
(define_expand "movdfcc"
- [(set (match_operand:DF 0 "s_register_operand" "")
- (if_then_else:DF (match_operand 1 "arm_cond_move_operator" "")
- (match_operand:DF 2 "s_register_operand" "")
- (match_operand:DF 3 "s_register_operand" "")))]
+ [(set (match_operand:DF 0 "s_register_operand")
+ (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
+ (match_operand:DF 2 "s_register_operand")
+ (match_operand:DF 3 "s_register_operand")))]
"TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
"
{
"<F_constraint>")
(match_operand:SDF 4 "s_register_operand"
"<F_constraint>")))]
- "TARGET_HARD_FLOAT && TARGET_FPU_ARMV8 <vfp_double_cond>"
+ "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
"*
{
enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
)
(define_expand "call"
- [(parallel [(call (match_operand 0 "memory_operand" "")
- (match_operand 1 "general_operand" ""))
+ [(parallel [(call (match_operand 0 "memory_operand")
+ (match_operand 1 "general_operand"))
(use (match_operand 2 "" ""))
(clobber (reg:SI LR_REGNUM))])]
"TARGET_EITHER"
"
{
rtx callee, pat;
+ tree addr = MEM_EXPR (operands[0]);
/* In an untyped call, we can get NULL for operand 2. */
if (operands[2] == NULL_RTX)
: !REG_P (callee))
XEXP (operands[0], 0) = force_reg (Pmode, callee);
- pat = gen_call_internal (operands[0], operands[1], operands[2]);
- arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
+ if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
+ /* Indirect call: set r9 with FDPIC value of callee. */
+ XEXP (operands[0], 0)
+ = arm_load_function_descriptor (XEXP (operands[0], 0));
+
+ if (detect_cmse_nonsecure_call (addr))
+ {
+ pat = gen_nonsecure_call_internal (operands[0], operands[1],
+ operands[2]);
+ emit_call_insn (pat);
+ }
+ else
+ {
+ pat = gen_call_internal (operands[0], operands[1], operands[2]);
+ arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
+ }
+
+ /* Restore FDPIC register (r9) after call. */
+ if (TARGET_FDPIC)
+ {
+ rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
+ rtx initial_fdpic_reg
+ = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
+
+ emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
+ initial_fdpic_reg));
+ }
+
DONE;
}"
)
+(define_insn "restore_pic_register_after_call"
+ [(set (match_operand:SI 0 "s_register_operand" "+r,r")
+ (unspec:SI [(match_dup 0)
+ (match_operand:SI 1 "nonimmediate_operand" "r,m")]
+ UNSPEC_PIC_RESTORE))]
+ ""
+ "@
+ mov\t%0, %1
+ ldr\t%0, %1"
+)
+
(define_expand "call_internal"
- [(parallel [(call (match_operand 0 "memory_operand" "")
- (match_operand 1 "general_operand" ""))
+ [(parallel [(call (match_operand 0 "memory_operand")
+ (match_operand 1 "general_operand"))
(use (match_operand 2 "" ""))
(clobber (reg:SI LR_REGNUM))])])
+(define_expand "nonsecure_call_internal"
+ [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
+ UNSPEC_NONSECURE_MEM)
+ (match_operand 1 "general_operand"))
+ (use (match_operand 2 "" ""))
+ (clobber (reg:SI LR_REGNUM))])]
+ "use_cmse"
+ "
+ {
+ rtx tmp;
+ tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
+ gen_rtx_REG (SImode, R4_REGNUM),
+ SImode);
+
+ operands[0] = replace_equiv_address (operands[0], tmp);
+ }")
+
(define_insn "*call_reg_armv5"
[(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
(match_operand 1 "" ""))
(use (match_operand 2 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
+ "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
"blx%?\\t%0"
[(set_attr "type" "call")]
)
(match_operand 1 "" ""))
(use (match_operand 2 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
+ "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
"*
return output_call (operands);
"
(define_expand "call_value"
[(parallel [(set (match_operand 0 "" "")
- (call (match_operand 1 "memory_operand" "")
- (match_operand 2 "general_operand" "")))
+ (call (match_operand 1 "memory_operand")
+ (match_operand 2 "general_operand")))
(use (match_operand 3 "" ""))
(clobber (reg:SI LR_REGNUM))])]
"TARGET_EITHER"
"
{
rtx pat, callee;
+ tree addr = MEM_EXPR (operands[1]);
/* In an untyped call, we can get NULL for operand 2. */
if (operands[3] == 0)
: !REG_P (callee))
XEXP (operands[1], 0) = force_reg (Pmode, callee);
- pat = gen_call_value_internal (operands[0], operands[1],
- operands[2], operands[3]);
- arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
+ if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
+ /* Indirect call: set r9 with FDPIC value of callee. */
+ XEXP (operands[1], 0)
+ = arm_load_function_descriptor (XEXP (operands[1], 0));
+
+ if (detect_cmse_nonsecure_call (addr))
+ {
+ pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
+ operands[2], operands[3]);
+ emit_call_insn (pat);
+ }
+ else
+ {
+ pat = gen_call_value_internal (operands[0], operands[1],
+ operands[2], operands[3]);
+ arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
+ }
+
+ /* Restore FDPIC register (r9) after call. */
+ if (TARGET_FDPIC)
+ {
+ rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
+ rtx initial_fdpic_reg
+ = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
+
+ emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
+ initial_fdpic_reg));
+ }
+
DONE;
}"
)
(define_expand "call_value_internal"
[(parallel [(set (match_operand 0 "" "")
- (call (match_operand 1 "memory_operand" "")
- (match_operand 2 "general_operand" "")))
+ (call (match_operand 1 "memory_operand")
+ (match_operand 2 "general_operand")))
(use (match_operand 3 "" ""))
(clobber (reg:SI LR_REGNUM))])])
+(define_expand "nonsecure_call_value_internal"
+ [(parallel [(set (match_operand 0 "" "")
+ (call (unspec:SI [(match_operand 1 "memory_operand")]
+ UNSPEC_NONSECURE_MEM)
+ (match_operand 2 "general_operand")))
+ (use (match_operand 3 "" ""))
+ (clobber (reg:SI LR_REGNUM))])]
+ "use_cmse"
+ "
+ {
+ rtx tmp;
+ tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
+ gen_rtx_REG (SImode, R4_REGNUM),
+ SImode);
+
+ operands[1] = replace_equiv_address (operands[1], tmp);
+ }")
+
(define_insn "*call_value_reg_armv5"
[(set (match_operand 0 "" "")
(call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
(match_operand 2 "" "")))
(use (match_operand 3 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
+ "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
"blx%?\\t%1"
[(set_attr "type" "call")]
)
(match_operand 2 "" "")))
(use (match_operand 3 "" ""))
(clobber (reg:SI LR_REGNUM))]
- "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
+ "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
"*
return output_call (&operands[1]);
"
/* Switch mode now when possible. */
if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
- && arm_arch5 && arm_change_mode_p (SYMBOL_REF_DECL (op)))
+ && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
/* Switch mode now when possible. */
if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
- && arm_arch5 && arm_change_mode_p (SYMBOL_REF_DECL (op)))
+ && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
)
(define_expand "sibcall_internal"
- [(parallel [(call (match_operand 0 "memory_operand" "")
- (match_operand 1 "general_operand" ""))
+ [(parallel [(call (match_operand 0 "memory_operand")
+ (match_operand 1 "general_operand"))
(return)
(use (match_operand 2 "" ""))])])
;; We may also be able to do sibcalls for Thumb, but it's much harder...
(define_expand "sibcall"
- [(parallel [(call (match_operand 0 "memory_operand" "")
- (match_operand 1 "general_operand" ""))
+ [(parallel [(call (match_operand 0 "memory_operand")
+ (match_operand 1 "general_operand"))
(return)
(use (match_operand 2 "" ""))])]
"TARGET_32BIT"
(define_expand "sibcall_value_internal"
[(parallel [(set (match_operand 0 "" "")
- (call (match_operand 1 "memory_operand" "")
- (match_operand 2 "general_operand" "")))
+ (call (match_operand 1 "memory_operand")
+ (match_operand 2 "general_operand")))
(return)
(use (match_operand 3 "" ""))])])
(define_expand "sibcall_value"
[(parallel [(set (match_operand 0 "" "")
- (call (match_operand 1 "memory_operand" "")
- (match_operand 2 "general_operand" "")))
+ (call (match_operand 1 "memory_operand")
+ (match_operand 2 "general_operand")))
(return)
(use (match_operand 3 "" ""))])]
"TARGET_32BIT"
return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
else
{
- if (arm_arch5 || arm_arch4t)
+ if (arm_arch5t || arm_arch4t)
return \"bx%?\\t%0\\t%@ indirect register sibling call\";
else
return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
else
{
- if (arm_arch5 || arm_arch4t)
+ if (arm_arch5t || arm_arch4t)
return \"bx%?\\t%1\";
else
return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
}
return output_return_instruction (const_true_rtx, true, false, false);
}"
- [(set_attr "type" "load1")
+ [(set_attr "type" "load_4")
(set_attr "length" "12")
(set_attr "predicable" "yes")]
)
}"
[(set_attr "conds" "use")
(set_attr "length" "12")
- (set_attr "type" "load1")]
+ (set_attr "type" "load_4")]
)
(define_insn "*cond_<return_str>return_inverted"
}"
[(set_attr "conds" "use")
(set_attr "length" "12")
- (set_attr "type" "load1")]
+ (set_attr "type" "load_4")]
)
(define_insn "*arm_simple_return"
[(set (match_dup 1)
(compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
(const_int 0)))
- (set (match_operand:SI 0 "s_register_operand" "")
+ (set (match_operand:SI 0 "s_register_operand")
(if_then_else:SI (eq (match_dup 1) (const_int 0))
(const_int -1)
(const_int 67108860)))] ; 0x03fffffc
(const_int 0))
(match_operand 1 "" "")
(match_operand 2 "" "")])]
- "TARGET_EITHER"
+ "TARGET_EITHER && !TARGET_FDPIC"
"
{
int i;
)
(define_expand "untyped_return"
- [(match_operand:BLK 0 "memory_operand" "")
+ [(match_operand:BLK 0 "memory_operand")
(match_operand 1 "" "")]
- "TARGET_EITHER"
+ "TARGET_EITHER && !TARGET_FDPIC"
"
{
int i;
(set_attr "type" "block")]
)
+;; Since we hard code r0 here use the 'o' constraint to prevent
+;; provoking undefined behaviour in the hardware with putting out
+;; auto-increment operations with potentially r0 as the base register.
(define_insn "probe_stack"
- [(set (match_operand:SI 0 "memory_operand" "=m")
+ [(set (match_operand:SI 0 "memory_operand" "=o")
(unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
"TARGET_32BIT"
"str%?\\tr0, %0"
- [(set_attr "type" "store1")
+ [(set_attr "type" "store_4")
(set_attr "predicable" "yes")]
)
(set_attr "conds" "clob")]
)
+;; Named patterns for stack smashing protection.
+(define_expand "stack_protect_combined_set"
+ [(parallel
+ [(set (match_operand:SI 0 "memory_operand")
+ (unspec:SI [(match_operand:SI 1 "guard_operand")]
+ UNSPEC_SP_SET))
+ (clobber (match_scratch:SI 2 ""))
+ (clobber (match_scratch:SI 3 ""))])]
+ ""
+ ""
+)
+
+;; Use a separate insn from the above expand to be able to have the mem outside
+;; the operand #1 when register allocation comes. This is needed to avoid LRA
+;; try to reload the guard since we need to control how PIC access is done in
+;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
+;; legitimize_pic_address ()).
+(define_insn_and_split "*stack_protect_combined_set_insn"
+ [(set (match_operand:SI 0 "memory_operand" "=m,m")
+ (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
+ UNSPEC_SP_SET))
+ (clobber (match_scratch:SI 2 "=&l,&r"))
+ (clobber (match_scratch:SI 3 "=&l,&r"))]
+ ""
+ "#"
+ "reload_completed"
+ [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
+ UNSPEC_SP_SET))
+ (clobber (match_dup 2))])]
+ "
+{
+ if (flag_pic)
+ {
+ rtx pic_reg;
+
+ if (TARGET_FDPIC)
+ pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
+ else
+ pic_reg = operands[3];
+
+ /* Forces recomputing of GOT base now. */
+ legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
+ true /*compute_now*/);
+ }
+ else
+ {
+ if (address_operand (operands[1], SImode))
+ operands[2] = operands[1];
+ else
+ {
+ rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
+ emit_move_insn (operands[2], mem);
+ }
+ }
+}"
+ [(set_attr "arch" "t1,32")]
+)
+
+;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
+;; canary value does not live beyond the life of this sequence.
+(define_insn "*stack_protect_set_insn"
+ [(set (match_operand:SI 0 "memory_operand" "=m,m")
+ (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
+ UNSPEC_SP_SET))
+ (clobber (match_dup 1))]
+ ""
+ "@
+ ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
+ ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
+ [(set_attr "length" "8,12")
+ (set_attr "conds" "clob,nocond")
+ (set_attr "type" "multiple")
+ (set_attr "arch" "t1,32")]
+)
+
+(define_expand "stack_protect_combined_test"
+ [(parallel
+ [(set (pc)
+ (if_then_else
+ (eq (match_operand:SI 0 "memory_operand")
+ (unspec:SI [(match_operand:SI 1 "guard_operand")]
+ UNSPEC_SP_TEST))
+ (label_ref (match_operand 2))
+ (pc)))
+ (clobber (match_scratch:SI 3 ""))
+ (clobber (match_scratch:SI 4 ""))
+ (clobber (reg:CC CC_REGNUM))])]
+ ""
+ ""
+)
+
+;; Use a separate insn from the above expand to be able to have the mem outside
+;; the operand #1 when register allocation comes. This is needed to avoid LRA
+;; try to reload the guard since we need to control how PIC access is done in
+;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
+;; legitimize_pic_address ()).
+(define_insn_and_split "*stack_protect_combined_test_insn"
+ [(set (pc)
+ (if_then_else
+ (eq (match_operand:SI 0 "memory_operand" "m,m")
+ (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
+ UNSPEC_SP_TEST))
+ (label_ref (match_operand 2))
+ (pc)))
+ (clobber (match_scratch:SI 3 "=&l,&r"))
+ (clobber (match_scratch:SI 4 "=&l,&r"))
+ (clobber (reg:CC CC_REGNUM))]
+ ""
+ "#"
+ "reload_completed"
+ [(const_int 0)]
+{
+ rtx eq;
+
+ if (flag_pic)
+ {
+ rtx pic_reg;
+
+ if (TARGET_FDPIC)
+ pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
+ else
+ pic_reg = operands[4];
+
+ /* Forces recomputing of GOT base now. */
+ legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
+ true /*compute_now*/);
+ }
+ else
+ {
+ if (address_operand (operands[1], SImode))
+ operands[3] = operands[1];
+ else
+ {
+ rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
+ emit_move_insn (operands[3], mem);
+ }
+ }
+ if (TARGET_32BIT)
+ {
+ emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
+ operands[3]));
+ rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
+ eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
+ emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
+ }
+ else
+ {
+ emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
+ operands[3]));
+ eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
+ emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
+ operands[2]));
+ }
+ DONE;
+}
+ [(set_attr "arch" "t1,32")]
+)
+
+(define_insn "arm_stack_protect_test_insn"
+ [(set (reg:CC_Z CC_REGNUM)
+ (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
+ (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
+ UNSPEC_SP_TEST)
+ (const_int 0)))
+ (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
+ (clobber (match_dup 2))]
+ "TARGET_32BIT"
+ "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
+ [(set_attr "length" "8,12")
+ (set_attr "conds" "set")
+ (set_attr "type" "multiple")
+ (set_attr "arch" "t,32")]
+)
+
(define_expand "casesi"
- [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
- (match_operand:SI 1 "const_int_operand" "") ; lower bound
- (match_operand:SI 2 "const_int_operand" "") ; total range
+ [(match_operand:SI 0 "s_register_operand") ; index to jump on
+ (match_operand:SI 1 "const_int_operand") ; lower bound
+ (match_operand:SI 2 "const_int_operand") ; total range
(match_operand:SI 3 "" "") ; table label
(match_operand:SI 4 "" "")] ; Out of range label
"(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
;; The USE in this pattern is needed to tell flow analysis that this is
;; a CASESI insn. It has no other purpose.
-(define_insn "arm_casesi_internal"
+(define_expand "arm_casesi_internal"
+ [(parallel [(set (pc)
+ (if_then_else
+ (leu (match_operand:SI 0 "s_register_operand")
+ (match_operand:SI 1 "arm_rhs_operand"))
+ (match_dup 4)
+ (label_ref:SI (match_operand 3 ""))))
+ (clobber (reg:CC CC_REGNUM))
+ (use (label_ref:SI (match_operand 2 "")))])]
+ "TARGET_ARM"
+{
+ operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
+ operands[4] = gen_rtx_PLUS (SImode, operands[4],
+ gen_rtx_LABEL_REF (SImode, operands[2]));
+ operands[4] = gen_rtx_MEM (SImode, operands[4]);
+ MEM_READONLY_P (operands[4]) = 1;
+ MEM_NOTRAP_P (operands[4]) = 1;
+})
+
+(define_insn "*arm_casesi_internal"
[(parallel [(set (pc)
(if_then_else
(leu (match_operand:SI 0 "s_register_operand" "r")
(match_operand:SI 1 "arm_rhs_operand" "rI"))
(mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
- (label_ref (match_operand 2 "" ""))))
- (label_ref (match_operand 3 "" ""))))
+ (label_ref:SI (match_operand 2 "" ""))))
+ (label_ref:SI (match_operand 3 "" ""))))
(clobber (reg:CC CC_REGNUM))
- (use (label_ref (match_dup 2)))])]
+ (use (label_ref:SI (match_dup 2)))])]
"TARGET_ARM"
"*
if (flag_pic)
(define_expand "indirect_jump"
[(set (pc)
- (match_operand:SI 0 "s_register_operand" ""))]
+ (match_operand:SI 0 "s_register_operand"))]
"TARGET_EITHER"
"
/* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
(match_operand:SI 0 "memory_operand" "m"))]
"TARGET_ARM"
"ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
- [(set_attr "type" "load1")
+ [(set_attr "type" "load_4")
(set_attr "pool_range" "4096")
(set_attr "neg_pool_range" "4084")
(set_attr "predicable" "yes")]
"TARGET_32BIT"
"<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "shift" "2")
(set_attr "arch" "a,t2")
(set_attr "type" "alu_shift_imm")])
"TARGET_32BIT && GET_CODE (operands[2]) != MULT"
"<arith_shift_insn>%?\\t%0, %1, %3%S2"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "shift" "3")
(set_attr "arch" "a,t2,a")
(set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
"TARGET_32BIT"
"sub%?\\t%0, %1, %3%S2"
[(set_attr "predicable" "yes")
+ (set_attr "predicable_short_it" "no")
(set_attr "shift" "3")
(set_attr "arch" "32,a")
(set_attr "type" "alus_shift_imm,alus_shift_reg")])
(eq:SI (match_operand:SI 1 "s_register_operand" "")
(const_int 0)))
(clobber (reg:CC CC_REGNUM))]
- "arm_arch5 && TARGET_32BIT"
+ "arm_arch5t && TARGET_32BIT"
[(set (match_dup 0) (clz:SI (match_dup 1)))
(set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
)
(cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
(set (match_dup 0) (const_int 1)))]
{
- operands[3] = GEN_INT (-INTVAL (operands[2]));
+ operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
})
(define_split
(set (match_operand:SI 0 "register_operand" "") (const_int 0)))
(cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
(set (match_dup 0) (const_int 1)))]
- "arm_arch5 && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
+ "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
[(set (match_dup 0) (clz:SI (match_dup 1)))
(set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
)
(set (match_operand:SI 0 "register_operand" "") (const_int 0)))
(cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
(set (match_dup 0) (const_int 1)))]
- "arm_arch5 && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
+ "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
&& !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
[(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
(set (match_dup 0) (clz:SI (match_dup 0)))
}"
[(set_attr "conds" "set")
(set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
(set_attr "type" "multiple")
(set_attr_alternative "length"
[(const_int 6)
}"
[(set_attr "conds" "set")
(set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
(set_attr_alternative "length"
[(const_int 6)
(const_int 8)
(compare
(and:SI
(match_operator 4 "arm_comparison_operator"
- [(match_operand:SI 0 "s_register_operand"
- "l,l,l,r,r,r,r,r,r")
- (match_operand:SI 1 "arm_add_operand"
- "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
+ [(match_operand:SI 0 "s_register_operand"
+ "l,l,l,r,r,r,r,r,r,r")
+ (match_operand:SI 1 "arm_add_operand"
+ "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
(match_operator:SI 5 "arm_comparison_operator"
- [(match_operand:SI 2 "s_register_operand"
- "l,r,r,l,l,r,r,r,r")
- (match_operand:SI 3 "arm_add_operand"
- "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
+ [(match_operand:SI 2 "s_register_operand"
+ "l,r,r,l,l,r,r,r,r,r")
+ (match_operand:SI 3 "arm_add_operand"
+ "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
(const_int 0)))]
"TARGET_32BIT"
"*
\"it\\t%d5\",
\"it\\t%d4\"
};
- static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
- CMP_CMP, CMN_CMP, CMP_CMP,
- CMN_CMP, CMP_CMN, CMN_CMN};
+ static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
+ CMP_CMP, CMN_CMP, CMP_CMP,
+ CMP_CMP, CMN_CMP, CMP_CMN,
+ CMN_CMN};
int swap =
comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
}"
[(set_attr "conds" "set")
(set_attr "predicable" "no")
- (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
(set_attr_alternative "length"
[(const_int 6)
(const_int 8)
(const_int 8)
(const_int 8)
(const_int 8)
+ (const_int 6)
(if_then_else (eq_attr "is_thumb" "no")
(const_int 8)
(const_int 10))
(ior:SI
(match_operator 4 "arm_comparison_operator"
[(match_operand:SI 0 "s_register_operand"
- "l,l,l,r,r,r,r,r,r")
+ "l,l,l,r,r,r,r,r,r,r")
(match_operand:SI 1 "arm_add_operand"
- "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
+ "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
(match_operator:SI 5 "arm_comparison_operator"
[(match_operand:SI 2 "s_register_operand"
- "l,r,r,l,l,r,r,r,r")
+ "l,r,r,l,l,r,r,r,r,r")
(match_operand:SI 3 "arm_add_operand"
- "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
+ "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
(const_int 0)))]
"TARGET_32BIT"
"*
\"it\\t%D4\",
\"it\\t%D5\"
};
- static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
- CMP_CMP, CMN_CMP, CMP_CMP,
- CMN_CMP, CMP_CMN, CMN_CMN};
+ static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
+ CMP_CMP, CMN_CMP, CMP_CMP,
+ CMP_CMP, CMN_CMP, CMP_CMN,
+ CMN_CMN};
int swap =
comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
}
"
[(set_attr "conds" "set")
- (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
+ (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
(set_attr_alternative "length"
[(const_int 6)
(const_int 8)
(const_int 8)
(const_int 8)
(const_int 8)
+ (const_int 6)
(if_then_else (eq_attr "is_thumb" "no")
(const_int 8)
(const_int 10))
)
(define_insn_and_split "*ior_scc_scc"
- [(set (match_operand:SI 0 "s_register_operand" "=Ts")
+ [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
(ior:SI (match_operator:SI 3 "arm_comparison_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_add_operand" "rIL")])
+ [(match_operand:SI 1 "s_register_operand" "l,r")
+ (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
(match_operator:SI 6 "arm_comparison_operator"
- [(match_operand:SI 4 "s_register_operand" "r")
- (match_operand:SI 5 "arm_add_operand" "rIL")])))
+ [(match_operand:SI 4 "s_register_operand" "l,r")
+ (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT
&& (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
DOM_CC_X_OR_Y),
CC_REGNUM);"
[(set_attr "conds" "clob")
+ (set_attr "enabled_for_short_it" "yes,no")
(set_attr "length" "16")
(set_attr "type" "multiple")]
)
(define_insn_and_split "*ior_scc_scc_cmp"
[(set (match_operand 0 "dominant_cc_register" "")
(compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_add_operand" "rIL")])
+ [(match_operand:SI 1 "s_register_operand" "l,r")
+ (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
(match_operator:SI 6 "arm_comparison_operator"
- [(match_operand:SI 4 "s_register_operand" "r")
- (match_operand:SI 5 "arm_add_operand" "rIL")]))
+ [(match_operand:SI 4 "s_register_operand" "l,r")
+ (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
(const_int 0)))
- (set (match_operand:SI 7 "s_register_operand" "=Ts")
+ (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
(ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
"TARGET_32BIT"
(set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
""
[(set_attr "conds" "set")
+ (set_attr "enabled_for_short_it" "yes,no")
(set_attr "length" "16")
(set_attr "type" "multiple")]
)
(define_insn_and_split "*and_scc_scc"
- [(set (match_operand:SI 0 "s_register_operand" "=Ts")
+ [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
(and:SI (match_operator:SI 3 "arm_comparison_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_add_operand" "rIL")])
+ [(match_operand:SI 1 "s_register_operand" "l,r")
+ (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
(match_operator:SI 6 "arm_comparison_operator"
- [(match_operand:SI 4 "s_register_operand" "r")
- (match_operand:SI 5 "arm_add_operand" "rIL")])))
+ [(match_operand:SI 4 "s_register_operand" "l,r")
+ (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
(clobber (reg:CC CC_REGNUM))]
"TARGET_32BIT
&& (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
DOM_CC_X_AND_Y),
CC_REGNUM);"
[(set_attr "conds" "clob")
+ (set_attr "enabled_for_short_it" "yes,no")
(set_attr "length" "16")
(set_attr "type" "multiple")]
)
(define_insn_and_split "*and_scc_scc_cmp"
[(set (match_operand 0 "dominant_cc_register" "")
(compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
- [(match_operand:SI 1 "s_register_operand" "r")
- (match_operand:SI 2 "arm_add_operand" "rIL")])
+ [(match_operand:SI 1 "s_register_operand" "l,r")
+ (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
(match_operator:SI 6 "arm_comparison_operator"
- [(match_operand:SI 4 "s_register_operand" "r")
- (match_operand:SI 5 "arm_add_operand" "rIL")]))
+ [(match_operand:SI 4 "s_register_operand" "l,r")
+ (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
(const_int 0)))
- (set (match_operand:SI 7 "s_register_operand" "=Ts")
+ (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
(and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
"TARGET_32BIT"
(set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
""
[(set_attr "conds" "set")
+ (set_attr "enabled_for_short_it" "yes,no")
(set_attr "length" "16")
(set_attr "type" "multiple")]
)
/* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
if (CONST_INT_P (operands[2]))
emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
- GEN_INT (- INTVAL (operands[2]))));
+ gen_int_mode (-INTVAL (operands[2]),
+ SImode)));
else
emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
}
"
[(set_attr "conds" "clob")
- (set_attr "enabled_for_depr_it" "no,yes,yes")
+ (set_attr "enabled_for_short_it" "no,yes,yes")
(set_attr "type" "multiple")]
)
[(set_attr "conds" "use")
(set_attr "length" "4")
(set_attr "arch" "t2,32")
- (set_attr "enabled_for_depr_it" "yes,no")
+ (set_attr "enabled_for_short_it" "yes,no")
(set_attr "type" "logic_shift_imm")]
)
[(set_attr "conds" "use")
(set_attr "length" "4")
(set_attr "arch" "t2,32")
- (set_attr "enabled_for_depr_it" "yes,no")
+ (set_attr "enabled_for_short_it" "yes,no")
(set_attr "type" "logic_shift_imm")]
)
}"
[(set_attr "length" "12")
(set_attr "predicable" "yes")
- (set_attr "type" "load1")]
+ (set_attr "type" "load_4")]
)
; This pattern is never tried by combine, so do it as a peephole
)
(define_expand "eh_epilogue"
- [(use (match_operand:SI 0 "register_operand" ""))
- (use (match_operand:SI 1 "register_operand" ""))
- (use (match_operand:SI 2 "register_operand" ""))]
+ [(use (match_operand:SI 0 "register_operand"))
+ (use (match_operand:SI 1 "register_operand"))
+ (use (match_operand:SI 2 "register_operand"))]
"TARGET_EITHER"
"
{
return \"\";
}"
- [(set_attr "type" "store4")
+ [(set_attr "type" "store_16")
(set (attr "length")
(symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
)
return \"\";
}
"
- [(set_attr "type" "load4")
+ [(set_attr "type" "load_16")
(set_attr "predicable" "yes")
(set (attr "length")
(symbol_ref "arm_attr_length_pop_multi (operands,
return \"\";
}
"
- [(set_attr "type" "load4")
+ [(set_attr "type" "load_16")
(set_attr "predicable" "yes")
(set (attr "length")
(symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
return \"\";
}
"
- [(set_attr "type" "load4")
+ [(set_attr "type" "load_16")
(set_attr "predicable" "yes")
(set (attr "length")
(symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
(mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
"TARGET_32BIT && (reload_in_progress || reload_completed)"
"ldr%?\t%|pc, [%0], #4"
- [(set_attr "type" "load1")
+ [(set_attr "type" "load_4")
(set_attr "predicable" "yes")]
)
;; Pop for floating point registers (as used in epilogue RTL)
return \"\";
}
"
- [(set_attr "type" "load4")
+ [(set_attr "type" "load_16")
(set_attr "conds" "unconditional")
(set_attr "predicable" "no")]
)
{
rtx x = operands[0];
making_const_table = TRUE;
- switch (GET_MODE_CLASS (GET_MODE (x)))
+ scalar_float_mode float_mode;
+ if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
+ assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
+ else
{
- case MODE_FLOAT:
- assemble_real (*CONST_DOUBLE_REAL_VALUE (x), GET_MODE (x),
- BITS_PER_WORD);
- break;
- default:
/* XXX: Sometimes gcc does something really dumb and ends up with
a HIGH in a constant pool entry, usually because it's trying to
load into a VFP register. We know this will always be used in
x = XEXP (x, 0);
assemble_integer (x, 4, BITS_PER_WORD, 1);
mark_symbol_refs_as_used (x);
- break;
}
return \"\";
}"
"*
{
making_const_table = TRUE;
- switch (GET_MODE_CLASS (GET_MODE (operands[0])))
- {
- case MODE_FLOAT:
- assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
- GET_MODE (operands[0]), BITS_PER_WORD);
- break;
- default:
- assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
- break;
- }
+ scalar_float_mode float_mode;
+ if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
+ assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
+ float_mode, BITS_PER_WORD);
+ else
+ assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
return \"\";
}"
[(set_attr "length" "8")
"*
{
making_const_table = TRUE;
- switch (GET_MODE_CLASS (GET_MODE (operands[0])))
- {
- case MODE_FLOAT:
- assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
- GET_MODE (operands[0]), BITS_PER_WORD);
- break;
- default:
- assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
- break;
- }
+ scalar_float_mode float_mode;
+ if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
+ assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
+ float_mode, BITS_PER_WORD);
+ else
+ assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
return \"\";
}"
[(set_attr "length" "16")
(define_insn "clzsi2"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
- "TARGET_32BIT && arm_arch5"
+ "TARGET_32BIT && arm_arch5t"
"clz%?\\t%0, %1"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "clz")])
(define_insn "rbitsi2"
"TARGET_32BIT && arm_arch_thumb2"
"rbit%?\\t%0, %1"
[(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "clz")])
;; Keep this as a CTZ expression until after reload and then split
[(prefetch (match_operand:SI 0 "address_operand" "p")
(match_operand:SI 1 "" "")
(match_operand:SI 2 "" ""))]
- "TARGET_32BIT && arm_arch5e"
+ "TARGET_32BIT && arm_arch5te"
"pld\\t%a0"
- [(set_attr "type" "load1")]
+ [(set_attr "type" "load_4")]
)
;; General predication pattern
;; Patterns for exception handling
(define_expand "eh_return"
- [(use (match_operand 0 "general_operand" ""))]
+ [(use (match_operand 0 "general_operand"))]
"TARGET_EITHER"
"
{
(set_attr "type" "mrs")]
)
+;; Doesn't clobber R1-R3. Must use r0 for the first operand.
+(define_insn "load_tp_soft_fdpic"
+ [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
+ (clobber (reg:SI FDPIC_REGNUM))
+ (clobber (reg:SI LR_REGNUM))
+ (clobber (reg:SI IP_REGNUM))
+ (clobber (reg:CC CC_REGNUM))]
+ "TARGET_SOFT_TP && TARGET_FDPIC"
+ "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
+ [(set_attr "conds" "clob")
+ (set_attr "type" "branch")]
+)
+
;; Doesn't clobber R1-R3. Must use r0 for the first operand.
(define_insn "load_tp_soft"
[(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
(clobber (reg:SI LR_REGNUM))
(clobber (reg:SI IP_REGNUM))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_SOFT_TP"
+ "TARGET_SOFT_TP && !TARGET_FDPIC"
"bl\\t__aeabi_read_tp\\t@ load_tp_soft"
[(set_attr "conds" "clob")
(set_attr "type" "branch")]
;; For thread pointer builtin
(define_expand "get_thread_pointersi"
- [(match_operand:SI 0 "s_register_operand" "=r")]
+ [(match_operand:SI 0 "s_register_operand")]
""
"
{
movt\t%0, %L1"
[(set_attr "arch" "32,v8mb")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
(set_attr "length" "4")
(set_attr "type" "alu_sreg")]
)
[(set_attr "arch" "t1,t2,32")
(set_attr "length" "2,2,4")
(set_attr "predicable" "no,yes,yes")
- (set_attr "predicable_short_it" "no")
(set_attr "type" "rev")]
)
(define_expand "arm_legacy_rev"
- [(set (match_operand:SI 2 "s_register_operand" "")
- (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
+ [(set (match_operand:SI 2 "s_register_operand")
+ (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
(const_int 16))
(match_dup 1)))
(set (match_dup 2)
(lshiftrt:SI (match_dup 2)
(const_int 8)))
- (set (match_operand:SI 3 "s_register_operand" "")
+ (set (match_operand:SI 3 "s_register_operand")
(rotatert:SI (match_dup 1)
(const_int 8)))
(set (match_dup 2)
(and:SI (match_dup 2)
(const_int -65281)))
- (set (match_operand:SI 0 "s_register_operand" "")
+ (set (match_operand:SI 0 "s_register_operand")
(xor:SI (match_dup 3)
(match_dup 2)))]
"TARGET_32BIT"
;; Reuse temporaries to keep register pressure down.
(define_expand "thumb_legacy_rev"
- [(set (match_operand:SI 2 "s_register_operand" "")
- (ashift:SI (match_operand:SI 1 "s_register_operand" "")
+ [(set (match_operand:SI 2 "s_register_operand")
+ (ashift:SI (match_operand:SI 1 "s_register_operand")
(const_int 24)))
- (set (match_operand:SI 3 "s_register_operand" "")
+ (set (match_operand:SI 3 "s_register_operand")
(lshiftrt:SI (match_dup 1)
(const_int 24)))
(set (match_dup 3)
(ior:SI (match_dup 3)
(match_dup 2)))
- (set (match_operand:SI 4 "s_register_operand" "")
+ (set (match_operand:SI 4 "s_register_operand")
(const_int 16))
- (set (match_operand:SI 5 "s_register_operand" "")
+ (set (match_operand:SI 5 "s_register_operand")
(rotatert:SI (match_dup 1)
(match_dup 4)))
(set (match_dup 2)
(set (match_dup 5)
(rotatert:SI (match_dup 5)
(match_dup 4)))
- (set (match_operand:SI 0 "s_register_operand" "")
+ (set (match_operand:SI 0 "s_register_operand")
(ior:SI (match_dup 5)
(match_dup 3)))]
"TARGET_THUMB"
;; rsbpl r0, r1, #0
(define_expand "modsi3"
- [(match_operand:SI 0 "register_operand" "")
- (match_operand:SI 1 "register_operand" "")
- (match_operand:SI 2 "const_int_operand" "")]
+ [(match_operand:SI 0 "register_operand")
+ (match_operand:SI 1 "register_operand")
+ (match_operand:SI 2 "const_int_operand")]
"TARGET_32BIT"
{
HOST_WIDE_INT val = INTVAL (operands[2]);
)
(define_expand "bswapsi2"
- [(set (match_operand:SI 0 "s_register_operand" "=r")
- (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
+ [(set (match_operand:SI 0 "s_register_operand")
+ (bswap:SI (match_operand:SI 1 "s_register_operand")))]
"TARGET_EITHER && (arm_arch6 || !optimize_size)"
"
if (!arm_arch6)
)
(define_expand "bswaphi2"
- [(set (match_operand:HI 0 "s_register_operand" "=r")
- (bswap:HI (match_operand:HI 1 "s_register_operand" "r")))]
+ [(set (match_operand:HI 0 "s_register_operand")
+ (bswap:HI (match_operand:HI 1 "s_register_operand")))]
"arm_arch6"
""
)
(mem:SI (plus:SI (match_dup 1)
(match_operand:SI 4 "const_int_operand" ""))))]
"TARGET_LDRD && TARGET_THUMB2 && reload_completed
- && current_tune->prefer_ldrd_strd
&& ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
&& (operands_ok_ldrd_strd (operands[0], operands[3],
operands[1], INTVAL (operands[2]),
false, true))"
"ldrd%?\t%0, %3, [%1, %2]"
- [(set_attr "type" "load2")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")])
+ [(set_attr "type" "load_8")
+ (set_attr "predicable" "yes")])
(define_insn "*thumb2_ldrd_base"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(mem:SI (plus:SI (match_dup 1)
(const_int 4))))]
"TARGET_LDRD && TARGET_THUMB2 && reload_completed
- && current_tune->prefer_ldrd_strd
&& (operands_ok_ldrd_strd (operands[0], operands[2],
operands[1], 0, false, true))"
"ldrd%?\t%0, %2, [%1]"
- [(set_attr "type" "load2")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")])
+ [(set_attr "type" "load_8")
+ (set_attr "predicable" "yes")])
(define_insn "*thumb2_ldrd_base_neg"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(set (match_operand:SI 2 "s_register_operand" "=r")
(mem:SI (match_dup 1)))]
"TARGET_LDRD && TARGET_THUMB2 && reload_completed
- && current_tune->prefer_ldrd_strd
&& (operands_ok_ldrd_strd (operands[0], operands[2],
operands[1], -4, false, true))"
"ldrd%?\t%0, %2, [%1, #-4]"
- [(set_attr "type" "load2")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")])
+ [(set_attr "type" "load_8")
+ (set_attr "predicable" "yes")])
(define_insn "*thumb2_strd"
[(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
(match_operand:SI 3 "const_int_operand" "")))
(match_operand:SI 4 "s_register_operand" "r"))]
"TARGET_LDRD && TARGET_THUMB2 && reload_completed
- && current_tune->prefer_ldrd_strd
&& ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
&& (operands_ok_ldrd_strd (operands[2], operands[4],
operands[0], INTVAL (operands[1]),
false, false))"
"strd%?\t%2, %4, [%0, %1]"
- [(set_attr "type" "store2")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")])
+ [(set_attr "type" "store_8")
+ (set_attr "predicable" "yes")])
(define_insn "*thumb2_strd_base"
[(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
(const_int 4)))
(match_operand:SI 2 "s_register_operand" "r"))]
"TARGET_LDRD && TARGET_THUMB2 && reload_completed
- && current_tune->prefer_ldrd_strd
&& (operands_ok_ldrd_strd (operands[1], operands[2],
operands[0], 0, false, false))"
"strd%?\t%1, %2, [%0]"
- [(set_attr "type" "store2")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")])
+ [(set_attr "type" "store_8")
+ (set_attr "predicable" "yes")])
(define_insn "*thumb2_strd_base_neg"
[(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
(set (mem:SI (match_dup 0))
(match_operand:SI 2 "s_register_operand" "r"))]
"TARGET_LDRD && TARGET_THUMB2 && reload_completed
- && current_tune->prefer_ldrd_strd
&& (operands_ok_ldrd_strd (operands[1], operands[2],
operands[0], -4, false, false))"
"strd%?\t%1, %2, [%0, #-4]"
- [(set_attr "type" "store2")
- (set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")])
+ [(set_attr "type" "store_8")
+ (set_attr "predicable" "yes")])
;; ARMv8 CRC32 instructions.
-(define_insn "<crc_variant>"
+(define_insn "arm_<crc_variant>"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
(match_operand:<crc_mode> 2 "s_register_operand" "r")]
;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
;; large lists without explicit writeback generated for APCS_FRAME epilogue.
+;; The operands are validated through the load_multiple_operation
+;; match_parallel predicate rather than through constraints so enable it only
+;; after reload.
(define_insn "*load_multiple"
[(match_parallel 0 "load_multiple_operation"
[(set (match_operand:SI 2 "s_register_operand" "=rk")
(mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
])]
- "TARGET_32BIT"
+ "TARGET_32BIT && reload_completed"
"*
{
arm_output_multireg_pop (operands, /*return_pc=*/false,
DONE;
})
+(define_insn "arm_<cdp>"
+ [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
+ (match_operand:SI 1 "immediate_operand" "n")
+ (match_operand:SI 2 "immediate_operand" "n")
+ (match_operand:SI 3 "immediate_operand" "n")
+ (match_operand:SI 4 "immediate_operand" "n")
+ (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
+ "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
+{
+ arm_const_bounds (operands[0], 0, 16);
+ arm_const_bounds (operands[1], 0, 16);
+ arm_const_bounds (operands[2], 0, (1 << 5));
+ arm_const_bounds (operands[3], 0, (1 << 5));
+ arm_const_bounds (operands[4], 0, (1 << 5));
+ arm_const_bounds (operands[5], 0, 8);
+ return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
+}
+ [(set_attr "length" "4")
+ (set_attr "type" "coproc")])
+
+(define_insn "*ldc"
+ [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
+ (match_operand:SI 1 "immediate_operand" "n")
+ (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
+ "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
+{
+ arm_const_bounds (operands[0], 0, 16);
+ arm_const_bounds (operands[1], 0, (1 << 5));
+ return "<ldc>\\tp%c0, CR%c1, %2";
+}
+ [(set_attr "length" "4")
+ (set_attr "type" "coproc")])
+
+(define_insn "*stc"
+ [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
+ (match_operand:SI 1 "immediate_operand" "n")
+ (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
+ "arm_coproc_builtin_available (VUNSPEC_<STC>)"
+{
+ arm_const_bounds (operands[0], 0, 16);
+ arm_const_bounds (operands[1], 0, (1 << 5));
+ return "<stc>\\tp%c0, CR%c1, %2";
+}
+ [(set_attr "length" "4")
+ (set_attr "type" "coproc")])
+
+(define_expand "arm_<ldc>"
+ [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
+ (match_operand:SI 1 "immediate_operand")
+ (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
+ "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
+
+(define_expand "arm_<stc>"
+ [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
+ (match_operand:SI 1 "immediate_operand")
+ (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
+ "arm_coproc_builtin_available (VUNSPEC_<STC>)")
+
+(define_insn "arm_<mcr>"
+ [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
+ (match_operand:SI 1 "immediate_operand" "n")
+ (match_operand:SI 2 "s_register_operand" "r")
+ (match_operand:SI 3 "immediate_operand" "n")
+ (match_operand:SI 4 "immediate_operand" "n")
+ (match_operand:SI 5 "immediate_operand" "n")] MCRI)
+ (use (match_dup 2))]
+ "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
+{
+ arm_const_bounds (operands[0], 0, 16);
+ arm_const_bounds (operands[1], 0, 8);
+ arm_const_bounds (operands[3], 0, (1 << 5));
+ arm_const_bounds (operands[4], 0, (1 << 5));
+ arm_const_bounds (operands[5], 0, 8);
+ return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
+}
+ [(set_attr "length" "4")
+ (set_attr "type" "coproc")])
+
+(define_insn "arm_<mrc>"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+ (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
+ (match_operand:SI 2 "immediate_operand" "n")
+ (match_operand:SI 3 "immediate_operand" "n")
+ (match_operand:SI 4 "immediate_operand" "n")
+ (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
+ "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
+{
+ arm_const_bounds (operands[1], 0, 16);
+ arm_const_bounds (operands[2], 0, 8);
+ arm_const_bounds (operands[3], 0, (1 << 5));
+ arm_const_bounds (operands[4], 0, (1 << 5));
+ arm_const_bounds (operands[5], 0, 8);
+ return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
+}
+ [(set_attr "length" "4")
+ (set_attr "type" "coproc")])
+
+(define_insn "arm_<mcrr>"
+ [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
+ (match_operand:SI 1 "immediate_operand" "n")
+ (match_operand:DI 2 "s_register_operand" "r")
+ (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
+ (use (match_dup 2))]
+ "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
+{
+ arm_const_bounds (operands[0], 0, 16);
+ arm_const_bounds (operands[1], 0, 8);
+ arm_const_bounds (operands[3], 0, (1 << 5));
+ return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
+}
+ [(set_attr "length" "4")
+ (set_attr "type" "coproc")])
+
+(define_insn "arm_<mrrc>"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
+ (match_operand:SI 2 "immediate_operand" "n")
+ (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
+ "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
+{
+ arm_const_bounds (operands[1], 0, 16);
+ arm_const_bounds (operands[2], 0, 8);
+ arm_const_bounds (operands[3], 0, (1 << 5));
+ return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
+}
+ [(set_attr "length" "4")
+ (set_attr "type" "coproc")])
+
+(define_expand "speculation_barrier"
+ [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
+ "TARGET_EITHER"
+ "
+ /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
+ have a usable barrier (and probably don't need one in practice).
+ But to be safe if such code is run on later architectures, call a
+ helper function in libgcc that will do the thing for the active
+ system. */
+ if (!(arm_arch7 || arm_arch8))
+ {
+ arm_emit_speculation_barrier_function ();
+ DONE;
+ }
+ "
+)
+
+;; Generate a hard speculation barrier when we have not enabled speculation
+;; tracking.
+(define_insn "*speculation_barrier_insn"
+ [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
+ "arm_arch7 || arm_arch8"
+ "isb\;dsb\\tsy"
+ [(set_attr "type" "block")
+ (set_attr "length" "8")]
+)
+
;; Vector bits common to IWMMXT and Neon
(include "vec-common.md")
;; Load the Intel Wireless Multimedia Extension patterns