1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the
71 ; GLOBAL_OFFSET_TABLE. The operation is fully
72 ; described by the RTL but must be wrapped to
73 ; prevent combine from trying to rip it apart.
74 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
75 ; being scheduled before the stack adjustment insn.
76 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
77 ; this unspec is used to prevent the deletion of
78 ; instructions setting registers for EH handling
79 ; and stack frame generation. Operand 0 is the
81 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
82 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
83 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
84 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
85 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
86 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
87 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
88 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
89 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
90 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
91 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
92 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
93 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
94 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
95 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
97 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
98 ; generate correct unwind information.
99 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
100 ; correctly for PIC usage.
104 ;; UNSPEC_VOLATILE Usage:
107 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
109 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
110 ; instruction epilogue sequence that isn't expanded
111 ; into normal RTL. Used for both normal and sibcall
113 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
114 ; for inlined constants.
115 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
117 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
119 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
121 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
123 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
125 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
127 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
128 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
129 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
130 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
131 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
132 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
133 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
138 ;;---------------------------------------------------------------------------
141 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
142 ; generating ARM code. This is used to control the length of some insn
143 ; patterns that share the same RTL in both ARM and Thumb code.
144 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
146 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
147 ; scheduling decisions for the load unit and the multiplier.
148 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
150 ; IS_XSCALE is set to 'yes' when compiling for XScale.
151 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
153 ;; Operand number of an input operand that is shifted. Zero if the
154 ;; given instruction does not shift one of its input operands.
155 (define_attr "shift" "" (const_int 0))
157 ; Floating Point Unit. If we only have floating point emulation, then there
158 ; is no point in scheduling the floating point insns. (Well, for best
159 ; performance we should try and group them together).
160 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
161 (const (symbol_ref "arm_fpu_attr")))
163 ; LENGTH of an instruction (in bytes)
164 (define_attr "length" "" (const_int 4))
166 ; POOL_RANGE is how far away from a constant pool entry that this insn
167 ; can be placed. If the distance is zero, then this insn will never
168 ; reference the pool.
169 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
170 ; before its address.
171 (define_attr "pool_range" "" (const_int 0))
172 (define_attr "neg_pool_range" "" (const_int 0))
174 ; An assembler sequence may clobber the condition codes without us knowing.
175 ; If such an insn references the pool, then we have no way of knowing how,
176 ; so use the most conservative value for pool_range.
177 (define_asm_attributes
178 [(set_attr "conds" "clob")
179 (set_attr "length" "4")
180 (set_attr "pool_range" "250")])
182 ;; The instruction used to implement a particular pattern. This
183 ;; information is used by pipeline descriptions to provide accurate
184 ;; scheduling information.
187 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
188 (const_string "other"))
190 ; TYPE attribute is used to detect floating point instructions which, if
191 ; running on a co-processor can run in parallel with other, basic instructions
192 ; If write-buffer scheduling is enabled then it can also be used in the
193 ; scheduling of writes.
195 ; Classification of each insn
196 ; alu any alu instruction that doesn't hit memory or fp
197 ; regs or have a shifted source operand
198 ; alu_shift any data instruction that doesn't hit memory or fp
199 ; regs, but has a source operand shifted by a constant
200 ; alu_shift_reg any data instruction that doesn't hit memory or fp
201 ; regs, but has a source operand shifted by a register value
202 ; mult a multiply instruction
203 ; block blockage insn, this blocks all functional units
204 ; float a floating point arithmetic operation (subject to expansion)
205 ; fdivd DFmode floating point division
206 ; fdivs SFmode floating point division
207 ; fmul Floating point multiply
208 ; ffmul Fast floating point multiply
209 ; farith Floating point arithmetic (4 cycle)
210 ; ffarith Fast floating point arithmetic (2 cycle)
211 ; float_em a floating point arithmetic operation that is normally emulated
212 ; even on a machine with an fpa.
213 ; f_load a floating point load from memory
214 ; f_store a floating point store to memory
215 ; f_load[sd] single/double load from memory
216 ; f_store[sd] single/double store to memory
217 ; f_flag a transfer of co-processor flags to the CPSR
218 ; f_mem_r a transfer of a floating point register to a real reg via mem
219 ; r_mem_f the reverse of f_mem_r
220 ; f_2_r fast transfer float to arm (no memory needed)
221 ; r_2_f fast transfer arm to float
222 ; f_cvt convert floating<->integral
224 ; call a subroutine call
225 ; load_byte load byte(s) from memory to arm registers
226 ; load1 load 1 word from memory to arm registers
227 ; load2 load 2 words from memory to arm registers
228 ; load3 load 3 words from memory to arm registers
229 ; load4 load 4 words from memory to arm registers
230 ; store store 1 word to memory from arm registers
231 ; store2 store 2 words
232 ; store3 store 3 words
233 ; store4 store 4 (or more) words
234 ; Additions for Cirrus Maverick co-processor:
235 ; mav_farith Floating point arithmetic (4 cycle)
236 ; mav_dmult Double multiplies (7 cycle)
240 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
242 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
243 (const_string "mult")
244 (const_string "alu")))
246 ; Load scheduling, set from the arm_ld_sched variable
247 ; initialized by arm_override_options()
248 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
250 ; condition codes: this one is used by final_prescan_insn to speed up
251 ; conditionalizing instructions. It saves having to scan the rtl to see if
252 ; it uses or alters the condition codes.
254 ; USE means that the condition codes are used by the insn in the process of
255 ; outputting code, this means (at present) that we can't use the insn in
258 ; SET means that the purpose of the insn is to set the condition codes in a
259 ; well defined manner.
261 ; CLOB means that the condition codes are altered in an undefined manner, if
262 ; they are altered at all
264 ; JUMP_CLOB is used when the condition cannot be represented by a single
265 ; instruction (UNEQ and LTGT). These cannot be predicated.
267 ; NOCOND means that the condition codes are neither altered nor affect the
268 ; output of this insn
270 (define_attr "conds" "use,set,clob,jump_clob,nocond"
271 (if_then_else (eq_attr "type" "call")
272 (const_string "clob")
273 (const_string "nocond")))
275 ; Predicable means that the insn can be conditionally executed based on
276 ; an automatically added predicate (additional patterns are generated by
277 ; gen...). We default to 'no' because no Thumb patterns match this rule
278 ; and not all ARM patterns do.
279 (define_attr "predicable" "no,yes" (const_string "no"))
281 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
282 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
283 ; suffer blockages enough to warrant modelling this (and it can adversely
284 ; affect the schedule).
285 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
287 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
288 ; to stall the processor. Used with model_wbuf above.
289 (define_attr "write_conflict" "no,yes"
290 (if_then_else (eq_attr "type"
291 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
293 (const_string "no")))
295 ; Classify the insns into those that take one cycle and those that take more
296 ; than one on the main cpu execution unit.
297 (define_attr "core_cycles" "single,multi"
298 (if_then_else (eq_attr "type"
299 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
300 (const_string "single")
301 (const_string "multi")))
303 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
304 ;; distant label. Only applicable to Thumb code.
305 (define_attr "far_jump" "yes,no" (const_string "no"))
308 ;; The number of machine instructions this pattern expands to.
309 ;; Used for Thumb-2 conditional execution.
310 (define_attr "ce_count" "" (const_int 1))
312 ;;---------------------------------------------------------------------------
315 ; A list of modes that are exactly 64 bits in size. We use this to expand
316 ; some splits that are the same for all modes when operating on ARM
318 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
320 ;;---------------------------------------------------------------------------
323 (include "predicates.md")
324 (include "constraints.md")
326 ;;---------------------------------------------------------------------------
327 ;; Pipeline descriptions
329 ;; Processor type. This is created automatically from arm-cores.def.
330 (include "arm-tune.md")
332 ;; True if the generic scheduling description should be used.
334 (define_attr "generic_sched" "yes,no"
336 (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexr4")
338 (const_string "yes"))))
340 (define_attr "generic_vfp" "yes,no"
342 (and (eq_attr "fpu" "vfp")
343 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8"))
345 (const_string "no"))))
347 (include "arm-generic.md")
348 (include "arm926ejs.md")
349 (include "arm1020e.md")
350 (include "arm1026ejs.md")
351 (include "arm1136jfs.md")
352 (include "cortex-a8.md")
353 (include "cortex-r4.md")
356 ;;---------------------------------------------------------------------------
361 ;; Note: For DImode insns, there is normally no reason why operands should
362 ;; not be in the same register, what we don't want is for something being
363 ;; written to partially overlap something that is an input.
364 ;; Cirrus 64bit additions should not be split because we have a native
365 ;; 64bit addition instructions.
367 (define_expand "adddi3"
369 [(set (match_operand:DI 0 "s_register_operand" "")
370 (plus:DI (match_operand:DI 1 "s_register_operand" "")
371 (match_operand:DI 2 "s_register_operand" "")))
372 (clobber (reg:CC CC_REGNUM))])]
375 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
377 if (!cirrus_fp_register (operands[0], DImode))
378 operands[0] = force_reg (DImode, operands[0]);
379 if (!cirrus_fp_register (operands[1], DImode))
380 operands[1] = force_reg (DImode, operands[1]);
381 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
387 if (GET_CODE (operands[1]) != REG)
388 operands[1] = force_reg (SImode, operands[1]);
389 if (GET_CODE (operands[2]) != REG)
390 operands[2] = force_reg (SImode, operands[2]);
395 (define_insn "*thumb1_adddi3"
396 [(set (match_operand:DI 0 "register_operand" "=l")
397 (plus:DI (match_operand:DI 1 "register_operand" "%0")
398 (match_operand:DI 2 "register_operand" "l")))
399 (clobber (reg:CC CC_REGNUM))
402 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
403 [(set_attr "length" "4")]
406 (define_insn_and_split "*arm_adddi3"
407 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
408 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
409 (match_operand:DI 2 "s_register_operand" "r, 0")))
410 (clobber (reg:CC CC_REGNUM))]
411 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
413 "TARGET_32BIT && reload_completed"
414 [(parallel [(set (reg:CC_C CC_REGNUM)
415 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
417 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
418 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
419 (plus:SI (match_dup 4) (match_dup 5))))]
422 operands[3] = gen_highpart (SImode, operands[0]);
423 operands[0] = gen_lowpart (SImode, operands[0]);
424 operands[4] = gen_highpart (SImode, operands[1]);
425 operands[1] = gen_lowpart (SImode, operands[1]);
426 operands[5] = gen_highpart (SImode, operands[2]);
427 operands[2] = gen_lowpart (SImode, operands[2]);
429 [(set_attr "conds" "clob")
430 (set_attr "length" "8")]
433 (define_insn_and_split "*adddi_sesidi_di"
434 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
435 (plus:DI (sign_extend:DI
436 (match_operand:SI 2 "s_register_operand" "r,r"))
437 (match_operand:DI 1 "s_register_operand" "r,0")))
438 (clobber (reg:CC CC_REGNUM))]
439 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
441 "TARGET_32BIT && reload_completed"
442 [(parallel [(set (reg:CC_C CC_REGNUM)
443 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
445 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
446 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
447 (plus:SI (ashiftrt:SI (match_dup 2)
452 operands[3] = gen_highpart (SImode, operands[0]);
453 operands[0] = gen_lowpart (SImode, operands[0]);
454 operands[4] = gen_highpart (SImode, operands[1]);
455 operands[1] = gen_lowpart (SImode, operands[1]);
456 operands[2] = gen_lowpart (SImode, operands[2]);
458 [(set_attr "conds" "clob")
459 (set_attr "length" "8")]
462 (define_insn_and_split "*adddi_zesidi_di"
463 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
464 (plus:DI (zero_extend:DI
465 (match_operand:SI 2 "s_register_operand" "r,r"))
466 (match_operand:DI 1 "s_register_operand" "r,0")))
467 (clobber (reg:CC CC_REGNUM))]
468 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
470 "TARGET_32BIT && reload_completed"
471 [(parallel [(set (reg:CC_C CC_REGNUM)
472 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
474 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
475 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
476 (plus:SI (match_dup 4) (const_int 0))))]
479 operands[3] = gen_highpart (SImode, operands[0]);
480 operands[0] = gen_lowpart (SImode, operands[0]);
481 operands[4] = gen_highpart (SImode, operands[1]);
482 operands[1] = gen_lowpart (SImode, operands[1]);
483 operands[2] = gen_lowpart (SImode, operands[2]);
485 [(set_attr "conds" "clob")
486 (set_attr "length" "8")]
489 (define_expand "addsi3"
490 [(set (match_operand:SI 0 "s_register_operand" "")
491 (plus:SI (match_operand:SI 1 "s_register_operand" "")
492 (match_operand:SI 2 "reg_or_int_operand" "")))]
495 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
497 arm_split_constant (PLUS, SImode, NULL_RTX,
498 INTVAL (operands[2]), operands[0], operands[1],
499 optimize && can_create_pseudo_p ());
505 ; If there is a scratch available, this will be faster than synthesizing the
508 [(match_scratch:SI 3 "r")
509 (set (match_operand:SI 0 "arm_general_register_operand" "")
510 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
511 (match_operand:SI 2 "const_int_operand" "")))]
513 !(const_ok_for_arm (INTVAL (operands[2]))
514 || const_ok_for_arm (-INTVAL (operands[2])))
515 && const_ok_for_arm (~INTVAL (operands[2]))"
516 [(set (match_dup 3) (match_dup 2))
517 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
521 (define_insn_and_split "*arm_addsi3"
522 [(set (match_operand:SI 0 "s_register_operand" "=r, !k,r, !k,r")
523 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k,rk,!k,rk")
524 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,L, L,?n")))]
533 GET_CODE (operands[2]) == CONST_INT
534 && !(const_ok_for_arm (INTVAL (operands[2]))
535 || const_ok_for_arm (-INTVAL (operands[2])))"
536 [(clobber (const_int 0))]
538 arm_split_constant (PLUS, SImode, curr_insn,
539 INTVAL (operands[2]), operands[0],
543 [(set_attr "length" "4,4,4,4,16")
544 (set_attr "predicable" "yes")]
547 ;; Register group 'k' is a single register group containing only the stack
548 ;; register. Trying to reload it will always fail catastrophically,
549 ;; so never allow those alternatives to match if reloading is needed.
551 (define_insn "*thumb1_addsi3"
552 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k")
553 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
554 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O")))]
557 static const char * const asms[] =
559 \"add\\t%0, %0, %2\",
560 \"sub\\t%0, %0, #%n2\",
561 \"add\\t%0, %1, %2\",
562 \"add\\t%0, %0, %2\",
563 \"add\\t%0, %0, %2\",
564 \"add\\t%0, %1, %2\",
567 if ((which_alternative == 2 || which_alternative == 6)
568 && GET_CODE (operands[2]) == CONST_INT
569 && INTVAL (operands[2]) < 0)
570 return \"sub\\t%0, %1, #%n2\";
571 return asms[which_alternative];
573 [(set_attr "length" "2")]
576 ;; Reloading and elimination of the frame pointer can
577 ;; sometimes cause this optimization to be missed.
579 [(set (match_operand:SI 0 "arm_general_register_operand" "")
580 (match_operand:SI 1 "const_int_operand" ""))
582 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
584 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
585 && (INTVAL (operands[1]) & 3) == 0"
586 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
590 ;; ??? Make Thumb-2 variants which prefer low regs
591 (define_insn "*addsi3_compare0"
592 [(set (reg:CC_NOOV CC_REGNUM)
594 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
595 (match_operand:SI 2 "arm_add_operand" "rI,L"))
597 (set (match_operand:SI 0 "s_register_operand" "=r,r")
598 (plus:SI (match_dup 1) (match_dup 2)))]
602 sub%.\\t%0, %1, #%n2"
603 [(set_attr "conds" "set")]
606 (define_insn "*addsi3_compare0_scratch"
607 [(set (reg:CC_NOOV CC_REGNUM)
609 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
610 (match_operand:SI 1 "arm_add_operand" "rI,L"))
616 [(set_attr "conds" "set")]
619 (define_insn "*compare_negsi_si"
620 [(set (reg:CC_Z CC_REGNUM)
622 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
623 (match_operand:SI 1 "s_register_operand" "r")))]
626 [(set_attr "conds" "set")]
629 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
630 ;; addend is a constant.
631 (define_insn "*cmpsi2_addneg"
632 [(set (reg:CC CC_REGNUM)
634 (match_operand:SI 1 "s_register_operand" "r,r")
635 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
636 (set (match_operand:SI 0 "s_register_operand" "=r,r")
637 (plus:SI (match_dup 1)
638 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
639 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
642 add%.\\t%0, %1, #%n2"
643 [(set_attr "conds" "set")]
646 ;; Convert the sequence
648 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
652 ;; bcs dest ((unsigned)rn >= 1)
653 ;; similarly for the beq variant using bcc.
654 ;; This is a common looping idiom (while (n--))
656 [(set (match_operand:SI 0 "arm_general_register_operand" "")
657 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
659 (set (match_operand 2 "cc_register" "")
660 (compare (match_dup 0) (const_int -1)))
662 (if_then_else (match_operator 3 "equality_operator"
663 [(match_dup 2) (const_int 0)])
664 (match_operand 4 "" "")
665 (match_operand 5 "" "")))]
666 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
670 (match_dup 1) (const_int 1)))
671 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
673 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
676 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
677 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
680 operands[2], const0_rtx);"
683 ;; The next four insns work because they compare the result with one of
684 ;; the operands, and we know that the use of the condition code is
685 ;; either GEU or LTU, so we can use the carry flag from the addition
686 ;; instead of doing the compare a second time.
687 (define_insn "*addsi3_compare_op1"
688 [(set (reg:CC_C CC_REGNUM)
690 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
691 (match_operand:SI 2 "arm_add_operand" "rI,L"))
693 (set (match_operand:SI 0 "s_register_operand" "=r,r")
694 (plus:SI (match_dup 1) (match_dup 2)))]
698 sub%.\\t%0, %1, #%n2"
699 [(set_attr "conds" "set")]
702 (define_insn "*addsi3_compare_op2"
703 [(set (reg:CC_C CC_REGNUM)
705 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
706 (match_operand:SI 2 "arm_add_operand" "rI,L"))
708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
709 (plus:SI (match_dup 1) (match_dup 2)))]
713 sub%.\\t%0, %1, #%n2"
714 [(set_attr "conds" "set")]
717 (define_insn "*compare_addsi2_op0"
718 [(set (reg:CC_C CC_REGNUM)
720 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
721 (match_operand:SI 1 "arm_add_operand" "rI,L"))
727 [(set_attr "conds" "set")]
730 (define_insn "*compare_addsi2_op1"
731 [(set (reg:CC_C CC_REGNUM)
733 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
734 (match_operand:SI 1 "arm_add_operand" "rI,L"))
740 [(set_attr "conds" "set")]
743 (define_insn "*addsi3_carryin"
744 [(set (match_operand:SI 0 "s_register_operand" "=r")
745 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
746 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
747 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
750 [(set_attr "conds" "use")]
753 (define_insn "*addsi3_carryin_shift"
754 [(set (match_operand:SI 0 "s_register_operand" "=r")
755 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
757 (match_operator:SI 2 "shift_operator"
758 [(match_operand:SI 3 "s_register_operand" "r")
759 (match_operand:SI 4 "reg_or_int_operand" "rM")])
760 (match_operand:SI 1 "s_register_operand" "r"))))]
762 "adc%?\\t%0, %1, %3%S2"
763 [(set_attr "conds" "use")
764 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
765 (const_string "alu_shift")
766 (const_string "alu_shift_reg")))]
769 (define_insn "*addsi3_carryin_alt1"
770 [(set (match_operand:SI 0 "s_register_operand" "=r")
771 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
772 (match_operand:SI 2 "arm_rhs_operand" "rI"))
773 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
776 [(set_attr "conds" "use")]
779 (define_insn "*addsi3_carryin_alt2"
780 [(set (match_operand:SI 0 "s_register_operand" "=r")
781 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
782 (match_operand:SI 1 "s_register_operand" "r"))
783 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
786 [(set_attr "conds" "use")]
789 (define_insn "*addsi3_carryin_alt3"
790 [(set (match_operand:SI 0 "s_register_operand" "=r")
791 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
792 (match_operand:SI 2 "arm_rhs_operand" "rI"))
793 (match_operand:SI 1 "s_register_operand" "r")))]
796 [(set_attr "conds" "use")]
799 (define_expand "incscc"
800 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
801 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
802 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
803 (match_operand:SI 1 "s_register_operand" "0,?r")))]
808 (define_insn "*arm_incscc"
809 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
810 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
811 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
812 (match_operand:SI 1 "s_register_operand" "0,?r")))]
816 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
817 [(set_attr "conds" "use")
818 (set_attr "length" "4,8")]
821 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
823 [(set (match_operand:SI 0 "s_register_operand" "")
824 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
825 (match_operand:SI 2 "s_register_operand" ""))
827 (clobber (match_operand:SI 3 "s_register_operand" ""))]
829 [(set (match_dup 3) (match_dup 1))
830 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
832 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
835 (define_expand "addsf3"
836 [(set (match_operand:SF 0 "s_register_operand" "")
837 (plus:SF (match_operand:SF 1 "s_register_operand" "")
838 (match_operand:SF 2 "arm_float_add_operand" "")))]
839 "TARGET_32BIT && TARGET_HARD_FLOAT"
842 && !cirrus_fp_register (operands[2], SFmode))
843 operands[2] = force_reg (SFmode, operands[2]);
846 (define_expand "adddf3"
847 [(set (match_operand:DF 0 "s_register_operand" "")
848 (plus:DF (match_operand:DF 1 "s_register_operand" "")
849 (match_operand:DF 2 "arm_float_add_operand" "")))]
850 "TARGET_32BIT && TARGET_HARD_FLOAT"
853 && !cirrus_fp_register (operands[2], DFmode))
854 operands[2] = force_reg (DFmode, operands[2]);
857 (define_expand "subdi3"
859 [(set (match_operand:DI 0 "s_register_operand" "")
860 (minus:DI (match_operand:DI 1 "s_register_operand" "")
861 (match_operand:DI 2 "s_register_operand" "")))
862 (clobber (reg:CC CC_REGNUM))])]
865 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
867 && cirrus_fp_register (operands[0], DImode)
868 && cirrus_fp_register (operands[1], DImode))
870 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
876 if (GET_CODE (operands[1]) != REG)
877 operands[1] = force_reg (SImode, operands[1]);
878 if (GET_CODE (operands[2]) != REG)
879 operands[2] = force_reg (SImode, operands[2]);
884 (define_insn "*arm_subdi3"
885 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
886 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
887 (match_operand:DI 2 "s_register_operand" "r,0,0")))
888 (clobber (reg:CC CC_REGNUM))]
890 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
891 [(set_attr "conds" "clob")
892 (set_attr "length" "8")]
895 (define_insn "*thumb_subdi3"
896 [(set (match_operand:DI 0 "register_operand" "=l")
897 (minus:DI (match_operand:DI 1 "register_operand" "0")
898 (match_operand:DI 2 "register_operand" "l")))
899 (clobber (reg:CC CC_REGNUM))]
901 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
902 [(set_attr "length" "4")]
905 (define_insn "*subdi_di_zesidi"
906 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
907 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
909 (match_operand:SI 2 "s_register_operand" "r,r"))))
910 (clobber (reg:CC CC_REGNUM))]
912 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
913 [(set_attr "conds" "clob")
914 (set_attr "length" "8")]
917 (define_insn "*subdi_di_sesidi"
918 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
919 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
921 (match_operand:SI 2 "s_register_operand" "r,r"))))
922 (clobber (reg:CC CC_REGNUM))]
924 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
925 [(set_attr "conds" "clob")
926 (set_attr "length" "8")]
929 (define_insn "*subdi_zesidi_di"
930 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
931 (minus:DI (zero_extend:DI
932 (match_operand:SI 2 "s_register_operand" "r,r"))
933 (match_operand:DI 1 "s_register_operand" "?r,0")))
934 (clobber (reg:CC CC_REGNUM))]
936 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
937 [(set_attr "conds" "clob")
938 (set_attr "length" "8")]
941 (define_insn "*subdi_sesidi_di"
942 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
943 (minus:DI (sign_extend:DI
944 (match_operand:SI 2 "s_register_operand" "r,r"))
945 (match_operand:DI 1 "s_register_operand" "?r,0")))
946 (clobber (reg:CC CC_REGNUM))]
948 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
949 [(set_attr "conds" "clob")
950 (set_attr "length" "8")]
953 (define_insn "*subdi_zesidi_zesidi"
954 [(set (match_operand:DI 0 "s_register_operand" "=r")
955 (minus:DI (zero_extend:DI
956 (match_operand:SI 1 "s_register_operand" "r"))
958 (match_operand:SI 2 "s_register_operand" "r"))))
959 (clobber (reg:CC CC_REGNUM))]
961 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
962 [(set_attr "conds" "clob")
963 (set_attr "length" "8")]
966 (define_expand "subsi3"
967 [(set (match_operand:SI 0 "s_register_operand" "")
968 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
969 (match_operand:SI 2 "s_register_operand" "")))]
972 if (GET_CODE (operands[1]) == CONST_INT)
976 arm_split_constant (MINUS, SImode, NULL_RTX,
977 INTVAL (operands[1]), operands[0],
978 operands[2], optimize && can_create_pseudo_p ());
981 else /* TARGET_THUMB1 */
982 operands[1] = force_reg (SImode, operands[1]);
987 (define_insn "*thumb1_subsi3_insn"
988 [(set (match_operand:SI 0 "register_operand" "=l")
989 (minus:SI (match_operand:SI 1 "register_operand" "l")
990 (match_operand:SI 2 "register_operand" "l")))]
993 [(set_attr "length" "2")]
996 ; ??? Check Thumb-2 split length
997 (define_insn_and_split "*arm_subsi3_insn"
998 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
999 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1000 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1007 && GET_CODE (operands[1]) == CONST_INT
1008 && !const_ok_for_arm (INTVAL (operands[1]))"
1009 [(clobber (const_int 0))]
1011 arm_split_constant (MINUS, SImode, curr_insn,
1012 INTVAL (operands[1]), operands[0], operands[2], 0);
1015 [(set_attr "length" "4,4,16")
1016 (set_attr "predicable" "yes")]
1020 [(match_scratch:SI 3 "r")
1021 (set (match_operand:SI 0 "arm_general_register_operand" "")
1022 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1023 (match_operand:SI 2 "arm_general_register_operand" "")))]
1025 && !const_ok_for_arm (INTVAL (operands[1]))
1026 && const_ok_for_arm (~INTVAL (operands[1]))"
1027 [(set (match_dup 3) (match_dup 1))
1028 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1032 (define_insn "*subsi3_compare0"
1033 [(set (reg:CC_NOOV CC_REGNUM)
1035 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1036 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1038 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1039 (minus:SI (match_dup 1) (match_dup 2)))]
1044 [(set_attr "conds" "set")]
1047 (define_expand "decscc"
1048 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1049 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1050 (match_operator:SI 2 "arm_comparison_operator"
1051 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1056 (define_insn "*arm_decscc"
1057 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1058 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1059 (match_operator:SI 2 "arm_comparison_operator"
1060 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1064 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1065 [(set_attr "conds" "use")
1066 (set_attr "length" "*,8")]
1069 (define_expand "subsf3"
1070 [(set (match_operand:SF 0 "s_register_operand" "")
1071 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1072 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1073 "TARGET_32BIT && TARGET_HARD_FLOAT"
1075 if (TARGET_MAVERICK)
1077 if (!cirrus_fp_register (operands[1], SFmode))
1078 operands[1] = force_reg (SFmode, operands[1]);
1079 if (!cirrus_fp_register (operands[2], SFmode))
1080 operands[2] = force_reg (SFmode, operands[2]);
1084 (define_expand "subdf3"
1085 [(set (match_operand:DF 0 "s_register_operand" "")
1086 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1087 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1088 "TARGET_32BIT && TARGET_HARD_FLOAT"
1090 if (TARGET_MAVERICK)
1092 if (!cirrus_fp_register (operands[1], DFmode))
1093 operands[1] = force_reg (DFmode, operands[1]);
1094 if (!cirrus_fp_register (operands[2], DFmode))
1095 operands[2] = force_reg (DFmode, operands[2]);
1100 ;; Multiplication insns
1102 (define_expand "mulsi3"
1103 [(set (match_operand:SI 0 "s_register_operand" "")
1104 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1105 (match_operand:SI 1 "s_register_operand" "")))]
1110 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1111 (define_insn "*arm_mulsi3"
1112 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1113 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1114 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1115 "TARGET_32BIT && !arm_arch6"
1116 "mul%?\\t%0, %2, %1"
1117 [(set_attr "insn" "mul")
1118 (set_attr "predicable" "yes")]
1121 (define_insn "*arm_mulsi3_v6"
1122 [(set (match_operand:SI 0 "s_register_operand" "=r")
1123 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1124 (match_operand:SI 2 "s_register_operand" "r")))]
1125 "TARGET_32BIT && arm_arch6"
1126 "mul%?\\t%0, %1, %2"
1127 [(set_attr "insn" "mul")
1128 (set_attr "predicable" "yes")]
1131 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1132 ; 1 and 2; are the same, because reload will make operand 0 match
1133 ; operand 1 without realizing that this conflicts with operand 2. We fix
1134 ; this by adding another alternative to match this case, and then `reload'
1135 ; it ourselves. This alternative must come first.
1136 (define_insn "*thumb_mulsi3"
1137 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1138 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1139 (match_operand:SI 2 "register_operand" "l,l,l")))]
1140 "TARGET_THUMB1 && !arm_arch6"
1142 if (which_alternative < 2)
1143 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1145 return \"mul\\t%0, %2\";
1147 [(set_attr "length" "4,4,2")
1148 (set_attr "insn" "mul")]
1151 (define_insn "*thumb_mulsi3_v6"
1152 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1153 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1154 (match_operand:SI 2 "register_operand" "l,0,0")))]
1155 "TARGET_THUMB1 && arm_arch6"
1160 [(set_attr "length" "2")
1161 (set_attr "insn" "mul")]
1164 (define_insn "*mulsi3_compare0"
1165 [(set (reg:CC_NOOV CC_REGNUM)
1166 (compare:CC_NOOV (mult:SI
1167 (match_operand:SI 2 "s_register_operand" "r,r")
1168 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1170 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1171 (mult:SI (match_dup 2) (match_dup 1)))]
1172 "TARGET_ARM && !arm_arch6"
1173 "mul%.\\t%0, %2, %1"
1174 [(set_attr "conds" "set")
1175 (set_attr "insn" "muls")]
1178 (define_insn "*mulsi3_compare0_v6"
1179 [(set (reg:CC_NOOV CC_REGNUM)
1180 (compare:CC_NOOV (mult:SI
1181 (match_operand:SI 2 "s_register_operand" "r")
1182 (match_operand:SI 1 "s_register_operand" "r"))
1184 (set (match_operand:SI 0 "s_register_operand" "=r")
1185 (mult:SI (match_dup 2) (match_dup 1)))]
1186 "TARGET_ARM && arm_arch6 && optimize_size"
1187 "mul%.\\t%0, %2, %1"
1188 [(set_attr "conds" "set")
1189 (set_attr "insn" "muls")]
1192 (define_insn "*mulsi_compare0_scratch"
1193 [(set (reg:CC_NOOV CC_REGNUM)
1194 (compare:CC_NOOV (mult:SI
1195 (match_operand:SI 2 "s_register_operand" "r,r")
1196 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1198 (clobber (match_scratch:SI 0 "=&r,&r"))]
1199 "TARGET_ARM && !arm_arch6"
1200 "mul%.\\t%0, %2, %1"
1201 [(set_attr "conds" "set")
1202 (set_attr "insn" "muls")]
1205 (define_insn "*mulsi_compare0_scratch_v6"
1206 [(set (reg:CC_NOOV CC_REGNUM)
1207 (compare:CC_NOOV (mult:SI
1208 (match_operand:SI 2 "s_register_operand" "r")
1209 (match_operand:SI 1 "s_register_operand" "r"))
1211 (clobber (match_scratch:SI 0 "=r"))]
1212 "TARGET_ARM && arm_arch6 && optimize_size"
1213 "mul%.\\t%0, %2, %1"
1214 [(set_attr "conds" "set")
1215 (set_attr "insn" "muls")]
1218 ;; Unnamed templates to match MLA instruction.
1220 (define_insn "*mulsi3addsi"
1221 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1223 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1224 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1225 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1226 "TARGET_32BIT && !arm_arch6"
1227 "mla%?\\t%0, %2, %1, %3"
1228 [(set_attr "insn" "mla")
1229 (set_attr "predicable" "yes")]
1232 (define_insn "*mulsi3addsi_v6"
1233 [(set (match_operand:SI 0 "s_register_operand" "=r")
1235 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1236 (match_operand:SI 1 "s_register_operand" "r"))
1237 (match_operand:SI 3 "s_register_operand" "r")))]
1238 "TARGET_32BIT && arm_arch6"
1239 "mla%?\\t%0, %2, %1, %3"
1240 [(set_attr "insn" "mla")
1241 (set_attr "predicable" "yes")]
1244 (define_insn "*mulsi3addsi_compare0"
1245 [(set (reg:CC_NOOV CC_REGNUM)
1248 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1249 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1250 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1252 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1253 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1255 "TARGET_ARM && arm_arch6"
1256 "mla%.\\t%0, %2, %1, %3"
1257 [(set_attr "conds" "set")
1258 (set_attr "insn" "mlas")]
1261 (define_insn "*mulsi3addsi_compare0_v6"
1262 [(set (reg:CC_NOOV CC_REGNUM)
1265 (match_operand:SI 2 "s_register_operand" "r")
1266 (match_operand:SI 1 "s_register_operand" "r"))
1267 (match_operand:SI 3 "s_register_operand" "r"))
1269 (set (match_operand:SI 0 "s_register_operand" "=r")
1270 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1272 "TARGET_ARM && arm_arch6 && optimize_size"
1273 "mla%.\\t%0, %2, %1, %3"
1274 [(set_attr "conds" "set")
1275 (set_attr "insn" "mlas")]
1278 (define_insn "*mulsi3addsi_compare0_scratch"
1279 [(set (reg:CC_NOOV CC_REGNUM)
1282 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1283 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1284 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1286 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1287 "TARGET_ARM && !arm_arch6"
1288 "mla%.\\t%0, %2, %1, %3"
1289 [(set_attr "conds" "set")
1290 (set_attr "insn" "mlas")]
1293 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1294 [(set (reg:CC_NOOV CC_REGNUM)
1297 (match_operand:SI 2 "s_register_operand" "r")
1298 (match_operand:SI 1 "s_register_operand" "r"))
1299 (match_operand:SI 3 "s_register_operand" "r"))
1301 (clobber (match_scratch:SI 0 "=r"))]
1302 "TARGET_ARM && arm_arch6 && optimize_size"
1303 "mla%.\\t%0, %2, %1, %3"
1304 [(set_attr "conds" "set")
1305 (set_attr "insn" "mlas")]
1308 (define_insn "*mulsi3subsi"
1309 [(set (match_operand:SI 0 "s_register_operand" "=r")
1311 (match_operand:SI 3 "s_register_operand" "r")
1312 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1313 (match_operand:SI 1 "s_register_operand" "r"))))]
1314 "TARGET_32BIT && arm_arch_thumb2"
1315 "mls%?\\t%0, %2, %1, %3"
1316 [(set_attr "insn" "mla")
1317 (set_attr "predicable" "yes")]
1320 ;; Unnamed template to match long long multiply-accumulate (smlal)
1322 (define_insn "*mulsidi3adddi"
1323 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1326 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1327 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1328 (match_operand:DI 1 "s_register_operand" "0")))]
1329 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1330 "smlal%?\\t%Q0, %R0, %3, %2"
1331 [(set_attr "insn" "smlal")
1332 (set_attr "predicable" "yes")]
1335 (define_insn "*mulsidi3adddi_v6"
1336 [(set (match_operand:DI 0 "s_register_operand" "=r")
1339 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1340 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1341 (match_operand:DI 1 "s_register_operand" "0")))]
1342 "TARGET_32BIT && arm_arch6"
1343 "smlal%?\\t%Q0, %R0, %3, %2"
1344 [(set_attr "insn" "smlal")
1345 (set_attr "predicable" "yes")]
1348 ;; 32x32->64 widening multiply.
1349 ;; As with mulsi3, the only difference between the v3-5 and v6+
1350 ;; versions of these patterns is the requirement that the output not
1351 ;; overlap the inputs, but that still means we have to have a named
1352 ;; expander and two different starred insns.
1354 (define_expand "mulsidi3"
1355 [(set (match_operand:DI 0 "s_register_operand" "")
1357 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1358 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1359 "TARGET_32BIT && arm_arch3m"
1363 (define_insn "*mulsidi3_nov6"
1364 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1366 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1367 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1368 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1369 "smull%?\\t%Q0, %R0, %1, %2"
1370 [(set_attr "insn" "smull")
1371 (set_attr "predicable" "yes")]
1374 (define_insn "*mulsidi3_v6"
1375 [(set (match_operand:DI 0 "s_register_operand" "=r")
1377 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1378 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1379 "TARGET_32BIT && arm_arch6"
1380 "smull%?\\t%Q0, %R0, %1, %2"
1381 [(set_attr "insn" "smull")
1382 (set_attr "predicable" "yes")]
1385 (define_expand "umulsidi3"
1386 [(set (match_operand:DI 0 "s_register_operand" "")
1388 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1389 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1390 "TARGET_32BIT && arm_arch3m"
1394 (define_insn "*umulsidi3_nov6"
1395 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1397 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1398 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1399 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1400 "umull%?\\t%Q0, %R0, %1, %2"
1401 [(set_attr "insn" "umull")
1402 (set_attr "predicable" "yes")]
1405 (define_insn "*umulsidi3_v6"
1406 [(set (match_operand:DI 0 "s_register_operand" "=r")
1408 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1409 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1410 "TARGET_32BIT && arm_arch6"
1411 "umull%?\\t%Q0, %R0, %1, %2"
1412 [(set_attr "insn" "umull")
1413 (set_attr "predicable" "yes")]
1416 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1418 (define_insn "*umulsidi3adddi"
1419 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1422 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1423 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1424 (match_operand:DI 1 "s_register_operand" "0")))]
1425 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1426 "umlal%?\\t%Q0, %R0, %3, %2"
1427 [(set_attr "insn" "umlal")
1428 (set_attr "predicable" "yes")]
1431 (define_insn "*umulsidi3adddi_v6"
1432 [(set (match_operand:DI 0 "s_register_operand" "=r")
1435 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1436 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1437 (match_operand:DI 1 "s_register_operand" "0")))]
1438 "TARGET_32BIT && arm_arch6"
1439 "umlal%?\\t%Q0, %R0, %3, %2"
1440 [(set_attr "insn" "umlal")
1441 (set_attr "predicable" "yes")]
1444 (define_expand "smulsi3_highpart"
1446 [(set (match_operand:SI 0 "s_register_operand" "")
1450 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1451 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1453 (clobber (match_scratch:SI 3 ""))])]
1454 "TARGET_32BIT && arm_arch3m"
1458 (define_insn "*smulsi3_highpart_nov6"
1459 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1463 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1464 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1466 (clobber (match_scratch:SI 3 "=&r,&r"))]
1467 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1468 "smull%?\\t%3, %0, %2, %1"
1469 [(set_attr "insn" "smull")
1470 (set_attr "predicable" "yes")]
1473 (define_insn "*smulsi3_highpart_v6"
1474 [(set (match_operand:SI 0 "s_register_operand" "=r")
1478 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1479 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1481 (clobber (match_scratch:SI 3 "=r"))]
1482 "TARGET_32BIT && arm_arch6"
1483 "smull%?\\t%3, %0, %2, %1"
1484 [(set_attr "insn" "smull")
1485 (set_attr "predicable" "yes")]
1488 (define_expand "umulsi3_highpart"
1490 [(set (match_operand:SI 0 "s_register_operand" "")
1494 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1495 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1497 (clobber (match_scratch:SI 3 ""))])]
1498 "TARGET_32BIT && arm_arch3m"
1502 (define_insn "*umulsi3_highpart_nov6"
1503 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1507 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1508 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1510 (clobber (match_scratch:SI 3 "=&r,&r"))]
1511 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1512 "umull%?\\t%3, %0, %2, %1"
1513 [(set_attr "insn" "umull")
1514 (set_attr "predicable" "yes")]
1517 (define_insn "*umulsi3_highpart_v6"
1518 [(set (match_operand:SI 0 "s_register_operand" "=r")
1522 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1523 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1525 (clobber (match_scratch:SI 3 "=r"))]
1526 "TARGET_32BIT && arm_arch6"
1527 "umull%?\\t%3, %0, %2, %1"
1528 [(set_attr "insn" "umull")
1529 (set_attr "predicable" "yes")]
1532 (define_insn "mulhisi3"
1533 [(set (match_operand:SI 0 "s_register_operand" "=r")
1534 (mult:SI (sign_extend:SI
1535 (match_operand:HI 1 "s_register_operand" "%r"))
1537 (match_operand:HI 2 "s_register_operand" "r"))))]
1538 "TARGET_DSP_MULTIPLY"
1539 "smulbb%?\\t%0, %1, %2"
1540 [(set_attr "insn" "smulxy")
1541 (set_attr "predicable" "yes")]
1544 (define_insn "*mulhisi3tb"
1545 [(set (match_operand:SI 0 "s_register_operand" "=r")
1546 (mult:SI (ashiftrt:SI
1547 (match_operand:SI 1 "s_register_operand" "r")
1550 (match_operand:HI 2 "s_register_operand" "r"))))]
1551 "TARGET_DSP_MULTIPLY"
1552 "smultb%?\\t%0, %1, %2"
1553 [(set_attr "insn" "smulxy")
1554 (set_attr "predicable" "yes")]
1557 (define_insn "*mulhisi3bt"
1558 [(set (match_operand:SI 0 "s_register_operand" "=r")
1559 (mult:SI (sign_extend:SI
1560 (match_operand:HI 1 "s_register_operand" "r"))
1562 (match_operand:SI 2 "s_register_operand" "r")
1564 "TARGET_DSP_MULTIPLY"
1565 "smulbt%?\\t%0, %1, %2"
1566 [(set_attr "insn" "smulxy")
1567 (set_attr "predicable" "yes")]
1570 (define_insn "*mulhisi3tt"
1571 [(set (match_operand:SI 0 "s_register_operand" "=r")
1572 (mult:SI (ashiftrt:SI
1573 (match_operand:SI 1 "s_register_operand" "r")
1576 (match_operand:SI 2 "s_register_operand" "r")
1578 "TARGET_DSP_MULTIPLY"
1579 "smultt%?\\t%0, %1, %2"
1580 [(set_attr "insn" "smulxy")
1581 (set_attr "predicable" "yes")]
1584 (define_insn "*mulhisi3addsi"
1585 [(set (match_operand:SI 0 "s_register_operand" "=r")
1586 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1587 (mult:SI (sign_extend:SI
1588 (match_operand:HI 2 "s_register_operand" "%r"))
1590 (match_operand:HI 3 "s_register_operand" "r")))))]
1591 "TARGET_DSP_MULTIPLY"
1592 "smlabb%?\\t%0, %2, %3, %1"
1593 [(set_attr "insn" "smlaxy")
1594 (set_attr "predicable" "yes")]
1597 (define_insn "*mulhidi3adddi"
1598 [(set (match_operand:DI 0 "s_register_operand" "=r")
1600 (match_operand:DI 1 "s_register_operand" "0")
1601 (mult:DI (sign_extend:DI
1602 (match_operand:HI 2 "s_register_operand" "%r"))
1604 (match_operand:HI 3 "s_register_operand" "r")))))]
1605 "TARGET_DSP_MULTIPLY"
1606 "smlalbb%?\\t%Q0, %R0, %2, %3"
1607 [(set_attr "insn" "smlalxy")
1608 (set_attr "predicable" "yes")])
1610 (define_expand "mulsf3"
1611 [(set (match_operand:SF 0 "s_register_operand" "")
1612 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1613 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1614 "TARGET_32BIT && TARGET_HARD_FLOAT"
1617 && !cirrus_fp_register (operands[2], SFmode))
1618 operands[2] = force_reg (SFmode, operands[2]);
1621 (define_expand "muldf3"
1622 [(set (match_operand:DF 0 "s_register_operand" "")
1623 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1624 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1625 "TARGET_32BIT && TARGET_HARD_FLOAT"
1628 && !cirrus_fp_register (operands[2], DFmode))
1629 operands[2] = force_reg (DFmode, operands[2]);
1634 (define_expand "divsf3"
1635 [(set (match_operand:SF 0 "s_register_operand" "")
1636 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1637 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1638 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1641 (define_expand "divdf3"
1642 [(set (match_operand:DF 0 "s_register_operand" "")
1643 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1644 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1645 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1650 (define_expand "modsf3"
1651 [(set (match_operand:SF 0 "s_register_operand" "")
1652 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1653 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1654 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1657 (define_expand "moddf3"
1658 [(set (match_operand:DF 0 "s_register_operand" "")
1659 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1660 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1661 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1664 ;; Boolean and,ior,xor insns
1666 ;; Split up double word logical operations
1668 ;; Split up simple DImode logical operations. Simply perform the logical
1669 ;; operation on the upper and lower halves of the registers.
1671 [(set (match_operand:DI 0 "s_register_operand" "")
1672 (match_operator:DI 6 "logical_binary_operator"
1673 [(match_operand:DI 1 "s_register_operand" "")
1674 (match_operand:DI 2 "s_register_operand" "")]))]
1675 "TARGET_32BIT && reload_completed
1676 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1677 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1678 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1681 operands[3] = gen_highpart (SImode, operands[0]);
1682 operands[0] = gen_lowpart (SImode, operands[0]);
1683 operands[4] = gen_highpart (SImode, operands[1]);
1684 operands[1] = gen_lowpart (SImode, operands[1]);
1685 operands[5] = gen_highpart (SImode, operands[2]);
1686 operands[2] = gen_lowpart (SImode, operands[2]);
1691 [(set (match_operand:DI 0 "s_register_operand" "")
1692 (match_operator:DI 6 "logical_binary_operator"
1693 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1694 (match_operand:DI 1 "s_register_operand" "")]))]
1695 "TARGET_32BIT && reload_completed"
1696 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1697 (set (match_dup 3) (match_op_dup:SI 6
1698 [(ashiftrt:SI (match_dup 2) (const_int 31))
1702 operands[3] = gen_highpart (SImode, operands[0]);
1703 operands[0] = gen_lowpart (SImode, operands[0]);
1704 operands[4] = gen_highpart (SImode, operands[1]);
1705 operands[1] = gen_lowpart (SImode, operands[1]);
1706 operands[5] = gen_highpart (SImode, operands[2]);
1707 operands[2] = gen_lowpart (SImode, operands[2]);
1711 ;; The zero extend of operand 2 means we can just copy the high part of
1712 ;; operand1 into operand0.
1714 [(set (match_operand:DI 0 "s_register_operand" "")
1716 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1717 (match_operand:DI 1 "s_register_operand" "")))]
1718 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1719 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1720 (set (match_dup 3) (match_dup 4))]
1723 operands[4] = gen_highpart (SImode, operands[1]);
1724 operands[3] = gen_highpart (SImode, operands[0]);
1725 operands[0] = gen_lowpart (SImode, operands[0]);
1726 operands[1] = gen_lowpart (SImode, operands[1]);
1730 ;; The zero extend of operand 2 means we can just copy the high part of
1731 ;; operand1 into operand0.
1733 [(set (match_operand:DI 0 "s_register_operand" "")
1735 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1736 (match_operand:DI 1 "s_register_operand" "")))]
1737 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1738 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1739 (set (match_dup 3) (match_dup 4))]
1742 operands[4] = gen_highpart (SImode, operands[1]);
1743 operands[3] = gen_highpart (SImode, operands[0]);
1744 operands[0] = gen_lowpart (SImode, operands[0]);
1745 operands[1] = gen_lowpart (SImode, operands[1]);
1749 (define_insn "anddi3"
1750 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1751 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1752 (match_operand:DI 2 "s_register_operand" "r,r")))]
1753 "TARGET_32BIT && ! TARGET_IWMMXT"
1755 [(set_attr "length" "8")]
1758 (define_insn_and_split "*anddi_zesidi_di"
1759 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1760 (and:DI (zero_extend:DI
1761 (match_operand:SI 2 "s_register_operand" "r,r"))
1762 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1765 "TARGET_32BIT && reload_completed"
1766 ; The zero extend of operand 2 clears the high word of the output
1768 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1769 (set (match_dup 3) (const_int 0))]
1772 operands[3] = gen_highpart (SImode, operands[0]);
1773 operands[0] = gen_lowpart (SImode, operands[0]);
1774 operands[1] = gen_lowpart (SImode, operands[1]);
1776 [(set_attr "length" "8")]
1779 (define_insn "*anddi_sesdi_di"
1780 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1781 (and:DI (sign_extend:DI
1782 (match_operand:SI 2 "s_register_operand" "r,r"))
1783 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1786 [(set_attr "length" "8")]
1789 (define_expand "andsi3"
1790 [(set (match_operand:SI 0 "s_register_operand" "")
1791 (and:SI (match_operand:SI 1 "s_register_operand" "")
1792 (match_operand:SI 2 "reg_or_int_operand" "")))]
1797 if (GET_CODE (operands[2]) == CONST_INT)
1799 arm_split_constant (AND, SImode, NULL_RTX,
1800 INTVAL (operands[2]), operands[0],
1801 operands[1], optimize && can_create_pseudo_p ());
1806 else /* TARGET_THUMB1 */
1808 if (GET_CODE (operands[2]) != CONST_INT)
1809 operands[2] = force_reg (SImode, operands[2]);
1814 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1816 operands[2] = force_reg (SImode,
1817 GEN_INT (~INTVAL (operands[2])));
1819 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1824 for (i = 9; i <= 31; i++)
1826 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1828 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1832 else if ((((HOST_WIDE_INT) 1) << i) - 1
1833 == ~INTVAL (operands[2]))
1835 rtx shift = GEN_INT (i);
1836 rtx reg = gen_reg_rtx (SImode);
1838 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1839 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1845 operands[2] = force_reg (SImode, operands[2]);
1851 ; ??? Check split length for Thumb-2
1852 (define_insn_and_split "*arm_andsi3_insn"
1853 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1854 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1855 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1859 bic%?\\t%0, %1, #%B2
1862 && GET_CODE (operands[2]) == CONST_INT
1863 && !(const_ok_for_arm (INTVAL (operands[2]))
1864 || const_ok_for_arm (~INTVAL (operands[2])))"
1865 [(clobber (const_int 0))]
1867 arm_split_constant (AND, SImode, curr_insn,
1868 INTVAL (operands[2]), operands[0], operands[1], 0);
1871 [(set_attr "length" "4,4,16")
1872 (set_attr "predicable" "yes")]
1875 (define_insn "*thumb1_andsi3_insn"
1876 [(set (match_operand:SI 0 "register_operand" "=l")
1877 (and:SI (match_operand:SI 1 "register_operand" "%0")
1878 (match_operand:SI 2 "register_operand" "l")))]
1881 [(set_attr "length" "2")]
1884 (define_insn "*andsi3_compare0"
1885 [(set (reg:CC_NOOV CC_REGNUM)
1887 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1888 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1890 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1891 (and:SI (match_dup 1) (match_dup 2)))]
1895 bic%.\\t%0, %1, #%B2"
1896 [(set_attr "conds" "set")]
1899 (define_insn "*andsi3_compare0_scratch"
1900 [(set (reg:CC_NOOV CC_REGNUM)
1902 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1903 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1905 (clobber (match_scratch:SI 2 "=X,r"))]
1909 bic%.\\t%2, %0, #%B1"
1910 [(set_attr "conds" "set")]
1913 (define_insn "*zeroextractsi_compare0_scratch"
1914 [(set (reg:CC_NOOV CC_REGNUM)
1915 (compare:CC_NOOV (zero_extract:SI
1916 (match_operand:SI 0 "s_register_operand" "r")
1917 (match_operand 1 "const_int_operand" "n")
1918 (match_operand 2 "const_int_operand" "n"))
1921 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1922 && INTVAL (operands[1]) > 0
1923 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1924 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1926 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1927 << INTVAL (operands[2]));
1928 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1931 [(set_attr "conds" "set")]
1934 (define_insn_and_split "*ne_zeroextractsi"
1935 [(set (match_operand:SI 0 "s_register_operand" "=r")
1936 (ne:SI (zero_extract:SI
1937 (match_operand:SI 1 "s_register_operand" "r")
1938 (match_operand:SI 2 "const_int_operand" "n")
1939 (match_operand:SI 3 "const_int_operand" "n"))
1941 (clobber (reg:CC CC_REGNUM))]
1943 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1944 && INTVAL (operands[2]) > 0
1945 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1946 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1949 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1950 && INTVAL (operands[2]) > 0
1951 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1952 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1953 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1954 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1956 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1958 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1959 (match_dup 0) (const_int 1)))]
1961 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1962 << INTVAL (operands[3]));
1964 [(set_attr "conds" "clob")
1965 (set (attr "length")
1966 (if_then_else (eq_attr "is_thumb" "yes")
1971 (define_insn_and_split "*ne_zeroextractsi_shifted"
1972 [(set (match_operand:SI 0 "s_register_operand" "=r")
1973 (ne:SI (zero_extract:SI
1974 (match_operand:SI 1 "s_register_operand" "r")
1975 (match_operand:SI 2 "const_int_operand" "n")
1978 (clobber (reg:CC CC_REGNUM))]
1982 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1983 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1985 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1987 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1988 (match_dup 0) (const_int 1)))]
1990 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1992 [(set_attr "conds" "clob")
1993 (set_attr "length" "8")]
1996 (define_insn_and_split "*ite_ne_zeroextractsi"
1997 [(set (match_operand:SI 0 "s_register_operand" "=r")
1998 (if_then_else:SI (ne (zero_extract:SI
1999 (match_operand:SI 1 "s_register_operand" "r")
2000 (match_operand:SI 2 "const_int_operand" "n")
2001 (match_operand:SI 3 "const_int_operand" "n"))
2003 (match_operand:SI 4 "arm_not_operand" "rIK")
2005 (clobber (reg:CC CC_REGNUM))]
2007 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2008 && INTVAL (operands[2]) > 0
2009 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2010 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2011 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2014 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2015 && INTVAL (operands[2]) > 0
2016 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2017 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2018 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2019 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2020 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2022 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2024 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2025 (match_dup 0) (match_dup 4)))]
2027 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2028 << INTVAL (operands[3]));
2030 [(set_attr "conds" "clob")
2031 (set_attr "length" "8")]
2034 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2035 [(set (match_operand:SI 0 "s_register_operand" "=r")
2036 (if_then_else:SI (ne (zero_extract:SI
2037 (match_operand:SI 1 "s_register_operand" "r")
2038 (match_operand:SI 2 "const_int_operand" "n")
2041 (match_operand:SI 3 "arm_not_operand" "rIK")
2043 (clobber (reg:CC CC_REGNUM))]
2044 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2046 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2047 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2048 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2050 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2052 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2053 (match_dup 0) (match_dup 3)))]
2055 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2057 [(set_attr "conds" "clob")
2058 (set_attr "length" "8")]
2062 [(set (match_operand:SI 0 "s_register_operand" "")
2063 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2064 (match_operand:SI 2 "const_int_operand" "")
2065 (match_operand:SI 3 "const_int_operand" "")))
2066 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2068 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2069 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2071 HOST_WIDE_INT temp = INTVAL (operands[2]);
2073 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2074 operands[3] = GEN_INT (32 - temp);
2078 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2080 [(set (match_operand:SI 0 "s_register_operand" "")
2081 (match_operator:SI 1 "shiftable_operator"
2082 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2083 (match_operand:SI 3 "const_int_operand" "")
2084 (match_operand:SI 4 "const_int_operand" ""))
2085 (match_operand:SI 5 "s_register_operand" "")]))
2086 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2088 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2091 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2094 HOST_WIDE_INT temp = INTVAL (operands[3]);
2096 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2097 operands[4] = GEN_INT (32 - temp);
2102 [(set (match_operand:SI 0 "s_register_operand" "")
2103 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2104 (match_operand:SI 2 "const_int_operand" "")
2105 (match_operand:SI 3 "const_int_operand" "")))]
2107 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2108 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2110 HOST_WIDE_INT temp = INTVAL (operands[2]);
2112 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2113 operands[3] = GEN_INT (32 - temp);
2118 [(set (match_operand:SI 0 "s_register_operand" "")
2119 (match_operator:SI 1 "shiftable_operator"
2120 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2121 (match_operand:SI 3 "const_int_operand" "")
2122 (match_operand:SI 4 "const_int_operand" ""))
2123 (match_operand:SI 5 "s_register_operand" "")]))
2124 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2126 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2129 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2132 HOST_WIDE_INT temp = INTVAL (operands[3]);
2134 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2135 operands[4] = GEN_INT (32 - temp);
2139 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2140 ;;; represented by the bitfield, then this will produce incorrect results.
2141 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2142 ;;; which have a real bit-field insert instruction, the truncation happens
2143 ;;; in the bit-field insert instruction itself. Since arm does not have a
2144 ;;; bit-field insert instruction, we would have to emit code here to truncate
2145 ;;; the value before we insert. This loses some of the advantage of having
2146 ;;; this insv pattern, so this pattern needs to be reevalutated.
2148 (define_expand "insv"
2149 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2150 (match_operand:SI 1 "general_operand" "")
2151 (match_operand:SI 2 "general_operand" ""))
2152 (match_operand:SI 3 "reg_or_int_operand" ""))]
2153 "TARGET_ARM || arm_arch_thumb2"
2156 int start_bit = INTVAL (operands[2]);
2157 int width = INTVAL (operands[1]);
2158 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2159 rtx target, subtarget;
2161 if (arm_arch_thumb2)
2163 bool use_bfi = TRUE;
2165 if (GET_CODE (operands[3]) == CONST_INT)
2167 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2171 emit_insn (gen_insv_zero (operands[0], operands[1],
2176 /* See if the set can be done with a single orr instruction. */
2177 if (val == mask && const_ok_for_arm (val << start_bit))
2183 if (GET_CODE (operands[3]) != REG)
2184 operands[3] = force_reg (SImode, operands[3]);
2186 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2192 target = operands[0];
2193 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2194 subreg as the final target. */
2195 if (GET_CODE (target) == SUBREG)
2197 subtarget = gen_reg_rtx (SImode);
2198 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2199 < GET_MODE_SIZE (SImode))
2200 target = SUBREG_REG (target);
2205 if (GET_CODE (operands[3]) == CONST_INT)
2207 /* Since we are inserting a known constant, we may be able to
2208 reduce the number of bits that we have to clear so that
2209 the mask becomes simple. */
2210 /* ??? This code does not check to see if the new mask is actually
2211 simpler. It may not be. */
2212 rtx op1 = gen_reg_rtx (SImode);
2213 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2214 start of this pattern. */
2215 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2216 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2218 emit_insn (gen_andsi3 (op1, operands[0],
2219 gen_int_mode (~mask2, SImode)));
2220 emit_insn (gen_iorsi3 (subtarget, op1,
2221 gen_int_mode (op3_value << start_bit, SImode)));
2223 else if (start_bit == 0
2224 && !(const_ok_for_arm (mask)
2225 || const_ok_for_arm (~mask)))
2227 /* A Trick, since we are setting the bottom bits in the word,
2228 we can shift operand[3] up, operand[0] down, OR them together
2229 and rotate the result back again. This takes 3 insns, and
2230 the third might be mergeable into another op. */
2231 /* The shift up copes with the possibility that operand[3] is
2232 wider than the bitfield. */
2233 rtx op0 = gen_reg_rtx (SImode);
2234 rtx op1 = gen_reg_rtx (SImode);
2236 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2237 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2238 emit_insn (gen_iorsi3 (op1, op1, op0));
2239 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2241 else if ((width + start_bit == 32)
2242 && !(const_ok_for_arm (mask)
2243 || const_ok_for_arm (~mask)))
2245 /* Similar trick, but slightly less efficient. */
2247 rtx op0 = gen_reg_rtx (SImode);
2248 rtx op1 = gen_reg_rtx (SImode);
2250 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2251 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2252 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2253 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2257 rtx op0 = gen_int_mode (mask, SImode);
2258 rtx op1 = gen_reg_rtx (SImode);
2259 rtx op2 = gen_reg_rtx (SImode);
2261 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2263 rtx tmp = gen_reg_rtx (SImode);
2265 emit_insn (gen_movsi (tmp, op0));
2269 /* Mask out any bits in operand[3] that are not needed. */
2270 emit_insn (gen_andsi3 (op1, operands[3], op0));
2272 if (GET_CODE (op0) == CONST_INT
2273 && (const_ok_for_arm (mask << start_bit)
2274 || const_ok_for_arm (~(mask << start_bit))))
2276 op0 = gen_int_mode (~(mask << start_bit), SImode);
2277 emit_insn (gen_andsi3 (op2, operands[0], op0));
2281 if (GET_CODE (op0) == CONST_INT)
2283 rtx tmp = gen_reg_rtx (SImode);
2285 emit_insn (gen_movsi (tmp, op0));
2290 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2292 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2296 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2298 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2301 if (subtarget != target)
2303 /* If TARGET is still a SUBREG, then it must be wider than a word,
2304 so we must be careful only to set the subword we were asked to. */
2305 if (GET_CODE (target) == SUBREG)
2306 emit_move_insn (target, subtarget);
2308 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2315 (define_insn "insv_zero"
2316 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2317 (match_operand:SI 1 "const_int_operand" "M")
2318 (match_operand:SI 2 "const_int_operand" "M"))
2322 [(set_attr "length" "4")
2323 (set_attr "predicable" "yes")]
2326 (define_insn "insv_t2"
2327 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2328 (match_operand:SI 1 "const_int_operand" "M")
2329 (match_operand:SI 2 "const_int_operand" "M"))
2330 (match_operand:SI 3 "s_register_operand" "r"))]
2332 "bfi%?\t%0, %3, %2, %1"
2333 [(set_attr "length" "4")
2334 (set_attr "predicable" "yes")]
2337 ; constants for op 2 will never be given to these patterns.
2338 (define_insn_and_split "*anddi_notdi_di"
2339 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2340 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2341 (match_operand:DI 2 "s_register_operand" "0,r")))]
2344 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2345 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2346 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2349 operands[3] = gen_highpart (SImode, operands[0]);
2350 operands[0] = gen_lowpart (SImode, operands[0]);
2351 operands[4] = gen_highpart (SImode, operands[1]);
2352 operands[1] = gen_lowpart (SImode, operands[1]);
2353 operands[5] = gen_highpart (SImode, operands[2]);
2354 operands[2] = gen_lowpart (SImode, operands[2]);
2356 [(set_attr "length" "8")
2357 (set_attr "predicable" "yes")]
2360 (define_insn_and_split "*anddi_notzesidi_di"
2361 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2362 (and:DI (not:DI (zero_extend:DI
2363 (match_operand:SI 2 "s_register_operand" "r,r")))
2364 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2367 bic%?\\t%Q0, %Q1, %2
2369 ; (not (zero_extend ...)) allows us to just copy the high word from
2370 ; operand1 to operand0.
2373 && operands[0] != operands[1]"
2374 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2375 (set (match_dup 3) (match_dup 4))]
2378 operands[3] = gen_highpart (SImode, operands[0]);
2379 operands[0] = gen_lowpart (SImode, operands[0]);
2380 operands[4] = gen_highpart (SImode, operands[1]);
2381 operands[1] = gen_lowpart (SImode, operands[1]);
2383 [(set_attr "length" "4,8")
2384 (set_attr "predicable" "yes")]
2387 (define_insn_and_split "*anddi_notsesidi_di"
2388 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2389 (and:DI (not:DI (sign_extend:DI
2390 (match_operand:SI 2 "s_register_operand" "r,r")))
2391 (match_operand:DI 1 "s_register_operand" "0,r")))]
2394 "TARGET_32BIT && reload_completed"
2395 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2396 (set (match_dup 3) (and:SI (not:SI
2397 (ashiftrt:SI (match_dup 2) (const_int 31)))
2401 operands[3] = gen_highpart (SImode, operands[0]);
2402 operands[0] = gen_lowpart (SImode, operands[0]);
2403 operands[4] = gen_highpart (SImode, operands[1]);
2404 operands[1] = gen_lowpart (SImode, operands[1]);
2406 [(set_attr "length" "8")
2407 (set_attr "predicable" "yes")]
2410 (define_insn "andsi_notsi_si"
2411 [(set (match_operand:SI 0 "s_register_operand" "=r")
2412 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2413 (match_operand:SI 1 "s_register_operand" "r")))]
2415 "bic%?\\t%0, %1, %2"
2416 [(set_attr "predicable" "yes")]
2419 (define_insn "bicsi3"
2420 [(set (match_operand:SI 0 "register_operand" "=l")
2421 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2422 (match_operand:SI 2 "register_operand" "0")))]
2425 [(set_attr "length" "2")]
2428 (define_insn "andsi_not_shiftsi_si"
2429 [(set (match_operand:SI 0 "s_register_operand" "=r")
2430 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2431 [(match_operand:SI 2 "s_register_operand" "r")
2432 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2433 (match_operand:SI 1 "s_register_operand" "r")))]
2435 "bic%?\\t%0, %1, %2%S4"
2436 [(set_attr "predicable" "yes")
2437 (set_attr "shift" "2")
2438 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2439 (const_string "alu_shift")
2440 (const_string "alu_shift_reg")))]
2443 (define_insn "*andsi_notsi_si_compare0"
2444 [(set (reg:CC_NOOV CC_REGNUM)
2446 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2447 (match_operand:SI 1 "s_register_operand" "r"))
2449 (set (match_operand:SI 0 "s_register_operand" "=r")
2450 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2452 "bic%.\\t%0, %1, %2"
2453 [(set_attr "conds" "set")]
2456 (define_insn "*andsi_notsi_si_compare0_scratch"
2457 [(set (reg:CC_NOOV CC_REGNUM)
2459 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2460 (match_operand:SI 1 "s_register_operand" "r"))
2462 (clobber (match_scratch:SI 0 "=r"))]
2464 "bic%.\\t%0, %1, %2"
2465 [(set_attr "conds" "set")]
2468 (define_insn "iordi3"
2469 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2470 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2471 (match_operand:DI 2 "s_register_operand" "r,r")))]
2472 "TARGET_32BIT && ! TARGET_IWMMXT"
2474 [(set_attr "length" "8")
2475 (set_attr "predicable" "yes")]
2478 (define_insn "*iordi_zesidi_di"
2479 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2480 (ior:DI (zero_extend:DI
2481 (match_operand:SI 2 "s_register_operand" "r,r"))
2482 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2485 orr%?\\t%Q0, %Q1, %2
2487 [(set_attr "length" "4,8")
2488 (set_attr "predicable" "yes")]
2491 (define_insn "*iordi_sesidi_di"
2492 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2493 (ior:DI (sign_extend:DI
2494 (match_operand:SI 2 "s_register_operand" "r,r"))
2495 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2498 [(set_attr "length" "8")
2499 (set_attr "predicable" "yes")]
2502 (define_expand "iorsi3"
2503 [(set (match_operand:SI 0 "s_register_operand" "")
2504 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2505 (match_operand:SI 2 "reg_or_int_operand" "")))]
2508 if (GET_CODE (operands[2]) == CONST_INT)
2512 arm_split_constant (IOR, SImode, NULL_RTX,
2513 INTVAL (operands[2]), operands[0], operands[1],
2514 optimize && can_create_pseudo_p ());
2517 else /* TARGET_THUMB1 */
2518 operands [2] = force_reg (SImode, operands [2]);
2523 (define_insn_and_split "*arm_iorsi3"
2524 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2525 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2526 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2532 && GET_CODE (operands[2]) == CONST_INT
2533 && !const_ok_for_arm (INTVAL (operands[2]))"
2534 [(clobber (const_int 0))]
2536 arm_split_constant (IOR, SImode, curr_insn,
2537 INTVAL (operands[2]), operands[0], operands[1], 0);
2540 [(set_attr "length" "4,16")
2541 (set_attr "predicable" "yes")]
2544 (define_insn "*thumb1_iorsi3"
2545 [(set (match_operand:SI 0 "register_operand" "=l")
2546 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2547 (match_operand:SI 2 "register_operand" "l")))]
2550 [(set_attr "length" "2")]
2554 [(match_scratch:SI 3 "r")
2555 (set (match_operand:SI 0 "arm_general_register_operand" "")
2556 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2557 (match_operand:SI 2 "const_int_operand" "")))]
2559 && !const_ok_for_arm (INTVAL (operands[2]))
2560 && const_ok_for_arm (~INTVAL (operands[2]))"
2561 [(set (match_dup 3) (match_dup 2))
2562 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2566 (define_insn "*iorsi3_compare0"
2567 [(set (reg:CC_NOOV CC_REGNUM)
2568 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2569 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2571 (set (match_operand:SI 0 "s_register_operand" "=r")
2572 (ior:SI (match_dup 1) (match_dup 2)))]
2574 "orr%.\\t%0, %1, %2"
2575 [(set_attr "conds" "set")]
2578 (define_insn "*iorsi3_compare0_scratch"
2579 [(set (reg:CC_NOOV CC_REGNUM)
2580 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2581 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2583 (clobber (match_scratch:SI 0 "=r"))]
2585 "orr%.\\t%0, %1, %2"
2586 [(set_attr "conds" "set")]
2589 (define_insn "xordi3"
2590 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2591 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2592 (match_operand:DI 2 "s_register_operand" "r,r")))]
2593 "TARGET_32BIT && !TARGET_IWMMXT"
2595 [(set_attr "length" "8")
2596 (set_attr "predicable" "yes")]
2599 (define_insn "*xordi_zesidi_di"
2600 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2601 (xor:DI (zero_extend:DI
2602 (match_operand:SI 2 "s_register_operand" "r,r"))
2603 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2606 eor%?\\t%Q0, %Q1, %2
2608 [(set_attr "length" "4,8")
2609 (set_attr "predicable" "yes")]
2612 (define_insn "*xordi_sesidi_di"
2613 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2614 (xor:DI (sign_extend:DI
2615 (match_operand:SI 2 "s_register_operand" "r,r"))
2616 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2619 [(set_attr "length" "8")
2620 (set_attr "predicable" "yes")]
2623 (define_expand "xorsi3"
2624 [(set (match_operand:SI 0 "s_register_operand" "")
2625 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2626 (match_operand:SI 2 "arm_rhs_operand" "")))]
2629 if (GET_CODE (operands[2]) == CONST_INT)
2630 operands[2] = force_reg (SImode, operands[2]);
2634 (define_insn "*arm_xorsi3"
2635 [(set (match_operand:SI 0 "s_register_operand" "=r")
2636 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2637 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2639 "eor%?\\t%0, %1, %2"
2640 [(set_attr "predicable" "yes")]
2643 (define_insn "*thumb1_xorsi3"
2644 [(set (match_operand:SI 0 "register_operand" "=l")
2645 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2646 (match_operand:SI 2 "register_operand" "l")))]
2649 [(set_attr "length" "2")]
2652 (define_insn "*xorsi3_compare0"
2653 [(set (reg:CC_NOOV CC_REGNUM)
2654 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2655 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2657 (set (match_operand:SI 0 "s_register_operand" "=r")
2658 (xor:SI (match_dup 1) (match_dup 2)))]
2660 "eor%.\\t%0, %1, %2"
2661 [(set_attr "conds" "set")]
2664 (define_insn "*xorsi3_compare0_scratch"
2665 [(set (reg:CC_NOOV CC_REGNUM)
2666 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2667 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2671 [(set_attr "conds" "set")]
2674 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2675 ; (NOT D) we can sometimes merge the final NOT into one of the following
2679 [(set (match_operand:SI 0 "s_register_operand" "")
2680 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2681 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2682 (match_operand:SI 3 "arm_rhs_operand" "")))
2683 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2685 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2686 (not:SI (match_dup 3))))
2687 (set (match_dup 0) (not:SI (match_dup 4)))]
2691 (define_insn "*andsi_iorsi3_notsi"
2692 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2693 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2694 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2695 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2697 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2698 [(set_attr "length" "8")
2699 (set_attr "ce_count" "2")
2700 (set_attr "predicable" "yes")]
2703 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2704 ; insns are available?
2706 [(set (match_operand:SI 0 "s_register_operand" "")
2707 (match_operator:SI 1 "logical_binary_operator"
2708 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2709 (match_operand:SI 3 "const_int_operand" "")
2710 (match_operand:SI 4 "const_int_operand" ""))
2711 (match_operator:SI 9 "logical_binary_operator"
2712 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2713 (match_operand:SI 6 "const_int_operand" ""))
2714 (match_operand:SI 7 "s_register_operand" "")])]))
2715 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2717 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2718 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2721 [(ashift:SI (match_dup 2) (match_dup 4))
2725 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2728 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2732 [(set (match_operand:SI 0 "s_register_operand" "")
2733 (match_operator:SI 1 "logical_binary_operator"
2734 [(match_operator:SI 9 "logical_binary_operator"
2735 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2736 (match_operand:SI 6 "const_int_operand" ""))
2737 (match_operand:SI 7 "s_register_operand" "")])
2738 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2739 (match_operand:SI 3 "const_int_operand" "")
2740 (match_operand:SI 4 "const_int_operand" ""))]))
2741 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2743 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2744 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2747 [(ashift:SI (match_dup 2) (match_dup 4))
2751 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2754 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2758 [(set (match_operand:SI 0 "s_register_operand" "")
2759 (match_operator:SI 1 "logical_binary_operator"
2760 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2761 (match_operand:SI 3 "const_int_operand" "")
2762 (match_operand:SI 4 "const_int_operand" ""))
2763 (match_operator:SI 9 "logical_binary_operator"
2764 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2765 (match_operand:SI 6 "const_int_operand" ""))
2766 (match_operand:SI 7 "s_register_operand" "")])]))
2767 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2769 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2770 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2773 [(ashift:SI (match_dup 2) (match_dup 4))
2777 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2780 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2784 [(set (match_operand:SI 0 "s_register_operand" "")
2785 (match_operator:SI 1 "logical_binary_operator"
2786 [(match_operator:SI 9 "logical_binary_operator"
2787 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2788 (match_operand:SI 6 "const_int_operand" ""))
2789 (match_operand:SI 7 "s_register_operand" "")])
2790 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2791 (match_operand:SI 3 "const_int_operand" "")
2792 (match_operand:SI 4 "const_int_operand" ""))]))
2793 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2795 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2796 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2799 [(ashift:SI (match_dup 2) (match_dup 4))
2803 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2806 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2810 ;; Minimum and maximum insns
2812 (define_expand "smaxsi3"
2814 (set (match_operand:SI 0 "s_register_operand" "")
2815 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2816 (match_operand:SI 2 "arm_rhs_operand" "")))
2817 (clobber (reg:CC CC_REGNUM))])]
2820 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2822 /* No need for a clobber of the condition code register here. */
2823 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2824 gen_rtx_SMAX (SImode, operands[1],
2830 (define_insn "*smax_0"
2831 [(set (match_operand:SI 0 "s_register_operand" "=r")
2832 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2835 "bic%?\\t%0, %1, %1, asr #31"
2836 [(set_attr "predicable" "yes")]
2839 (define_insn "*smax_m1"
2840 [(set (match_operand:SI 0 "s_register_operand" "=r")
2841 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2844 "orr%?\\t%0, %1, %1, asr #31"
2845 [(set_attr "predicable" "yes")]
2848 (define_insn "*arm_smax_insn"
2849 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2850 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2851 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2852 (clobber (reg:CC CC_REGNUM))]
2855 cmp\\t%1, %2\;movlt\\t%0, %2
2856 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2857 [(set_attr "conds" "clob")
2858 (set_attr "length" "8,12")]
2861 (define_expand "sminsi3"
2863 (set (match_operand:SI 0 "s_register_operand" "")
2864 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2865 (match_operand:SI 2 "arm_rhs_operand" "")))
2866 (clobber (reg:CC CC_REGNUM))])]
2869 if (operands[2] == const0_rtx)
2871 /* No need for a clobber of the condition code register here. */
2872 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2873 gen_rtx_SMIN (SImode, operands[1],
2879 (define_insn "*smin_0"
2880 [(set (match_operand:SI 0 "s_register_operand" "=r")
2881 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2884 "and%?\\t%0, %1, %1, asr #31"
2885 [(set_attr "predicable" "yes")]
2888 (define_insn "*arm_smin_insn"
2889 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2890 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2891 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2892 (clobber (reg:CC CC_REGNUM))]
2895 cmp\\t%1, %2\;movge\\t%0, %2
2896 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2897 [(set_attr "conds" "clob")
2898 (set_attr "length" "8,12")]
2901 (define_expand "umaxsi3"
2903 (set (match_operand:SI 0 "s_register_operand" "")
2904 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2905 (match_operand:SI 2 "arm_rhs_operand" "")))
2906 (clobber (reg:CC CC_REGNUM))])]
2911 (define_insn "*arm_umaxsi3"
2912 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2913 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2914 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2915 (clobber (reg:CC CC_REGNUM))]
2918 cmp\\t%1, %2\;movcc\\t%0, %2
2919 cmp\\t%1, %2\;movcs\\t%0, %1
2920 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2921 [(set_attr "conds" "clob")
2922 (set_attr "length" "8,8,12")]
2925 (define_expand "uminsi3"
2927 (set (match_operand:SI 0 "s_register_operand" "")
2928 (umin:SI (match_operand:SI 1 "s_register_operand" "")
2929 (match_operand:SI 2 "arm_rhs_operand" "")))
2930 (clobber (reg:CC CC_REGNUM))])]
2935 (define_insn "*arm_uminsi3"
2936 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2937 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2938 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2939 (clobber (reg:CC CC_REGNUM))]
2942 cmp\\t%1, %2\;movcs\\t%0, %2
2943 cmp\\t%1, %2\;movcc\\t%0, %1
2944 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2945 [(set_attr "conds" "clob")
2946 (set_attr "length" "8,8,12")]
2949 (define_insn "*store_minmaxsi"
2950 [(set (match_operand:SI 0 "memory_operand" "=m")
2951 (match_operator:SI 3 "minmax_operator"
2952 [(match_operand:SI 1 "s_register_operand" "r")
2953 (match_operand:SI 2 "s_register_operand" "r")]))
2954 (clobber (reg:CC CC_REGNUM))]
2957 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2958 operands[1], operands[2]);
2959 output_asm_insn (\"cmp\\t%1, %2\", operands);
2961 output_asm_insn (\"ite\t%d3\", operands);
2962 output_asm_insn (\"str%d3\\t%1, %0\", operands);
2963 output_asm_insn (\"str%D3\\t%2, %0\", operands);
2966 [(set_attr "conds" "clob")
2967 (set (attr "length")
2968 (if_then_else (eq_attr "is_thumb" "yes")
2971 (set_attr "type" "store1")]
2974 ; Reject the frame pointer in operand[1], since reloading this after
2975 ; it has been eliminated can cause carnage.
2976 (define_insn "*minmax_arithsi"
2977 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2978 (match_operator:SI 4 "shiftable_operator"
2979 [(match_operator:SI 5 "minmax_operator"
2980 [(match_operand:SI 2 "s_register_operand" "r,r")
2981 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
2982 (match_operand:SI 1 "s_register_operand" "0,?r")]))
2983 (clobber (reg:CC CC_REGNUM))]
2984 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
2987 enum rtx_code code = GET_CODE (operands[4]);
2990 if (which_alternative != 0 || operands[3] != const0_rtx
2991 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
2996 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
2997 operands[2], operands[3]);
2998 output_asm_insn (\"cmp\\t%2, %3\", operands);
3002 output_asm_insn (\"ite\\t%d5\", operands);
3004 output_asm_insn (\"it\\t%d5\", operands);
3006 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3008 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3011 [(set_attr "conds" "clob")
3012 (set (attr "length")
3013 (if_then_else (eq_attr "is_thumb" "yes")
3019 ;; Shift and rotation insns
3021 (define_expand "ashldi3"
3022 [(set (match_operand:DI 0 "s_register_operand" "")
3023 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3024 (match_operand:SI 2 "reg_or_int_operand" "")))]
3027 if (GET_CODE (operands[2]) == CONST_INT)
3029 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3031 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3034 /* Ideally we shouldn't fail here if we could know that operands[1]
3035 ends up already living in an iwmmxt register. Otherwise it's
3036 cheaper to have the alternate code being generated than moving
3037 values to iwmmxt regs and back. */
3040 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3045 (define_insn "arm_ashldi3_1bit"
3046 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3047 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3049 (clobber (reg:CC CC_REGNUM))]
3051 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3052 [(set_attr "conds" "clob")
3053 (set_attr "length" "8")]
3056 (define_expand "ashlsi3"
3057 [(set (match_operand:SI 0 "s_register_operand" "")
3058 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3059 (match_operand:SI 2 "arm_rhs_operand" "")))]
3062 if (GET_CODE (operands[2]) == CONST_INT
3063 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3065 emit_insn (gen_movsi (operands[0], const0_rtx));
3071 (define_insn "*thumb1_ashlsi3"
3072 [(set (match_operand:SI 0 "register_operand" "=l,l")
3073 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3074 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3077 [(set_attr "length" "2")]
3080 (define_expand "ashrdi3"
3081 [(set (match_operand:DI 0 "s_register_operand" "")
3082 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3083 (match_operand:SI 2 "reg_or_int_operand" "")))]
3086 if (GET_CODE (operands[2]) == CONST_INT)
3088 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3090 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3093 /* Ideally we shouldn't fail here if we could know that operands[1]
3094 ends up already living in an iwmmxt register. Otherwise it's
3095 cheaper to have the alternate code being generated than moving
3096 values to iwmmxt regs and back. */
3099 else if (!TARGET_REALLY_IWMMXT)
3104 (define_insn "arm_ashrdi3_1bit"
3105 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3106 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3108 (clobber (reg:CC CC_REGNUM))]
3110 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3111 [(set_attr "conds" "clob")
3112 (set_attr "length" "8")]
3115 (define_expand "ashrsi3"
3116 [(set (match_operand:SI 0 "s_register_operand" "")
3117 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3118 (match_operand:SI 2 "arm_rhs_operand" "")))]
3121 if (GET_CODE (operands[2]) == CONST_INT
3122 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3123 operands[2] = GEN_INT (31);
3127 (define_insn "*thumb1_ashrsi3"
3128 [(set (match_operand:SI 0 "register_operand" "=l,l")
3129 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3130 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3133 [(set_attr "length" "2")]
3136 (define_expand "lshrdi3"
3137 [(set (match_operand:DI 0 "s_register_operand" "")
3138 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3139 (match_operand:SI 2 "reg_or_int_operand" "")))]
3142 if (GET_CODE (operands[2]) == CONST_INT)
3144 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3146 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3149 /* Ideally we shouldn't fail here if we could know that operands[1]
3150 ends up already living in an iwmmxt register. Otherwise it's
3151 cheaper to have the alternate code being generated than moving
3152 values to iwmmxt regs and back. */
3155 else if (!TARGET_REALLY_IWMMXT)
3160 (define_insn "arm_lshrdi3_1bit"
3161 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3162 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3164 (clobber (reg:CC CC_REGNUM))]
3166 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3167 [(set_attr "conds" "clob")
3168 (set_attr "length" "8")]
3171 (define_expand "lshrsi3"
3172 [(set (match_operand:SI 0 "s_register_operand" "")
3173 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3174 (match_operand:SI 2 "arm_rhs_operand" "")))]
3177 if (GET_CODE (operands[2]) == CONST_INT
3178 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3180 emit_insn (gen_movsi (operands[0], const0_rtx));
3186 (define_insn "*thumb1_lshrsi3"
3187 [(set (match_operand:SI 0 "register_operand" "=l,l")
3188 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3189 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3192 [(set_attr "length" "2")]
3195 (define_expand "rotlsi3"
3196 [(set (match_operand:SI 0 "s_register_operand" "")
3197 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3198 (match_operand:SI 2 "reg_or_int_operand" "")))]
3201 if (GET_CODE (operands[2]) == CONST_INT)
3202 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3205 rtx reg = gen_reg_rtx (SImode);
3206 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3212 (define_expand "rotrsi3"
3213 [(set (match_operand:SI 0 "s_register_operand" "")
3214 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3215 (match_operand:SI 2 "arm_rhs_operand" "")))]
3220 if (GET_CODE (operands[2]) == CONST_INT
3221 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3222 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3224 else /* TARGET_THUMB1 */
3226 if (GET_CODE (operands [2]) == CONST_INT)
3227 operands [2] = force_reg (SImode, operands[2]);
3232 (define_insn "*thumb1_rotrsi3"
3233 [(set (match_operand:SI 0 "register_operand" "=l")
3234 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3235 (match_operand:SI 2 "register_operand" "l")))]
3238 [(set_attr "length" "2")]
3241 (define_insn "*arm_shiftsi3"
3242 [(set (match_operand:SI 0 "s_register_operand" "=r")
3243 (match_operator:SI 3 "shift_operator"
3244 [(match_operand:SI 1 "s_register_operand" "r")
3245 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3247 "* return arm_output_shift(operands, 0);"
3248 [(set_attr "predicable" "yes")
3249 (set_attr "shift" "1")
3250 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3251 (const_string "alu_shift")
3252 (const_string "alu_shift_reg")))]
3255 (define_insn "*shiftsi3_compare0"
3256 [(set (reg:CC_NOOV CC_REGNUM)
3257 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3258 [(match_operand:SI 1 "s_register_operand" "r")
3259 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3261 (set (match_operand:SI 0 "s_register_operand" "=r")
3262 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3264 "* return arm_output_shift(operands, 1);"
3265 [(set_attr "conds" "set")
3266 (set_attr "shift" "1")
3267 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3268 (const_string "alu_shift")
3269 (const_string "alu_shift_reg")))]
3272 (define_insn "*shiftsi3_compare0_scratch"
3273 [(set (reg:CC_NOOV CC_REGNUM)
3274 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3275 [(match_operand:SI 1 "s_register_operand" "r")
3276 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3278 (clobber (match_scratch:SI 0 "=r"))]
3280 "* return arm_output_shift(operands, 1);"
3281 [(set_attr "conds" "set")
3282 (set_attr "shift" "1")]
3285 (define_insn "*arm_notsi_shiftsi"
3286 [(set (match_operand:SI 0 "s_register_operand" "=r")
3287 (not:SI (match_operator:SI 3 "shift_operator"
3288 [(match_operand:SI 1 "s_register_operand" "r")
3289 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3292 [(set_attr "predicable" "yes")
3293 (set_attr "shift" "1")
3294 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3295 (const_string "alu_shift")
3296 (const_string "alu_shift_reg")))]
3299 (define_insn "*arm_notsi_shiftsi_compare0"
3300 [(set (reg:CC_NOOV CC_REGNUM)
3301 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3302 [(match_operand:SI 1 "s_register_operand" "r")
3303 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3305 (set (match_operand:SI 0 "s_register_operand" "=r")
3306 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3309 [(set_attr "conds" "set")
3310 (set_attr "shift" "1")
3311 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3312 (const_string "alu_shift")
3313 (const_string "alu_shift_reg")))]
3316 (define_insn "*arm_not_shiftsi_compare0_scratch"
3317 [(set (reg:CC_NOOV CC_REGNUM)
3318 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3319 [(match_operand:SI 1 "s_register_operand" "r")
3320 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3322 (clobber (match_scratch:SI 0 "=r"))]
3325 [(set_attr "conds" "set")
3326 (set_attr "shift" "1")
3327 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3328 (const_string "alu_shift")
3329 (const_string "alu_shift_reg")))]
3332 ;; We don't really have extzv, but defining this using shifts helps
3333 ;; to reduce register pressure later on.
3335 (define_expand "extzv"
3337 (ashift:SI (match_operand:SI 1 "register_operand" "")
3338 (match_operand:SI 2 "const_int_operand" "")))
3339 (set (match_operand:SI 0 "register_operand" "")
3340 (lshiftrt:SI (match_dup 4)
3341 (match_operand:SI 3 "const_int_operand" "")))]
3342 "TARGET_THUMB1 || arm_arch_thumb2"
3345 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3346 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3348 if (arm_arch_thumb2)
3350 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3355 operands[3] = GEN_INT (rshift);
3359 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3363 operands[2] = GEN_INT (lshift);
3364 operands[4] = gen_reg_rtx (SImode);
3369 [(set (match_operand:SI 0 "s_register_operand" "=r")
3370 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3371 (match_operand:SI 2 "const_int_operand" "M")
3372 (match_operand:SI 3 "const_int_operand" "M")))]
3374 "sbfx%?\t%0, %1, %3, %2"
3375 [(set_attr "length" "4")
3376 (set_attr "predicable" "yes")]
3379 (define_insn "extzv_t2"
3380 [(set (match_operand:SI 0 "s_register_operand" "=r")
3381 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3382 (match_operand:SI 2 "const_int_operand" "M")
3383 (match_operand:SI 3 "const_int_operand" "M")))]
3385 "ubfx%?\t%0, %1, %3, %2"
3386 [(set_attr "length" "4")
3387 (set_attr "predicable" "yes")]
3391 ;; Unary arithmetic insns
3393 (define_expand "negdi2"
3395 [(set (match_operand:DI 0 "s_register_operand" "")
3396 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3397 (clobber (reg:CC CC_REGNUM))])]
3402 if (GET_CODE (operands[1]) != REG)
3403 operands[1] = force_reg (SImode, operands[1]);
3408 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3409 ;; The second alternative is to allow the common case of a *full* overlap.
3410 (define_insn "*arm_negdi2"
3411 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3412 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3413 (clobber (reg:CC CC_REGNUM))]
3415 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3416 [(set_attr "conds" "clob")
3417 (set_attr "length" "8")]
3420 (define_insn "*thumb1_negdi2"
3421 [(set (match_operand:DI 0 "register_operand" "=&l")
3422 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3423 (clobber (reg:CC CC_REGNUM))]
3425 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3426 [(set_attr "length" "6")]
3429 (define_expand "negsi2"
3430 [(set (match_operand:SI 0 "s_register_operand" "")
3431 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3436 (define_insn "*arm_negsi2"
3437 [(set (match_operand:SI 0 "s_register_operand" "=r")
3438 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3440 "rsb%?\\t%0, %1, #0"
3441 [(set_attr "predicable" "yes")]
3444 (define_insn "*thumb1_negsi2"
3445 [(set (match_operand:SI 0 "register_operand" "=l")
3446 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3449 [(set_attr "length" "2")]
3452 (define_expand "negsf2"
3453 [(set (match_operand:SF 0 "s_register_operand" "")
3454 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3455 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3459 (define_expand "negdf2"
3460 [(set (match_operand:DF 0 "s_register_operand" "")
3461 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3462 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3465 ;; abssi2 doesn't really clobber the condition codes if a different register
3466 ;; is being set. To keep things simple, assume during rtl manipulations that
3467 ;; it does, but tell the final scan operator the truth. Similarly for
3470 (define_expand "abssi2"
3472 [(set (match_operand:SI 0 "s_register_operand" "")
3473 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3474 (clobber (match_dup 2))])]
3478 operands[2] = gen_rtx_SCRATCH (SImode);
3480 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3483 (define_insn "*arm_abssi2"
3484 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3485 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3486 (clobber (reg:CC CC_REGNUM))]
3489 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3490 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3491 [(set_attr "conds" "clob,*")
3492 (set_attr "shift" "1")
3493 ;; predicable can't be set based on the variant, so left as no
3494 (set_attr "length" "8")]
3497 (define_insn_and_split "*thumb1_abssi2"
3498 [(set (match_operand:SI 0 "s_register_operand" "=l")
3499 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3500 (clobber (match_scratch:SI 2 "=&l"))]
3503 "TARGET_THUMB1 && reload_completed"
3504 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3505 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3506 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3508 [(set_attr "length" "6")]
3511 (define_insn "*arm_neg_abssi2"
3512 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3513 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3514 (clobber (reg:CC CC_REGNUM))]
3517 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3518 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3519 [(set_attr "conds" "clob,*")
3520 (set_attr "shift" "1")
3521 ;; predicable can't be set based on the variant, so left as no
3522 (set_attr "length" "8")]
3525 (define_insn_and_split "*thumb1_neg_abssi2"
3526 [(set (match_operand:SI 0 "s_register_operand" "=l")
3527 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3528 (clobber (match_scratch:SI 2 "=&l"))]
3531 "TARGET_THUMB1 && reload_completed"
3532 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3533 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3534 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3536 [(set_attr "length" "6")]
3539 (define_expand "abssf2"
3540 [(set (match_operand:SF 0 "s_register_operand" "")
3541 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3542 "TARGET_32BIT && TARGET_HARD_FLOAT"
3545 (define_expand "absdf2"
3546 [(set (match_operand:DF 0 "s_register_operand" "")
3547 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3548 "TARGET_32BIT && TARGET_HARD_FLOAT"
3551 (define_expand "sqrtsf2"
3552 [(set (match_operand:SF 0 "s_register_operand" "")
3553 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3554 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3557 (define_expand "sqrtdf2"
3558 [(set (match_operand:DF 0 "s_register_operand" "")
3559 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3560 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3563 (define_insn_and_split "one_cmpldi2"
3564 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3565 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3568 "TARGET_32BIT && reload_completed"
3569 [(set (match_dup 0) (not:SI (match_dup 1)))
3570 (set (match_dup 2) (not:SI (match_dup 3)))]
3573 operands[2] = gen_highpart (SImode, operands[0]);
3574 operands[0] = gen_lowpart (SImode, operands[0]);
3575 operands[3] = gen_highpart (SImode, operands[1]);
3576 operands[1] = gen_lowpart (SImode, operands[1]);
3578 [(set_attr "length" "8")
3579 (set_attr "predicable" "yes")]
3582 (define_expand "one_cmplsi2"
3583 [(set (match_operand:SI 0 "s_register_operand" "")
3584 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3589 (define_insn "*arm_one_cmplsi2"
3590 [(set (match_operand:SI 0 "s_register_operand" "=r")
3591 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3594 [(set_attr "predicable" "yes")]
3597 (define_insn "*thumb1_one_cmplsi2"
3598 [(set (match_operand:SI 0 "register_operand" "=l")
3599 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3602 [(set_attr "length" "2")]
3605 (define_insn "*notsi_compare0"
3606 [(set (reg:CC_NOOV CC_REGNUM)
3607 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3609 (set (match_operand:SI 0 "s_register_operand" "=r")
3610 (not:SI (match_dup 1)))]
3613 [(set_attr "conds" "set")]
3616 (define_insn "*notsi_compare0_scratch"
3617 [(set (reg:CC_NOOV CC_REGNUM)
3618 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3620 (clobber (match_scratch:SI 0 "=r"))]
3623 [(set_attr "conds" "set")]
3626 ;; Fixed <--> Floating conversion insns
3628 (define_expand "floatsisf2"
3629 [(set (match_operand:SF 0 "s_register_operand" "")
3630 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3631 "TARGET_32BIT && TARGET_HARD_FLOAT"
3633 if (TARGET_MAVERICK)
3635 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3640 (define_expand "floatsidf2"
3641 [(set (match_operand:DF 0 "s_register_operand" "")
3642 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3643 "TARGET_32BIT && TARGET_HARD_FLOAT"
3645 if (TARGET_MAVERICK)
3647 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3652 (define_expand "fix_truncsfsi2"
3653 [(set (match_operand:SI 0 "s_register_operand" "")
3654 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3655 "TARGET_32BIT && TARGET_HARD_FLOAT"
3657 if (TARGET_MAVERICK)
3659 if (!cirrus_fp_register (operands[0], SImode))
3660 operands[0] = force_reg (SImode, operands[0]);
3661 if (!cirrus_fp_register (operands[1], SFmode))
3662 operands[1] = force_reg (SFmode, operands[0]);
3663 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3668 (define_expand "fix_truncdfsi2"
3669 [(set (match_operand:SI 0 "s_register_operand" "")
3670 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3671 "TARGET_32BIT && TARGET_HARD_FLOAT"
3673 if (TARGET_MAVERICK)
3675 if (!cirrus_fp_register (operands[1], DFmode))
3676 operands[1] = force_reg (DFmode, operands[0]);
3677 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3684 (define_expand "truncdfsf2"
3685 [(set (match_operand:SF 0 "s_register_operand" "")
3687 (match_operand:DF 1 "s_register_operand" "")))]
3688 "TARGET_32BIT && TARGET_HARD_FLOAT"
3692 ;; Zero and sign extension instructions.
3694 (define_expand "zero_extendsidi2"
3695 [(set (match_operand:DI 0 "s_register_operand" "")
3696 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3701 (define_insn "*arm_zero_extendsidi2"
3702 [(set (match_operand:DI 0 "s_register_operand" "=r")
3703 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3706 if (REGNO (operands[1])
3707 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3708 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3709 return \"mov%?\\t%R0, #0\";
3711 [(set_attr "length" "8")
3712 (set_attr "predicable" "yes")]
3715 (define_expand "zero_extendqidi2"
3716 [(set (match_operand:DI 0 "s_register_operand" "")
3717 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3722 (define_insn "*arm_zero_extendqidi2"
3723 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3724 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3727 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3728 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3729 [(set_attr "length" "8")
3730 (set_attr "predicable" "yes")
3731 (set_attr "type" "*,load_byte")
3732 (set_attr "pool_range" "*,4092")
3733 (set_attr "neg_pool_range" "*,4084")]
3736 (define_expand "extendsidi2"
3737 [(set (match_operand:DI 0 "s_register_operand" "")
3738 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3743 (define_insn "*arm_extendsidi2"
3744 [(set (match_operand:DI 0 "s_register_operand" "=r")
3745 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3748 if (REGNO (operands[1])
3749 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3750 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3751 return \"mov%?\\t%R0, %Q0, asr #31\";
3753 [(set_attr "length" "8")
3754 (set_attr "shift" "1")
3755 (set_attr "predicable" "yes")]
3758 (define_expand "zero_extendhisi2"
3760 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3762 (set (match_operand:SI 0 "s_register_operand" "")
3763 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3767 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3769 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3770 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3774 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3776 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3780 if (!s_register_operand (operands[1], HImode))
3781 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3785 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3786 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3790 operands[1] = gen_lowpart (SImode, operands[1]);
3791 operands[2] = gen_reg_rtx (SImode);
3795 (define_insn "*thumb1_zero_extendhisi2"
3796 [(set (match_operand:SI 0 "register_operand" "=l")
3797 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3798 "TARGET_THUMB1 && !arm_arch6"
3800 rtx mem = XEXP (operands[1], 0);
3802 if (GET_CODE (mem) == CONST)
3803 mem = XEXP (mem, 0);
3805 if (GET_CODE (mem) == LABEL_REF)
3806 return \"ldr\\t%0, %1\";
3808 if (GET_CODE (mem) == PLUS)
3810 rtx a = XEXP (mem, 0);
3811 rtx b = XEXP (mem, 1);
3813 /* This can happen due to bugs in reload. */
3814 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3817 ops[0] = operands[0];
3820 output_asm_insn (\"mov %0, %1\", ops);
3822 XEXP (mem, 0) = operands[0];
3825 else if ( GET_CODE (a) == LABEL_REF
3826 && GET_CODE (b) == CONST_INT)
3827 return \"ldr\\t%0, %1\";
3830 return \"ldrh\\t%0, %1\";
3832 [(set_attr "length" "4")
3833 (set_attr "type" "load_byte")
3834 (set_attr "pool_range" "60")]
3837 (define_insn "*thumb1_zero_extendhisi2_v6"
3838 [(set (match_operand:SI 0 "register_operand" "=l,l")
3839 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3840 "TARGET_THUMB1 && arm_arch6"
3844 if (which_alternative == 0)
3845 return \"uxth\\t%0, %1\";
3847 mem = XEXP (operands[1], 0);
3849 if (GET_CODE (mem) == CONST)
3850 mem = XEXP (mem, 0);
3852 if (GET_CODE (mem) == LABEL_REF)
3853 return \"ldr\\t%0, %1\";
3855 if (GET_CODE (mem) == PLUS)
3857 rtx a = XEXP (mem, 0);
3858 rtx b = XEXP (mem, 1);
3860 /* This can happen due to bugs in reload. */
3861 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3864 ops[0] = operands[0];
3867 output_asm_insn (\"mov %0, %1\", ops);
3869 XEXP (mem, 0) = operands[0];
3872 else if ( GET_CODE (a) == LABEL_REF
3873 && GET_CODE (b) == CONST_INT)
3874 return \"ldr\\t%0, %1\";
3877 return \"ldrh\\t%0, %1\";
3879 [(set_attr "length" "2,4")
3880 (set_attr "type" "alu_shift,load_byte")
3881 (set_attr "pool_range" "*,60")]
3884 (define_insn "*arm_zero_extendhisi2"
3885 [(set (match_operand:SI 0 "s_register_operand" "=r")
3886 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3887 "TARGET_ARM && arm_arch4 && !arm_arch6"
3889 [(set_attr "type" "load_byte")
3890 (set_attr "predicable" "yes")
3891 (set_attr "pool_range" "256")
3892 (set_attr "neg_pool_range" "244")]
3895 (define_insn "*arm_zero_extendhisi2_v6"
3896 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3897 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3898 "TARGET_ARM && arm_arch6"
3902 [(set_attr "type" "alu_shift,load_byte")
3903 (set_attr "predicable" "yes")
3904 (set_attr "pool_range" "*,256")
3905 (set_attr "neg_pool_range" "*,244")]
3908 (define_insn "*arm_zero_extendhisi2addsi"
3909 [(set (match_operand:SI 0 "s_register_operand" "=r")
3910 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3911 (match_operand:SI 2 "s_register_operand" "r")))]
3913 "uxtah%?\\t%0, %2, %1"
3914 [(set_attr "type" "alu_shift")
3915 (set_attr "predicable" "yes")]
3918 (define_expand "zero_extendqisi2"
3919 [(set (match_operand:SI 0 "s_register_operand" "")
3920 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
3923 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
3927 emit_insn (gen_andsi3 (operands[0],
3928 gen_lowpart (SImode, operands[1]),
3931 else /* TARGET_THUMB */
3933 rtx temp = gen_reg_rtx (SImode);
3936 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3937 operands[1] = gen_lowpart (SImode, operands[1]);
3940 ops[1] = operands[1];
3941 ops[2] = GEN_INT (24);
3943 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3944 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
3946 ops[0] = operands[0];
3948 ops[2] = GEN_INT (24);
3950 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3951 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
3958 (define_insn "*thumb1_zero_extendqisi2"
3959 [(set (match_operand:SI 0 "register_operand" "=l")
3960 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3961 "TARGET_THUMB1 && !arm_arch6"
3963 [(set_attr "length" "2")
3964 (set_attr "type" "load_byte")
3965 (set_attr "pool_range" "32")]
3968 (define_insn "*thumb1_zero_extendqisi2_v6"
3969 [(set (match_operand:SI 0 "register_operand" "=l,l")
3970 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
3971 "TARGET_THUMB1 && arm_arch6"
3975 [(set_attr "length" "2,2")
3976 (set_attr "type" "alu_shift,load_byte")
3977 (set_attr "pool_range" "*,32")]
3980 (define_insn "*arm_zero_extendqisi2"
3981 [(set (match_operand:SI 0 "s_register_operand" "=r")
3982 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3983 "TARGET_ARM && !arm_arch6"
3984 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3985 [(set_attr "type" "load_byte")
3986 (set_attr "predicable" "yes")
3987 (set_attr "pool_range" "4096")
3988 (set_attr "neg_pool_range" "4084")]
3991 (define_insn "*arm_zero_extendqisi2_v6"
3992 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3993 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3994 "TARGET_ARM && arm_arch6"
3997 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3998 [(set_attr "type" "alu_shift,load_byte")
3999 (set_attr "predicable" "yes")
4000 (set_attr "pool_range" "*,4096")
4001 (set_attr "neg_pool_range" "*,4084")]
4004 (define_insn "*arm_zero_extendqisi2addsi"
4005 [(set (match_operand:SI 0 "s_register_operand" "=r")
4006 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4007 (match_operand:SI 2 "s_register_operand" "r")))]
4009 "uxtab%?\\t%0, %2, %1"
4010 [(set_attr "predicable" "yes")
4011 (set_attr "insn" "xtab")
4012 (set_attr "type" "alu_shift")]
4016 [(set (match_operand:SI 0 "s_register_operand" "")
4017 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4018 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4019 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4020 [(set (match_dup 2) (match_dup 1))
4021 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4026 [(set (match_operand:SI 0 "s_register_operand" "")
4027 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4028 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4029 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4030 [(set (match_dup 2) (match_dup 1))
4031 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4035 (define_insn "*compareqi_eq0"
4036 [(set (reg:CC_Z CC_REGNUM)
4037 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4041 [(set_attr "conds" "set")]
4044 (define_expand "extendhisi2"
4046 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4048 (set (match_operand:SI 0 "s_register_operand" "")
4049 (ashiftrt:SI (match_dup 2)
4054 if (GET_CODE (operands[1]) == MEM)
4058 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4063 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4064 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4069 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4071 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4075 if (!s_register_operand (operands[1], HImode))
4076 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4081 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4083 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4084 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4089 operands[1] = gen_lowpart (SImode, operands[1]);
4090 operands[2] = gen_reg_rtx (SImode);
4094 (define_insn "thumb1_extendhisi2"
4095 [(set (match_operand:SI 0 "register_operand" "=l")
4096 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4097 (clobber (match_scratch:SI 2 "=&l"))]
4098 "TARGET_THUMB1 && !arm_arch6"
4102 rtx mem = XEXP (operands[1], 0);
4104 /* This code used to try to use 'V', and fix the address only if it was
4105 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4106 range of QImode offsets, and offsettable_address_p does a QImode
4109 if (GET_CODE (mem) == CONST)
4110 mem = XEXP (mem, 0);
4112 if (GET_CODE (mem) == LABEL_REF)
4113 return \"ldr\\t%0, %1\";
4115 if (GET_CODE (mem) == PLUS)
4117 rtx a = XEXP (mem, 0);
4118 rtx b = XEXP (mem, 1);
4120 if (GET_CODE (a) == LABEL_REF
4121 && GET_CODE (b) == CONST_INT)
4122 return \"ldr\\t%0, %1\";
4124 if (GET_CODE (b) == REG)
4125 return \"ldrsh\\t%0, %1\";
4133 ops[2] = const0_rtx;
4136 gcc_assert (GET_CODE (ops[1]) == REG);
4138 ops[0] = operands[0];
4139 ops[3] = operands[2];
4140 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4143 [(set_attr "length" "4")
4144 (set_attr "type" "load_byte")
4145 (set_attr "pool_range" "1020")]
4148 ;; We used to have an early-clobber on the scratch register here.
4149 ;; However, there's a bug somewhere in reload which means that this
4150 ;; can be partially ignored during spill allocation if the memory
4151 ;; address also needs reloading; this causes us to die later on when
4152 ;; we try to verify the operands. Fortunately, we don't really need
4153 ;; the early-clobber: we can always use operand 0 if operand 2
4154 ;; overlaps the address.
4155 (define_insn "*thumb1_extendhisi2_insn_v6"
4156 [(set (match_operand:SI 0 "register_operand" "=l,l")
4157 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4158 (clobber (match_scratch:SI 2 "=X,l"))]
4159 "TARGET_THUMB1 && arm_arch6"
4165 if (which_alternative == 0)
4166 return \"sxth\\t%0, %1\";
4168 mem = XEXP (operands[1], 0);
4170 /* This code used to try to use 'V', and fix the address only if it was
4171 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4172 range of QImode offsets, and offsettable_address_p does a QImode
4175 if (GET_CODE (mem) == CONST)
4176 mem = XEXP (mem, 0);
4178 if (GET_CODE (mem) == LABEL_REF)
4179 return \"ldr\\t%0, %1\";
4181 if (GET_CODE (mem) == PLUS)
4183 rtx a = XEXP (mem, 0);
4184 rtx b = XEXP (mem, 1);
4186 if (GET_CODE (a) == LABEL_REF
4187 && GET_CODE (b) == CONST_INT)
4188 return \"ldr\\t%0, %1\";
4190 if (GET_CODE (b) == REG)
4191 return \"ldrsh\\t%0, %1\";
4199 ops[2] = const0_rtx;
4202 gcc_assert (GET_CODE (ops[1]) == REG);
4204 ops[0] = operands[0];
4205 if (reg_mentioned_p (operands[2], ops[1]))
4208 ops[3] = operands[2];
4209 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4212 [(set_attr "length" "2,4")
4213 (set_attr "type" "alu_shift,load_byte")
4214 (set_attr "pool_range" "*,1020")]
4217 ;; This pattern will only be used when ldsh is not available
4218 (define_expand "extendhisi2_mem"
4219 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4221 (zero_extend:SI (match_dup 7)))
4222 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4223 (set (match_operand:SI 0 "" "")
4224 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4229 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4231 mem1 = change_address (operands[1], QImode, addr);
4232 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4233 operands[0] = gen_lowpart (SImode, operands[0]);
4235 operands[2] = gen_reg_rtx (SImode);
4236 operands[3] = gen_reg_rtx (SImode);
4237 operands[6] = gen_reg_rtx (SImode);
4240 if (BYTES_BIG_ENDIAN)
4242 operands[4] = operands[2];
4243 operands[5] = operands[3];
4247 operands[4] = operands[3];
4248 operands[5] = operands[2];
4253 (define_insn "*arm_extendhisi2"
4254 [(set (match_operand:SI 0 "s_register_operand" "=r")
4255 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4256 "TARGET_ARM && arm_arch4 && !arm_arch6"
4257 "ldr%(sh%)\\t%0, %1"
4258 [(set_attr "type" "load_byte")
4259 (set_attr "predicable" "yes")
4260 (set_attr "pool_range" "256")
4261 (set_attr "neg_pool_range" "244")]
4264 ;; ??? Check Thumb-2 pool range
4265 (define_insn "*arm_extendhisi2_v6"
4266 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4267 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4268 "TARGET_32BIT && arm_arch6"
4272 [(set_attr "type" "alu_shift,load_byte")
4273 (set_attr "predicable" "yes")
4274 (set_attr "pool_range" "*,256")
4275 (set_attr "neg_pool_range" "*,244")]
4278 (define_insn "*arm_extendhisi2addsi"
4279 [(set (match_operand:SI 0 "s_register_operand" "=r")
4280 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4281 (match_operand:SI 2 "s_register_operand" "r")))]
4283 "sxtah%?\\t%0, %2, %1"
4286 (define_expand "extendqihi2"
4288 (ashift:SI (match_operand:QI 1 "general_operand" "")
4290 (set (match_operand:HI 0 "s_register_operand" "")
4291 (ashiftrt:SI (match_dup 2)
4296 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4298 emit_insn (gen_rtx_SET (VOIDmode,
4300 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4303 if (!s_register_operand (operands[1], QImode))
4304 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4305 operands[0] = gen_lowpart (SImode, operands[0]);
4306 operands[1] = gen_lowpart (SImode, operands[1]);
4307 operands[2] = gen_reg_rtx (SImode);
4311 (define_insn "*arm_extendqihi_insn"
4312 [(set (match_operand:HI 0 "s_register_operand" "=r")
4313 (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
4314 "TARGET_ARM && arm_arch4"
4315 "ldr%(sb%)\\t%0, %1"
4316 [(set_attr "type" "load_byte")
4317 (set_attr "predicable" "yes")
4318 (set_attr "pool_range" "256")
4319 (set_attr "neg_pool_range" "244")]
4322 (define_expand "extendqisi2"
4324 (ashift:SI (match_operand:QI 1 "general_operand" "")
4326 (set (match_operand:SI 0 "s_register_operand" "")
4327 (ashiftrt:SI (match_dup 2)
4332 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4334 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4335 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4339 if (!s_register_operand (operands[1], QImode))
4340 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4344 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4345 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4349 operands[1] = gen_lowpart (SImode, operands[1]);
4350 operands[2] = gen_reg_rtx (SImode);
4354 (define_insn "*arm_extendqisi"
4355 [(set (match_operand:SI 0 "s_register_operand" "=r")
4356 (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
4357 "TARGET_ARM && arm_arch4 && !arm_arch6"
4358 "ldr%(sb%)\\t%0, %1"
4359 [(set_attr "type" "load_byte")
4360 (set_attr "predicable" "yes")
4361 (set_attr "pool_range" "256")
4362 (set_attr "neg_pool_range" "244")]
4365 (define_insn "*arm_extendqisi_v6"
4366 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4367 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
4368 "TARGET_ARM && arm_arch6"
4372 [(set_attr "type" "alu_shift,load_byte")
4373 (set_attr "predicable" "yes")
4374 (set_attr "pool_range" "*,256")
4375 (set_attr "neg_pool_range" "*,244")]
4378 (define_insn "*arm_extendqisi2addsi"
4379 [(set (match_operand:SI 0 "s_register_operand" "=r")
4380 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4381 (match_operand:SI 2 "s_register_operand" "r")))]
4383 "sxtab%?\\t%0, %2, %1"
4384 [(set_attr "type" "alu_shift")
4385 (set_attr "insn" "xtab")
4386 (set_attr "predicable" "yes")]
4389 (define_insn "*thumb1_extendqisi2"
4390 [(set (match_operand:SI 0 "register_operand" "=l,l")
4391 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4392 "TARGET_THUMB1 && !arm_arch6"
4396 rtx mem = XEXP (operands[1], 0);
4398 if (GET_CODE (mem) == CONST)
4399 mem = XEXP (mem, 0);
4401 if (GET_CODE (mem) == LABEL_REF)
4402 return \"ldr\\t%0, %1\";
4404 if (GET_CODE (mem) == PLUS
4405 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4406 return \"ldr\\t%0, %1\";
4408 if (which_alternative == 0)
4409 return \"ldrsb\\t%0, %1\";
4411 ops[0] = operands[0];
4413 if (GET_CODE (mem) == PLUS)
4415 rtx a = XEXP (mem, 0);
4416 rtx b = XEXP (mem, 1);
4421 if (GET_CODE (a) == REG)
4423 if (GET_CODE (b) == REG)
4424 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4425 else if (REGNO (a) == REGNO (ops[0]))
4427 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4428 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4429 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4432 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4436 gcc_assert (GET_CODE (b) == REG);
4437 if (REGNO (b) == REGNO (ops[0]))
4439 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4440 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4441 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4444 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4447 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4449 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4450 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4451 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4456 ops[2] = const0_rtx;
4458 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4462 [(set_attr "length" "2,6")
4463 (set_attr "type" "load_byte,load_byte")
4464 (set_attr "pool_range" "32,32")]
4467 (define_insn "*thumb1_extendqisi2_v6"
4468 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4469 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4470 "TARGET_THUMB1 && arm_arch6"
4476 if (which_alternative == 0)
4477 return \"sxtb\\t%0, %1\";
4479 mem = XEXP (operands[1], 0);
4481 if (GET_CODE (mem) == CONST)
4482 mem = XEXP (mem, 0);
4484 if (GET_CODE (mem) == LABEL_REF)
4485 return \"ldr\\t%0, %1\";
4487 if (GET_CODE (mem) == PLUS
4488 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4489 return \"ldr\\t%0, %1\";
4491 if (which_alternative == 0)
4492 return \"ldrsb\\t%0, %1\";
4494 ops[0] = operands[0];
4496 if (GET_CODE (mem) == PLUS)
4498 rtx a = XEXP (mem, 0);
4499 rtx b = XEXP (mem, 1);
4504 if (GET_CODE (a) == REG)
4506 if (GET_CODE (b) == REG)
4507 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4508 else if (REGNO (a) == REGNO (ops[0]))
4510 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4511 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4514 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4518 gcc_assert (GET_CODE (b) == REG);
4519 if (REGNO (b) == REGNO (ops[0]))
4521 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4522 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4525 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4528 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4530 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4531 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4536 ops[2] = const0_rtx;
4538 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4542 [(set_attr "length" "2,2,4")
4543 (set_attr "type" "alu_shift,load_byte,load_byte")
4544 (set_attr "pool_range" "*,32,32")]
4547 (define_expand "extendsfdf2"
4548 [(set (match_operand:DF 0 "s_register_operand" "")
4549 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4550 "TARGET_32BIT && TARGET_HARD_FLOAT"
4554 ;; Move insns (including loads and stores)
4556 ;; XXX Just some ideas about movti.
4557 ;; I don't think these are a good idea on the arm, there just aren't enough
4559 ;;(define_expand "loadti"
4560 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4561 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4564 ;;(define_expand "storeti"
4565 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4566 ;; (match_operand:TI 1 "s_register_operand" ""))]
4569 ;;(define_expand "movti"
4570 ;; [(set (match_operand:TI 0 "general_operand" "")
4571 ;; (match_operand:TI 1 "general_operand" ""))]
4577 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4578 ;; operands[1] = copy_to_reg (operands[1]);
4579 ;; if (GET_CODE (operands[0]) == MEM)
4580 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4581 ;; else if (GET_CODE (operands[1]) == MEM)
4582 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4586 ;; emit_insn (insn);
4590 ;; Recognize garbage generated above.
4593 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4594 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4598 ;; register mem = (which_alternative < 3);
4599 ;; register const char *template;
4601 ;; operands[mem] = XEXP (operands[mem], 0);
4602 ;; switch (which_alternative)
4604 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4605 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4606 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4607 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4608 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4609 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4611 ;; output_asm_insn (template, operands);
4615 (define_expand "movdi"
4616 [(set (match_operand:DI 0 "general_operand" "")
4617 (match_operand:DI 1 "general_operand" ""))]
4620 if (can_create_pseudo_p ())
4622 if (GET_CODE (operands[0]) != REG)
4623 operands[1] = force_reg (DImode, operands[1]);
4628 (define_insn "*arm_movdi"
4629 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4630 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4632 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4634 && ( register_operand (operands[0], DImode)
4635 || register_operand (operands[1], DImode))"
4637 switch (which_alternative)
4644 return output_move_double (operands);
4647 [(set_attr "length" "8,12,16,8,8")
4648 (set_attr "type" "*,*,*,load2,store2")
4649 (set_attr "pool_range" "*,*,*,1020,*")
4650 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4654 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4655 (match_operand:ANY64 1 "const_double_operand" ""))]
4658 && (arm_const_double_inline_cost (operands[1])
4659 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4662 arm_split_constant (SET, SImode, curr_insn,
4663 INTVAL (gen_lowpart (SImode, operands[1])),
4664 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4665 arm_split_constant (SET, SImode, curr_insn,
4666 INTVAL (gen_highpart_mode (SImode,
4667 GET_MODE (operands[0]),
4669 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4674 ; If optimizing for size, or if we have load delay slots, then
4675 ; we want to split the constant into two separate operations.
4676 ; In both cases this may split a trivial part into a single data op
4677 ; leaving a single complex constant to load. We can also get longer
4678 ; offsets in a LDR which means we get better chances of sharing the pool
4679 ; entries. Finally, we can normally do a better job of scheduling
4680 ; LDR instructions than we can with LDM.
4681 ; This pattern will only match if the one above did not.
4683 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4684 (match_operand:ANY64 1 "const_double_operand" ""))]
4685 "TARGET_ARM && reload_completed
4686 && arm_const_double_by_parts (operands[1])"
4687 [(set (match_dup 0) (match_dup 1))
4688 (set (match_dup 2) (match_dup 3))]
4690 operands[2] = gen_highpart (SImode, operands[0]);
4691 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4693 operands[0] = gen_lowpart (SImode, operands[0]);
4694 operands[1] = gen_lowpart (SImode, operands[1]);
4699 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4700 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4701 "TARGET_EITHER && reload_completed"
4702 [(set (match_dup 0) (match_dup 1))
4703 (set (match_dup 2) (match_dup 3))]
4705 operands[2] = gen_highpart (SImode, operands[0]);
4706 operands[3] = gen_highpart (SImode, operands[1]);
4707 operands[0] = gen_lowpart (SImode, operands[0]);
4708 operands[1] = gen_lowpart (SImode, operands[1]);
4710 /* Handle a partial overlap. */
4711 if (rtx_equal_p (operands[0], operands[3]))
4713 rtx tmp0 = operands[0];
4714 rtx tmp1 = operands[1];
4716 operands[0] = operands[2];
4717 operands[1] = operands[3];
4724 ;; We can't actually do base+index doubleword loads if the index and
4725 ;; destination overlap. Split here so that we at least have chance to
4728 [(set (match_operand:DI 0 "s_register_operand" "")
4729 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4730 (match_operand:SI 2 "s_register_operand" ""))))]
4732 && reg_overlap_mentioned_p (operands[0], operands[1])
4733 && reg_overlap_mentioned_p (operands[0], operands[2])"
4735 (plus:SI (match_dup 1)
4738 (mem:DI (match_dup 4)))]
4740 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4744 ;;; ??? This should have alternatives for constants.
4745 ;;; ??? This was originally identical to the movdf_insn pattern.
4746 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4747 ;;; thumb_reorg with a memory reference.
4748 (define_insn "*thumb1_movdi_insn"
4749 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4750 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4752 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4753 && ( register_operand (operands[0], DImode)
4754 || register_operand (operands[1], DImode))"
4757 switch (which_alternative)
4761 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4762 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4763 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4765 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4767 operands[1] = GEN_INT (- INTVAL (operands[1]));
4768 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4770 return \"ldmia\\t%1, {%0, %H0}\";
4772 return \"stmia\\t%0, {%1, %H1}\";
4774 return thumb_load_double_from_address (operands);
4776 operands[2] = gen_rtx_MEM (SImode,
4777 plus_constant (XEXP (operands[0], 0), 4));
4778 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4781 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4782 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4783 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4786 [(set_attr "length" "4,4,6,2,2,6,4,4")
4787 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4788 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4791 (define_expand "movsi"
4792 [(set (match_operand:SI 0 "general_operand" "")
4793 (match_operand:SI 1 "general_operand" ""))]
4797 rtx base, offset, tmp;
4801 /* Everything except mem = const or mem = mem can be done easily. */
4802 if (GET_CODE (operands[0]) == MEM)
4803 operands[1] = force_reg (SImode, operands[1]);
4804 if (arm_general_register_operand (operands[0], SImode)
4805 && GET_CODE (operands[1]) == CONST_INT
4806 && !(const_ok_for_arm (INTVAL (operands[1]))
4807 || const_ok_for_arm (~INTVAL (operands[1]))))
4809 arm_split_constant (SET, SImode, NULL_RTX,
4810 INTVAL (operands[1]), operands[0], NULL_RTX,
4811 optimize && can_create_pseudo_p ());
4815 else /* TARGET_THUMB1... */
4817 if (can_create_pseudo_p ())
4819 if (GET_CODE (operands[0]) != REG)
4820 operands[1] = force_reg (SImode, operands[1]);
4824 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4826 split_const (operands[1], &base, &offset);
4827 if (GET_CODE (base) == SYMBOL_REF
4828 && !offset_within_block_p (base, INTVAL (offset)))
4830 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4831 emit_move_insn (tmp, base);
4832 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4837 /* Recognize the case where operand[1] is a reference to thread-local
4838 data and load its address to a register. */
4839 if (arm_tls_referenced_p (operands[1]))
4841 rtx tmp = operands[1];
4844 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4846 addend = XEXP (XEXP (tmp, 0), 1);
4847 tmp = XEXP (XEXP (tmp, 0), 0);
4850 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4851 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4853 tmp = legitimize_tls_address (tmp,
4854 !can_create_pseudo_p () ? operands[0] : 0);
4857 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4858 tmp = force_operand (tmp, operands[0]);
4863 && (CONSTANT_P (operands[1])
4864 || symbol_mentioned_p (operands[1])
4865 || label_mentioned_p (operands[1])))
4866 operands[1] = legitimize_pic_address (operands[1], SImode,
4867 (!can_create_pseudo_p ()
4874 (define_insn "*arm_movsi_insn"
4875 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
4876 (match_operand:SI 1 "general_operand" "rk, I,K,N,mi,rk"))]
4877 "TARGET_ARM && ! TARGET_IWMMXT
4878 && !(TARGET_HARD_FLOAT && TARGET_VFP)
4879 && ( register_operand (operands[0], SImode)
4880 || register_operand (operands[1], SImode))"
4888 [(set_attr "type" "*,*,*,*,load1,store1")
4889 (set_attr "predicable" "yes")
4890 (set_attr "pool_range" "*,*,*,*,4096,*")
4891 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
4895 [(set (match_operand:SI 0 "arm_general_register_operand" "")
4896 (match_operand:SI 1 "const_int_operand" ""))]
4898 && (!(const_ok_for_arm (INTVAL (operands[1]))
4899 || const_ok_for_arm (~INTVAL (operands[1]))))"
4900 [(clobber (const_int 0))]
4902 arm_split_constant (SET, SImode, NULL_RTX,
4903 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
4908 (define_insn "*thumb1_movsi_insn"
4909 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
4910 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
4912 && ( register_operand (operands[0], SImode)
4913 || register_operand (operands[1], SImode))"
4924 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
4925 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
4926 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
4930 [(set (match_operand:SI 0 "register_operand" "")
4931 (match_operand:SI 1 "const_int_operand" ""))]
4932 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
4933 [(set (match_dup 0) (match_dup 1))
4934 (set (match_dup 0) (neg:SI (match_dup 0)))]
4935 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
4939 [(set (match_operand:SI 0 "register_operand" "")
4940 (match_operand:SI 1 "const_int_operand" ""))]
4941 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
4942 [(set (match_dup 0) (match_dup 1))
4943 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
4946 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
4947 unsigned HOST_WIDE_INT mask = 0xff;
4950 for (i = 0; i < 25; i++)
4951 if ((val & (mask << i)) == val)
4954 /* Shouldn't happen, but we don't want to split if the shift is zero. */
4958 operands[1] = GEN_INT (val >> i);
4959 operands[2] = GEN_INT (i);
4963 ;; When generating pic, we need to load the symbol offset into a register.
4964 ;; So that the optimizer does not confuse this with a normal symbol load
4965 ;; we use an unspec. The offset will be loaded from a constant pool entry,
4966 ;; since that is the only type of relocation we can use.
4968 ;; The rather odd constraints on the following are to force reload to leave
4969 ;; the insn alone, and to force the minipool generation pass to then move
4970 ;; the GOT symbol to memory.
4972 (define_insn "pic_load_addr_arm"
4973 [(set (match_operand:SI 0 "s_register_operand" "=r")
4974 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4975 "TARGET_ARM && flag_pic"
4977 [(set_attr "type" "load1")
4978 (set (attr "pool_range") (const_int 4096))
4979 (set (attr "neg_pool_range") (const_int 4084))]
4982 (define_insn "pic_load_addr_thumb1"
4983 [(set (match_operand:SI 0 "s_register_operand" "=l")
4984 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4985 "TARGET_THUMB1 && flag_pic"
4987 [(set_attr "type" "load1")
4988 (set (attr "pool_range") (const_int 1024))]
4991 (define_insn "pic_add_dot_plus_four"
4992 [(set (match_operand:SI 0 "register_operand" "=r")
4993 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0")
4994 (const (plus:SI (pc) (const_int 4))))
4995 (match_operand 2 "" "")]
4999 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5000 INTVAL (operands[2]));
5001 return \"add\\t%0, %|pc\";
5003 [(set_attr "length" "2")]
5006 (define_insn "pic_add_dot_plus_eight"
5007 [(set (match_operand:SI 0 "register_operand" "=r")
5008 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
5009 (const (plus:SI (pc) (const_int 8))))
5010 (match_operand 2 "" "")]
5014 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5015 INTVAL (operands[2]));
5016 return \"add%?\\t%0, %|pc, %1\";
5018 [(set_attr "predicable" "yes")]
5021 (define_insn "tls_load_dot_plus_eight"
5022 [(set (match_operand:SI 0 "register_operand" "+r")
5023 (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
5024 (const (plus:SI (pc) (const_int 8))))
5025 (match_operand 2 "" "")]
5029 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5030 INTVAL (operands[2]));
5031 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5033 [(set_attr "predicable" "yes")]
5036 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5037 ;; followed by a load. These sequences can be crunched down to
5038 ;; tls_load_dot_plus_eight by a peephole.
5041 [(parallel [(set (match_operand:SI 0 "register_operand" "")
5042 (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "")
5043 (const (plus:SI (pc) (const_int 8))))]
5045 (use (label_ref (match_operand 1 "" "")))])
5046 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5047 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5048 [(parallel [(set (match_dup 2)
5049 (mem:SI (unspec:SI [(plus:SI (match_dup 3)
5050 (const (plus:SI (pc) (const_int 8))))]
5052 (use (label_ref (match_dup 1)))])]
5056 (define_insn "pic_offset_arm"
5057 [(set (match_operand:SI 0 "register_operand" "=r")
5058 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5059 (unspec:SI [(match_operand:SI 2 "" "X")]
5060 UNSPEC_PIC_OFFSET))))]
5061 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5062 "ldr%?\\t%0, [%1,%2]"
5063 [(set_attr "type" "load1")]
5066 (define_expand "builtin_setjmp_receiver"
5067 [(label_ref (match_operand 0 "" ""))]
5071 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5073 if (arm_pic_register != INVALID_REGNUM)
5074 arm_load_pic_register (1UL << 3);
5078 ;; If copying one reg to another we can set the condition codes according to
5079 ;; its value. Such a move is common after a return from subroutine and the
5080 ;; result is being tested against zero.
5082 (define_insn "*movsi_compare0"
5083 [(set (reg:CC CC_REGNUM)
5084 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5086 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5092 [(set_attr "conds" "set")]
5095 ;; Subroutine to store a half word from a register into memory.
5096 ;; Operand 0 is the source register (HImode)
5097 ;; Operand 1 is the destination address in a register (SImode)
5099 ;; In both this routine and the next, we must be careful not to spill
5100 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5101 ;; can generate unrecognizable rtl.
5103 (define_expand "storehi"
5104 [;; store the low byte
5105 (set (match_operand 1 "" "") (match_dup 3))
5106 ;; extract the high byte
5108 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5109 ;; store the high byte
5110 (set (match_dup 4) (match_dup 5))]
5114 rtx op1 = operands[1];
5115 rtx addr = XEXP (op1, 0);
5116 enum rtx_code code = GET_CODE (addr);
5118 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5120 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5122 operands[4] = adjust_address (op1, QImode, 1);
5123 operands[1] = adjust_address (operands[1], QImode, 0);
5124 operands[3] = gen_lowpart (QImode, operands[0]);
5125 operands[0] = gen_lowpart (SImode, operands[0]);
5126 operands[2] = gen_reg_rtx (SImode);
5127 operands[5] = gen_lowpart (QImode, operands[2]);
5131 (define_expand "storehi_bigend"
5132 [(set (match_dup 4) (match_dup 3))
5134 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5135 (set (match_operand 1 "" "") (match_dup 5))]
5139 rtx op1 = operands[1];
5140 rtx addr = XEXP (op1, 0);
5141 enum rtx_code code = GET_CODE (addr);
5143 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5145 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5147 operands[4] = adjust_address (op1, QImode, 1);
5148 operands[1] = adjust_address (operands[1], QImode, 0);
5149 operands[3] = gen_lowpart (QImode, operands[0]);
5150 operands[0] = gen_lowpart (SImode, operands[0]);
5151 operands[2] = gen_reg_rtx (SImode);
5152 operands[5] = gen_lowpart (QImode, operands[2]);
5156 ;; Subroutine to store a half word integer constant into memory.
5157 (define_expand "storeinthi"
5158 [(set (match_operand 0 "" "")
5159 (match_operand 1 "" ""))
5160 (set (match_dup 3) (match_dup 2))]
5164 HOST_WIDE_INT value = INTVAL (operands[1]);
5165 rtx addr = XEXP (operands[0], 0);
5166 rtx op0 = operands[0];
5167 enum rtx_code code = GET_CODE (addr);
5169 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5171 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5173 operands[1] = gen_reg_rtx (SImode);
5174 if (BYTES_BIG_ENDIAN)
5176 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5177 if ((value & 255) == ((value >> 8) & 255))
5178 operands[2] = operands[1];
5181 operands[2] = gen_reg_rtx (SImode);
5182 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5187 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5188 if ((value & 255) == ((value >> 8) & 255))
5189 operands[2] = operands[1];
5192 operands[2] = gen_reg_rtx (SImode);
5193 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5197 operands[3] = adjust_address (op0, QImode, 1);
5198 operands[0] = adjust_address (operands[0], QImode, 0);
5199 operands[2] = gen_lowpart (QImode, operands[2]);
5200 operands[1] = gen_lowpart (QImode, operands[1]);
5204 (define_expand "storehi_single_op"
5205 [(set (match_operand:HI 0 "memory_operand" "")
5206 (match_operand:HI 1 "general_operand" ""))]
5207 "TARGET_32BIT && arm_arch4"
5209 if (!s_register_operand (operands[1], HImode))
5210 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5214 (define_expand "movhi"
5215 [(set (match_operand:HI 0 "general_operand" "")
5216 (match_operand:HI 1 "general_operand" ""))]
5221 if (can_create_pseudo_p ())
5223 if (GET_CODE (operands[0]) == MEM)
5227 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5230 if (GET_CODE (operands[1]) == CONST_INT)
5231 emit_insn (gen_storeinthi (operands[0], operands[1]));
5234 if (GET_CODE (operands[1]) == MEM)
5235 operands[1] = force_reg (HImode, operands[1]);
5236 if (BYTES_BIG_ENDIAN)
5237 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5239 emit_insn (gen_storehi (operands[1], operands[0]));
5243 /* Sign extend a constant, and keep it in an SImode reg. */
5244 else if (GET_CODE (operands[1]) == CONST_INT)
5246 rtx reg = gen_reg_rtx (SImode);
5247 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5249 /* If the constant is already valid, leave it alone. */
5250 if (!const_ok_for_arm (val))
5252 /* If setting all the top bits will make the constant
5253 loadable in a single instruction, then set them.
5254 Otherwise, sign extend the number. */
5256 if (const_ok_for_arm (~(val | ~0xffff)))
5258 else if (val & 0x8000)
5262 emit_insn (gen_movsi (reg, GEN_INT (val)));
5263 operands[1] = gen_lowpart (HImode, reg);
5265 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5266 && GET_CODE (operands[1]) == MEM)
5268 rtx reg = gen_reg_rtx (SImode);
5270 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5271 operands[1] = gen_lowpart (HImode, reg);
5273 else if (!arm_arch4)
5275 if (GET_CODE (operands[1]) == MEM)
5278 rtx offset = const0_rtx;
5279 rtx reg = gen_reg_rtx (SImode);
5281 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5282 || (GET_CODE (base) == PLUS
5283 && (GET_CODE (offset = XEXP (base, 1))
5285 && ((INTVAL(offset) & 1) != 1)
5286 && GET_CODE (base = XEXP (base, 0)) == REG))
5287 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5291 new = widen_memory_access (operands[1], SImode,
5292 ((INTVAL (offset) & ~3)
5293 - INTVAL (offset)));
5294 emit_insn (gen_movsi (reg, new));
5295 if (((INTVAL (offset) & 2) != 0)
5296 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5298 rtx reg2 = gen_reg_rtx (SImode);
5300 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5305 emit_insn (gen_movhi_bytes (reg, operands[1]));
5307 operands[1] = gen_lowpart (HImode, reg);
5311 /* Handle loading a large integer during reload. */
5312 else if (GET_CODE (operands[1]) == CONST_INT
5313 && !const_ok_for_arm (INTVAL (operands[1]))
5314 && !const_ok_for_arm (~INTVAL (operands[1])))
5316 /* Writing a constant to memory needs a scratch, which should
5317 be handled with SECONDARY_RELOADs. */
5318 gcc_assert (GET_CODE (operands[0]) == REG);
5320 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5321 emit_insn (gen_movsi (operands[0], operands[1]));
5325 else if (TARGET_THUMB2)
5327 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5328 if (can_create_pseudo_p ())
5330 if (GET_CODE (operands[0]) != REG)
5331 operands[1] = force_reg (HImode, operands[1]);
5332 /* Zero extend a constant, and keep it in an SImode reg. */
5333 else if (GET_CODE (operands[1]) == CONST_INT)
5335 rtx reg = gen_reg_rtx (SImode);
5336 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5338 emit_insn (gen_movsi (reg, GEN_INT (val)));
5339 operands[1] = gen_lowpart (HImode, reg);
5343 else /* TARGET_THUMB1 */
5345 if (can_create_pseudo_p ())
5347 if (GET_CODE (operands[1]) == CONST_INT)
5349 rtx reg = gen_reg_rtx (SImode);
5351 emit_insn (gen_movsi (reg, operands[1]));
5352 operands[1] = gen_lowpart (HImode, reg);
5355 /* ??? We shouldn't really get invalid addresses here, but this can
5356 happen if we are passed a SP (never OK for HImode/QImode) or
5357 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5358 HImode/QImode) relative address. */
5359 /* ??? This should perhaps be fixed elsewhere, for instance, in
5360 fixup_stack_1, by checking for other kinds of invalid addresses,
5361 e.g. a bare reference to a virtual register. This may confuse the
5362 alpha though, which must handle this case differently. */
5363 if (GET_CODE (operands[0]) == MEM
5364 && !memory_address_p (GET_MODE (operands[0]),
5365 XEXP (operands[0], 0)))
5367 = replace_equiv_address (operands[0],
5368 copy_to_reg (XEXP (operands[0], 0)));
5370 if (GET_CODE (operands[1]) == MEM
5371 && !memory_address_p (GET_MODE (operands[1]),
5372 XEXP (operands[1], 0)))
5374 = replace_equiv_address (operands[1],
5375 copy_to_reg (XEXP (operands[1], 0)));
5377 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5379 rtx reg = gen_reg_rtx (SImode);
5381 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5382 operands[1] = gen_lowpart (HImode, reg);
5385 if (GET_CODE (operands[0]) == MEM)
5386 operands[1] = force_reg (HImode, operands[1]);
5388 else if (GET_CODE (operands[1]) == CONST_INT
5389 && !satisfies_constraint_I (operands[1]))
5391 /* Handle loading a large integer during reload. */
5393 /* Writing a constant to memory needs a scratch, which should
5394 be handled with SECONDARY_RELOADs. */
5395 gcc_assert (GET_CODE (operands[0]) == REG);
5397 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5398 emit_insn (gen_movsi (operands[0], operands[1]));
5405 (define_insn "*thumb1_movhi_insn"
5406 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5407 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5409 && ( register_operand (operands[0], HImode)
5410 || register_operand (operands[1], HImode))"
5412 switch (which_alternative)
5414 case 0: return \"add %0, %1, #0\";
5415 case 2: return \"strh %1, %0\";
5416 case 3: return \"mov %0, %1\";
5417 case 4: return \"mov %0, %1\";
5418 case 5: return \"mov %0, %1\";
5419 default: gcc_unreachable ();
5421 /* The stack pointer can end up being taken as an index register.
5422 Catch this case here and deal with it. */
5423 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5424 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5425 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5428 ops[0] = operands[0];
5429 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5431 output_asm_insn (\"mov %0, %1\", ops);
5433 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5436 return \"ldrh %0, %1\";
5438 [(set_attr "length" "2,4,2,2,2,2")
5439 (set_attr "type" "*,load1,store1,*,*,*")]
5443 (define_expand "movhi_bytes"
5444 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5446 (zero_extend:SI (match_dup 6)))
5447 (set (match_operand:SI 0 "" "")
5448 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5453 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5455 mem1 = change_address (operands[1], QImode, addr);
5456 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5457 operands[0] = gen_lowpart (SImode, operands[0]);
5459 operands[2] = gen_reg_rtx (SImode);
5460 operands[3] = gen_reg_rtx (SImode);
5463 if (BYTES_BIG_ENDIAN)
5465 operands[4] = operands[2];
5466 operands[5] = operands[3];
5470 operands[4] = operands[3];
5471 operands[5] = operands[2];
5476 (define_expand "movhi_bigend"
5478 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5481 (ashiftrt:SI (match_dup 2) (const_int 16)))
5482 (set (match_operand:HI 0 "s_register_operand" "")
5486 operands[2] = gen_reg_rtx (SImode);
5487 operands[3] = gen_reg_rtx (SImode);
5488 operands[4] = gen_lowpart (HImode, operands[3]);
5492 ;; Pattern to recognize insn generated default case above
5493 (define_insn "*movhi_insn_arch4"
5494 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5495 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5498 && (GET_CODE (operands[1]) != CONST_INT
5499 || const_ok_for_arm (INTVAL (operands[1]))
5500 || const_ok_for_arm (~INTVAL (operands[1])))"
5502 mov%?\\t%0, %1\\t%@ movhi
5503 mvn%?\\t%0, #%B1\\t%@ movhi
5504 str%(h%)\\t%1, %0\\t%@ movhi
5505 ldr%(h%)\\t%0, %1\\t%@ movhi"
5506 [(set_attr "type" "*,*,store1,load1")
5507 (set_attr "predicable" "yes")
5508 (set_attr "pool_range" "*,*,*,256")
5509 (set_attr "neg_pool_range" "*,*,*,244")]
5512 (define_insn "*movhi_bytes"
5513 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5514 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5517 mov%?\\t%0, %1\\t%@ movhi
5518 mvn%?\\t%0, #%B1\\t%@ movhi"
5519 [(set_attr "predicable" "yes")]
5522 (define_expand "thumb_movhi_clobber"
5523 [(set (match_operand:HI 0 "memory_operand" "")
5524 (match_operand:HI 1 "register_operand" ""))
5525 (clobber (match_operand:DI 2 "register_operand" ""))]
5528 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5529 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5531 emit_insn (gen_movhi (operands[0], operands[1]));
5534 /* XXX Fixme, need to handle other cases here as well. */
5539 ;; We use a DImode scratch because we may occasionally need an additional
5540 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5541 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5542 (define_expand "reload_outhi"
5543 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5544 (match_operand:HI 1 "s_register_operand" "r")
5545 (match_operand:DI 2 "s_register_operand" "=&l")])]
5548 arm_reload_out_hi (operands);
5550 thumb_reload_out_hi (operands);
5555 (define_expand "reload_inhi"
5556 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5557 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5558 (match_operand:DI 2 "s_register_operand" "=&r")])]
5562 arm_reload_in_hi (operands);
5564 thumb_reload_out_hi (operands);
5568 (define_expand "movqi"
5569 [(set (match_operand:QI 0 "general_operand" "")
5570 (match_operand:QI 1 "general_operand" ""))]
5573 /* Everything except mem = const or mem = mem can be done easily */
5575 if (can_create_pseudo_p ())
5577 if (GET_CODE (operands[1]) == CONST_INT)
5579 rtx reg = gen_reg_rtx (SImode);
5581 emit_insn (gen_movsi (reg, operands[1]));
5582 operands[1] = gen_lowpart (QImode, reg);
5587 /* ??? We shouldn't really get invalid addresses here, but this can
5588 happen if we are passed a SP (never OK for HImode/QImode) or
5589 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5590 HImode/QImode) relative address. */
5591 /* ??? This should perhaps be fixed elsewhere, for instance, in
5592 fixup_stack_1, by checking for other kinds of invalid addresses,
5593 e.g. a bare reference to a virtual register. This may confuse the
5594 alpha though, which must handle this case differently. */
5595 if (GET_CODE (operands[0]) == MEM
5596 && !memory_address_p (GET_MODE (operands[0]),
5597 XEXP (operands[0], 0)))
5599 = replace_equiv_address (operands[0],
5600 copy_to_reg (XEXP (operands[0], 0)));
5601 if (GET_CODE (operands[1]) == MEM
5602 && !memory_address_p (GET_MODE (operands[1]),
5603 XEXP (operands[1], 0)))
5605 = replace_equiv_address (operands[1],
5606 copy_to_reg (XEXP (operands[1], 0)));
5609 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5611 rtx reg = gen_reg_rtx (SImode);
5613 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5614 operands[1] = gen_lowpart (QImode, reg);
5617 if (GET_CODE (operands[0]) == MEM)
5618 operands[1] = force_reg (QImode, operands[1]);
5620 else if (TARGET_THUMB
5621 && GET_CODE (operands[1]) == CONST_INT
5622 && !satisfies_constraint_I (operands[1]))
5624 /* Handle loading a large integer during reload. */
5626 /* Writing a constant to memory needs a scratch, which should
5627 be handled with SECONDARY_RELOADs. */
5628 gcc_assert (GET_CODE (operands[0]) == REG);
5630 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5631 emit_insn (gen_movsi (operands[0], operands[1]));
5638 (define_insn "*arm_movqi_insn"
5639 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5640 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5642 && ( register_operand (operands[0], QImode)
5643 || register_operand (operands[1], QImode))"
5649 [(set_attr "type" "*,*,load1,store1")
5650 (set_attr "predicable" "yes")]
5653 (define_insn "*thumb1_movqi_insn"
5654 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5655 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5657 && ( register_operand (operands[0], QImode)
5658 || register_operand (operands[1], QImode))"
5666 [(set_attr "length" "2")
5667 (set_attr "type" "*,load1,store1,*,*,*")
5668 (set_attr "pool_range" "*,32,*,*,*,*")]
5671 (define_expand "movsf"
5672 [(set (match_operand:SF 0 "general_operand" "")
5673 (match_operand:SF 1 "general_operand" ""))]
5678 if (GET_CODE (operands[0]) == MEM)
5679 operands[1] = force_reg (SFmode, operands[1]);
5681 else /* TARGET_THUMB1 */
5683 if (can_create_pseudo_p ())
5685 if (GET_CODE (operands[0]) != REG)
5686 operands[1] = force_reg (SFmode, operands[1]);
5692 ;; Transform a floating-point move of a constant into a core register into
5693 ;; an SImode operation.
5695 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5696 (match_operand:SF 1 "immediate_operand" ""))]
5699 && GET_CODE (operands[1]) == CONST_DOUBLE"
5700 [(set (match_dup 2) (match_dup 3))]
5702 operands[2] = gen_lowpart (SImode, operands[0]);
5703 operands[3] = gen_lowpart (SImode, operands[1]);
5704 if (operands[2] == 0 || operands[3] == 0)
5709 (define_insn "*arm_movsf_soft_insn"
5710 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5711 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5713 && TARGET_SOFT_FLOAT
5714 && (GET_CODE (operands[0]) != MEM
5715 || register_operand (operands[1], SFmode))"
5718 ldr%?\\t%0, %1\\t%@ float
5719 str%?\\t%1, %0\\t%@ float"
5720 [(set_attr "length" "4,4,4")
5721 (set_attr "predicable" "yes")
5722 (set_attr "type" "*,load1,store1")
5723 (set_attr "pool_range" "*,4096,*")
5724 (set_attr "neg_pool_range" "*,4084,*")]
5727 ;;; ??? This should have alternatives for constants.
5728 (define_insn "*thumb1_movsf_insn"
5729 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5730 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5732 && ( register_operand (operands[0], SFmode)
5733 || register_operand (operands[1], SFmode))"
5742 [(set_attr "length" "2")
5743 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5744 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5747 (define_expand "movdf"
5748 [(set (match_operand:DF 0 "general_operand" "")
5749 (match_operand:DF 1 "general_operand" ""))]
5754 if (GET_CODE (operands[0]) == MEM)
5755 operands[1] = force_reg (DFmode, operands[1]);
5757 else /* TARGET_THUMB */
5759 if (can_create_pseudo_p ())
5761 if (GET_CODE (operands[0]) != REG)
5762 operands[1] = force_reg (DFmode, operands[1]);
5768 ;; Reloading a df mode value stored in integer regs to memory can require a
5770 (define_expand "reload_outdf"
5771 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5772 (match_operand:DF 1 "s_register_operand" "r")
5773 (match_operand:SI 2 "s_register_operand" "=&r")]
5777 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5780 operands[2] = XEXP (operands[0], 0);
5781 else if (code == POST_INC || code == PRE_DEC)
5783 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5784 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5785 emit_insn (gen_movdi (operands[0], operands[1]));
5788 else if (code == PRE_INC)
5790 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5792 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5795 else if (code == POST_DEC)
5796 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5798 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5799 XEXP (XEXP (operands[0], 0), 1)));
5801 emit_insn (gen_rtx_SET (VOIDmode,
5802 replace_equiv_address (operands[0], operands[2]),
5805 if (code == POST_DEC)
5806 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5812 (define_insn "*movdf_soft_insn"
5813 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5814 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5815 "TARGET_ARM && TARGET_SOFT_FLOAT
5816 && ( register_operand (operands[0], DFmode)
5817 || register_operand (operands[1], DFmode))"
5819 switch (which_alternative)
5826 return output_move_double (operands);
5829 [(set_attr "length" "8,12,16,8,8")
5830 (set_attr "type" "*,*,*,load2,store2")
5831 (set_attr "pool_range" "1020")
5832 (set_attr "neg_pool_range" "1008")]
5835 ;;; ??? This should have alternatives for constants.
5836 ;;; ??? This was originally identical to the movdi_insn pattern.
5837 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5838 ;;; thumb_reorg with a memory reference.
5839 (define_insn "*thumb_movdf_insn"
5840 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5841 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5843 && ( register_operand (operands[0], DFmode)
5844 || register_operand (operands[1], DFmode))"
5846 switch (which_alternative)
5850 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5851 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5852 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5854 return \"ldmia\\t%1, {%0, %H0}\";
5856 return \"stmia\\t%0, {%1, %H1}\";
5858 return thumb_load_double_from_address (operands);
5860 operands[2] = gen_rtx_MEM (SImode,
5861 plus_constant (XEXP (operands[0], 0), 4));
5862 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5865 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5866 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5867 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5870 [(set_attr "length" "4,2,2,6,4,4")
5871 (set_attr "type" "*,load2,store2,load2,store2,*")
5872 (set_attr "pool_range" "*,*,*,1020,*,*")]
5875 (define_expand "movxf"
5876 [(set (match_operand:XF 0 "general_operand" "")
5877 (match_operand:XF 1 "general_operand" ""))]
5878 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
5880 if (GET_CODE (operands[0]) == MEM)
5881 operands[1] = force_reg (XFmode, operands[1]);
5887 ;; load- and store-multiple insns
5888 ;; The arm can load/store any set of registers, provided that they are in
5889 ;; ascending order; but that is beyond GCC so stick with what it knows.
5891 (define_expand "load_multiple"
5892 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5893 (match_operand:SI 1 "" ""))
5894 (use (match_operand:SI 2 "" ""))])]
5897 HOST_WIDE_INT offset = 0;
5899 /* Support only fixed point registers. */
5900 if (GET_CODE (operands[2]) != CONST_INT
5901 || INTVAL (operands[2]) > 14
5902 || INTVAL (operands[2]) < 2
5903 || GET_CODE (operands[1]) != MEM
5904 || GET_CODE (operands[0]) != REG
5905 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
5906 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5910 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
5911 force_reg (SImode, XEXP (operands[1], 0)),
5912 TRUE, FALSE, operands[1], &offset);
5915 ;; Load multiple with write-back
5917 (define_insn "*ldmsi_postinc4"
5918 [(match_parallel 0 "load_multiple_operation"
5919 [(set (match_operand:SI 1 "s_register_operand" "=r")
5920 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5922 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5923 (mem:SI (match_dup 2)))
5924 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5925 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5926 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5927 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5928 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5929 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5930 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5931 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5932 [(set_attr "type" "load4")
5933 (set_attr "predicable" "yes")]
5936 (define_insn "*ldmsi_postinc4_thumb1"
5937 [(match_parallel 0 "load_multiple_operation"
5938 [(set (match_operand:SI 1 "s_register_operand" "=l")
5939 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5941 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5942 (mem:SI (match_dup 2)))
5943 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5944 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5945 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5946 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5947 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5948 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5949 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5950 "ldmia\\t%1!, {%3, %4, %5, %6}"
5951 [(set_attr "type" "load4")]
5954 (define_insn "*ldmsi_postinc3"
5955 [(match_parallel 0 "load_multiple_operation"
5956 [(set (match_operand:SI 1 "s_register_operand" "=r")
5957 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5959 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5960 (mem:SI (match_dup 2)))
5961 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5962 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5963 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5964 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
5965 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5966 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
5967 [(set_attr "type" "load3")
5968 (set_attr "predicable" "yes")]
5971 (define_insn "*ldmsi_postinc2"
5972 [(match_parallel 0 "load_multiple_operation"
5973 [(set (match_operand:SI 1 "s_register_operand" "=r")
5974 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5976 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5977 (mem:SI (match_dup 2)))
5978 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5979 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
5980 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5981 "ldm%(ia%)\\t%1!, {%3, %4}"
5982 [(set_attr "type" "load2")
5983 (set_attr "predicable" "yes")]
5986 ;; Ordinary load multiple
5988 (define_insn "*ldmsi4"
5989 [(match_parallel 0 "load_multiple_operation"
5990 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5991 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5992 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5993 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5994 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5995 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
5996 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5997 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
5998 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5999 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6000 [(set_attr "type" "load4")
6001 (set_attr "predicable" "yes")]
6004 (define_insn "*ldmsi3"
6005 [(match_parallel 0 "load_multiple_operation"
6006 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6007 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6008 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6009 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6010 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6011 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6012 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6013 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6014 [(set_attr "type" "load3")
6015 (set_attr "predicable" "yes")]
6018 (define_insn "*ldmsi2"
6019 [(match_parallel 0 "load_multiple_operation"
6020 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6021 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6022 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6023 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6024 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6025 "ldm%(ia%)\\t%1, {%2, %3}"
6026 [(set_attr "type" "load2")
6027 (set_attr "predicable" "yes")]
6030 (define_expand "store_multiple"
6031 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6032 (match_operand:SI 1 "" ""))
6033 (use (match_operand:SI 2 "" ""))])]
6036 HOST_WIDE_INT offset = 0;
6038 /* Support only fixed point registers. */
6039 if (GET_CODE (operands[2]) != CONST_INT
6040 || INTVAL (operands[2]) > 14
6041 || INTVAL (operands[2]) < 2
6042 || GET_CODE (operands[1]) != REG
6043 || GET_CODE (operands[0]) != MEM
6044 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6045 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6049 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6050 force_reg (SImode, XEXP (operands[0], 0)),
6051 TRUE, FALSE, operands[0], &offset);
6054 ;; Store multiple with write-back
6056 (define_insn "*stmsi_postinc4"
6057 [(match_parallel 0 "store_multiple_operation"
6058 [(set (match_operand:SI 1 "s_register_operand" "=r")
6059 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6061 (set (mem:SI (match_dup 2))
6062 (match_operand:SI 3 "arm_hard_register_operand" ""))
6063 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6064 (match_operand:SI 4 "arm_hard_register_operand" ""))
6065 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6066 (match_operand:SI 5 "arm_hard_register_operand" ""))
6067 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6068 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6069 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6070 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6071 [(set_attr "predicable" "yes")
6072 (set_attr "type" "store4")]
6075 (define_insn "*stmsi_postinc4_thumb1"
6076 [(match_parallel 0 "store_multiple_operation"
6077 [(set (match_operand:SI 1 "s_register_operand" "=l")
6078 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6080 (set (mem:SI (match_dup 2))
6081 (match_operand:SI 3 "arm_hard_register_operand" ""))
6082 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6083 (match_operand:SI 4 "arm_hard_register_operand" ""))
6084 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6085 (match_operand:SI 5 "arm_hard_register_operand" ""))
6086 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6087 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6088 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6089 "stmia\\t%1!, {%3, %4, %5, %6}"
6090 [(set_attr "type" "store4")]
6093 (define_insn "*stmsi_postinc3"
6094 [(match_parallel 0 "store_multiple_operation"
6095 [(set (match_operand:SI 1 "s_register_operand" "=r")
6096 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6098 (set (mem:SI (match_dup 2))
6099 (match_operand:SI 3 "arm_hard_register_operand" ""))
6100 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6101 (match_operand:SI 4 "arm_hard_register_operand" ""))
6102 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6103 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6104 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6105 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6106 [(set_attr "predicable" "yes")
6107 (set_attr "type" "store3")]
6110 (define_insn "*stmsi_postinc2"
6111 [(match_parallel 0 "store_multiple_operation"
6112 [(set (match_operand:SI 1 "s_register_operand" "=r")
6113 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6115 (set (mem:SI (match_dup 2))
6116 (match_operand:SI 3 "arm_hard_register_operand" ""))
6117 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6118 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6119 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6120 "stm%(ia%)\\t%1!, {%3, %4}"
6121 [(set_attr "predicable" "yes")
6122 (set_attr "type" "store2")]
6125 ;; Ordinary store multiple
6127 (define_insn "*stmsi4"
6128 [(match_parallel 0 "store_multiple_operation"
6129 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6130 (match_operand:SI 2 "arm_hard_register_operand" ""))
6131 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6132 (match_operand:SI 3 "arm_hard_register_operand" ""))
6133 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6134 (match_operand:SI 4 "arm_hard_register_operand" ""))
6135 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6136 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6137 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6138 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6139 [(set_attr "predicable" "yes")
6140 (set_attr "type" "store4")]
6143 (define_insn "*stmsi3"
6144 [(match_parallel 0 "store_multiple_operation"
6145 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6146 (match_operand:SI 2 "arm_hard_register_operand" ""))
6147 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6148 (match_operand:SI 3 "arm_hard_register_operand" ""))
6149 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6150 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6151 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6152 "stm%(ia%)\\t%1, {%2, %3, %4}"
6153 [(set_attr "predicable" "yes")
6154 (set_attr "type" "store3")]
6157 (define_insn "*stmsi2"
6158 [(match_parallel 0 "store_multiple_operation"
6159 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6160 (match_operand:SI 2 "arm_hard_register_operand" ""))
6161 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6162 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6163 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6164 "stm%(ia%)\\t%1, {%2, %3}"
6165 [(set_attr "predicable" "yes")
6166 (set_attr "type" "store2")]
6169 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6170 ;; We could let this apply for blocks of less than this, but it clobbers so
6171 ;; many registers that there is then probably a better way.
6173 (define_expand "movmemqi"
6174 [(match_operand:BLK 0 "general_operand" "")
6175 (match_operand:BLK 1 "general_operand" "")
6176 (match_operand:SI 2 "const_int_operand" "")
6177 (match_operand:SI 3 "const_int_operand" "")]
6182 if (arm_gen_movmemqi (operands))
6186 else /* TARGET_THUMB1 */
6188 if ( INTVAL (operands[3]) != 4
6189 || INTVAL (operands[2]) > 48)
6192 thumb_expand_movmemqi (operands);
6198 ;; Thumb block-move insns
6200 (define_insn "movmem12b"
6201 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6202 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6203 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6204 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6205 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6206 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6207 (set (match_operand:SI 0 "register_operand" "=l")
6208 (plus:SI (match_dup 2) (const_int 12)))
6209 (set (match_operand:SI 1 "register_operand" "=l")
6210 (plus:SI (match_dup 3) (const_int 12)))
6211 (clobber (match_scratch:SI 4 "=&l"))
6212 (clobber (match_scratch:SI 5 "=&l"))
6213 (clobber (match_scratch:SI 6 "=&l"))]
6215 "* return thumb_output_move_mem_multiple (3, operands);"
6216 [(set_attr "length" "4")
6217 ; This isn't entirely accurate... It loads as well, but in terms of
6218 ; scheduling the following insn it is better to consider it as a store
6219 (set_attr "type" "store3")]
6222 (define_insn "movmem8b"
6223 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6224 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6225 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6226 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6227 (set (match_operand:SI 0 "register_operand" "=l")
6228 (plus:SI (match_dup 2) (const_int 8)))
6229 (set (match_operand:SI 1 "register_operand" "=l")
6230 (plus:SI (match_dup 3) (const_int 8)))
6231 (clobber (match_scratch:SI 4 "=&l"))
6232 (clobber (match_scratch:SI 5 "=&l"))]
6234 "* return thumb_output_move_mem_multiple (2, operands);"
6235 [(set_attr "length" "4")
6236 ; This isn't entirely accurate... It loads as well, but in terms of
6237 ; scheduling the following insn it is better to consider it as a store
6238 (set_attr "type" "store2")]
6243 ;; Compare & branch insns
6244 ;; The range calculations are based as follows:
6245 ;; For forward branches, the address calculation returns the address of
6246 ;; the next instruction. This is 2 beyond the branch instruction.
6247 ;; For backward branches, the address calculation returns the address of
6248 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6249 ;; instruction for the shortest sequence, and 4 before the branch instruction
6250 ;; if we have to jump around an unconditional branch.
6251 ;; To the basic branch range the PC offset must be added (this is +4).
6252 ;; So for forward branches we have
6253 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6254 ;; And for backward branches we have
6255 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6257 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6258 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6260 (define_expand "cbranchsi4"
6261 [(set (pc) (if_then_else
6262 (match_operator 0 "arm_comparison_operator"
6263 [(match_operand:SI 1 "s_register_operand" "")
6264 (match_operand:SI 2 "nonmemory_operand" "")])
6265 (label_ref (match_operand 3 "" ""))
6269 if (thumb1_cmpneg_operand (operands[2], SImode))
6271 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6272 operands[3], operands[0]));
6275 if (!thumb1_cmp_operand (operands[2], SImode))
6276 operands[2] = force_reg (SImode, operands[2]);
6279 (define_insn "*cbranchsi4_insn"
6280 [(set (pc) (if_then_else
6281 (match_operator 0 "arm_comparison_operator"
6282 [(match_operand:SI 1 "s_register_operand" "l,*h")
6283 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6284 (label_ref (match_operand 3 "" ""))
6288 output_asm_insn (\"cmp\\t%1, %2\", operands);
6290 switch (get_attr_length (insn))
6292 case 4: return \"b%d0\\t%l3\";
6293 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6294 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6297 [(set (attr "far_jump")
6299 (eq_attr "length" "8")
6300 (const_string "yes")
6301 (const_string "no")))
6302 (set (attr "length")
6304 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6305 (le (minus (match_dup 3) (pc)) (const_int 256)))
6308 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6309 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6314 (define_insn "cbranchsi4_scratch"
6315 [(set (pc) (if_then_else
6316 (match_operator 4 "arm_comparison_operator"
6317 [(match_operand:SI 1 "s_register_operand" "l,0")
6318 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6319 (label_ref (match_operand 3 "" ""))
6321 (clobber (match_scratch:SI 0 "=l,l"))]
6324 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6326 switch (get_attr_length (insn))
6328 case 4: return \"b%d4\\t%l3\";
6329 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6330 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6333 [(set (attr "far_jump")
6335 (eq_attr "length" "8")
6336 (const_string "yes")
6337 (const_string "no")))
6338 (set (attr "length")
6340 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6341 (le (minus (match_dup 3) (pc)) (const_int 256)))
6344 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6345 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6349 (define_insn "*movsi_cbranchsi4"
6352 (match_operator 3 "arm_comparison_operator"
6353 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6355 (label_ref (match_operand 2 "" ""))
6357 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6361 if (which_alternative == 0)
6362 output_asm_insn (\"cmp\t%0, #0\", operands);
6363 else if (which_alternative == 1)
6364 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6367 output_asm_insn (\"cmp\t%1, #0\", operands);
6368 if (which_alternative == 2)
6369 output_asm_insn (\"mov\t%0, %1\", operands);
6371 output_asm_insn (\"str\t%1, %0\", operands);
6373 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6375 case 4: return \"b%d3\\t%l2\";
6376 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6377 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6380 [(set (attr "far_jump")
6382 (ior (and (gt (symbol_ref ("which_alternative"))
6384 (eq_attr "length" "8"))
6385 (eq_attr "length" "10"))
6386 (const_string "yes")
6387 (const_string "no")))
6388 (set (attr "length")
6390 (le (symbol_ref ("which_alternative"))
6393 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6394 (le (minus (match_dup 2) (pc)) (const_int 256)))
6397 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6398 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6402 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6403 (le (minus (match_dup 2) (pc)) (const_int 256)))
6406 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6407 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6412 (define_insn "*negated_cbranchsi4"
6415 (match_operator 0 "equality_operator"
6416 [(match_operand:SI 1 "s_register_operand" "l")
6417 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6418 (label_ref (match_operand 3 "" ""))
6422 output_asm_insn (\"cmn\\t%1, %2\", operands);
6423 switch (get_attr_length (insn))
6425 case 4: return \"b%d0\\t%l3\";
6426 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6427 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6430 [(set (attr "far_jump")
6432 (eq_attr "length" "8")
6433 (const_string "yes")
6434 (const_string "no")))
6435 (set (attr "length")
6437 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6438 (le (minus (match_dup 3) (pc)) (const_int 256)))
6441 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6442 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6447 (define_insn "*tbit_cbranch"
6450 (match_operator 0 "equality_operator"
6451 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6453 (match_operand:SI 2 "const_int_operand" "i"))
6455 (label_ref (match_operand 3 "" ""))
6457 (clobber (match_scratch:SI 4 "=l"))]
6462 op[0] = operands[4];
6463 op[1] = operands[1];
6464 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6466 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6467 switch (get_attr_length (insn))
6469 case 4: return \"b%d0\\t%l3\";
6470 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6471 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6474 [(set (attr "far_jump")
6476 (eq_attr "length" "8")
6477 (const_string "yes")
6478 (const_string "no")))
6479 (set (attr "length")
6481 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6482 (le (minus (match_dup 3) (pc)) (const_int 256)))
6485 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6486 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6491 (define_insn "*tlobits_cbranch"
6494 (match_operator 0 "equality_operator"
6495 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6496 (match_operand:SI 2 "const_int_operand" "i")
6499 (label_ref (match_operand 3 "" ""))
6501 (clobber (match_scratch:SI 4 "=l"))]
6506 op[0] = operands[4];
6507 op[1] = operands[1];
6508 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6510 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6511 switch (get_attr_length (insn))
6513 case 4: return \"b%d0\\t%l3\";
6514 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6515 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6518 [(set (attr "far_jump")
6520 (eq_attr "length" "8")
6521 (const_string "yes")
6522 (const_string "no")))
6523 (set (attr "length")
6525 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6526 (le (minus (match_dup 3) (pc)) (const_int 256)))
6529 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6530 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6535 (define_insn "*tstsi3_cbranch"
6538 (match_operator 3 "equality_operator"
6539 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6540 (match_operand:SI 1 "s_register_operand" "l"))
6542 (label_ref (match_operand 2 "" ""))
6547 output_asm_insn (\"tst\\t%0, %1\", operands);
6548 switch (get_attr_length (insn))
6550 case 4: return \"b%d3\\t%l2\";
6551 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6552 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6555 [(set (attr "far_jump")
6557 (eq_attr "length" "8")
6558 (const_string "yes")
6559 (const_string "no")))
6560 (set (attr "length")
6562 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6563 (le (minus (match_dup 2) (pc)) (const_int 256)))
6566 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6567 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6572 (define_insn "*andsi3_cbranch"
6575 (match_operator 5 "equality_operator"
6576 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6577 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6579 (label_ref (match_operand 4 "" ""))
6581 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6582 (and:SI (match_dup 2) (match_dup 3)))
6583 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6587 if (which_alternative == 0)
6588 output_asm_insn (\"and\\t%0, %3\", operands);
6589 else if (which_alternative == 1)
6591 output_asm_insn (\"and\\t%1, %3\", operands);
6592 output_asm_insn (\"mov\\t%0, %1\", operands);
6596 output_asm_insn (\"and\\t%1, %3\", operands);
6597 output_asm_insn (\"str\\t%1, %0\", operands);
6600 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6602 case 4: return \"b%d5\\t%l4\";
6603 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6604 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6607 [(set (attr "far_jump")
6609 (ior (and (eq (symbol_ref ("which_alternative"))
6611 (eq_attr "length" "8"))
6612 (eq_attr "length" "10"))
6613 (const_string "yes")
6614 (const_string "no")))
6615 (set (attr "length")
6617 (eq (symbol_ref ("which_alternative"))
6620 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6621 (le (minus (match_dup 4) (pc)) (const_int 256)))
6624 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6625 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6629 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6630 (le (minus (match_dup 4) (pc)) (const_int 256)))
6633 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6634 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6639 (define_insn "*orrsi3_cbranch_scratch"
6642 (match_operator 4 "equality_operator"
6643 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6644 (match_operand:SI 2 "s_register_operand" "l"))
6646 (label_ref (match_operand 3 "" ""))
6648 (clobber (match_scratch:SI 0 "=l"))]
6652 output_asm_insn (\"orr\\t%0, %2\", operands);
6653 switch (get_attr_length (insn))
6655 case 4: return \"b%d4\\t%l3\";
6656 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6657 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6660 [(set (attr "far_jump")
6662 (eq_attr "length" "8")
6663 (const_string "yes")
6664 (const_string "no")))
6665 (set (attr "length")
6667 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6668 (le (minus (match_dup 3) (pc)) (const_int 256)))
6671 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6672 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6677 (define_insn "*orrsi3_cbranch"
6680 (match_operator 5 "equality_operator"
6681 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6682 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6684 (label_ref (match_operand 4 "" ""))
6686 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6687 (ior:SI (match_dup 2) (match_dup 3)))
6688 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6692 if (which_alternative == 0)
6693 output_asm_insn (\"orr\\t%0, %3\", operands);
6694 else if (which_alternative == 1)
6696 output_asm_insn (\"orr\\t%1, %3\", operands);
6697 output_asm_insn (\"mov\\t%0, %1\", operands);
6701 output_asm_insn (\"orr\\t%1, %3\", operands);
6702 output_asm_insn (\"str\\t%1, %0\", operands);
6705 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6707 case 4: return \"b%d5\\t%l4\";
6708 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6709 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6712 [(set (attr "far_jump")
6714 (ior (and (eq (symbol_ref ("which_alternative"))
6716 (eq_attr "length" "8"))
6717 (eq_attr "length" "10"))
6718 (const_string "yes")
6719 (const_string "no")))
6720 (set (attr "length")
6722 (eq (symbol_ref ("which_alternative"))
6725 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6726 (le (minus (match_dup 4) (pc)) (const_int 256)))
6729 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6730 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6734 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6735 (le (minus (match_dup 4) (pc)) (const_int 256)))
6738 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6739 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6744 (define_insn "*xorsi3_cbranch_scratch"
6747 (match_operator 4 "equality_operator"
6748 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6749 (match_operand:SI 2 "s_register_operand" "l"))
6751 (label_ref (match_operand 3 "" ""))
6753 (clobber (match_scratch:SI 0 "=l"))]
6757 output_asm_insn (\"eor\\t%0, %2\", operands);
6758 switch (get_attr_length (insn))
6760 case 4: return \"b%d4\\t%l3\";
6761 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6762 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6765 [(set (attr "far_jump")
6767 (eq_attr "length" "8")
6768 (const_string "yes")
6769 (const_string "no")))
6770 (set (attr "length")
6772 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6773 (le (minus (match_dup 3) (pc)) (const_int 256)))
6776 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6777 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6782 (define_insn "*xorsi3_cbranch"
6785 (match_operator 5 "equality_operator"
6786 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6787 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6789 (label_ref (match_operand 4 "" ""))
6791 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6792 (xor:SI (match_dup 2) (match_dup 3)))
6793 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6797 if (which_alternative == 0)
6798 output_asm_insn (\"eor\\t%0, %3\", operands);
6799 else if (which_alternative == 1)
6801 output_asm_insn (\"eor\\t%1, %3\", operands);
6802 output_asm_insn (\"mov\\t%0, %1\", operands);
6806 output_asm_insn (\"eor\\t%1, %3\", operands);
6807 output_asm_insn (\"str\\t%1, %0\", operands);
6810 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6812 case 4: return \"b%d5\\t%l4\";
6813 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6814 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6817 [(set (attr "far_jump")
6819 (ior (and (eq (symbol_ref ("which_alternative"))
6821 (eq_attr "length" "8"))
6822 (eq_attr "length" "10"))
6823 (const_string "yes")
6824 (const_string "no")))
6825 (set (attr "length")
6827 (eq (symbol_ref ("which_alternative"))
6830 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6831 (le (minus (match_dup 4) (pc)) (const_int 256)))
6834 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6835 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6839 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6840 (le (minus (match_dup 4) (pc)) (const_int 256)))
6843 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6844 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6849 (define_insn "*bicsi3_cbranch_scratch"
6852 (match_operator 4 "equality_operator"
6853 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6854 (match_operand:SI 1 "s_register_operand" "0"))
6856 (label_ref (match_operand 3 "" ""))
6858 (clobber (match_scratch:SI 0 "=l"))]
6862 output_asm_insn (\"bic\\t%0, %2\", operands);
6863 switch (get_attr_length (insn))
6865 case 4: return \"b%d4\\t%l3\";
6866 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6867 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6870 [(set (attr "far_jump")
6872 (eq_attr "length" "8")
6873 (const_string "yes")
6874 (const_string "no")))
6875 (set (attr "length")
6877 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6878 (le (minus (match_dup 3) (pc)) (const_int 256)))
6881 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6882 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6887 (define_insn "*bicsi3_cbranch"
6890 (match_operator 5 "equality_operator"
6891 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
6892 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
6894 (label_ref (match_operand 4 "" ""))
6896 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
6897 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
6898 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
6902 if (which_alternative == 0)
6903 output_asm_insn (\"bic\\t%0, %3\", operands);
6904 else if (which_alternative <= 2)
6906 output_asm_insn (\"bic\\t%1, %3\", operands);
6907 /* It's ok if OP0 is a lo-reg, even though the mov will set the
6908 conditions again, since we're only testing for equality. */
6909 output_asm_insn (\"mov\\t%0, %1\", operands);
6913 output_asm_insn (\"bic\\t%1, %3\", operands);
6914 output_asm_insn (\"str\\t%1, %0\", operands);
6917 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6919 case 4: return \"b%d5\\t%l4\";
6920 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6921 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6924 [(set (attr "far_jump")
6926 (ior (and (eq (symbol_ref ("which_alternative"))
6928 (eq_attr "length" "8"))
6929 (eq_attr "length" "10"))
6930 (const_string "yes")
6931 (const_string "no")))
6932 (set (attr "length")
6934 (eq (symbol_ref ("which_alternative"))
6937 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6938 (le (minus (match_dup 4) (pc)) (const_int 256)))
6941 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6942 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6946 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6947 (le (minus (match_dup 4) (pc)) (const_int 256)))
6950 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6951 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6956 (define_insn "*cbranchne_decr1"
6958 (if_then_else (match_operator 3 "equality_operator"
6959 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6961 (label_ref (match_operand 4 "" ""))
6963 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6964 (plus:SI (match_dup 2) (const_int -1)))
6965 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6970 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6972 VOIDmode, operands[2], const1_rtx);
6973 cond[1] = operands[4];
6975 if (which_alternative == 0)
6976 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6977 else if (which_alternative == 1)
6979 /* We must provide an alternative for a hi reg because reload
6980 cannot handle output reloads on a jump instruction, but we
6981 can't subtract into that. Fortunately a mov from lo to hi
6982 does not clobber the condition codes. */
6983 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6984 output_asm_insn (\"mov\\t%0, %1\", operands);
6988 /* Similarly, but the target is memory. */
6989 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6990 output_asm_insn (\"str\\t%1, %0\", operands);
6993 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6996 output_asm_insn (\"b%d0\\t%l1\", cond);
6999 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7000 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7002 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7003 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7007 [(set (attr "far_jump")
7009 (ior (and (eq (symbol_ref ("which_alternative"))
7011 (eq_attr "length" "8"))
7012 (eq_attr "length" "10"))
7013 (const_string "yes")
7014 (const_string "no")))
7015 (set_attr_alternative "length"
7019 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7020 (le (minus (match_dup 4) (pc)) (const_int 256)))
7023 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7024 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7029 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7030 (le (minus (match_dup 4) (pc)) (const_int 256)))
7033 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7034 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7039 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7040 (le (minus (match_dup 4) (pc)) (const_int 256)))
7043 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7044 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7049 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7050 (le (minus (match_dup 4) (pc)) (const_int 256)))
7053 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7054 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7059 (define_insn "*addsi3_cbranch"
7062 (match_operator 4 "comparison_operator"
7064 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7065 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7067 (label_ref (match_operand 5 "" ""))
7070 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7071 (plus:SI (match_dup 2) (match_dup 3)))
7072 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7074 && (GET_CODE (operands[4]) == EQ
7075 || GET_CODE (operands[4]) == NE
7076 || GET_CODE (operands[4]) == GE
7077 || GET_CODE (operands[4]) == LT)"
7083 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7084 cond[1] = operands[2];
7085 cond[2] = operands[3];
7087 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7088 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7090 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7092 if (which_alternative >= 3
7093 && which_alternative < 4)
7094 output_asm_insn (\"mov\\t%0, %1\", operands);
7095 else if (which_alternative >= 4)
7096 output_asm_insn (\"str\\t%1, %0\", operands);
7098 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7101 return \"b%d4\\t%l5\";
7103 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7105 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7109 [(set (attr "far_jump")
7111 (ior (and (lt (symbol_ref ("which_alternative"))
7113 (eq_attr "length" "8"))
7114 (eq_attr "length" "10"))
7115 (const_string "yes")
7116 (const_string "no")))
7117 (set (attr "length")
7119 (lt (symbol_ref ("which_alternative"))
7122 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7123 (le (minus (match_dup 5) (pc)) (const_int 256)))
7126 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7127 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7131 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7132 (le (minus (match_dup 5) (pc)) (const_int 256)))
7135 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7136 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7141 (define_insn "*addsi3_cbranch_scratch"
7144 (match_operator 3 "comparison_operator"
7146 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7147 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7149 (label_ref (match_operand 4 "" ""))
7151 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7153 && (GET_CODE (operands[3]) == EQ
7154 || GET_CODE (operands[3]) == NE
7155 || GET_CODE (operands[3]) == GE
7156 || GET_CODE (operands[3]) == LT)"
7159 switch (which_alternative)
7162 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7165 output_asm_insn (\"cmn\t%1, %2\", operands);
7168 if (INTVAL (operands[2]) < 0)
7169 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7171 output_asm_insn (\"add\t%0, %1, %2\", operands);
7174 if (INTVAL (operands[2]) < 0)
7175 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7177 output_asm_insn (\"add\t%0, %0, %2\", operands);
7181 switch (get_attr_length (insn))
7184 return \"b%d3\\t%l4\";
7186 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7188 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7192 [(set (attr "far_jump")
7194 (eq_attr "length" "8")
7195 (const_string "yes")
7196 (const_string "no")))
7197 (set (attr "length")
7199 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7200 (le (minus (match_dup 4) (pc)) (const_int 256)))
7203 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7204 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7209 (define_insn "*subsi3_cbranch"
7212 (match_operator 4 "comparison_operator"
7214 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7215 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7217 (label_ref (match_operand 5 "" ""))
7219 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7220 (minus:SI (match_dup 2) (match_dup 3)))
7221 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7223 && (GET_CODE (operands[4]) == EQ
7224 || GET_CODE (operands[4]) == NE
7225 || GET_CODE (operands[4]) == GE
7226 || GET_CODE (operands[4]) == LT)"
7229 if (which_alternative == 0)
7230 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7231 else if (which_alternative == 1)
7233 /* We must provide an alternative for a hi reg because reload
7234 cannot handle output reloads on a jump instruction, but we
7235 can't subtract into that. Fortunately a mov from lo to hi
7236 does not clobber the condition codes. */
7237 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7238 output_asm_insn (\"mov\\t%0, %1\", operands);
7242 /* Similarly, but the target is memory. */
7243 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7244 output_asm_insn (\"str\\t%1, %0\", operands);
7247 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7250 return \"b%d4\\t%l5\";
7252 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7254 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7258 [(set (attr "far_jump")
7260 (ior (and (eq (symbol_ref ("which_alternative"))
7262 (eq_attr "length" "8"))
7263 (eq_attr "length" "10"))
7264 (const_string "yes")
7265 (const_string "no")))
7266 (set (attr "length")
7268 (eq (symbol_ref ("which_alternative"))
7271 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7272 (le (minus (match_dup 5) (pc)) (const_int 256)))
7275 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7276 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7280 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7281 (le (minus (match_dup 5) (pc)) (const_int 256)))
7284 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7285 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7290 (define_insn "*subsi3_cbranch_scratch"
7293 (match_operator 0 "arm_comparison_operator"
7294 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7295 (match_operand:SI 2 "nonmemory_operand" "l"))
7297 (label_ref (match_operand 3 "" ""))
7300 && (GET_CODE (operands[0]) == EQ
7301 || GET_CODE (operands[0]) == NE
7302 || GET_CODE (operands[0]) == GE
7303 || GET_CODE (operands[0]) == LT)"
7305 output_asm_insn (\"cmp\\t%1, %2\", operands);
7306 switch (get_attr_length (insn))
7308 case 4: return \"b%d0\\t%l3\";
7309 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7310 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7313 [(set (attr "far_jump")
7315 (eq_attr "length" "8")
7316 (const_string "yes")
7317 (const_string "no")))
7318 (set (attr "length")
7320 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7321 (le (minus (match_dup 3) (pc)) (const_int 256)))
7324 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7325 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7330 ;; Comparison and test insns
7332 (define_expand "cmpsi"
7333 [(match_operand:SI 0 "s_register_operand" "")
7334 (match_operand:SI 1 "arm_add_operand" "")]
7337 arm_compare_op0 = operands[0];
7338 arm_compare_op1 = operands[1];
7343 (define_expand "cmpsf"
7344 [(match_operand:SF 0 "s_register_operand" "")
7345 (match_operand:SF 1 "arm_float_compare_operand" "")]
7346 "TARGET_32BIT && TARGET_HARD_FLOAT"
7348 arm_compare_op0 = operands[0];
7349 arm_compare_op1 = operands[1];
7354 (define_expand "cmpdf"
7355 [(match_operand:DF 0 "s_register_operand" "")
7356 (match_operand:DF 1 "arm_float_compare_operand" "")]
7357 "TARGET_32BIT && TARGET_HARD_FLOAT"
7359 arm_compare_op0 = operands[0];
7360 arm_compare_op1 = operands[1];
7365 (define_insn "*arm_cmpsi_insn"
7366 [(set (reg:CC CC_REGNUM)
7367 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7368 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7373 [(set_attr "conds" "set")]
7376 (define_insn "*arm_cmpsi_shiftsi"
7377 [(set (reg:CC CC_REGNUM)
7378 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7379 (match_operator:SI 3 "shift_operator"
7380 [(match_operand:SI 1 "s_register_operand" "r")
7381 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7384 [(set_attr "conds" "set")
7385 (set_attr "shift" "1")
7386 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7387 (const_string "alu_shift")
7388 (const_string "alu_shift_reg")))]
7391 (define_insn "*arm_cmpsi_shiftsi_swp"
7392 [(set (reg:CC_SWP CC_REGNUM)
7393 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7394 [(match_operand:SI 1 "s_register_operand" "r")
7395 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7396 (match_operand:SI 0 "s_register_operand" "r")))]
7399 [(set_attr "conds" "set")
7400 (set_attr "shift" "1")
7401 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7402 (const_string "alu_shift")
7403 (const_string "alu_shift_reg")))]
7406 (define_insn "*arm_cmpsi_negshiftsi_si"
7407 [(set (reg:CC_Z CC_REGNUM)
7409 (neg:SI (match_operator:SI 1 "shift_operator"
7410 [(match_operand:SI 2 "s_register_operand" "r")
7411 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7412 (match_operand:SI 0 "s_register_operand" "r")))]
7415 [(set_attr "conds" "set")
7416 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7417 (const_string "alu_shift")
7418 (const_string "alu_shift_reg")))]
7421 ;; Cirrus SF compare instruction
7422 (define_insn "*cirrus_cmpsf"
7423 [(set (reg:CCFP CC_REGNUM)
7424 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7425 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7426 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7427 "cfcmps%?\\tr15, %V0, %V1"
7428 [(set_attr "type" "mav_farith")
7429 (set_attr "cirrus" "compare")]
7432 ;; Cirrus DF compare instruction
7433 (define_insn "*cirrus_cmpdf"
7434 [(set (reg:CCFP CC_REGNUM)
7435 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7436 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7437 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7438 "cfcmpd%?\\tr15, %V0, %V1"
7439 [(set_attr "type" "mav_farith")
7440 (set_attr "cirrus" "compare")]
7443 ;; Cirrus DI compare instruction
7444 (define_expand "cmpdi"
7445 [(match_operand:DI 0 "cirrus_fp_register" "")
7446 (match_operand:DI 1 "cirrus_fp_register" "")]
7447 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7449 arm_compare_op0 = operands[0];
7450 arm_compare_op1 = operands[1];
7454 (define_insn "*cirrus_cmpdi"
7455 [(set (reg:CC CC_REGNUM)
7456 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7457 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7458 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7459 "cfcmp64%?\\tr15, %V0, %V1"
7460 [(set_attr "type" "mav_farith")
7461 (set_attr "cirrus" "compare")]
7464 ; This insn allows redundant compares to be removed by cse, nothing should
7465 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7466 ; is deleted later on. The match_dup will match the mode here, so that
7467 ; mode changes of the condition codes aren't lost by this even though we don't
7468 ; specify what they are.
7470 (define_insn "*deleted_compare"
7471 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7473 "\\t%@ deleted compare"
7474 [(set_attr "conds" "set")
7475 (set_attr "length" "0")]
7479 ;; Conditional branch insns
7481 (define_expand "beq"
7483 (if_then_else (eq (match_dup 1) (const_int 0))
7484 (label_ref (match_operand 0 "" ""))
7487 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7490 (define_expand "bne"
7492 (if_then_else (ne (match_dup 1) (const_int 0))
7493 (label_ref (match_operand 0 "" ""))
7496 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7499 (define_expand "bgt"
7501 (if_then_else (gt (match_dup 1) (const_int 0))
7502 (label_ref (match_operand 0 "" ""))
7505 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7508 (define_expand "ble"
7510 (if_then_else (le (match_dup 1) (const_int 0))
7511 (label_ref (match_operand 0 "" ""))
7514 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7517 (define_expand "bge"
7519 (if_then_else (ge (match_dup 1) (const_int 0))
7520 (label_ref (match_operand 0 "" ""))
7523 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7526 (define_expand "blt"
7528 (if_then_else (lt (match_dup 1) (const_int 0))
7529 (label_ref (match_operand 0 "" ""))
7532 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7535 (define_expand "bgtu"
7537 (if_then_else (gtu (match_dup 1) (const_int 0))
7538 (label_ref (match_operand 0 "" ""))
7541 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7544 (define_expand "bleu"
7546 (if_then_else (leu (match_dup 1) (const_int 0))
7547 (label_ref (match_operand 0 "" ""))
7550 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7553 (define_expand "bgeu"
7555 (if_then_else (geu (match_dup 1) (const_int 0))
7556 (label_ref (match_operand 0 "" ""))
7559 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7562 (define_expand "bltu"
7564 (if_then_else (ltu (match_dup 1) (const_int 0))
7565 (label_ref (match_operand 0 "" ""))
7568 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7571 (define_expand "bunordered"
7573 (if_then_else (unordered (match_dup 1) (const_int 0))
7574 (label_ref (match_operand 0 "" ""))
7576 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7577 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7581 (define_expand "bordered"
7583 (if_then_else (ordered (match_dup 1) (const_int 0))
7584 (label_ref (match_operand 0 "" ""))
7586 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7587 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7591 (define_expand "bungt"
7593 (if_then_else (ungt (match_dup 1) (const_int 0))
7594 (label_ref (match_operand 0 "" ""))
7596 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7597 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7600 (define_expand "bunlt"
7602 (if_then_else (unlt (match_dup 1) (const_int 0))
7603 (label_ref (match_operand 0 "" ""))
7605 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7606 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7609 (define_expand "bunge"
7611 (if_then_else (unge (match_dup 1) (const_int 0))
7612 (label_ref (match_operand 0 "" ""))
7614 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7615 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7618 (define_expand "bunle"
7620 (if_then_else (unle (match_dup 1) (const_int 0))
7621 (label_ref (match_operand 0 "" ""))
7623 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7624 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7627 ;; The following two patterns need two branch instructions, since there is
7628 ;; no single instruction that will handle all cases.
7629 (define_expand "buneq"
7631 (if_then_else (uneq (match_dup 1) (const_int 0))
7632 (label_ref (match_operand 0 "" ""))
7634 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7635 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7638 (define_expand "bltgt"
7640 (if_then_else (ltgt (match_dup 1) (const_int 0))
7641 (label_ref (match_operand 0 "" ""))
7643 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7644 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7648 ;; Patterns to match conditional branch insns.
7651 ; Special pattern to match UNEQ.
7652 (define_insn "*arm_buneq"
7654 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7655 (label_ref (match_operand 0 "" ""))
7657 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7659 gcc_assert (!arm_ccfsm_state);
7661 return \"bvs\\t%l0\;beq\\t%l0\";
7663 [(set_attr "conds" "jump_clob")
7664 (set_attr "length" "8")]
7667 ; Special pattern to match LTGT.
7668 (define_insn "*arm_bltgt"
7670 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7671 (label_ref (match_operand 0 "" ""))
7673 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7675 gcc_assert (!arm_ccfsm_state);
7677 return \"bmi\\t%l0\;bgt\\t%l0\";
7679 [(set_attr "conds" "jump_clob")
7680 (set_attr "length" "8")]
7683 (define_insn "*arm_cond_branch"
7685 (if_then_else (match_operator 1 "arm_comparison_operator"
7686 [(match_operand 2 "cc_register" "") (const_int 0)])
7687 (label_ref (match_operand 0 "" ""))
7691 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7693 arm_ccfsm_state += 2;
7696 return \"b%d1\\t%l0\";
7698 [(set_attr "conds" "use")
7699 (set_attr "type" "branch")]
7702 ; Special pattern to match reversed UNEQ.
7703 (define_insn "*arm_buneq_reversed"
7705 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7707 (label_ref (match_operand 0 "" ""))))]
7708 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7710 gcc_assert (!arm_ccfsm_state);
7712 return \"bmi\\t%l0\;bgt\\t%l0\";
7714 [(set_attr "conds" "jump_clob")
7715 (set_attr "length" "8")]
7718 ; Special pattern to match reversed LTGT.
7719 (define_insn "*arm_bltgt_reversed"
7721 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7723 (label_ref (match_operand 0 "" ""))))]
7724 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7726 gcc_assert (!arm_ccfsm_state);
7728 return \"bvs\\t%l0\;beq\\t%l0\";
7730 [(set_attr "conds" "jump_clob")
7731 (set_attr "length" "8")]
7734 (define_insn "*arm_cond_branch_reversed"
7736 (if_then_else (match_operator 1 "arm_comparison_operator"
7737 [(match_operand 2 "cc_register" "") (const_int 0)])
7739 (label_ref (match_operand 0 "" ""))))]
7742 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7744 arm_ccfsm_state += 2;
7747 return \"b%D1\\t%l0\";
7749 [(set_attr "conds" "use")
7750 (set_attr "type" "branch")]
7757 (define_expand "seq"
7758 [(set (match_operand:SI 0 "s_register_operand" "")
7759 (eq:SI (match_dup 1) (const_int 0)))]
7761 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7764 (define_expand "sne"
7765 [(set (match_operand:SI 0 "s_register_operand" "")
7766 (ne:SI (match_dup 1) (const_int 0)))]
7768 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7771 (define_expand "sgt"
7772 [(set (match_operand:SI 0 "s_register_operand" "")
7773 (gt:SI (match_dup 1) (const_int 0)))]
7775 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7778 (define_expand "sle"
7779 [(set (match_operand:SI 0 "s_register_operand" "")
7780 (le:SI (match_dup 1) (const_int 0)))]
7782 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7785 (define_expand "sge"
7786 [(set (match_operand:SI 0 "s_register_operand" "")
7787 (ge:SI (match_dup 1) (const_int 0)))]
7789 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7792 (define_expand "slt"
7793 [(set (match_operand:SI 0 "s_register_operand" "")
7794 (lt:SI (match_dup 1) (const_int 0)))]
7796 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7799 (define_expand "sgtu"
7800 [(set (match_operand:SI 0 "s_register_operand" "")
7801 (gtu:SI (match_dup 1) (const_int 0)))]
7803 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7806 (define_expand "sleu"
7807 [(set (match_operand:SI 0 "s_register_operand" "")
7808 (leu:SI (match_dup 1) (const_int 0)))]
7810 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7813 (define_expand "sgeu"
7814 [(set (match_operand:SI 0 "s_register_operand" "")
7815 (geu:SI (match_dup 1) (const_int 0)))]
7817 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7820 (define_expand "sltu"
7821 [(set (match_operand:SI 0 "s_register_operand" "")
7822 (ltu:SI (match_dup 1) (const_int 0)))]
7824 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7827 (define_expand "sunordered"
7828 [(set (match_operand:SI 0 "s_register_operand" "")
7829 (unordered:SI (match_dup 1) (const_int 0)))]
7830 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7831 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7835 (define_expand "sordered"
7836 [(set (match_operand:SI 0 "s_register_operand" "")
7837 (ordered:SI (match_dup 1) (const_int 0)))]
7838 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7839 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7843 (define_expand "sungt"
7844 [(set (match_operand:SI 0 "s_register_operand" "")
7845 (ungt:SI (match_dup 1) (const_int 0)))]
7846 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7847 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7851 (define_expand "sunge"
7852 [(set (match_operand:SI 0 "s_register_operand" "")
7853 (unge:SI (match_dup 1) (const_int 0)))]
7854 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7855 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7859 (define_expand "sunlt"
7860 [(set (match_operand:SI 0 "s_register_operand" "")
7861 (unlt:SI (match_dup 1) (const_int 0)))]
7862 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7863 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7867 (define_expand "sunle"
7868 [(set (match_operand:SI 0 "s_register_operand" "")
7869 (unle:SI (match_dup 1) (const_int 0)))]
7870 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7871 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7875 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7876 ;;; simple ARM instructions.
7878 ; (define_expand "suneq"
7879 ; [(set (match_operand:SI 0 "s_register_operand" "")
7880 ; (uneq:SI (match_dup 1) (const_int 0)))]
7881 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7882 ; "gcc_unreachable ();"
7885 ; (define_expand "sltgt"
7886 ; [(set (match_operand:SI 0 "s_register_operand" "")
7887 ; (ltgt:SI (match_dup 1) (const_int 0)))]
7888 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7889 ; "gcc_unreachable ();"
7892 (define_insn "*mov_scc"
7893 [(set (match_operand:SI 0 "s_register_operand" "=r")
7894 (match_operator:SI 1 "arm_comparison_operator"
7895 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7897 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7898 [(set_attr "conds" "use")
7899 (set_attr "length" "8")]
7902 (define_insn "*mov_negscc"
7903 [(set (match_operand:SI 0 "s_register_operand" "=r")
7904 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7905 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7907 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7908 [(set_attr "conds" "use")
7909 (set_attr "length" "8")]
7912 (define_insn "*mov_notscc"
7913 [(set (match_operand:SI 0 "s_register_operand" "=r")
7914 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7915 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7917 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7918 [(set_attr "conds" "use")
7919 (set_attr "length" "8")]
7922 (define_expand "cstoresi4"
7923 [(set (match_operand:SI 0 "s_register_operand" "")
7924 (match_operator:SI 1 "arm_comparison_operator"
7925 [(match_operand:SI 2 "s_register_operand" "")
7926 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7929 rtx op3, scratch, scratch2;
7931 if (operands[3] == const0_rtx)
7933 switch (GET_CODE (operands[1]))
7936 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7940 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7944 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7945 NULL_RTX, 0, OPTAB_WIDEN);
7946 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7947 NULL_RTX, 0, OPTAB_WIDEN);
7948 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7949 operands[0], 1, OPTAB_WIDEN);
7953 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7955 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7956 NULL_RTX, 1, OPTAB_WIDEN);
7960 scratch = expand_binop (SImode, ashr_optab, operands[2],
7961 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7962 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7963 NULL_RTX, 0, OPTAB_WIDEN);
7964 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7968 /* LT is handled by generic code. No need for unsigned with 0. */
7975 switch (GET_CODE (operands[1]))
7978 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7979 NULL_RTX, 0, OPTAB_WIDEN);
7980 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7984 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7985 NULL_RTX, 0, OPTAB_WIDEN);
7986 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7990 op3 = force_reg (SImode, operands[3]);
7992 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7993 NULL_RTX, 1, OPTAB_WIDEN);
7994 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7995 NULL_RTX, 0, OPTAB_WIDEN);
7996 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8002 if (!thumb1_cmp_operand (op3, SImode))
8003 op3 = force_reg (SImode, op3);
8004 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8005 NULL_RTX, 0, OPTAB_WIDEN);
8006 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8007 NULL_RTX, 1, OPTAB_WIDEN);
8008 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8013 op3 = force_reg (SImode, operands[3]);
8014 scratch = force_reg (SImode, const0_rtx);
8015 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8021 if (!thumb1_cmp_operand (op3, SImode))
8022 op3 = force_reg (SImode, op3);
8023 scratch = force_reg (SImode, const0_rtx);
8024 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8030 if (!thumb1_cmp_operand (op3, SImode))
8031 op3 = force_reg (SImode, op3);
8032 scratch = gen_reg_rtx (SImode);
8033 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8034 emit_insn (gen_negsi2 (operands[0], scratch));
8038 op3 = force_reg (SImode, operands[3]);
8039 scratch = gen_reg_rtx (SImode);
8040 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8041 emit_insn (gen_negsi2 (operands[0], scratch));
8044 /* No good sequences for GT, LT. */
8051 (define_expand "cstoresi_eq0_thumb1"
8053 [(set (match_operand:SI 0 "s_register_operand" "")
8054 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8056 (clobber (match_dup:SI 2))])]
8058 "operands[2] = gen_reg_rtx (SImode);"
8061 (define_expand "cstoresi_ne0_thumb1"
8063 [(set (match_operand:SI 0 "s_register_operand" "")
8064 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8066 (clobber (match_dup:SI 2))])]
8068 "operands[2] = gen_reg_rtx (SImode);"
8071 (define_insn "*cstoresi_eq0_thumb1_insn"
8072 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8073 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8075 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8078 neg\\t%0, %1\;adc\\t%0, %0, %1
8079 neg\\t%2, %1\;adc\\t%0, %1, %2"
8080 [(set_attr "length" "4")]
8083 (define_insn "*cstoresi_ne0_thumb1_insn"
8084 [(set (match_operand:SI 0 "s_register_operand" "=l")
8085 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8087 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8089 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8090 [(set_attr "length" "4")]
8093 (define_insn "cstoresi_nltu_thumb1"
8094 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8095 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8096 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8098 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8099 [(set_attr "length" "4")]
8102 ;; Used as part of the expansion of thumb les sequence.
8103 (define_insn "thumb1_addsi3_addgeu"
8104 [(set (match_operand:SI 0 "s_register_operand" "=l")
8105 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8106 (match_operand:SI 2 "s_register_operand" "l"))
8107 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8108 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8110 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8111 [(set_attr "length" "4")]
8115 ;; Conditional move insns
8117 (define_expand "movsicc"
8118 [(set (match_operand:SI 0 "s_register_operand" "")
8119 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8120 (match_operand:SI 2 "arm_not_operand" "")
8121 (match_operand:SI 3 "arm_not_operand" "")))]
8125 enum rtx_code code = GET_CODE (operands[1]);
8128 if (code == UNEQ || code == LTGT)
8131 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8132 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8136 (define_expand "movsfcc"
8137 [(set (match_operand:SF 0 "s_register_operand" "")
8138 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8139 (match_operand:SF 2 "s_register_operand" "")
8140 (match_operand:SF 3 "nonmemory_operand" "")))]
8144 enum rtx_code code = GET_CODE (operands[1]);
8147 if (code == UNEQ || code == LTGT)
8150 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8151 Otherwise, ensure it is a valid FP add operand */
8152 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8153 || (!arm_float_add_operand (operands[3], SFmode)))
8154 operands[3] = force_reg (SFmode, operands[3]);
8156 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8157 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8161 (define_expand "movdfcc"
8162 [(set (match_operand:DF 0 "s_register_operand" "")
8163 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8164 (match_operand:DF 2 "s_register_operand" "")
8165 (match_operand:DF 3 "arm_float_add_operand" "")))]
8166 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8169 enum rtx_code code = GET_CODE (operands[1]);
8172 if (code == UNEQ || code == LTGT)
8175 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8176 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8180 (define_insn "*movsicc_insn"
8181 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8183 (match_operator 3 "arm_comparison_operator"
8184 [(match_operand 4 "cc_register" "") (const_int 0)])
8185 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8186 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8193 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8194 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8195 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8196 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8197 [(set_attr "length" "4,4,4,4,8,8,8,8")
8198 (set_attr "conds" "use")]
8201 (define_insn "*movsfcc_soft_insn"
8202 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8203 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8204 [(match_operand 4 "cc_register" "") (const_int 0)])
8205 (match_operand:SF 1 "s_register_operand" "0,r")
8206 (match_operand:SF 2 "s_register_operand" "r,0")))]
8207 "TARGET_ARM && TARGET_SOFT_FLOAT"
8211 [(set_attr "conds" "use")]
8215 ;; Jump and linkage insns
8217 (define_expand "jump"
8219 (label_ref (match_operand 0 "" "")))]
8224 (define_insn "*arm_jump"
8226 (label_ref (match_operand 0 "" "")))]
8230 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8232 arm_ccfsm_state += 2;
8235 return \"b%?\\t%l0\";
8238 [(set_attr "predicable" "yes")]
8241 (define_insn "*thumb_jump"
8243 (label_ref (match_operand 0 "" "")))]
8246 if (get_attr_length (insn) == 2)
8248 return \"bl\\t%l0\\t%@ far jump\";
8250 [(set (attr "far_jump")
8252 (eq_attr "length" "4")
8253 (const_string "yes")
8254 (const_string "no")))
8255 (set (attr "length")
8257 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8258 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8263 (define_expand "call"
8264 [(parallel [(call (match_operand 0 "memory_operand" "")
8265 (match_operand 1 "general_operand" ""))
8266 (use (match_operand 2 "" ""))
8267 (clobber (reg:SI LR_REGNUM))])]
8273 /* In an untyped call, we can get NULL for operand 2. */
8274 if (operands[2] == NULL_RTX)
8275 operands[2] = const0_rtx;
8277 /* Decide if we should generate indirect calls by loading the
8278 32-bit address of the callee into a register before performing the
8280 callee = XEXP (operands[0], 0);
8281 if (GET_CODE (callee) == SYMBOL_REF
8282 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8284 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8286 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8287 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8292 (define_expand "call_internal"
8293 [(parallel [(call (match_operand 0 "memory_operand" "")
8294 (match_operand 1 "general_operand" ""))
8295 (use (match_operand 2 "" ""))
8296 (clobber (reg:SI LR_REGNUM))])])
8298 (define_insn "*call_reg_armv5"
8299 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8300 (match_operand 1 "" ""))
8301 (use (match_operand 2 "" ""))
8302 (clobber (reg:SI LR_REGNUM))]
8303 "TARGET_ARM && arm_arch5"
8305 [(set_attr "type" "call")]
8308 (define_insn "*call_reg_arm"
8309 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8310 (match_operand 1 "" ""))
8311 (use (match_operand 2 "" ""))
8312 (clobber (reg:SI LR_REGNUM))]
8313 "TARGET_ARM && !arm_arch5"
8315 return output_call (operands);
8317 ;; length is worst case, normally it is only two
8318 [(set_attr "length" "12")
8319 (set_attr "type" "call")]
8322 (define_insn "*call_mem"
8323 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8324 (match_operand 1 "" ""))
8325 (use (match_operand 2 "" ""))
8326 (clobber (reg:SI LR_REGNUM))]
8329 return output_call_mem (operands);
8331 [(set_attr "length" "12")
8332 (set_attr "type" "call")]
8335 (define_insn "*call_reg_thumb1_v5"
8336 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8337 (match_operand 1 "" ""))
8338 (use (match_operand 2 "" ""))
8339 (clobber (reg:SI LR_REGNUM))]
8340 "TARGET_THUMB1 && arm_arch5"
8342 [(set_attr "length" "2")
8343 (set_attr "type" "call")]
8346 (define_insn "*call_reg_thumb1"
8347 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8348 (match_operand 1 "" ""))
8349 (use (match_operand 2 "" ""))
8350 (clobber (reg:SI LR_REGNUM))]
8351 "TARGET_THUMB1 && !arm_arch5"
8354 if (!TARGET_CALLER_INTERWORKING)
8355 return thumb_call_via_reg (operands[0]);
8356 else if (operands[1] == const0_rtx)
8357 return \"bl\\t%__interwork_call_via_%0\";
8358 else if (frame_pointer_needed)
8359 return \"bl\\t%__interwork_r7_call_via_%0\";
8361 return \"bl\\t%__interwork_r11_call_via_%0\";
8363 [(set_attr "type" "call")]
8366 (define_expand "call_value"
8367 [(parallel [(set (match_operand 0 "" "")
8368 (call (match_operand 1 "memory_operand" "")
8369 (match_operand 2 "general_operand" "")))
8370 (use (match_operand 3 "" ""))
8371 (clobber (reg:SI LR_REGNUM))])]
8377 /* In an untyped call, we can get NULL for operand 2. */
8378 if (operands[3] == 0)
8379 operands[3] = const0_rtx;
8381 /* Decide if we should generate indirect calls by loading the
8382 32-bit address of the callee into a register before performing the
8384 callee = XEXP (operands[1], 0);
8385 if (GET_CODE (callee) == SYMBOL_REF
8386 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8388 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8390 pat = gen_call_value_internal (operands[0], operands[1],
8391 operands[2], operands[3]);
8392 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8397 (define_expand "call_value_internal"
8398 [(parallel [(set (match_operand 0 "" "")
8399 (call (match_operand 1 "memory_operand" "")
8400 (match_operand 2 "general_operand" "")))
8401 (use (match_operand 3 "" ""))
8402 (clobber (reg:SI LR_REGNUM))])])
8404 (define_insn "*call_value_reg_armv5"
8405 [(set (match_operand 0 "" "")
8406 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8407 (match_operand 2 "" "")))
8408 (use (match_operand 3 "" ""))
8409 (clobber (reg:SI LR_REGNUM))]
8410 "TARGET_ARM && arm_arch5"
8412 [(set_attr "type" "call")]
8415 (define_insn "*call_value_reg_arm"
8416 [(set (match_operand 0 "" "")
8417 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8418 (match_operand 2 "" "")))
8419 (use (match_operand 3 "" ""))
8420 (clobber (reg:SI LR_REGNUM))]
8421 "TARGET_ARM && !arm_arch5"
8423 return output_call (&operands[1]);
8425 [(set_attr "length" "12")
8426 (set_attr "type" "call")]
8429 (define_insn "*call_value_mem"
8430 [(set (match_operand 0 "" "")
8431 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8432 (match_operand 2 "" "")))
8433 (use (match_operand 3 "" ""))
8434 (clobber (reg:SI LR_REGNUM))]
8435 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8437 return output_call_mem (&operands[1]);
8439 [(set_attr "length" "12")
8440 (set_attr "type" "call")]
8443 (define_insn "*call_value_reg_thumb1_v5"
8444 [(set (match_operand 0 "" "")
8445 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8446 (match_operand 2 "" "")))
8447 (use (match_operand 3 "" ""))
8448 (clobber (reg:SI LR_REGNUM))]
8449 "TARGET_THUMB1 && arm_arch5"
8451 [(set_attr "length" "2")
8452 (set_attr "type" "call")]
8455 (define_insn "*call_value_reg_thumb1"
8456 [(set (match_operand 0 "" "")
8457 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8458 (match_operand 2 "" "")))
8459 (use (match_operand 3 "" ""))
8460 (clobber (reg:SI LR_REGNUM))]
8461 "TARGET_THUMB1 && !arm_arch5"
8464 if (!TARGET_CALLER_INTERWORKING)
8465 return thumb_call_via_reg (operands[1]);
8466 else if (operands[2] == const0_rtx)
8467 return \"bl\\t%__interwork_call_via_%1\";
8468 else if (frame_pointer_needed)
8469 return \"bl\\t%__interwork_r7_call_via_%1\";
8471 return \"bl\\t%__interwork_r11_call_via_%1\";
8473 [(set_attr "type" "call")]
8476 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8477 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8479 (define_insn "*call_symbol"
8480 [(call (mem:SI (match_operand:SI 0 "" ""))
8481 (match_operand 1 "" ""))
8482 (use (match_operand 2 "" ""))
8483 (clobber (reg:SI LR_REGNUM))]
8485 && (GET_CODE (operands[0]) == SYMBOL_REF)
8486 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8489 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8491 [(set_attr "type" "call")]
8494 (define_insn "*call_value_symbol"
8495 [(set (match_operand 0 "" "")
8496 (call (mem:SI (match_operand:SI 1 "" ""))
8497 (match_operand:SI 2 "" "")))
8498 (use (match_operand 3 "" ""))
8499 (clobber (reg:SI LR_REGNUM))]
8501 && (GET_CODE (operands[1]) == SYMBOL_REF)
8502 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8505 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8507 [(set_attr "type" "call")]
8510 (define_insn "*call_insn"
8511 [(call (mem:SI (match_operand:SI 0 "" ""))
8512 (match_operand:SI 1 "" ""))
8513 (use (match_operand 2 "" ""))
8514 (clobber (reg:SI LR_REGNUM))]
8516 && GET_CODE (operands[0]) == SYMBOL_REF
8517 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8519 [(set_attr "length" "4")
8520 (set_attr "type" "call")]
8523 (define_insn "*call_value_insn"
8524 [(set (match_operand 0 "" "")
8525 (call (mem:SI (match_operand 1 "" ""))
8526 (match_operand 2 "" "")))
8527 (use (match_operand 3 "" ""))
8528 (clobber (reg:SI LR_REGNUM))]
8530 && GET_CODE (operands[1]) == SYMBOL_REF
8531 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8533 [(set_attr "length" "4")
8534 (set_attr "type" "call")]
8537 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8538 (define_expand "sibcall"
8539 [(parallel [(call (match_operand 0 "memory_operand" "")
8540 (match_operand 1 "general_operand" ""))
8542 (use (match_operand 2 "" ""))])]
8546 if (operands[2] == NULL_RTX)
8547 operands[2] = const0_rtx;
8551 (define_expand "sibcall_value"
8552 [(parallel [(set (match_operand 0 "" "")
8553 (call (match_operand 1 "memory_operand" "")
8554 (match_operand 2 "general_operand" "")))
8556 (use (match_operand 3 "" ""))])]
8560 if (operands[3] == NULL_RTX)
8561 operands[3] = const0_rtx;
8565 (define_insn "*sibcall_insn"
8566 [(call (mem:SI (match_operand:SI 0 "" "X"))
8567 (match_operand 1 "" ""))
8569 (use (match_operand 2 "" ""))]
8570 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8572 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8574 [(set_attr "type" "call")]
8577 (define_insn "*sibcall_value_insn"
8578 [(set (match_operand 0 "" "")
8579 (call (mem:SI (match_operand:SI 1 "" "X"))
8580 (match_operand 2 "" "")))
8582 (use (match_operand 3 "" ""))]
8583 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8585 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8587 [(set_attr "type" "call")]
8590 ;; Often the return insn will be the same as loading from memory, so set attr
8591 (define_insn "return"
8593 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8596 if (arm_ccfsm_state == 2)
8598 arm_ccfsm_state += 2;
8601 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8603 [(set_attr "type" "load1")
8604 (set_attr "length" "12")
8605 (set_attr "predicable" "yes")]
8608 (define_insn "*cond_return"
8610 (if_then_else (match_operator 0 "arm_comparison_operator"
8611 [(match_operand 1 "cc_register" "") (const_int 0)])
8614 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8617 if (arm_ccfsm_state == 2)
8619 arm_ccfsm_state += 2;
8622 return output_return_instruction (operands[0], TRUE, FALSE);
8624 [(set_attr "conds" "use")
8625 (set_attr "length" "12")
8626 (set_attr "type" "load1")]
8629 (define_insn "*cond_return_inverted"
8631 (if_then_else (match_operator 0 "arm_comparison_operator"
8632 [(match_operand 1 "cc_register" "") (const_int 0)])
8635 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8638 if (arm_ccfsm_state == 2)
8640 arm_ccfsm_state += 2;
8643 return output_return_instruction (operands[0], TRUE, TRUE);
8645 [(set_attr "conds" "use")
8646 (set_attr "length" "12")
8647 (set_attr "type" "load1")]
8650 ;; Generate a sequence of instructions to determine if the processor is
8651 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8654 (define_expand "return_addr_mask"
8656 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8658 (set (match_operand:SI 0 "s_register_operand" "")
8659 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8661 (const_int 67108860)))] ; 0x03fffffc
8664 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8667 (define_insn "*check_arch2"
8668 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8669 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8672 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8673 [(set_attr "length" "8")
8674 (set_attr "conds" "set")]
8677 ;; Call subroutine returning any type.
8679 (define_expand "untyped_call"
8680 [(parallel [(call (match_operand 0 "" "")
8682 (match_operand 1 "" "")
8683 (match_operand 2 "" "")])]
8688 rtx par = gen_rtx_PARALLEL (VOIDmode,
8689 rtvec_alloc (XVECLEN (operands[2], 0)));
8690 rtx addr = gen_reg_rtx (Pmode);
8694 emit_move_insn (addr, XEXP (operands[1], 0));
8695 mem = change_address (operands[1], BLKmode, addr);
8697 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8699 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8701 /* Default code only uses r0 as a return value, but we could
8702 be using anything up to 4 registers. */
8703 if (REGNO (src) == R0_REGNUM)
8704 src = gen_rtx_REG (TImode, R0_REGNUM);
8706 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8708 size += GET_MODE_SIZE (GET_MODE (src));
8711 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8716 for (i = 0; i < XVECLEN (par, 0); i++)
8718 HOST_WIDE_INT offset = 0;
8719 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8722 emit_move_insn (addr, plus_constant (addr, size));
8724 mem = change_address (mem, GET_MODE (reg), NULL);
8725 if (REGNO (reg) == R0_REGNUM)
8727 /* On thumb we have to use a write-back instruction. */
8728 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8729 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8730 size = TARGET_ARM ? 16 : 0;
8734 emit_move_insn (mem, reg);
8735 size = GET_MODE_SIZE (GET_MODE (reg));
8739 /* The optimizer does not know that the call sets the function value
8740 registers we stored in the result block. We avoid problems by
8741 claiming that all hard registers are used and clobbered at this
8743 emit_insn (gen_blockage ());
8749 (define_expand "untyped_return"
8750 [(match_operand:BLK 0 "memory_operand" "")
8751 (match_operand 1 "" "")]
8756 rtx addr = gen_reg_rtx (Pmode);
8760 emit_move_insn (addr, XEXP (operands[0], 0));
8761 mem = change_address (operands[0], BLKmode, addr);
8763 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8765 HOST_WIDE_INT offset = 0;
8766 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8769 emit_move_insn (addr, plus_constant (addr, size));
8771 mem = change_address (mem, GET_MODE (reg), NULL);
8772 if (REGNO (reg) == R0_REGNUM)
8774 /* On thumb we have to use a write-back instruction. */
8775 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8776 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8777 size = TARGET_ARM ? 16 : 0;
8781 emit_move_insn (reg, mem);
8782 size = GET_MODE_SIZE (GET_MODE (reg));
8786 /* Emit USE insns before the return. */
8787 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8788 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8790 /* Construct the return. */
8791 expand_naked_return ();
8797 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8798 ;; all of memory. This blocks insns from being moved across this point.
8800 (define_insn "blockage"
8801 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8804 [(set_attr "length" "0")
8805 (set_attr "type" "block")]
8808 (define_expand "casesi"
8809 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8810 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8811 (match_operand:SI 2 "const_int_operand" "") ; total range
8812 (match_operand:SI 3 "" "") ; table label
8813 (match_operand:SI 4 "" "")] ; Out of range label
8818 if (operands[1] != const0_rtx)
8820 reg = gen_reg_rtx (SImode);
8822 emit_insn (gen_addsi3 (reg, operands[0],
8823 GEN_INT (-INTVAL (operands[1]))));
8827 if (!const_ok_for_arm (INTVAL (operands[2])))
8828 operands[2] = force_reg (SImode, operands[2]);
8832 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8833 operands[3], operands[4]));
8837 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8838 operands[2], operands[3], operands[4]));
8842 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8843 operands[3], operands[4]));
8849 ;; The USE in this pattern is needed to tell flow analysis that this is
8850 ;; a CASESI insn. It has no other purpose.
8851 (define_insn "arm_casesi_internal"
8852 [(parallel [(set (pc)
8854 (leu (match_operand:SI 0 "s_register_operand" "r")
8855 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8856 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8857 (label_ref (match_operand 2 "" ""))))
8858 (label_ref (match_operand 3 "" ""))))
8859 (clobber (reg:CC CC_REGNUM))
8860 (use (label_ref (match_dup 2)))])]
8864 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8865 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8867 [(set_attr "conds" "clob")
8868 (set_attr "length" "12")]
8871 (define_expand "indirect_jump"
8873 (match_operand:SI 0 "s_register_operand" ""))]
8876 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8877 address and use bx. */
8881 tmp = gen_reg_rtx (SImode);
8882 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8888 ;; NB Never uses BX.
8889 (define_insn "*arm_indirect_jump"
8891 (match_operand:SI 0 "s_register_operand" "r"))]
8893 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8894 [(set_attr "predicable" "yes")]
8897 (define_insn "*load_indirect_jump"
8899 (match_operand:SI 0 "memory_operand" "m"))]
8901 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8902 [(set_attr "type" "load1")
8903 (set_attr "pool_range" "4096")
8904 (set_attr "neg_pool_range" "4084")
8905 (set_attr "predicable" "yes")]
8908 ;; NB Never uses BX.
8909 (define_insn "*thumb1_indirect_jump"
8911 (match_operand:SI 0 "register_operand" "l*r"))]
8914 [(set_attr "conds" "clob")
8915 (set_attr "length" "2")]
8925 if (TARGET_UNIFIED_ASM)
8928 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8929 return \"mov\\tr8, r8\";
8931 [(set (attr "length")
8932 (if_then_else (eq_attr "is_thumb" "yes")
8938 ;; Patterns to allow combination of arithmetic, cond code and shifts
8940 (define_insn "*arith_shiftsi"
8941 [(set (match_operand:SI 0 "s_register_operand" "=r")
8942 (match_operator:SI 1 "shiftable_operator"
8943 [(match_operator:SI 3 "shift_operator"
8944 [(match_operand:SI 4 "s_register_operand" "r")
8945 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8946 (match_operand:SI 2 "s_register_operand" "r")]))]
8948 "%i1%?\\t%0, %2, %4%S3"
8949 [(set_attr "predicable" "yes")
8950 (set_attr "shift" "4")
8951 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8952 (const_string "alu_shift")
8953 (const_string "alu_shift_reg")))]
8957 [(set (match_operand:SI 0 "s_register_operand" "")
8958 (match_operator:SI 1 "shiftable_operator"
8959 [(match_operator:SI 2 "shiftable_operator"
8960 [(match_operator:SI 3 "shift_operator"
8961 [(match_operand:SI 4 "s_register_operand" "")
8962 (match_operand:SI 5 "reg_or_int_operand" "")])
8963 (match_operand:SI 6 "s_register_operand" "")])
8964 (match_operand:SI 7 "arm_rhs_operand" "")]))
8965 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8968 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8971 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8974 (define_insn "*arith_shiftsi_compare0"
8975 [(set (reg:CC_NOOV CC_REGNUM)
8976 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8977 [(match_operator:SI 3 "shift_operator"
8978 [(match_operand:SI 4 "s_register_operand" "r")
8979 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8980 (match_operand:SI 2 "s_register_operand" "r")])
8982 (set (match_operand:SI 0 "s_register_operand" "=r")
8983 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8986 "%i1%.\\t%0, %2, %4%S3"
8987 [(set_attr "conds" "set")
8988 (set_attr "shift" "4")
8989 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8990 (const_string "alu_shift")
8991 (const_string "alu_shift_reg")))]
8994 (define_insn "*arith_shiftsi_compare0_scratch"
8995 [(set (reg:CC_NOOV CC_REGNUM)
8996 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8997 [(match_operator:SI 3 "shift_operator"
8998 [(match_operand:SI 4 "s_register_operand" "r")
8999 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9000 (match_operand:SI 2 "s_register_operand" "r")])
9002 (clobber (match_scratch:SI 0 "=r"))]
9004 "%i1%.\\t%0, %2, %4%S3"
9005 [(set_attr "conds" "set")
9006 (set_attr "shift" "4")
9007 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9008 (const_string "alu_shift")
9009 (const_string "alu_shift_reg")))]
9012 (define_insn "*sub_shiftsi"
9013 [(set (match_operand:SI 0 "s_register_operand" "=r")
9014 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9015 (match_operator:SI 2 "shift_operator"
9016 [(match_operand:SI 3 "s_register_operand" "r")
9017 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9019 "sub%?\\t%0, %1, %3%S2"
9020 [(set_attr "predicable" "yes")
9021 (set_attr "shift" "3")
9022 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9023 (const_string "alu_shift")
9024 (const_string "alu_shift_reg")))]
9027 (define_insn "*sub_shiftsi_compare0"
9028 [(set (reg:CC_NOOV CC_REGNUM)
9030 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9031 (match_operator:SI 2 "shift_operator"
9032 [(match_operand:SI 3 "s_register_operand" "r")
9033 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9035 (set (match_operand:SI 0 "s_register_operand" "=r")
9036 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9039 "sub%.\\t%0, %1, %3%S2"
9040 [(set_attr "conds" "set")
9041 (set_attr "shift" "3")
9042 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9043 (const_string "alu_shift")
9044 (const_string "alu_shift_reg")))]
9047 (define_insn "*sub_shiftsi_compare0_scratch"
9048 [(set (reg:CC_NOOV CC_REGNUM)
9050 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9051 (match_operator:SI 2 "shift_operator"
9052 [(match_operand:SI 3 "s_register_operand" "r")
9053 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9055 (clobber (match_scratch:SI 0 "=r"))]
9057 "sub%.\\t%0, %1, %3%S2"
9058 [(set_attr "conds" "set")
9059 (set_attr "shift" "3")
9060 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9061 (const_string "alu_shift")
9062 (const_string "alu_shift_reg")))]
9067 (define_insn "*and_scc"
9068 [(set (match_operand:SI 0 "s_register_operand" "=r")
9069 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9070 [(match_operand 3 "cc_register" "") (const_int 0)])
9071 (match_operand:SI 2 "s_register_operand" "r")))]
9073 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9074 [(set_attr "conds" "use")
9075 (set_attr "length" "8")]
9078 (define_insn "*ior_scc"
9079 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9080 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9081 [(match_operand 3 "cc_register" "") (const_int 0)])
9082 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9086 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9087 [(set_attr "conds" "use")
9088 (set_attr "length" "4,8")]
9091 (define_insn "*compare_scc"
9092 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9093 (match_operator:SI 1 "arm_comparison_operator"
9094 [(match_operand:SI 2 "s_register_operand" "r,r")
9095 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9096 (clobber (reg:CC CC_REGNUM))]
9099 if (operands[3] == const0_rtx)
9101 if (GET_CODE (operands[1]) == LT)
9102 return \"mov\\t%0, %2, lsr #31\";
9104 if (GET_CODE (operands[1]) == GE)
9105 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9107 if (GET_CODE (operands[1]) == EQ)
9108 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9111 if (GET_CODE (operands[1]) == NE)
9113 if (which_alternative == 1)
9114 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9115 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9117 if (which_alternative == 1)
9118 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9120 output_asm_insn (\"cmp\\t%2, %3\", operands);
9121 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9123 [(set_attr "conds" "clob")
9124 (set_attr "length" "12")]
9127 (define_insn "*cond_move"
9128 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9129 (if_then_else:SI (match_operator 3 "equality_operator"
9130 [(match_operator 4 "arm_comparison_operator"
9131 [(match_operand 5 "cc_register" "") (const_int 0)])
9133 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9134 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9137 if (GET_CODE (operands[3]) == NE)
9139 if (which_alternative != 1)
9140 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9141 if (which_alternative != 0)
9142 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9145 if (which_alternative != 0)
9146 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9147 if (which_alternative != 1)
9148 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9151 [(set_attr "conds" "use")
9152 (set_attr "length" "4,4,8")]
9155 (define_insn "*cond_arith"
9156 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9157 (match_operator:SI 5 "shiftable_operator"
9158 [(match_operator:SI 4 "arm_comparison_operator"
9159 [(match_operand:SI 2 "s_register_operand" "r,r")
9160 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9161 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9162 (clobber (reg:CC CC_REGNUM))]
9165 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9166 return \"%i5\\t%0, %1, %2, lsr #31\";
9168 output_asm_insn (\"cmp\\t%2, %3\", operands);
9169 if (GET_CODE (operands[5]) == AND)
9170 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9171 else if (GET_CODE (operands[5]) == MINUS)
9172 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9173 else if (which_alternative != 0)
9174 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9175 return \"%i5%d4\\t%0, %1, #1\";
9177 [(set_attr "conds" "clob")
9178 (set_attr "length" "12")]
9181 (define_insn "*cond_sub"
9182 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9183 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9184 (match_operator:SI 4 "arm_comparison_operator"
9185 [(match_operand:SI 2 "s_register_operand" "r,r")
9186 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9187 (clobber (reg:CC CC_REGNUM))]
9190 output_asm_insn (\"cmp\\t%2, %3\", operands);
9191 if (which_alternative != 0)
9192 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9193 return \"sub%d4\\t%0, %1, #1\";
9195 [(set_attr "conds" "clob")
9196 (set_attr "length" "8,12")]
9199 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9200 (define_insn "*cmp_ite0"
9201 [(set (match_operand 6 "dominant_cc_register" "")
9204 (match_operator 4 "arm_comparison_operator"
9205 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9206 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9207 (match_operator:SI 5 "arm_comparison_operator"
9208 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9209 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9215 static const char * const opcodes[4][2] =
9217 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9218 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9219 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9220 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9221 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9222 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9223 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9224 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9227 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9229 return opcodes[which_alternative][swap];
9231 [(set_attr "conds" "set")
9232 (set_attr "length" "8")]
9235 (define_insn "*cmp_ite1"
9236 [(set (match_operand 6 "dominant_cc_register" "")
9239 (match_operator 4 "arm_comparison_operator"
9240 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9241 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9242 (match_operator:SI 5 "arm_comparison_operator"
9243 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9244 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9250 static const char * const opcodes[4][2] =
9252 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9253 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9254 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9255 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9256 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9257 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9258 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9259 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9262 comparison_dominates_p (GET_CODE (operands[5]),
9263 reverse_condition (GET_CODE (operands[4])));
9265 return opcodes[which_alternative][swap];
9267 [(set_attr "conds" "set")
9268 (set_attr "length" "8")]
9271 (define_insn "*cmp_and"
9272 [(set (match_operand 6 "dominant_cc_register" "")
9275 (match_operator 4 "arm_comparison_operator"
9276 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9277 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9278 (match_operator:SI 5 "arm_comparison_operator"
9279 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9280 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9285 static const char *const opcodes[4][2] =
9287 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9288 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9289 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9290 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9291 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9292 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9293 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9294 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9297 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9299 return opcodes[which_alternative][swap];
9301 [(set_attr "conds" "set")
9302 (set_attr "predicable" "no")
9303 (set_attr "length" "8")]
9306 (define_insn "*cmp_ior"
9307 [(set (match_operand 6 "dominant_cc_register" "")
9310 (match_operator 4 "arm_comparison_operator"
9311 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9312 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9313 (match_operator:SI 5 "arm_comparison_operator"
9314 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9315 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9320 static const char *const opcodes[4][2] =
9322 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9323 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9324 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9325 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9326 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9327 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9328 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9329 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9332 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9334 return opcodes[which_alternative][swap];
9337 [(set_attr "conds" "set")
9338 (set_attr "length" "8")]
9341 (define_insn_and_split "*ior_scc_scc"
9342 [(set (match_operand:SI 0 "s_register_operand" "=r")
9343 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9344 [(match_operand:SI 1 "s_register_operand" "r")
9345 (match_operand:SI 2 "arm_add_operand" "rIL")])
9346 (match_operator:SI 6 "arm_comparison_operator"
9347 [(match_operand:SI 4 "s_register_operand" "r")
9348 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9349 (clobber (reg:CC CC_REGNUM))]
9351 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9354 "TARGET_ARM && reload_completed"
9358 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9359 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9361 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9363 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9366 [(set_attr "conds" "clob")
9367 (set_attr "length" "16")])
9369 ; If the above pattern is followed by a CMP insn, then the compare is
9370 ; redundant, since we can rework the conditional instruction that follows.
9371 (define_insn_and_split "*ior_scc_scc_cmp"
9372 [(set (match_operand 0 "dominant_cc_register" "")
9373 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9374 [(match_operand:SI 1 "s_register_operand" "r")
9375 (match_operand:SI 2 "arm_add_operand" "rIL")])
9376 (match_operator:SI 6 "arm_comparison_operator"
9377 [(match_operand:SI 4 "s_register_operand" "r")
9378 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9380 (set (match_operand:SI 7 "s_register_operand" "=r")
9381 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9382 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9385 "TARGET_ARM && reload_completed"
9389 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9390 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9392 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9394 [(set_attr "conds" "set")
9395 (set_attr "length" "16")])
9397 (define_insn_and_split "*and_scc_scc"
9398 [(set (match_operand:SI 0 "s_register_operand" "=r")
9399 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9400 [(match_operand:SI 1 "s_register_operand" "r")
9401 (match_operand:SI 2 "arm_add_operand" "rIL")])
9402 (match_operator:SI 6 "arm_comparison_operator"
9403 [(match_operand:SI 4 "s_register_operand" "r")
9404 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9405 (clobber (reg:CC CC_REGNUM))]
9407 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9410 "TARGET_ARM && reload_completed
9411 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9416 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9417 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9419 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9421 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9424 [(set_attr "conds" "clob")
9425 (set_attr "length" "16")])
9427 ; If the above pattern is followed by a CMP insn, then the compare is
9428 ; redundant, since we can rework the conditional instruction that follows.
9429 (define_insn_and_split "*and_scc_scc_cmp"
9430 [(set (match_operand 0 "dominant_cc_register" "")
9431 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9432 [(match_operand:SI 1 "s_register_operand" "r")
9433 (match_operand:SI 2 "arm_add_operand" "rIL")])
9434 (match_operator:SI 6 "arm_comparison_operator"
9435 [(match_operand:SI 4 "s_register_operand" "r")
9436 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9438 (set (match_operand:SI 7 "s_register_operand" "=r")
9439 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9440 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9443 "TARGET_ARM && reload_completed"
9447 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9448 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9450 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9452 [(set_attr "conds" "set")
9453 (set_attr "length" "16")])
9455 ;; If there is no dominance in the comparison, then we can still save an
9456 ;; instruction in the AND case, since we can know that the second compare
9457 ;; need only zero the value if false (if true, then the value is already
9459 (define_insn_and_split "*and_scc_scc_nodom"
9460 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9461 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9462 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9463 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9464 (match_operator:SI 6 "arm_comparison_operator"
9465 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9466 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9467 (clobber (reg:CC CC_REGNUM))]
9469 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9472 "TARGET_ARM && reload_completed"
9473 [(parallel [(set (match_dup 0)
9474 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9475 (clobber (reg:CC CC_REGNUM))])
9476 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9478 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9481 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9482 operands[4], operands[5]),
9484 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9486 [(set_attr "conds" "clob")
9487 (set_attr "length" "20")])
9490 [(set (reg:CC_NOOV CC_REGNUM)
9491 (compare:CC_NOOV (ior:SI
9492 (and:SI (match_operand:SI 0 "s_register_operand" "")
9494 (match_operator:SI 1 "comparison_operator"
9495 [(match_operand:SI 2 "s_register_operand" "")
9496 (match_operand:SI 3 "arm_add_operand" "")]))
9498 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9501 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9503 (set (reg:CC_NOOV CC_REGNUM)
9504 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9509 [(set (reg:CC_NOOV CC_REGNUM)
9510 (compare:CC_NOOV (ior:SI
9511 (match_operator:SI 1 "comparison_operator"
9512 [(match_operand:SI 2 "s_register_operand" "")
9513 (match_operand:SI 3 "arm_add_operand" "")])
9514 (and:SI (match_operand:SI 0 "s_register_operand" "")
9517 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9520 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9522 (set (reg:CC_NOOV CC_REGNUM)
9523 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9526 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9528 (define_insn "*negscc"
9529 [(set (match_operand:SI 0 "s_register_operand" "=r")
9530 (neg:SI (match_operator 3 "arm_comparison_operator"
9531 [(match_operand:SI 1 "s_register_operand" "r")
9532 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9533 (clobber (reg:CC CC_REGNUM))]
9536 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9537 return \"mov\\t%0, %1, asr #31\";
9539 if (GET_CODE (operands[3]) == NE)
9540 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9542 output_asm_insn (\"cmp\\t%1, %2\", operands);
9543 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9544 return \"mvn%d3\\t%0, #0\";
9546 [(set_attr "conds" "clob")
9547 (set_attr "length" "12")]
9550 (define_insn "movcond"
9551 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9553 (match_operator 5 "arm_comparison_operator"
9554 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9555 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9556 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9557 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9558 (clobber (reg:CC CC_REGNUM))]
9561 if (GET_CODE (operands[5]) == LT
9562 && (operands[4] == const0_rtx))
9564 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9566 if (operands[2] == const0_rtx)
9567 return \"and\\t%0, %1, %3, asr #31\";
9568 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9570 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9572 if (operands[1] == const0_rtx)
9573 return \"bic\\t%0, %2, %3, asr #31\";
9574 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9576 /* The only case that falls through to here is when both ops 1 & 2
9580 if (GET_CODE (operands[5]) == GE
9581 && (operands[4] == const0_rtx))
9583 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9585 if (operands[2] == const0_rtx)
9586 return \"bic\\t%0, %1, %3, asr #31\";
9587 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9589 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9591 if (operands[1] == const0_rtx)
9592 return \"and\\t%0, %2, %3, asr #31\";
9593 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9595 /* The only case that falls through to here is when both ops 1 & 2
9598 if (GET_CODE (operands[4]) == CONST_INT
9599 && !const_ok_for_arm (INTVAL (operands[4])))
9600 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9602 output_asm_insn (\"cmp\\t%3, %4\", operands);
9603 if (which_alternative != 0)
9604 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9605 if (which_alternative != 1)
9606 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9609 [(set_attr "conds" "clob")
9610 (set_attr "length" "8,8,12")]
9613 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9615 (define_insn "*ifcompare_plus_move"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9617 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9618 [(match_operand:SI 4 "s_register_operand" "r,r")
9619 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9621 (match_operand:SI 2 "s_register_operand" "r,r")
9622 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9623 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9624 (clobber (reg:CC CC_REGNUM))]
9627 [(set_attr "conds" "clob")
9628 (set_attr "length" "8,12")]
9631 (define_insn "*if_plus_move"
9632 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9634 (match_operator 4 "arm_comparison_operator"
9635 [(match_operand 5 "cc_register" "") (const_int 0)])
9637 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9638 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9639 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9643 sub%d4\\t%0, %2, #%n3
9644 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9645 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9646 [(set_attr "conds" "use")
9647 (set_attr "length" "4,4,8,8")
9648 (set_attr "type" "*,*,*,*")]
9651 (define_insn "*ifcompare_move_plus"
9652 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9653 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9654 [(match_operand:SI 4 "s_register_operand" "r,r")
9655 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9656 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9658 (match_operand:SI 2 "s_register_operand" "r,r")
9659 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9660 (clobber (reg:CC CC_REGNUM))]
9663 [(set_attr "conds" "clob")
9664 (set_attr "length" "8,12")]
9667 (define_insn "*if_move_plus"
9668 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9670 (match_operator 4 "arm_comparison_operator"
9671 [(match_operand 5 "cc_register" "") (const_int 0)])
9672 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9674 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9675 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9679 sub%D4\\t%0, %2, #%n3
9680 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9681 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9682 [(set_attr "conds" "use")
9683 (set_attr "length" "4,4,8,8")
9684 (set_attr "type" "*,*,*,*")]
9687 (define_insn "*ifcompare_arith_arith"
9688 [(set (match_operand:SI 0 "s_register_operand" "=r")
9689 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9690 [(match_operand:SI 5 "s_register_operand" "r")
9691 (match_operand:SI 6 "arm_add_operand" "rIL")])
9692 (match_operator:SI 8 "shiftable_operator"
9693 [(match_operand:SI 1 "s_register_operand" "r")
9694 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9695 (match_operator:SI 7 "shiftable_operator"
9696 [(match_operand:SI 3 "s_register_operand" "r")
9697 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9698 (clobber (reg:CC CC_REGNUM))]
9701 [(set_attr "conds" "clob")
9702 (set_attr "length" "12")]
9705 (define_insn "*if_arith_arith"
9706 [(set (match_operand:SI 0 "s_register_operand" "=r")
9707 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9708 [(match_operand 8 "cc_register" "") (const_int 0)])
9709 (match_operator:SI 6 "shiftable_operator"
9710 [(match_operand:SI 1 "s_register_operand" "r")
9711 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9712 (match_operator:SI 7 "shiftable_operator"
9713 [(match_operand:SI 3 "s_register_operand" "r")
9714 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9716 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9717 [(set_attr "conds" "use")
9718 (set_attr "length" "8")]
9721 (define_insn "*ifcompare_arith_move"
9722 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9723 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9724 [(match_operand:SI 2 "s_register_operand" "r,r")
9725 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9726 (match_operator:SI 7 "shiftable_operator"
9727 [(match_operand:SI 4 "s_register_operand" "r,r")
9728 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9729 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9730 (clobber (reg:CC CC_REGNUM))]
9733 /* If we have an operation where (op x 0) is the identity operation and
9734 the conditional operator is LT or GE and we are comparing against zero and
9735 everything is in registers then we can do this in two instructions. */
9736 if (operands[3] == const0_rtx
9737 && GET_CODE (operands[7]) != AND
9738 && GET_CODE (operands[5]) == REG
9739 && GET_CODE (operands[1]) == REG
9740 && REGNO (operands[1]) == REGNO (operands[4])
9741 && REGNO (operands[4]) != REGNO (operands[0]))
9743 if (GET_CODE (operands[6]) == LT)
9744 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9745 else if (GET_CODE (operands[6]) == GE)
9746 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9748 if (GET_CODE (operands[3]) == CONST_INT
9749 && !const_ok_for_arm (INTVAL (operands[3])))
9750 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9752 output_asm_insn (\"cmp\\t%2, %3\", operands);
9753 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9754 if (which_alternative != 0)
9755 return \"mov%D6\\t%0, %1\";
9758 [(set_attr "conds" "clob")
9759 (set_attr "length" "8,12")]
9762 (define_insn "*if_arith_move"
9763 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9764 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9765 [(match_operand 6 "cc_register" "") (const_int 0)])
9766 (match_operator:SI 5 "shiftable_operator"
9767 [(match_operand:SI 2 "s_register_operand" "r,r")
9768 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9769 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9773 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9774 [(set_attr "conds" "use")
9775 (set_attr "length" "4,8")
9776 (set_attr "type" "*,*")]
9779 (define_insn "*ifcompare_move_arith"
9780 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9781 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9782 [(match_operand:SI 4 "s_register_operand" "r,r")
9783 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9784 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9785 (match_operator:SI 7 "shiftable_operator"
9786 [(match_operand:SI 2 "s_register_operand" "r,r")
9787 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9788 (clobber (reg:CC CC_REGNUM))]
9791 /* If we have an operation where (op x 0) is the identity operation and
9792 the conditional operator is LT or GE and we are comparing against zero and
9793 everything is in registers then we can do this in two instructions */
9794 if (operands[5] == const0_rtx
9795 && GET_CODE (operands[7]) != AND
9796 && GET_CODE (operands[3]) == REG
9797 && GET_CODE (operands[1]) == REG
9798 && REGNO (operands[1]) == REGNO (operands[2])
9799 && REGNO (operands[2]) != REGNO (operands[0]))
9801 if (GET_CODE (operands[6]) == GE)
9802 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9803 else if (GET_CODE (operands[6]) == LT)
9804 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9807 if (GET_CODE (operands[5]) == CONST_INT
9808 && !const_ok_for_arm (INTVAL (operands[5])))
9809 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9811 output_asm_insn (\"cmp\\t%4, %5\", operands);
9813 if (which_alternative != 0)
9814 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9815 return \"%I7%D6\\t%0, %2, %3\";
9817 [(set_attr "conds" "clob")
9818 (set_attr "length" "8,12")]
9821 (define_insn "*if_move_arith"
9822 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9824 (match_operator 4 "arm_comparison_operator"
9825 [(match_operand 6 "cc_register" "") (const_int 0)])
9826 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9827 (match_operator:SI 5 "shiftable_operator"
9828 [(match_operand:SI 2 "s_register_operand" "r,r")
9829 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9833 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9834 [(set_attr "conds" "use")
9835 (set_attr "length" "4,8")
9836 (set_attr "type" "*,*")]
9839 (define_insn "*ifcompare_move_not"
9840 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9842 (match_operator 5 "arm_comparison_operator"
9843 [(match_operand:SI 3 "s_register_operand" "r,r")
9844 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9845 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9847 (match_operand:SI 2 "s_register_operand" "r,r"))))
9848 (clobber (reg:CC CC_REGNUM))]
9851 [(set_attr "conds" "clob")
9852 (set_attr "length" "8,12")]
9855 (define_insn "*if_move_not"
9856 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9858 (match_operator 4 "arm_comparison_operator"
9859 [(match_operand 3 "cc_register" "") (const_int 0)])
9860 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9861 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9865 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9866 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9867 [(set_attr "conds" "use")
9868 (set_attr "length" "4,8,8")]
9871 (define_insn "*ifcompare_not_move"
9872 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9874 (match_operator 5 "arm_comparison_operator"
9875 [(match_operand:SI 3 "s_register_operand" "r,r")
9876 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9878 (match_operand:SI 2 "s_register_operand" "r,r"))
9879 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9880 (clobber (reg:CC CC_REGNUM))]
9883 [(set_attr "conds" "clob")
9884 (set_attr "length" "8,12")]
9887 (define_insn "*if_not_move"
9888 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9890 (match_operator 4 "arm_comparison_operator"
9891 [(match_operand 3 "cc_register" "") (const_int 0)])
9892 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9893 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9897 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9898 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9899 [(set_attr "conds" "use")
9900 (set_attr "length" "4,8,8")]
9903 (define_insn "*ifcompare_shift_move"
9904 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9906 (match_operator 6 "arm_comparison_operator"
9907 [(match_operand:SI 4 "s_register_operand" "r,r")
9908 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9909 (match_operator:SI 7 "shift_operator"
9910 [(match_operand:SI 2 "s_register_operand" "r,r")
9911 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9912 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9913 (clobber (reg:CC CC_REGNUM))]
9916 [(set_attr "conds" "clob")
9917 (set_attr "length" "8,12")]
9920 (define_insn "*if_shift_move"
9921 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9923 (match_operator 5 "arm_comparison_operator"
9924 [(match_operand 6 "cc_register" "") (const_int 0)])
9925 (match_operator:SI 4 "shift_operator"
9926 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9927 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9928 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9932 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9933 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9934 [(set_attr "conds" "use")
9935 (set_attr "shift" "2")
9936 (set_attr "length" "4,8,8")
9937 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9938 (const_string "alu_shift")
9939 (const_string "alu_shift_reg")))]
9942 (define_insn "*ifcompare_move_shift"
9943 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9945 (match_operator 6 "arm_comparison_operator"
9946 [(match_operand:SI 4 "s_register_operand" "r,r")
9947 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9948 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9949 (match_operator:SI 7 "shift_operator"
9950 [(match_operand:SI 2 "s_register_operand" "r,r")
9951 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9952 (clobber (reg:CC CC_REGNUM))]
9955 [(set_attr "conds" "clob")
9956 (set_attr "length" "8,12")]
9959 (define_insn "*if_move_shift"
9960 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9962 (match_operator 5 "arm_comparison_operator"
9963 [(match_operand 6 "cc_register" "") (const_int 0)])
9964 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9965 (match_operator:SI 4 "shift_operator"
9966 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9967 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9971 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9972 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9973 [(set_attr "conds" "use")
9974 (set_attr "shift" "2")
9975 (set_attr "length" "4,8,8")
9976 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9977 (const_string "alu_shift")
9978 (const_string "alu_shift_reg")))]
9981 (define_insn "*ifcompare_shift_shift"
9982 [(set (match_operand:SI 0 "s_register_operand" "=r")
9984 (match_operator 7 "arm_comparison_operator"
9985 [(match_operand:SI 5 "s_register_operand" "r")
9986 (match_operand:SI 6 "arm_add_operand" "rIL")])
9987 (match_operator:SI 8 "shift_operator"
9988 [(match_operand:SI 1 "s_register_operand" "r")
9989 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9990 (match_operator:SI 9 "shift_operator"
9991 [(match_operand:SI 3 "s_register_operand" "r")
9992 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9993 (clobber (reg:CC CC_REGNUM))]
9996 [(set_attr "conds" "clob")
9997 (set_attr "length" "12")]
10000 (define_insn "*if_shift_shift"
10001 [(set (match_operand:SI 0 "s_register_operand" "=r")
10003 (match_operator 5 "arm_comparison_operator"
10004 [(match_operand 8 "cc_register" "") (const_int 0)])
10005 (match_operator:SI 6 "shift_operator"
10006 [(match_operand:SI 1 "s_register_operand" "r")
10007 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10008 (match_operator:SI 7 "shift_operator"
10009 [(match_operand:SI 3 "s_register_operand" "r")
10010 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10012 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10013 [(set_attr "conds" "use")
10014 (set_attr "shift" "1")
10015 (set_attr "length" "8")
10016 (set (attr "type") (if_then_else
10017 (and (match_operand 2 "const_int_operand" "")
10018 (match_operand 4 "const_int_operand" ""))
10019 (const_string "alu_shift")
10020 (const_string "alu_shift_reg")))]
10023 (define_insn "*ifcompare_not_arith"
10024 [(set (match_operand:SI 0 "s_register_operand" "=r")
10026 (match_operator 6 "arm_comparison_operator"
10027 [(match_operand:SI 4 "s_register_operand" "r")
10028 (match_operand:SI 5 "arm_add_operand" "rIL")])
10029 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10030 (match_operator:SI 7 "shiftable_operator"
10031 [(match_operand:SI 2 "s_register_operand" "r")
10032 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10033 (clobber (reg:CC CC_REGNUM))]
10036 [(set_attr "conds" "clob")
10037 (set_attr "length" "12")]
10040 (define_insn "*if_not_arith"
10041 [(set (match_operand:SI 0 "s_register_operand" "=r")
10043 (match_operator 5 "arm_comparison_operator"
10044 [(match_operand 4 "cc_register" "") (const_int 0)])
10045 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10046 (match_operator:SI 6 "shiftable_operator"
10047 [(match_operand:SI 2 "s_register_operand" "r")
10048 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10050 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10051 [(set_attr "conds" "use")
10052 (set_attr "length" "8")]
10055 (define_insn "*ifcompare_arith_not"
10056 [(set (match_operand:SI 0 "s_register_operand" "=r")
10058 (match_operator 6 "arm_comparison_operator"
10059 [(match_operand:SI 4 "s_register_operand" "r")
10060 (match_operand:SI 5 "arm_add_operand" "rIL")])
10061 (match_operator:SI 7 "shiftable_operator"
10062 [(match_operand:SI 2 "s_register_operand" "r")
10063 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10064 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10065 (clobber (reg:CC CC_REGNUM))]
10068 [(set_attr "conds" "clob")
10069 (set_attr "length" "12")]
10072 (define_insn "*if_arith_not"
10073 [(set (match_operand:SI 0 "s_register_operand" "=r")
10075 (match_operator 5 "arm_comparison_operator"
10076 [(match_operand 4 "cc_register" "") (const_int 0)])
10077 (match_operator:SI 6 "shiftable_operator"
10078 [(match_operand:SI 2 "s_register_operand" "r")
10079 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10080 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10082 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10083 [(set_attr "conds" "use")
10084 (set_attr "length" "8")]
10087 (define_insn "*ifcompare_neg_move"
10088 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10090 (match_operator 5 "arm_comparison_operator"
10091 [(match_operand:SI 3 "s_register_operand" "r,r")
10092 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10093 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10094 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10095 (clobber (reg:CC CC_REGNUM))]
10098 [(set_attr "conds" "clob")
10099 (set_attr "length" "8,12")]
10102 (define_insn "*if_neg_move"
10103 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10105 (match_operator 4 "arm_comparison_operator"
10106 [(match_operand 3 "cc_register" "") (const_int 0)])
10107 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10108 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10111 rsb%d4\\t%0, %2, #0
10112 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10113 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10114 [(set_attr "conds" "use")
10115 (set_attr "length" "4,8,8")]
10118 (define_insn "*ifcompare_move_neg"
10119 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10121 (match_operator 5 "arm_comparison_operator"
10122 [(match_operand:SI 3 "s_register_operand" "r,r")
10123 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10124 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10125 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10126 (clobber (reg:CC CC_REGNUM))]
10129 [(set_attr "conds" "clob")
10130 (set_attr "length" "8,12")]
10133 (define_insn "*if_move_neg"
10134 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10136 (match_operator 4 "arm_comparison_operator"
10137 [(match_operand 3 "cc_register" "") (const_int 0)])
10138 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10139 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10142 rsb%D4\\t%0, %2, #0
10143 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10144 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10145 [(set_attr "conds" "use")
10146 (set_attr "length" "4,8,8")]
10149 (define_insn "*arith_adjacentmem"
10150 [(set (match_operand:SI 0 "s_register_operand" "=r")
10151 (match_operator:SI 1 "shiftable_operator"
10152 [(match_operand:SI 2 "memory_operand" "m")
10153 (match_operand:SI 3 "memory_operand" "m")]))
10154 (clobber (match_scratch:SI 4 "=r"))]
10155 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10161 HOST_WIDE_INT val1 = 0, val2 = 0;
10163 if (REGNO (operands[0]) > REGNO (operands[4]))
10165 ldm[1] = operands[4];
10166 ldm[2] = operands[0];
10170 ldm[1] = operands[0];
10171 ldm[2] = operands[4];
10174 base_reg = XEXP (operands[2], 0);
10176 if (!REG_P (base_reg))
10178 val1 = INTVAL (XEXP (base_reg, 1));
10179 base_reg = XEXP (base_reg, 0);
10182 if (!REG_P (XEXP (operands[3], 0)))
10183 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10185 arith[0] = operands[0];
10186 arith[3] = operands[1];
10200 if (val1 !=0 && val2 != 0)
10204 if (val1 == 4 || val2 == 4)
10205 /* Other val must be 8, since we know they are adjacent and neither
10207 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10208 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10210 ldm[0] = ops[0] = operands[4];
10212 ops[2] = GEN_INT (val1);
10213 output_add_immediate (ops);
10215 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10217 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10221 /* Offset is out of range for a single add, so use two ldr. */
10224 ops[2] = GEN_INT (val1);
10225 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10227 ops[2] = GEN_INT (val2);
10228 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10231 else if (val1 != 0)
10234 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10236 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10241 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10243 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10245 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10248 [(set_attr "length" "12")
10249 (set_attr "predicable" "yes")
10250 (set_attr "type" "load1")]
10253 ; This pattern is never tried by combine, so do it as a peephole
10256 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10257 (match_operand:SI 1 "arm_general_register_operand" ""))
10258 (set (reg:CC CC_REGNUM)
10259 (compare:CC (match_dup 1) (const_int 0)))]
10261 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10262 (set (match_dup 0) (match_dup 1))])]
10266 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10267 ; reversed, check that the memory references aren't volatile.
10270 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10271 (match_operand:SI 4 "memory_operand" "m"))
10272 (set (match_operand:SI 1 "s_register_operand" "=rk")
10273 (match_operand:SI 5 "memory_operand" "m"))
10274 (set (match_operand:SI 2 "s_register_operand" "=rk")
10275 (match_operand:SI 6 "memory_operand" "m"))
10276 (set (match_operand:SI 3 "s_register_operand" "=rk")
10277 (match_operand:SI 7 "memory_operand" "m"))]
10278 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10280 return emit_ldm_seq (operands, 4);
10285 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10286 (match_operand:SI 3 "memory_operand" "m"))
10287 (set (match_operand:SI 1 "s_register_operand" "=rk")
10288 (match_operand:SI 4 "memory_operand" "m"))
10289 (set (match_operand:SI 2 "s_register_operand" "=rk")
10290 (match_operand:SI 5 "memory_operand" "m"))]
10291 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10293 return emit_ldm_seq (operands, 3);
10298 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10299 (match_operand:SI 2 "memory_operand" "m"))
10300 (set (match_operand:SI 1 "s_register_operand" "=rk")
10301 (match_operand:SI 3 "memory_operand" "m"))]
10302 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10304 return emit_ldm_seq (operands, 2);
10309 [(set (match_operand:SI 4 "memory_operand" "=m")
10310 (match_operand:SI 0 "s_register_operand" "rk"))
10311 (set (match_operand:SI 5 "memory_operand" "=m")
10312 (match_operand:SI 1 "s_register_operand" "rk"))
10313 (set (match_operand:SI 6 "memory_operand" "=m")
10314 (match_operand:SI 2 "s_register_operand" "rk"))
10315 (set (match_operand:SI 7 "memory_operand" "=m")
10316 (match_operand:SI 3 "s_register_operand" "rk"))]
10317 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10319 return emit_stm_seq (operands, 4);
10324 [(set (match_operand:SI 3 "memory_operand" "=m")
10325 (match_operand:SI 0 "s_register_operand" "rk"))
10326 (set (match_operand:SI 4 "memory_operand" "=m")
10327 (match_operand:SI 1 "s_register_operand" "rk"))
10328 (set (match_operand:SI 5 "memory_operand" "=m")
10329 (match_operand:SI 2 "s_register_operand" "rk"))]
10330 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10332 return emit_stm_seq (operands, 3);
10337 [(set (match_operand:SI 2 "memory_operand" "=m")
10338 (match_operand:SI 0 "s_register_operand" "rk"))
10339 (set (match_operand:SI 3 "memory_operand" "=m")
10340 (match_operand:SI 1 "s_register_operand" "rk"))]
10341 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10343 return emit_stm_seq (operands, 2);
10348 [(set (match_operand:SI 0 "s_register_operand" "")
10349 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10351 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10352 [(match_operand:SI 3 "s_register_operand" "")
10353 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10354 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10356 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10357 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10362 ;; This split can be used because CC_Z mode implies that the following
10363 ;; branch will be an equality, or an unsigned inequality, so the sign
10364 ;; extension is not needed.
10367 [(set (reg:CC_Z CC_REGNUM)
10369 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10371 (match_operand 1 "const_int_operand" "")))
10372 (clobber (match_scratch:SI 2 ""))]
10374 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10375 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10376 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10377 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10379 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10382 ;; ??? Check the patterns above for Thumb-2 usefulness
10384 (define_expand "prologue"
10385 [(clobber (const_int 0))]
10388 arm_expand_prologue ();
10390 thumb1_expand_prologue ();
10395 (define_expand "epilogue"
10396 [(clobber (const_int 0))]
10399 if (crtl->calls_eh_return)
10400 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10402 thumb1_expand_epilogue ();
10403 else if (USE_RETURN_INSN (FALSE))
10405 emit_jump_insn (gen_return ());
10408 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10410 gen_rtx_RETURN (VOIDmode)),
10411 VUNSPEC_EPILOGUE));
10416 ;; Note - although unspec_volatile's USE all hard registers,
10417 ;; USEs are ignored after relaod has completed. Thus we need
10418 ;; to add an unspec of the link register to ensure that flow
10419 ;; does not think that it is unused by the sibcall branch that
10420 ;; will replace the standard function epilogue.
10421 (define_insn "sibcall_epilogue"
10422 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10423 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10426 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10427 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10428 return arm_output_epilogue (next_nonnote_insn (insn));
10430 ;; Length is absolute worst case
10431 [(set_attr "length" "44")
10432 (set_attr "type" "block")
10433 ;; We don't clobber the conditions, but the potential length of this
10434 ;; operation is sufficient to make conditionalizing the sequence
10435 ;; unlikely to be profitable.
10436 (set_attr "conds" "clob")]
10439 (define_insn "*epilogue_insns"
10440 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10444 return arm_output_epilogue (NULL);
10445 else /* TARGET_THUMB1 */
10446 return thumb_unexpanded_epilogue ();
10448 ; Length is absolute worst case
10449 [(set_attr "length" "44")
10450 (set_attr "type" "block")
10451 ;; We don't clobber the conditions, but the potential length of this
10452 ;; operation is sufficient to make conditionalizing the sequence
10453 ;; unlikely to be profitable.
10454 (set_attr "conds" "clob")]
10457 (define_expand "eh_epilogue"
10458 [(use (match_operand:SI 0 "register_operand" ""))
10459 (use (match_operand:SI 1 "register_operand" ""))
10460 (use (match_operand:SI 2 "register_operand" ""))]
10464 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10465 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10467 rtx ra = gen_rtx_REG (Pmode, 2);
10469 emit_move_insn (ra, operands[2]);
10472 /* This is a hack -- we may have crystalized the function type too
10474 cfun->machine->func_type = 0;
10478 ;; This split is only used during output to reduce the number of patterns
10479 ;; that need assembler instructions adding to them. We allowed the setting
10480 ;; of the conditions to be implicit during rtl generation so that
10481 ;; the conditional compare patterns would work. However this conflicts to
10482 ;; some extent with the conditional data operations, so we have to split them
10485 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10486 ;; conditional execution sufficient?
10489 [(set (match_operand:SI 0 "s_register_operand" "")
10490 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10491 [(match_operand 2 "" "") (match_operand 3 "" "")])
10493 (match_operand 4 "" "")))
10494 (clobber (reg:CC CC_REGNUM))]
10495 "TARGET_ARM && reload_completed"
10496 [(set (match_dup 5) (match_dup 6))
10497 (cond_exec (match_dup 7)
10498 (set (match_dup 0) (match_dup 4)))]
10501 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10502 operands[2], operands[3]);
10503 enum rtx_code rc = GET_CODE (operands[1]);
10505 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10506 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10507 if (mode == CCFPmode || mode == CCFPEmode)
10508 rc = reverse_condition_maybe_unordered (rc);
10510 rc = reverse_condition (rc);
10512 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10517 [(set (match_operand:SI 0 "s_register_operand" "")
10518 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10519 [(match_operand 2 "" "") (match_operand 3 "" "")])
10520 (match_operand 4 "" "")
10522 (clobber (reg:CC CC_REGNUM))]
10523 "TARGET_ARM && reload_completed"
10524 [(set (match_dup 5) (match_dup 6))
10525 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10526 (set (match_dup 0) (match_dup 4)))]
10529 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10530 operands[2], operands[3]);
10532 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10533 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10538 [(set (match_operand:SI 0 "s_register_operand" "")
10539 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10540 [(match_operand 2 "" "") (match_operand 3 "" "")])
10541 (match_operand 4 "" "")
10542 (match_operand 5 "" "")))
10543 (clobber (reg:CC CC_REGNUM))]
10544 "TARGET_ARM && reload_completed"
10545 [(set (match_dup 6) (match_dup 7))
10546 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10547 (set (match_dup 0) (match_dup 4)))
10548 (cond_exec (match_dup 8)
10549 (set (match_dup 0) (match_dup 5)))]
10552 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10553 operands[2], operands[3]);
10554 enum rtx_code rc = GET_CODE (operands[1]);
10556 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10557 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10558 if (mode == CCFPmode || mode == CCFPEmode)
10559 rc = reverse_condition_maybe_unordered (rc);
10561 rc = reverse_condition (rc);
10563 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10568 [(set (match_operand:SI 0 "s_register_operand" "")
10569 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10570 [(match_operand:SI 2 "s_register_operand" "")
10571 (match_operand:SI 3 "arm_add_operand" "")])
10572 (match_operand:SI 4 "arm_rhs_operand" "")
10574 (match_operand:SI 5 "s_register_operand" ""))))
10575 (clobber (reg:CC CC_REGNUM))]
10576 "TARGET_ARM && reload_completed"
10577 [(set (match_dup 6) (match_dup 7))
10578 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10579 (set (match_dup 0) (match_dup 4)))
10580 (cond_exec (match_dup 8)
10581 (set (match_dup 0) (not:SI (match_dup 5))))]
10584 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10585 operands[2], operands[3]);
10586 enum rtx_code rc = GET_CODE (operands[1]);
10588 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10589 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10590 if (mode == CCFPmode || mode == CCFPEmode)
10591 rc = reverse_condition_maybe_unordered (rc);
10593 rc = reverse_condition (rc);
10595 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10599 (define_insn "*cond_move_not"
10600 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10601 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10602 [(match_operand 3 "cc_register" "") (const_int 0)])
10603 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10605 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10609 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10610 [(set_attr "conds" "use")
10611 (set_attr "length" "4,8")]
10614 ;; The next two patterns occur when an AND operation is followed by a
10615 ;; scc insn sequence
10617 (define_insn "*sign_extract_onebit"
10618 [(set (match_operand:SI 0 "s_register_operand" "=r")
10619 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10621 (match_operand:SI 2 "const_int_operand" "n")))
10622 (clobber (reg:CC CC_REGNUM))]
10625 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10626 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10627 return \"mvnne\\t%0, #0\";
10629 [(set_attr "conds" "clob")
10630 (set_attr "length" "8")]
10633 (define_insn "*not_signextract_onebit"
10634 [(set (match_operand:SI 0 "s_register_operand" "=r")
10636 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10638 (match_operand:SI 2 "const_int_operand" "n"))))
10639 (clobber (reg:CC CC_REGNUM))]
10642 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10643 output_asm_insn (\"tst\\t%1, %2\", operands);
10644 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10645 return \"movne\\t%0, #0\";
10647 [(set_attr "conds" "clob")
10648 (set_attr "length" "12")]
10650 ;; ??? The above patterns need auditing for Thumb-2
10652 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10653 ;; expressions. For simplicity, the first register is also in the unspec
10655 (define_insn "*push_multi"
10656 [(match_parallel 2 "multi_register_push"
10657 [(set (match_operand:BLK 0 "memory_operand" "=m")
10658 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10659 UNSPEC_PUSH_MULT))])]
10663 int num_saves = XVECLEN (operands[2], 0);
10665 /* For the StrongARM at least it is faster to
10666 use STR to store only a single register.
10667 In Thumb mode always use push, and the assembler will pick
10668 something appropriate. */
10669 if (num_saves == 1 && TARGET_ARM)
10670 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10677 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10679 strcpy (pattern, \"push\\t{%1\");
10681 for (i = 1; i < num_saves; i++)
10683 strcat (pattern, \", %|\");
10685 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10688 strcat (pattern, \"}\");
10689 output_asm_insn (pattern, operands);
10694 [(set_attr "type" "store4")]
10697 (define_insn "stack_tie"
10698 [(set (mem:BLK (scratch))
10699 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10700 (match_operand:SI 1 "s_register_operand" "rk")]
10704 [(set_attr "length" "0")]
10707 ;; Similarly for the floating point registers
10708 (define_insn "*push_fp_multi"
10709 [(match_parallel 2 "multi_register_push"
10710 [(set (match_operand:BLK 0 "memory_operand" "=m")
10711 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10712 UNSPEC_PUSH_MULT))])]
10713 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10718 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10719 output_asm_insn (pattern, operands);
10722 [(set_attr "type" "f_store")]
10725 ;; Special patterns for dealing with the constant pool
10727 (define_insn "align_4"
10728 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10731 assemble_align (32);
10736 (define_insn "align_8"
10737 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10740 assemble_align (64);
10745 (define_insn "consttable_end"
10746 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10749 making_const_table = FALSE;
10754 (define_insn "consttable_1"
10755 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10758 making_const_table = TRUE;
10759 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10760 assemble_zeros (3);
10763 [(set_attr "length" "4")]
10766 (define_insn "consttable_2"
10767 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10770 making_const_table = TRUE;
10771 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10772 assemble_zeros (2);
10775 [(set_attr "length" "4")]
10778 (define_insn "consttable_4"
10779 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10783 making_const_table = TRUE;
10784 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10789 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10790 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10794 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10799 [(set_attr "length" "4")]
10802 (define_insn "consttable_8"
10803 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10807 making_const_table = TRUE;
10808 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10813 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10814 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10818 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10823 [(set_attr "length" "8")]
10826 (define_insn "consttable_16"
10827 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10831 making_const_table = TRUE;
10832 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10837 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10838 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10842 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10847 [(set_attr "length" "16")]
10850 ;; Miscellaneous Thumb patterns
10852 (define_expand "tablejump"
10853 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10854 (use (label_ref (match_operand 1 "" "")))])]
10859 /* Hopefully, CSE will eliminate this copy. */
10860 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10861 rtx reg2 = gen_reg_rtx (SImode);
10863 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10864 operands[0] = reg2;
10869 ;; NB never uses BX.
10870 (define_insn "*thumb1_tablejump"
10871 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10872 (use (label_ref (match_operand 1 "" "")))]
10875 [(set_attr "length" "2")]
10878 ;; V5 Instructions,
10880 (define_insn "clzsi2"
10881 [(set (match_operand:SI 0 "s_register_operand" "=r")
10882 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10883 "TARGET_32BIT && arm_arch5"
10885 [(set_attr "predicable" "yes")
10886 (set_attr "insn" "clz")])
10888 ;; V5E instructions.
10890 (define_insn "prefetch"
10891 [(prefetch (match_operand:SI 0 "address_operand" "p")
10892 (match_operand:SI 1 "" "")
10893 (match_operand:SI 2 "" ""))]
10894 "TARGET_32BIT && arm_arch5e"
10897 ;; General predication pattern
10900 [(match_operator 0 "arm_comparison_operator"
10901 [(match_operand 1 "cc_register" "")
10907 (define_insn "prologue_use"
10908 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10910 "%@ %0 needed for prologue"
10914 ;; Patterns for exception handling
10916 (define_expand "eh_return"
10917 [(use (match_operand 0 "general_operand" ""))]
10922 emit_insn (gen_arm_eh_return (operands[0]));
10924 emit_insn (gen_thumb_eh_return (operands[0]));
10929 ;; We can't expand this before we know where the link register is stored.
10930 (define_insn_and_split "arm_eh_return"
10931 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10933 (clobber (match_scratch:SI 1 "=&r"))]
10936 "&& reload_completed"
10940 arm_set_return_address (operands[0], operands[1]);
10945 (define_insn_and_split "thumb_eh_return"
10946 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10948 (clobber (match_scratch:SI 1 "=&l"))]
10951 "&& reload_completed"
10955 thumb_set_return_address (operands[0], operands[1]);
10963 (define_insn "load_tp_hard"
10964 [(set (match_operand:SI 0 "register_operand" "=r")
10965 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10967 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10968 [(set_attr "predicable" "yes")]
10971 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10972 (define_insn "load_tp_soft"
10973 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10974 (clobber (reg:SI LR_REGNUM))
10975 (clobber (reg:SI IP_REGNUM))
10976 (clobber (reg:CC CC_REGNUM))]
10978 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10979 [(set_attr "conds" "clob")]
10982 ;; Load the FPA co-processor patterns
10984 ;; Load the Maverick co-processor patterns
10985 (include "cirrus.md")
10986 ;; Vector bits common to IWMMXT and Neon
10987 (include "vec-common.md")
10988 ;; Load the Intel Wireless Multimedia Extension patterns
10989 (include "iwmmxt.md")
10990 ;; Load the VFP co-processor patterns
10992 ;; Thumb-2 patterns
10993 (include "thumb2.md")
10995 (include "neon.md")