]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
* config/alpha/alpha.c (alpha_preferred_reload_class,
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
8
9 ;; This file is part of GCC.
10
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
15
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
20
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
24
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26
27 \f
28 ;;---------------------------------------------------------------------------
29 ;; Constants
30
31 ;; Register numbers
32 (define_constants
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
42 ]
43 )
44 ;; 3rd operand to select_dominance_cc_mode
45 (define_constants
46 [(DOM_CC_X_AND_Y 0)
47 (DOM_CC_NX_OR_Y 1)
48 (DOM_CC_X_OR_Y 2)
49 ]
50 )
51
52 ;; UNSPEC Usage:
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
55
56 (define_constants
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
66 ; expressions.
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the
71 ; GLOBAL_OFFSET_TABLE. The operation is fully
72 ; described by the RTL but must be wrapped to
73 ; prevent combine from trying to rip it apart.
74 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
75 ; being scheduled before the stack adjustment insn.
76 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
77 ; this unspec is used to prevent the deletion of
78 ; instructions setting registers for EH handling
79 ; and stack frame generation. Operand 0 is the
80 ; register to "use".
81 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
82 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
83 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
84 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
85 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
86 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
87 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
88 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
89 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
90 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
91 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
92 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
93 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
94 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
95 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
96 ; instruction stream.
97 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
98 ; generate correct unwind information.
99 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
100 ; correctly for PIC usage.
101 ]
102 )
103
104 ;; UNSPEC_VOLATILE Usage:
105
106 (define_constants
107 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
108 ; insn in the code.
109 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
110 ; instruction epilogue sequence that isn't expanded
111 ; into normal RTL. Used for both normal and sibcall
112 ; epilogues.
113 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
114 ; for inlined constants.
115 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
116 ; table.
117 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
118 ; an 8-bit object.
119 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
120 ; a 16-bit object.
121 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
122 ; a 32-bit object.
123 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
124 ; a 64-bit object.
125 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
126 ; a 128-bit object.
127 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
128 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
129 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
130 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
131 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
132 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
133 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
134 ; handling.
135 ]
136 )
137 \f
138 ;;---------------------------------------------------------------------------
139 ;; Attributes
140
141 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
142 ; generating ARM code. This is used to control the length of some insn
143 ; patterns that share the same RTL in both ARM and Thumb code.
144 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
145
146 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
147 ; scheduling decisions for the load unit and the multiplier.
148 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
149
150 ; IS_XSCALE is set to 'yes' when compiling for XScale.
151 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
152
153 ;; Operand number of an input operand that is shifted. Zero if the
154 ;; given instruction does not shift one of its input operands.
155 (define_attr "shift" "" (const_int 0))
156
157 ; Floating Point Unit. If we only have floating point emulation, then there
158 ; is no point in scheduling the floating point insns. (Well, for best
159 ; performance we should try and group them together).
160 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
161 (const (symbol_ref "arm_fpu_attr")))
162
163 ; LENGTH of an instruction (in bytes)
164 (define_attr "length" "" (const_int 4))
165
166 ; POOL_RANGE is how far away from a constant pool entry that this insn
167 ; can be placed. If the distance is zero, then this insn will never
168 ; reference the pool.
169 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
170 ; before its address.
171 (define_attr "pool_range" "" (const_int 0))
172 (define_attr "neg_pool_range" "" (const_int 0))
173
174 ; An assembler sequence may clobber the condition codes without us knowing.
175 ; If such an insn references the pool, then we have no way of knowing how,
176 ; so use the most conservative value for pool_range.
177 (define_asm_attributes
178 [(set_attr "conds" "clob")
179 (set_attr "length" "4")
180 (set_attr "pool_range" "250")])
181
182 ;; The instruction used to implement a particular pattern. This
183 ;; information is used by pipeline descriptions to provide accurate
184 ;; scheduling information.
185
186 (define_attr "insn"
187 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
188 (const_string "other"))
189
190 ; TYPE attribute is used to detect floating point instructions which, if
191 ; running on a co-processor can run in parallel with other, basic instructions
192 ; If write-buffer scheduling is enabled then it can also be used in the
193 ; scheduling of writes.
194
195 ; Classification of each insn
196 ; alu any alu instruction that doesn't hit memory or fp
197 ; regs or have a shifted source operand
198 ; alu_shift any data instruction that doesn't hit memory or fp
199 ; regs, but has a source operand shifted by a constant
200 ; alu_shift_reg any data instruction that doesn't hit memory or fp
201 ; regs, but has a source operand shifted by a register value
202 ; mult a multiply instruction
203 ; block blockage insn, this blocks all functional units
204 ; float a floating point arithmetic operation (subject to expansion)
205 ; fdivd DFmode floating point division
206 ; fdivs SFmode floating point division
207 ; fmul Floating point multiply
208 ; ffmul Fast floating point multiply
209 ; farith Floating point arithmetic (4 cycle)
210 ; ffarith Fast floating point arithmetic (2 cycle)
211 ; float_em a floating point arithmetic operation that is normally emulated
212 ; even on a machine with an fpa.
213 ; f_load a floating point load from memory
214 ; f_store a floating point store to memory
215 ; f_load[sd] single/double load from memory
216 ; f_store[sd] single/double store to memory
217 ; f_flag a transfer of co-processor flags to the CPSR
218 ; f_mem_r a transfer of a floating point register to a real reg via mem
219 ; r_mem_f the reverse of f_mem_r
220 ; f_2_r fast transfer float to arm (no memory needed)
221 ; r_2_f fast transfer arm to float
222 ; f_cvt convert floating<->integral
223 ; branch a branch
224 ; call a subroutine call
225 ; load_byte load byte(s) from memory to arm registers
226 ; load1 load 1 word from memory to arm registers
227 ; load2 load 2 words from memory to arm registers
228 ; load3 load 3 words from memory to arm registers
229 ; load4 load 4 words from memory to arm registers
230 ; store store 1 word to memory from arm registers
231 ; store2 store 2 words
232 ; store3 store 3 words
233 ; store4 store 4 (or more) words
234 ; Additions for Cirrus Maverick co-processor:
235 ; mav_farith Floating point arithmetic (4 cycle)
236 ; mav_dmult Double multiplies (7 cycle)
237 ;
238
239 (define_attr "type"
240 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
241 (if_then_else
242 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
243 (const_string "mult")
244 (const_string "alu")))
245
246 ; Load scheduling, set from the arm_ld_sched variable
247 ; initialized by arm_override_options()
248 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
249
250 ; condition codes: this one is used by final_prescan_insn to speed up
251 ; conditionalizing instructions. It saves having to scan the rtl to see if
252 ; it uses or alters the condition codes.
253 ;
254 ; USE means that the condition codes are used by the insn in the process of
255 ; outputting code, this means (at present) that we can't use the insn in
256 ; inlined branches
257 ;
258 ; SET means that the purpose of the insn is to set the condition codes in a
259 ; well defined manner.
260 ;
261 ; CLOB means that the condition codes are altered in an undefined manner, if
262 ; they are altered at all
263 ;
264 ; JUMP_CLOB is used when the condition cannot be represented by a single
265 ; instruction (UNEQ and LTGT). These cannot be predicated.
266 ;
267 ; NOCOND means that the condition codes are neither altered nor affect the
268 ; output of this insn
269
270 (define_attr "conds" "use,set,clob,jump_clob,nocond"
271 (if_then_else (eq_attr "type" "call")
272 (const_string "clob")
273 (const_string "nocond")))
274
275 ; Predicable means that the insn can be conditionally executed based on
276 ; an automatically added predicate (additional patterns are generated by
277 ; gen...). We default to 'no' because no Thumb patterns match this rule
278 ; and not all ARM patterns do.
279 (define_attr "predicable" "no,yes" (const_string "no"))
280
281 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
282 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
283 ; suffer blockages enough to warrant modelling this (and it can adversely
284 ; affect the schedule).
285 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
286
287 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
288 ; to stall the processor. Used with model_wbuf above.
289 (define_attr "write_conflict" "no,yes"
290 (if_then_else (eq_attr "type"
291 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
292 (const_string "yes")
293 (const_string "no")))
294
295 ; Classify the insns into those that take one cycle and those that take more
296 ; than one on the main cpu execution unit.
297 (define_attr "core_cycles" "single,multi"
298 (if_then_else (eq_attr "type"
299 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
300 (const_string "single")
301 (const_string "multi")))
302
303 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
304 ;; distant label. Only applicable to Thumb code.
305 (define_attr "far_jump" "yes,no" (const_string "no"))
306
307
308 ;; The number of machine instructions this pattern expands to.
309 ;; Used for Thumb-2 conditional execution.
310 (define_attr "ce_count" "" (const_int 1))
311
312 ;;---------------------------------------------------------------------------
313 ;; Mode iterators
314
315 ; A list of modes that are exactly 64 bits in size. We use this to expand
316 ; some splits that are the same for all modes when operating on ARM
317 ; registers.
318 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
319
320 ;;---------------------------------------------------------------------------
321 ;; Predicates
322
323 (include "predicates.md")
324 (include "constraints.md")
325
326 ;;---------------------------------------------------------------------------
327 ;; Pipeline descriptions
328
329 ;; Processor type. This is created automatically from arm-cores.def.
330 (include "arm-tune.md")
331
332 ;; True if the generic scheduling description should be used.
333
334 (define_attr "generic_sched" "yes,no"
335 (const (if_then_else
336 (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexr4")
337 (const_string "no")
338 (const_string "yes"))))
339
340 (define_attr "generic_vfp" "yes,no"
341 (const (if_then_else
342 (and (eq_attr "fpu" "vfp")
343 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8"))
344 (const_string "yes")
345 (const_string "no"))))
346
347 (include "arm-generic.md")
348 (include "arm926ejs.md")
349 (include "arm1020e.md")
350 (include "arm1026ejs.md")
351 (include "arm1136jfs.md")
352 (include "cortex-a8.md")
353 (include "cortex-r4.md")
354
355 \f
356 ;;---------------------------------------------------------------------------
357 ;; Insn patterns
358 ;;
359 ;; Addition insns.
360
361 ;; Note: For DImode insns, there is normally no reason why operands should
362 ;; not be in the same register, what we don't want is for something being
363 ;; written to partially overlap something that is an input.
364 ;; Cirrus 64bit additions should not be split because we have a native
365 ;; 64bit addition instructions.
366
367 (define_expand "adddi3"
368 [(parallel
369 [(set (match_operand:DI 0 "s_register_operand" "")
370 (plus:DI (match_operand:DI 1 "s_register_operand" "")
371 (match_operand:DI 2 "s_register_operand" "")))
372 (clobber (reg:CC CC_REGNUM))])]
373 "TARGET_EITHER"
374 "
375 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
376 {
377 if (!cirrus_fp_register (operands[0], DImode))
378 operands[0] = force_reg (DImode, operands[0]);
379 if (!cirrus_fp_register (operands[1], DImode))
380 operands[1] = force_reg (DImode, operands[1]);
381 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
382 DONE;
383 }
384
385 if (TARGET_THUMB1)
386 {
387 if (GET_CODE (operands[1]) != REG)
388 operands[1] = force_reg (SImode, operands[1]);
389 if (GET_CODE (operands[2]) != REG)
390 operands[2] = force_reg (SImode, operands[2]);
391 }
392 "
393 )
394
395 (define_insn "*thumb1_adddi3"
396 [(set (match_operand:DI 0 "register_operand" "=l")
397 (plus:DI (match_operand:DI 1 "register_operand" "%0")
398 (match_operand:DI 2 "register_operand" "l")))
399 (clobber (reg:CC CC_REGNUM))
400 ]
401 "TARGET_THUMB1"
402 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
403 [(set_attr "length" "4")]
404 )
405
406 (define_insn_and_split "*arm_adddi3"
407 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
408 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
409 (match_operand:DI 2 "s_register_operand" "r, 0")))
410 (clobber (reg:CC CC_REGNUM))]
411 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
412 "#"
413 "TARGET_32BIT && reload_completed"
414 [(parallel [(set (reg:CC_C CC_REGNUM)
415 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
416 (match_dup 1)))
417 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
418 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
419 (plus:SI (match_dup 4) (match_dup 5))))]
420 "
421 {
422 operands[3] = gen_highpart (SImode, operands[0]);
423 operands[0] = gen_lowpart (SImode, operands[0]);
424 operands[4] = gen_highpart (SImode, operands[1]);
425 operands[1] = gen_lowpart (SImode, operands[1]);
426 operands[5] = gen_highpart (SImode, operands[2]);
427 operands[2] = gen_lowpart (SImode, operands[2]);
428 }"
429 [(set_attr "conds" "clob")
430 (set_attr "length" "8")]
431 )
432
433 (define_insn_and_split "*adddi_sesidi_di"
434 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
435 (plus:DI (sign_extend:DI
436 (match_operand:SI 2 "s_register_operand" "r,r"))
437 (match_operand:DI 1 "s_register_operand" "r,0")))
438 (clobber (reg:CC CC_REGNUM))]
439 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
440 "#"
441 "TARGET_32BIT && reload_completed"
442 [(parallel [(set (reg:CC_C CC_REGNUM)
443 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
444 (match_dup 1)))
445 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
446 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
447 (plus:SI (ashiftrt:SI (match_dup 2)
448 (const_int 31))
449 (match_dup 4))))]
450 "
451 {
452 operands[3] = gen_highpart (SImode, operands[0]);
453 operands[0] = gen_lowpart (SImode, operands[0]);
454 operands[4] = gen_highpart (SImode, operands[1]);
455 operands[1] = gen_lowpart (SImode, operands[1]);
456 operands[2] = gen_lowpart (SImode, operands[2]);
457 }"
458 [(set_attr "conds" "clob")
459 (set_attr "length" "8")]
460 )
461
462 (define_insn_and_split "*adddi_zesidi_di"
463 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
464 (plus:DI (zero_extend:DI
465 (match_operand:SI 2 "s_register_operand" "r,r"))
466 (match_operand:DI 1 "s_register_operand" "r,0")))
467 (clobber (reg:CC CC_REGNUM))]
468 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
469 "#"
470 "TARGET_32BIT && reload_completed"
471 [(parallel [(set (reg:CC_C CC_REGNUM)
472 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
473 (match_dup 1)))
474 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
475 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
476 (plus:SI (match_dup 4) (const_int 0))))]
477 "
478 {
479 operands[3] = gen_highpart (SImode, operands[0]);
480 operands[0] = gen_lowpart (SImode, operands[0]);
481 operands[4] = gen_highpart (SImode, operands[1]);
482 operands[1] = gen_lowpart (SImode, operands[1]);
483 operands[2] = gen_lowpart (SImode, operands[2]);
484 }"
485 [(set_attr "conds" "clob")
486 (set_attr "length" "8")]
487 )
488
489 (define_expand "addsi3"
490 [(set (match_operand:SI 0 "s_register_operand" "")
491 (plus:SI (match_operand:SI 1 "s_register_operand" "")
492 (match_operand:SI 2 "reg_or_int_operand" "")))]
493 "TARGET_EITHER"
494 "
495 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
496 {
497 arm_split_constant (PLUS, SImode, NULL_RTX,
498 INTVAL (operands[2]), operands[0], operands[1],
499 optimize && can_create_pseudo_p ());
500 DONE;
501 }
502 "
503 )
504
505 ; If there is a scratch available, this will be faster than synthesizing the
506 ; addition.
507 (define_peephole2
508 [(match_scratch:SI 3 "r")
509 (set (match_operand:SI 0 "arm_general_register_operand" "")
510 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
511 (match_operand:SI 2 "const_int_operand" "")))]
512 "TARGET_32BIT &&
513 !(const_ok_for_arm (INTVAL (operands[2]))
514 || const_ok_for_arm (-INTVAL (operands[2])))
515 && const_ok_for_arm (~INTVAL (operands[2]))"
516 [(set (match_dup 3) (match_dup 2))
517 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
518 ""
519 )
520
521 (define_insn_and_split "*arm_addsi3"
522 [(set (match_operand:SI 0 "s_register_operand" "=r, !k,r, !k,r")
523 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k,rk,!k,rk")
524 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,L, L,?n")))]
525 "TARGET_32BIT"
526 "@
527 add%?\\t%0, %1, %2
528 add%?\\t%0, %1, %2
529 sub%?\\t%0, %1, #%n2
530 sub%?\\t%0, %1, #%n2
531 #"
532 "TARGET_32BIT &&
533 GET_CODE (operands[2]) == CONST_INT
534 && !(const_ok_for_arm (INTVAL (operands[2]))
535 || const_ok_for_arm (-INTVAL (operands[2])))"
536 [(clobber (const_int 0))]
537 "
538 arm_split_constant (PLUS, SImode, curr_insn,
539 INTVAL (operands[2]), operands[0],
540 operands[1], 0);
541 DONE;
542 "
543 [(set_attr "length" "4,4,4,4,16")
544 (set_attr "predicable" "yes")]
545 )
546
547 ;; Register group 'k' is a single register group containing only the stack
548 ;; register. Trying to reload it will always fail catastrophically,
549 ;; so never allow those alternatives to match if reloading is needed.
550
551 (define_insn "*thumb1_addsi3"
552 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k")
553 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
554 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O")))]
555 "TARGET_THUMB1"
556 "*
557 static const char * const asms[] =
558 {
559 \"add\\t%0, %0, %2\",
560 \"sub\\t%0, %0, #%n2\",
561 \"add\\t%0, %1, %2\",
562 \"add\\t%0, %0, %2\",
563 \"add\\t%0, %0, %2\",
564 \"add\\t%0, %1, %2\",
565 \"add\\t%0, %1, %2\"
566 };
567 if ((which_alternative == 2 || which_alternative == 6)
568 && GET_CODE (operands[2]) == CONST_INT
569 && INTVAL (operands[2]) < 0)
570 return \"sub\\t%0, %1, #%n2\";
571 return asms[which_alternative];
572 "
573 [(set_attr "length" "2")]
574 )
575
576 ;; Reloading and elimination of the frame pointer can
577 ;; sometimes cause this optimization to be missed.
578 (define_peephole2
579 [(set (match_operand:SI 0 "arm_general_register_operand" "")
580 (match_operand:SI 1 "const_int_operand" ""))
581 (set (match_dup 0)
582 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
583 "TARGET_THUMB1
584 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
585 && (INTVAL (operands[1]) & 3) == 0"
586 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
587 ""
588 )
589
590 ;; ??? Make Thumb-2 variants which prefer low regs
591 (define_insn "*addsi3_compare0"
592 [(set (reg:CC_NOOV CC_REGNUM)
593 (compare:CC_NOOV
594 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
595 (match_operand:SI 2 "arm_add_operand" "rI,L"))
596 (const_int 0)))
597 (set (match_operand:SI 0 "s_register_operand" "=r,r")
598 (plus:SI (match_dup 1) (match_dup 2)))]
599 "TARGET_32BIT"
600 "@
601 add%.\\t%0, %1, %2
602 sub%.\\t%0, %1, #%n2"
603 [(set_attr "conds" "set")]
604 )
605
606 (define_insn "*addsi3_compare0_scratch"
607 [(set (reg:CC_NOOV CC_REGNUM)
608 (compare:CC_NOOV
609 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
610 (match_operand:SI 1 "arm_add_operand" "rI,L"))
611 (const_int 0)))]
612 "TARGET_32BIT"
613 "@
614 cmn%?\\t%0, %1
615 cmp%?\\t%0, #%n1"
616 [(set_attr "conds" "set")]
617 )
618
619 (define_insn "*compare_negsi_si"
620 [(set (reg:CC_Z CC_REGNUM)
621 (compare:CC_Z
622 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
623 (match_operand:SI 1 "s_register_operand" "r")))]
624 "TARGET_32BIT"
625 "cmn%?\\t%1, %0"
626 [(set_attr "conds" "set")]
627 )
628
629 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
630 ;; addend is a constant.
631 (define_insn "*cmpsi2_addneg"
632 [(set (reg:CC CC_REGNUM)
633 (compare:CC
634 (match_operand:SI 1 "s_register_operand" "r,r")
635 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
636 (set (match_operand:SI 0 "s_register_operand" "=r,r")
637 (plus:SI (match_dup 1)
638 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
639 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
640 "@
641 sub%.\\t%0, %1, %2
642 add%.\\t%0, %1, #%n2"
643 [(set_attr "conds" "set")]
644 )
645
646 ;; Convert the sequence
647 ;; sub rd, rn, #1
648 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
649 ;; bne dest
650 ;; into
651 ;; subs rd, rn, #1
652 ;; bcs dest ((unsigned)rn >= 1)
653 ;; similarly for the beq variant using bcc.
654 ;; This is a common looping idiom (while (n--))
655 (define_peephole2
656 [(set (match_operand:SI 0 "arm_general_register_operand" "")
657 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
658 (const_int -1)))
659 (set (match_operand 2 "cc_register" "")
660 (compare (match_dup 0) (const_int -1)))
661 (set (pc)
662 (if_then_else (match_operator 3 "equality_operator"
663 [(match_dup 2) (const_int 0)])
664 (match_operand 4 "" "")
665 (match_operand 5 "" "")))]
666 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
667 [(parallel[
668 (set (match_dup 2)
669 (compare:CC
670 (match_dup 1) (const_int 1)))
671 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
672 (set (pc)
673 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
674 (match_dup 4)
675 (match_dup 5)))]
676 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
677 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
678 ? GEU : LTU),
679 VOIDmode,
680 operands[2], const0_rtx);"
681 )
682
683 ;; The next four insns work because they compare the result with one of
684 ;; the operands, and we know that the use of the condition code is
685 ;; either GEU or LTU, so we can use the carry flag from the addition
686 ;; instead of doing the compare a second time.
687 (define_insn "*addsi3_compare_op1"
688 [(set (reg:CC_C CC_REGNUM)
689 (compare:CC_C
690 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
691 (match_operand:SI 2 "arm_add_operand" "rI,L"))
692 (match_dup 1)))
693 (set (match_operand:SI 0 "s_register_operand" "=r,r")
694 (plus:SI (match_dup 1) (match_dup 2)))]
695 "TARGET_32BIT"
696 "@
697 add%.\\t%0, %1, %2
698 sub%.\\t%0, %1, #%n2"
699 [(set_attr "conds" "set")]
700 )
701
702 (define_insn "*addsi3_compare_op2"
703 [(set (reg:CC_C CC_REGNUM)
704 (compare:CC_C
705 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
706 (match_operand:SI 2 "arm_add_operand" "rI,L"))
707 (match_dup 2)))
708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
709 (plus:SI (match_dup 1) (match_dup 2)))]
710 "TARGET_32BIT"
711 "@
712 add%.\\t%0, %1, %2
713 sub%.\\t%0, %1, #%n2"
714 [(set_attr "conds" "set")]
715 )
716
717 (define_insn "*compare_addsi2_op0"
718 [(set (reg:CC_C CC_REGNUM)
719 (compare:CC_C
720 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
721 (match_operand:SI 1 "arm_add_operand" "rI,L"))
722 (match_dup 0)))]
723 "TARGET_32BIT"
724 "@
725 cmn%?\\t%0, %1
726 cmp%?\\t%0, #%n1"
727 [(set_attr "conds" "set")]
728 )
729
730 (define_insn "*compare_addsi2_op1"
731 [(set (reg:CC_C CC_REGNUM)
732 (compare:CC_C
733 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
734 (match_operand:SI 1 "arm_add_operand" "rI,L"))
735 (match_dup 1)))]
736 "TARGET_32BIT"
737 "@
738 cmn%?\\t%0, %1
739 cmp%?\\t%0, #%n1"
740 [(set_attr "conds" "set")]
741 )
742
743 (define_insn "*addsi3_carryin"
744 [(set (match_operand:SI 0 "s_register_operand" "=r")
745 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
746 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
747 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
748 "TARGET_32BIT"
749 "adc%?\\t%0, %1, %2"
750 [(set_attr "conds" "use")]
751 )
752
753 (define_insn "*addsi3_carryin_shift"
754 [(set (match_operand:SI 0 "s_register_operand" "=r")
755 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
756 (plus:SI
757 (match_operator:SI 2 "shift_operator"
758 [(match_operand:SI 3 "s_register_operand" "r")
759 (match_operand:SI 4 "reg_or_int_operand" "rM")])
760 (match_operand:SI 1 "s_register_operand" "r"))))]
761 "TARGET_32BIT"
762 "adc%?\\t%0, %1, %3%S2"
763 [(set_attr "conds" "use")
764 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
765 (const_string "alu_shift")
766 (const_string "alu_shift_reg")))]
767 )
768
769 (define_insn "*addsi3_carryin_alt1"
770 [(set (match_operand:SI 0 "s_register_operand" "=r")
771 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
772 (match_operand:SI 2 "arm_rhs_operand" "rI"))
773 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
774 "TARGET_32BIT"
775 "adc%?\\t%0, %1, %2"
776 [(set_attr "conds" "use")]
777 )
778
779 (define_insn "*addsi3_carryin_alt2"
780 [(set (match_operand:SI 0 "s_register_operand" "=r")
781 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
782 (match_operand:SI 1 "s_register_operand" "r"))
783 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
784 "TARGET_32BIT"
785 "adc%?\\t%0, %1, %2"
786 [(set_attr "conds" "use")]
787 )
788
789 (define_insn "*addsi3_carryin_alt3"
790 [(set (match_operand:SI 0 "s_register_operand" "=r")
791 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
792 (match_operand:SI 2 "arm_rhs_operand" "rI"))
793 (match_operand:SI 1 "s_register_operand" "r")))]
794 "TARGET_32BIT"
795 "adc%?\\t%0, %1, %2"
796 [(set_attr "conds" "use")]
797 )
798
799 (define_expand "incscc"
800 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
801 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
802 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
803 (match_operand:SI 1 "s_register_operand" "0,?r")))]
804 "TARGET_32BIT"
805 ""
806 )
807
808 (define_insn "*arm_incscc"
809 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
810 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
811 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
812 (match_operand:SI 1 "s_register_operand" "0,?r")))]
813 "TARGET_ARM"
814 "@
815 add%d2\\t%0, %1, #1
816 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
817 [(set_attr "conds" "use")
818 (set_attr "length" "4,8")]
819 )
820
821 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
822 (define_split
823 [(set (match_operand:SI 0 "s_register_operand" "")
824 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
825 (match_operand:SI 2 "s_register_operand" ""))
826 (const_int -1)))
827 (clobber (match_operand:SI 3 "s_register_operand" ""))]
828 "TARGET_32BIT"
829 [(set (match_dup 3) (match_dup 1))
830 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
831 "
832 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
833 ")
834
835 (define_expand "addsf3"
836 [(set (match_operand:SF 0 "s_register_operand" "")
837 (plus:SF (match_operand:SF 1 "s_register_operand" "")
838 (match_operand:SF 2 "arm_float_add_operand" "")))]
839 "TARGET_32BIT && TARGET_HARD_FLOAT"
840 "
841 if (TARGET_MAVERICK
842 && !cirrus_fp_register (operands[2], SFmode))
843 operands[2] = force_reg (SFmode, operands[2]);
844 ")
845
846 (define_expand "adddf3"
847 [(set (match_operand:DF 0 "s_register_operand" "")
848 (plus:DF (match_operand:DF 1 "s_register_operand" "")
849 (match_operand:DF 2 "arm_float_add_operand" "")))]
850 "TARGET_32BIT && TARGET_HARD_FLOAT"
851 "
852 if (TARGET_MAVERICK
853 && !cirrus_fp_register (operands[2], DFmode))
854 operands[2] = force_reg (DFmode, operands[2]);
855 ")
856
857 (define_expand "subdi3"
858 [(parallel
859 [(set (match_operand:DI 0 "s_register_operand" "")
860 (minus:DI (match_operand:DI 1 "s_register_operand" "")
861 (match_operand:DI 2 "s_register_operand" "")))
862 (clobber (reg:CC CC_REGNUM))])]
863 "TARGET_EITHER"
864 "
865 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
866 && TARGET_32BIT
867 && cirrus_fp_register (operands[0], DImode)
868 && cirrus_fp_register (operands[1], DImode))
869 {
870 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
871 DONE;
872 }
873
874 if (TARGET_THUMB1)
875 {
876 if (GET_CODE (operands[1]) != REG)
877 operands[1] = force_reg (SImode, operands[1]);
878 if (GET_CODE (operands[2]) != REG)
879 operands[2] = force_reg (SImode, operands[2]);
880 }
881 "
882 )
883
884 (define_insn "*arm_subdi3"
885 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
886 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
887 (match_operand:DI 2 "s_register_operand" "r,0,0")))
888 (clobber (reg:CC CC_REGNUM))]
889 "TARGET_32BIT"
890 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
891 [(set_attr "conds" "clob")
892 (set_attr "length" "8")]
893 )
894
895 (define_insn "*thumb_subdi3"
896 [(set (match_operand:DI 0 "register_operand" "=l")
897 (minus:DI (match_operand:DI 1 "register_operand" "0")
898 (match_operand:DI 2 "register_operand" "l")))
899 (clobber (reg:CC CC_REGNUM))]
900 "TARGET_THUMB1"
901 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
902 [(set_attr "length" "4")]
903 )
904
905 (define_insn "*subdi_di_zesidi"
906 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
907 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
908 (zero_extend:DI
909 (match_operand:SI 2 "s_register_operand" "r,r"))))
910 (clobber (reg:CC CC_REGNUM))]
911 "TARGET_32BIT"
912 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
913 [(set_attr "conds" "clob")
914 (set_attr "length" "8")]
915 )
916
917 (define_insn "*subdi_di_sesidi"
918 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
919 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
920 (sign_extend:DI
921 (match_operand:SI 2 "s_register_operand" "r,r"))))
922 (clobber (reg:CC CC_REGNUM))]
923 "TARGET_32BIT"
924 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
925 [(set_attr "conds" "clob")
926 (set_attr "length" "8")]
927 )
928
929 (define_insn "*subdi_zesidi_di"
930 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
931 (minus:DI (zero_extend:DI
932 (match_operand:SI 2 "s_register_operand" "r,r"))
933 (match_operand:DI 1 "s_register_operand" "?r,0")))
934 (clobber (reg:CC CC_REGNUM))]
935 "TARGET_ARM"
936 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
937 [(set_attr "conds" "clob")
938 (set_attr "length" "8")]
939 )
940
941 (define_insn "*subdi_sesidi_di"
942 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
943 (minus:DI (sign_extend:DI
944 (match_operand:SI 2 "s_register_operand" "r,r"))
945 (match_operand:DI 1 "s_register_operand" "?r,0")))
946 (clobber (reg:CC CC_REGNUM))]
947 "TARGET_ARM"
948 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
949 [(set_attr "conds" "clob")
950 (set_attr "length" "8")]
951 )
952
953 (define_insn "*subdi_zesidi_zesidi"
954 [(set (match_operand:DI 0 "s_register_operand" "=r")
955 (minus:DI (zero_extend:DI
956 (match_operand:SI 1 "s_register_operand" "r"))
957 (zero_extend:DI
958 (match_operand:SI 2 "s_register_operand" "r"))))
959 (clobber (reg:CC CC_REGNUM))]
960 "TARGET_32BIT"
961 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
962 [(set_attr "conds" "clob")
963 (set_attr "length" "8")]
964 )
965
966 (define_expand "subsi3"
967 [(set (match_operand:SI 0 "s_register_operand" "")
968 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
969 (match_operand:SI 2 "s_register_operand" "")))]
970 "TARGET_EITHER"
971 "
972 if (GET_CODE (operands[1]) == CONST_INT)
973 {
974 if (TARGET_32BIT)
975 {
976 arm_split_constant (MINUS, SImode, NULL_RTX,
977 INTVAL (operands[1]), operands[0],
978 operands[2], optimize && can_create_pseudo_p ());
979 DONE;
980 }
981 else /* TARGET_THUMB1 */
982 operands[1] = force_reg (SImode, operands[1]);
983 }
984 "
985 )
986
987 (define_insn "*thumb1_subsi3_insn"
988 [(set (match_operand:SI 0 "register_operand" "=l")
989 (minus:SI (match_operand:SI 1 "register_operand" "l")
990 (match_operand:SI 2 "register_operand" "l")))]
991 "TARGET_THUMB1"
992 "sub\\t%0, %1, %2"
993 [(set_attr "length" "2")]
994 )
995
996 ; ??? Check Thumb-2 split length
997 (define_insn_and_split "*arm_subsi3_insn"
998 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
999 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1000 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1001 "TARGET_32BIT"
1002 "@
1003 rsb%?\\t%0, %2, %1
1004 sub%?\\t%0, %1, %2
1005 #"
1006 "TARGET_32BIT
1007 && GET_CODE (operands[1]) == CONST_INT
1008 && !const_ok_for_arm (INTVAL (operands[1]))"
1009 [(clobber (const_int 0))]
1010 "
1011 arm_split_constant (MINUS, SImode, curr_insn,
1012 INTVAL (operands[1]), operands[0], operands[2], 0);
1013 DONE;
1014 "
1015 [(set_attr "length" "4,4,16")
1016 (set_attr "predicable" "yes")]
1017 )
1018
1019 (define_peephole2
1020 [(match_scratch:SI 3 "r")
1021 (set (match_operand:SI 0 "arm_general_register_operand" "")
1022 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1023 (match_operand:SI 2 "arm_general_register_operand" "")))]
1024 "TARGET_32BIT
1025 && !const_ok_for_arm (INTVAL (operands[1]))
1026 && const_ok_for_arm (~INTVAL (operands[1]))"
1027 [(set (match_dup 3) (match_dup 1))
1028 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1029 ""
1030 )
1031
1032 (define_insn "*subsi3_compare0"
1033 [(set (reg:CC_NOOV CC_REGNUM)
1034 (compare:CC_NOOV
1035 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1036 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1037 (const_int 0)))
1038 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1039 (minus:SI (match_dup 1) (match_dup 2)))]
1040 "TARGET_32BIT"
1041 "@
1042 sub%.\\t%0, %1, %2
1043 rsb%.\\t%0, %2, %1"
1044 [(set_attr "conds" "set")]
1045 )
1046
1047 (define_expand "decscc"
1048 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1049 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1050 (match_operator:SI 2 "arm_comparison_operator"
1051 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1052 "TARGET_32BIT"
1053 ""
1054 )
1055
1056 (define_insn "*arm_decscc"
1057 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1058 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1059 (match_operator:SI 2 "arm_comparison_operator"
1060 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1061 "TARGET_ARM"
1062 "@
1063 sub%d2\\t%0, %1, #1
1064 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1065 [(set_attr "conds" "use")
1066 (set_attr "length" "*,8")]
1067 )
1068
1069 (define_expand "subsf3"
1070 [(set (match_operand:SF 0 "s_register_operand" "")
1071 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1072 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1073 "TARGET_32BIT && TARGET_HARD_FLOAT"
1074 "
1075 if (TARGET_MAVERICK)
1076 {
1077 if (!cirrus_fp_register (operands[1], SFmode))
1078 operands[1] = force_reg (SFmode, operands[1]);
1079 if (!cirrus_fp_register (operands[2], SFmode))
1080 operands[2] = force_reg (SFmode, operands[2]);
1081 }
1082 ")
1083
1084 (define_expand "subdf3"
1085 [(set (match_operand:DF 0 "s_register_operand" "")
1086 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1087 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1088 "TARGET_32BIT && TARGET_HARD_FLOAT"
1089 "
1090 if (TARGET_MAVERICK)
1091 {
1092 if (!cirrus_fp_register (operands[1], DFmode))
1093 operands[1] = force_reg (DFmode, operands[1]);
1094 if (!cirrus_fp_register (operands[2], DFmode))
1095 operands[2] = force_reg (DFmode, operands[2]);
1096 }
1097 ")
1098
1099 \f
1100 ;; Multiplication insns
1101
1102 (define_expand "mulsi3"
1103 [(set (match_operand:SI 0 "s_register_operand" "")
1104 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1105 (match_operand:SI 1 "s_register_operand" "")))]
1106 "TARGET_EITHER"
1107 ""
1108 )
1109
1110 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1111 (define_insn "*arm_mulsi3"
1112 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1113 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1114 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1115 "TARGET_32BIT && !arm_arch6"
1116 "mul%?\\t%0, %2, %1"
1117 [(set_attr "insn" "mul")
1118 (set_attr "predicable" "yes")]
1119 )
1120
1121 (define_insn "*arm_mulsi3_v6"
1122 [(set (match_operand:SI 0 "s_register_operand" "=r")
1123 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1124 (match_operand:SI 2 "s_register_operand" "r")))]
1125 "TARGET_32BIT && arm_arch6"
1126 "mul%?\\t%0, %1, %2"
1127 [(set_attr "insn" "mul")
1128 (set_attr "predicable" "yes")]
1129 )
1130
1131 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1132 ; 1 and 2; are the same, because reload will make operand 0 match
1133 ; operand 1 without realizing that this conflicts with operand 2. We fix
1134 ; this by adding another alternative to match this case, and then `reload'
1135 ; it ourselves. This alternative must come first.
1136 (define_insn "*thumb_mulsi3"
1137 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1138 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1139 (match_operand:SI 2 "register_operand" "l,l,l")))]
1140 "TARGET_THUMB1 && !arm_arch6"
1141 "*
1142 if (which_alternative < 2)
1143 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1144 else
1145 return \"mul\\t%0, %2\";
1146 "
1147 [(set_attr "length" "4,4,2")
1148 (set_attr "insn" "mul")]
1149 )
1150
1151 (define_insn "*thumb_mulsi3_v6"
1152 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1153 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1154 (match_operand:SI 2 "register_operand" "l,0,0")))]
1155 "TARGET_THUMB1 && arm_arch6"
1156 "@
1157 mul\\t%0, %2
1158 mul\\t%0, %1
1159 mul\\t%0, %1"
1160 [(set_attr "length" "2")
1161 (set_attr "insn" "mul")]
1162 )
1163
1164 (define_insn "*mulsi3_compare0"
1165 [(set (reg:CC_NOOV CC_REGNUM)
1166 (compare:CC_NOOV (mult:SI
1167 (match_operand:SI 2 "s_register_operand" "r,r")
1168 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1169 (const_int 0)))
1170 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1171 (mult:SI (match_dup 2) (match_dup 1)))]
1172 "TARGET_ARM && !arm_arch6"
1173 "mul%.\\t%0, %2, %1"
1174 [(set_attr "conds" "set")
1175 (set_attr "insn" "muls")]
1176 )
1177
1178 (define_insn "*mulsi3_compare0_v6"
1179 [(set (reg:CC_NOOV CC_REGNUM)
1180 (compare:CC_NOOV (mult:SI
1181 (match_operand:SI 2 "s_register_operand" "r")
1182 (match_operand:SI 1 "s_register_operand" "r"))
1183 (const_int 0)))
1184 (set (match_operand:SI 0 "s_register_operand" "=r")
1185 (mult:SI (match_dup 2) (match_dup 1)))]
1186 "TARGET_ARM && arm_arch6 && optimize_size"
1187 "mul%.\\t%0, %2, %1"
1188 [(set_attr "conds" "set")
1189 (set_attr "insn" "muls")]
1190 )
1191
1192 (define_insn "*mulsi_compare0_scratch"
1193 [(set (reg:CC_NOOV CC_REGNUM)
1194 (compare:CC_NOOV (mult:SI
1195 (match_operand:SI 2 "s_register_operand" "r,r")
1196 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1197 (const_int 0)))
1198 (clobber (match_scratch:SI 0 "=&r,&r"))]
1199 "TARGET_ARM && !arm_arch6"
1200 "mul%.\\t%0, %2, %1"
1201 [(set_attr "conds" "set")
1202 (set_attr "insn" "muls")]
1203 )
1204
1205 (define_insn "*mulsi_compare0_scratch_v6"
1206 [(set (reg:CC_NOOV CC_REGNUM)
1207 (compare:CC_NOOV (mult:SI
1208 (match_operand:SI 2 "s_register_operand" "r")
1209 (match_operand:SI 1 "s_register_operand" "r"))
1210 (const_int 0)))
1211 (clobber (match_scratch:SI 0 "=r"))]
1212 "TARGET_ARM && arm_arch6 && optimize_size"
1213 "mul%.\\t%0, %2, %1"
1214 [(set_attr "conds" "set")
1215 (set_attr "insn" "muls")]
1216 )
1217
1218 ;; Unnamed templates to match MLA instruction.
1219
1220 (define_insn "*mulsi3addsi"
1221 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1222 (plus:SI
1223 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1224 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1225 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1226 "TARGET_32BIT && !arm_arch6"
1227 "mla%?\\t%0, %2, %1, %3"
1228 [(set_attr "insn" "mla")
1229 (set_attr "predicable" "yes")]
1230 )
1231
1232 (define_insn "*mulsi3addsi_v6"
1233 [(set (match_operand:SI 0 "s_register_operand" "=r")
1234 (plus:SI
1235 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1236 (match_operand:SI 1 "s_register_operand" "r"))
1237 (match_operand:SI 3 "s_register_operand" "r")))]
1238 "TARGET_32BIT && arm_arch6"
1239 "mla%?\\t%0, %2, %1, %3"
1240 [(set_attr "insn" "mla")
1241 (set_attr "predicable" "yes")]
1242 )
1243
1244 (define_insn "*mulsi3addsi_compare0"
1245 [(set (reg:CC_NOOV CC_REGNUM)
1246 (compare:CC_NOOV
1247 (plus:SI (mult:SI
1248 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1249 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1250 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1251 (const_int 0)))
1252 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1253 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1254 (match_dup 3)))]
1255 "TARGET_ARM && arm_arch6"
1256 "mla%.\\t%0, %2, %1, %3"
1257 [(set_attr "conds" "set")
1258 (set_attr "insn" "mlas")]
1259 )
1260
1261 (define_insn "*mulsi3addsi_compare0_v6"
1262 [(set (reg:CC_NOOV CC_REGNUM)
1263 (compare:CC_NOOV
1264 (plus:SI (mult:SI
1265 (match_operand:SI 2 "s_register_operand" "r")
1266 (match_operand:SI 1 "s_register_operand" "r"))
1267 (match_operand:SI 3 "s_register_operand" "r"))
1268 (const_int 0)))
1269 (set (match_operand:SI 0 "s_register_operand" "=r")
1270 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1271 (match_dup 3)))]
1272 "TARGET_ARM && arm_arch6 && optimize_size"
1273 "mla%.\\t%0, %2, %1, %3"
1274 [(set_attr "conds" "set")
1275 (set_attr "insn" "mlas")]
1276 )
1277
1278 (define_insn "*mulsi3addsi_compare0_scratch"
1279 [(set (reg:CC_NOOV CC_REGNUM)
1280 (compare:CC_NOOV
1281 (plus:SI (mult:SI
1282 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1283 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1284 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1285 (const_int 0)))
1286 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1287 "TARGET_ARM && !arm_arch6"
1288 "mla%.\\t%0, %2, %1, %3"
1289 [(set_attr "conds" "set")
1290 (set_attr "insn" "mlas")]
1291 )
1292
1293 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1294 [(set (reg:CC_NOOV CC_REGNUM)
1295 (compare:CC_NOOV
1296 (plus:SI (mult:SI
1297 (match_operand:SI 2 "s_register_operand" "r")
1298 (match_operand:SI 1 "s_register_operand" "r"))
1299 (match_operand:SI 3 "s_register_operand" "r"))
1300 (const_int 0)))
1301 (clobber (match_scratch:SI 0 "=r"))]
1302 "TARGET_ARM && arm_arch6 && optimize_size"
1303 "mla%.\\t%0, %2, %1, %3"
1304 [(set_attr "conds" "set")
1305 (set_attr "insn" "mlas")]
1306 )
1307
1308 (define_insn "*mulsi3subsi"
1309 [(set (match_operand:SI 0 "s_register_operand" "=r")
1310 (minus:SI
1311 (match_operand:SI 3 "s_register_operand" "r")
1312 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1313 (match_operand:SI 1 "s_register_operand" "r"))))]
1314 "TARGET_32BIT && arm_arch_thumb2"
1315 "mls%?\\t%0, %2, %1, %3"
1316 [(set_attr "insn" "mla")
1317 (set_attr "predicable" "yes")]
1318 )
1319
1320 ;; Unnamed template to match long long multiply-accumulate (smlal)
1321
1322 (define_insn "*mulsidi3adddi"
1323 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1324 (plus:DI
1325 (mult:DI
1326 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1327 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1328 (match_operand:DI 1 "s_register_operand" "0")))]
1329 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1330 "smlal%?\\t%Q0, %R0, %3, %2"
1331 [(set_attr "insn" "smlal")
1332 (set_attr "predicable" "yes")]
1333 )
1334
1335 (define_insn "*mulsidi3adddi_v6"
1336 [(set (match_operand:DI 0 "s_register_operand" "=r")
1337 (plus:DI
1338 (mult:DI
1339 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1340 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1341 (match_operand:DI 1 "s_register_operand" "0")))]
1342 "TARGET_32BIT && arm_arch6"
1343 "smlal%?\\t%Q0, %R0, %3, %2"
1344 [(set_attr "insn" "smlal")
1345 (set_attr "predicable" "yes")]
1346 )
1347
1348 ;; 32x32->64 widening multiply.
1349 ;; As with mulsi3, the only difference between the v3-5 and v6+
1350 ;; versions of these patterns is the requirement that the output not
1351 ;; overlap the inputs, but that still means we have to have a named
1352 ;; expander and two different starred insns.
1353
1354 (define_expand "mulsidi3"
1355 [(set (match_operand:DI 0 "s_register_operand" "")
1356 (mult:DI
1357 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1358 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1359 "TARGET_32BIT && arm_arch3m"
1360 ""
1361 )
1362
1363 (define_insn "*mulsidi3_nov6"
1364 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1365 (mult:DI
1366 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1367 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1368 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1369 "smull%?\\t%Q0, %R0, %1, %2"
1370 [(set_attr "insn" "smull")
1371 (set_attr "predicable" "yes")]
1372 )
1373
1374 (define_insn "*mulsidi3_v6"
1375 [(set (match_operand:DI 0 "s_register_operand" "=r")
1376 (mult:DI
1377 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1378 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1379 "TARGET_32BIT && arm_arch6"
1380 "smull%?\\t%Q0, %R0, %1, %2"
1381 [(set_attr "insn" "smull")
1382 (set_attr "predicable" "yes")]
1383 )
1384
1385 (define_expand "umulsidi3"
1386 [(set (match_operand:DI 0 "s_register_operand" "")
1387 (mult:DI
1388 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1389 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1390 "TARGET_32BIT && arm_arch3m"
1391 ""
1392 )
1393
1394 (define_insn "*umulsidi3_nov6"
1395 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1396 (mult:DI
1397 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1398 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1399 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1400 "umull%?\\t%Q0, %R0, %1, %2"
1401 [(set_attr "insn" "umull")
1402 (set_attr "predicable" "yes")]
1403 )
1404
1405 (define_insn "*umulsidi3_v6"
1406 [(set (match_operand:DI 0 "s_register_operand" "=r")
1407 (mult:DI
1408 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1409 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1410 "TARGET_32BIT && arm_arch6"
1411 "umull%?\\t%Q0, %R0, %1, %2"
1412 [(set_attr "insn" "umull")
1413 (set_attr "predicable" "yes")]
1414 )
1415
1416 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1417
1418 (define_insn "*umulsidi3adddi"
1419 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1420 (plus:DI
1421 (mult:DI
1422 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1423 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1424 (match_operand:DI 1 "s_register_operand" "0")))]
1425 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1426 "umlal%?\\t%Q0, %R0, %3, %2"
1427 [(set_attr "insn" "umlal")
1428 (set_attr "predicable" "yes")]
1429 )
1430
1431 (define_insn "*umulsidi3adddi_v6"
1432 [(set (match_operand:DI 0 "s_register_operand" "=r")
1433 (plus:DI
1434 (mult:DI
1435 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1436 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1437 (match_operand:DI 1 "s_register_operand" "0")))]
1438 "TARGET_32BIT && arm_arch6"
1439 "umlal%?\\t%Q0, %R0, %3, %2"
1440 [(set_attr "insn" "umlal")
1441 (set_attr "predicable" "yes")]
1442 )
1443
1444 (define_expand "smulsi3_highpart"
1445 [(parallel
1446 [(set (match_operand:SI 0 "s_register_operand" "")
1447 (truncate:SI
1448 (lshiftrt:DI
1449 (mult:DI
1450 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1451 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1452 (const_int 32))))
1453 (clobber (match_scratch:SI 3 ""))])]
1454 "TARGET_32BIT && arm_arch3m"
1455 ""
1456 )
1457
1458 (define_insn "*smulsi3_highpart_nov6"
1459 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1460 (truncate:SI
1461 (lshiftrt:DI
1462 (mult:DI
1463 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1464 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1465 (const_int 32))))
1466 (clobber (match_scratch:SI 3 "=&r,&r"))]
1467 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1468 "smull%?\\t%3, %0, %2, %1"
1469 [(set_attr "insn" "smull")
1470 (set_attr "predicable" "yes")]
1471 )
1472
1473 (define_insn "*smulsi3_highpart_v6"
1474 [(set (match_operand:SI 0 "s_register_operand" "=r")
1475 (truncate:SI
1476 (lshiftrt:DI
1477 (mult:DI
1478 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1479 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1480 (const_int 32))))
1481 (clobber (match_scratch:SI 3 "=r"))]
1482 "TARGET_32BIT && arm_arch6"
1483 "smull%?\\t%3, %0, %2, %1"
1484 [(set_attr "insn" "smull")
1485 (set_attr "predicable" "yes")]
1486 )
1487
1488 (define_expand "umulsi3_highpart"
1489 [(parallel
1490 [(set (match_operand:SI 0 "s_register_operand" "")
1491 (truncate:SI
1492 (lshiftrt:DI
1493 (mult:DI
1494 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1495 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1496 (const_int 32))))
1497 (clobber (match_scratch:SI 3 ""))])]
1498 "TARGET_32BIT && arm_arch3m"
1499 ""
1500 )
1501
1502 (define_insn "*umulsi3_highpart_nov6"
1503 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1504 (truncate:SI
1505 (lshiftrt:DI
1506 (mult:DI
1507 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1508 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1509 (const_int 32))))
1510 (clobber (match_scratch:SI 3 "=&r,&r"))]
1511 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1512 "umull%?\\t%3, %0, %2, %1"
1513 [(set_attr "insn" "umull")
1514 (set_attr "predicable" "yes")]
1515 )
1516
1517 (define_insn "*umulsi3_highpart_v6"
1518 [(set (match_operand:SI 0 "s_register_operand" "=r")
1519 (truncate:SI
1520 (lshiftrt:DI
1521 (mult:DI
1522 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1523 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1524 (const_int 32))))
1525 (clobber (match_scratch:SI 3 "=r"))]
1526 "TARGET_32BIT && arm_arch6"
1527 "umull%?\\t%3, %0, %2, %1"
1528 [(set_attr "insn" "umull")
1529 (set_attr "predicable" "yes")]
1530 )
1531
1532 (define_insn "mulhisi3"
1533 [(set (match_operand:SI 0 "s_register_operand" "=r")
1534 (mult:SI (sign_extend:SI
1535 (match_operand:HI 1 "s_register_operand" "%r"))
1536 (sign_extend:SI
1537 (match_operand:HI 2 "s_register_operand" "r"))))]
1538 "TARGET_DSP_MULTIPLY"
1539 "smulbb%?\\t%0, %1, %2"
1540 [(set_attr "insn" "smulxy")
1541 (set_attr "predicable" "yes")]
1542 )
1543
1544 (define_insn "*mulhisi3tb"
1545 [(set (match_operand:SI 0 "s_register_operand" "=r")
1546 (mult:SI (ashiftrt:SI
1547 (match_operand:SI 1 "s_register_operand" "r")
1548 (const_int 16))
1549 (sign_extend:SI
1550 (match_operand:HI 2 "s_register_operand" "r"))))]
1551 "TARGET_DSP_MULTIPLY"
1552 "smultb%?\\t%0, %1, %2"
1553 [(set_attr "insn" "smulxy")
1554 (set_attr "predicable" "yes")]
1555 )
1556
1557 (define_insn "*mulhisi3bt"
1558 [(set (match_operand:SI 0 "s_register_operand" "=r")
1559 (mult:SI (sign_extend:SI
1560 (match_operand:HI 1 "s_register_operand" "r"))
1561 (ashiftrt:SI
1562 (match_operand:SI 2 "s_register_operand" "r")
1563 (const_int 16))))]
1564 "TARGET_DSP_MULTIPLY"
1565 "smulbt%?\\t%0, %1, %2"
1566 [(set_attr "insn" "smulxy")
1567 (set_attr "predicable" "yes")]
1568 )
1569
1570 (define_insn "*mulhisi3tt"
1571 [(set (match_operand:SI 0 "s_register_operand" "=r")
1572 (mult:SI (ashiftrt:SI
1573 (match_operand:SI 1 "s_register_operand" "r")
1574 (const_int 16))
1575 (ashiftrt:SI
1576 (match_operand:SI 2 "s_register_operand" "r")
1577 (const_int 16))))]
1578 "TARGET_DSP_MULTIPLY"
1579 "smultt%?\\t%0, %1, %2"
1580 [(set_attr "insn" "smulxy")
1581 (set_attr "predicable" "yes")]
1582 )
1583
1584 (define_insn "*mulhisi3addsi"
1585 [(set (match_operand:SI 0 "s_register_operand" "=r")
1586 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1587 (mult:SI (sign_extend:SI
1588 (match_operand:HI 2 "s_register_operand" "%r"))
1589 (sign_extend:SI
1590 (match_operand:HI 3 "s_register_operand" "r")))))]
1591 "TARGET_DSP_MULTIPLY"
1592 "smlabb%?\\t%0, %2, %3, %1"
1593 [(set_attr "insn" "smlaxy")
1594 (set_attr "predicable" "yes")]
1595 )
1596
1597 (define_insn "*mulhidi3adddi"
1598 [(set (match_operand:DI 0 "s_register_operand" "=r")
1599 (plus:DI
1600 (match_operand:DI 1 "s_register_operand" "0")
1601 (mult:DI (sign_extend:DI
1602 (match_operand:HI 2 "s_register_operand" "%r"))
1603 (sign_extend:DI
1604 (match_operand:HI 3 "s_register_operand" "r")))))]
1605 "TARGET_DSP_MULTIPLY"
1606 "smlalbb%?\\t%Q0, %R0, %2, %3"
1607 [(set_attr "insn" "smlalxy")
1608 (set_attr "predicable" "yes")])
1609
1610 (define_expand "mulsf3"
1611 [(set (match_operand:SF 0 "s_register_operand" "")
1612 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1613 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1614 "TARGET_32BIT && TARGET_HARD_FLOAT"
1615 "
1616 if (TARGET_MAVERICK
1617 && !cirrus_fp_register (operands[2], SFmode))
1618 operands[2] = force_reg (SFmode, operands[2]);
1619 ")
1620
1621 (define_expand "muldf3"
1622 [(set (match_operand:DF 0 "s_register_operand" "")
1623 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1624 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1625 "TARGET_32BIT && TARGET_HARD_FLOAT"
1626 "
1627 if (TARGET_MAVERICK
1628 && !cirrus_fp_register (operands[2], DFmode))
1629 operands[2] = force_reg (DFmode, operands[2]);
1630 ")
1631 \f
1632 ;; Division insns
1633
1634 (define_expand "divsf3"
1635 [(set (match_operand:SF 0 "s_register_operand" "")
1636 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1637 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1638 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1639 "")
1640
1641 (define_expand "divdf3"
1642 [(set (match_operand:DF 0 "s_register_operand" "")
1643 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1644 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1645 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1646 "")
1647 \f
1648 ;; Modulo insns
1649
1650 (define_expand "modsf3"
1651 [(set (match_operand:SF 0 "s_register_operand" "")
1652 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1653 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1654 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1655 "")
1656
1657 (define_expand "moddf3"
1658 [(set (match_operand:DF 0 "s_register_operand" "")
1659 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1660 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1661 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1662 "")
1663 \f
1664 ;; Boolean and,ior,xor insns
1665
1666 ;; Split up double word logical operations
1667
1668 ;; Split up simple DImode logical operations. Simply perform the logical
1669 ;; operation on the upper and lower halves of the registers.
1670 (define_split
1671 [(set (match_operand:DI 0 "s_register_operand" "")
1672 (match_operator:DI 6 "logical_binary_operator"
1673 [(match_operand:DI 1 "s_register_operand" "")
1674 (match_operand:DI 2 "s_register_operand" "")]))]
1675 "TARGET_32BIT && reload_completed
1676 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1677 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1678 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1679 "
1680 {
1681 operands[3] = gen_highpart (SImode, operands[0]);
1682 operands[0] = gen_lowpart (SImode, operands[0]);
1683 operands[4] = gen_highpart (SImode, operands[1]);
1684 operands[1] = gen_lowpart (SImode, operands[1]);
1685 operands[5] = gen_highpart (SImode, operands[2]);
1686 operands[2] = gen_lowpart (SImode, operands[2]);
1687 }"
1688 )
1689
1690 (define_split
1691 [(set (match_operand:DI 0 "s_register_operand" "")
1692 (match_operator:DI 6 "logical_binary_operator"
1693 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1694 (match_operand:DI 1 "s_register_operand" "")]))]
1695 "TARGET_32BIT && reload_completed"
1696 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1697 (set (match_dup 3) (match_op_dup:SI 6
1698 [(ashiftrt:SI (match_dup 2) (const_int 31))
1699 (match_dup 4)]))]
1700 "
1701 {
1702 operands[3] = gen_highpart (SImode, operands[0]);
1703 operands[0] = gen_lowpart (SImode, operands[0]);
1704 operands[4] = gen_highpart (SImode, operands[1]);
1705 operands[1] = gen_lowpart (SImode, operands[1]);
1706 operands[5] = gen_highpart (SImode, operands[2]);
1707 operands[2] = gen_lowpart (SImode, operands[2]);
1708 }"
1709 )
1710
1711 ;; The zero extend of operand 2 means we can just copy the high part of
1712 ;; operand1 into operand0.
1713 (define_split
1714 [(set (match_operand:DI 0 "s_register_operand" "")
1715 (ior:DI
1716 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1717 (match_operand:DI 1 "s_register_operand" "")))]
1718 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1719 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1720 (set (match_dup 3) (match_dup 4))]
1721 "
1722 {
1723 operands[4] = gen_highpart (SImode, operands[1]);
1724 operands[3] = gen_highpart (SImode, operands[0]);
1725 operands[0] = gen_lowpart (SImode, operands[0]);
1726 operands[1] = gen_lowpart (SImode, operands[1]);
1727 }"
1728 )
1729
1730 ;; The zero extend of operand 2 means we can just copy the high part of
1731 ;; operand1 into operand0.
1732 (define_split
1733 [(set (match_operand:DI 0 "s_register_operand" "")
1734 (xor:DI
1735 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1736 (match_operand:DI 1 "s_register_operand" "")))]
1737 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1738 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1739 (set (match_dup 3) (match_dup 4))]
1740 "
1741 {
1742 operands[4] = gen_highpart (SImode, operands[1]);
1743 operands[3] = gen_highpart (SImode, operands[0]);
1744 operands[0] = gen_lowpart (SImode, operands[0]);
1745 operands[1] = gen_lowpart (SImode, operands[1]);
1746 }"
1747 )
1748
1749 (define_insn "anddi3"
1750 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1751 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1752 (match_operand:DI 2 "s_register_operand" "r,r")))]
1753 "TARGET_32BIT && ! TARGET_IWMMXT"
1754 "#"
1755 [(set_attr "length" "8")]
1756 )
1757
1758 (define_insn_and_split "*anddi_zesidi_di"
1759 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1760 (and:DI (zero_extend:DI
1761 (match_operand:SI 2 "s_register_operand" "r,r"))
1762 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1763 "TARGET_32BIT"
1764 "#"
1765 "TARGET_32BIT && reload_completed"
1766 ; The zero extend of operand 2 clears the high word of the output
1767 ; operand.
1768 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1769 (set (match_dup 3) (const_int 0))]
1770 "
1771 {
1772 operands[3] = gen_highpart (SImode, operands[0]);
1773 operands[0] = gen_lowpart (SImode, operands[0]);
1774 operands[1] = gen_lowpart (SImode, operands[1]);
1775 }"
1776 [(set_attr "length" "8")]
1777 )
1778
1779 (define_insn "*anddi_sesdi_di"
1780 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1781 (and:DI (sign_extend:DI
1782 (match_operand:SI 2 "s_register_operand" "r,r"))
1783 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1784 "TARGET_32BIT"
1785 "#"
1786 [(set_attr "length" "8")]
1787 )
1788
1789 (define_expand "andsi3"
1790 [(set (match_operand:SI 0 "s_register_operand" "")
1791 (and:SI (match_operand:SI 1 "s_register_operand" "")
1792 (match_operand:SI 2 "reg_or_int_operand" "")))]
1793 "TARGET_EITHER"
1794 "
1795 if (TARGET_32BIT)
1796 {
1797 if (GET_CODE (operands[2]) == CONST_INT)
1798 {
1799 arm_split_constant (AND, SImode, NULL_RTX,
1800 INTVAL (operands[2]), operands[0],
1801 operands[1], optimize && can_create_pseudo_p ());
1802
1803 DONE;
1804 }
1805 }
1806 else /* TARGET_THUMB1 */
1807 {
1808 if (GET_CODE (operands[2]) != CONST_INT)
1809 operands[2] = force_reg (SImode, operands[2]);
1810 else
1811 {
1812 int i;
1813
1814 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1815 {
1816 operands[2] = force_reg (SImode,
1817 GEN_INT (~INTVAL (operands[2])));
1818
1819 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1820
1821 DONE;
1822 }
1823
1824 for (i = 9; i <= 31; i++)
1825 {
1826 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1827 {
1828 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1829 const0_rtx));
1830 DONE;
1831 }
1832 else if ((((HOST_WIDE_INT) 1) << i) - 1
1833 == ~INTVAL (operands[2]))
1834 {
1835 rtx shift = GEN_INT (i);
1836 rtx reg = gen_reg_rtx (SImode);
1837
1838 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1839 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1840
1841 DONE;
1842 }
1843 }
1844
1845 operands[2] = force_reg (SImode, operands[2]);
1846 }
1847 }
1848 "
1849 )
1850
1851 ; ??? Check split length for Thumb-2
1852 (define_insn_and_split "*arm_andsi3_insn"
1853 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1854 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1855 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1856 "TARGET_32BIT"
1857 "@
1858 and%?\\t%0, %1, %2
1859 bic%?\\t%0, %1, #%B2
1860 #"
1861 "TARGET_32BIT
1862 && GET_CODE (operands[2]) == CONST_INT
1863 && !(const_ok_for_arm (INTVAL (operands[2]))
1864 || const_ok_for_arm (~INTVAL (operands[2])))"
1865 [(clobber (const_int 0))]
1866 "
1867 arm_split_constant (AND, SImode, curr_insn,
1868 INTVAL (operands[2]), operands[0], operands[1], 0);
1869 DONE;
1870 "
1871 [(set_attr "length" "4,4,16")
1872 (set_attr "predicable" "yes")]
1873 )
1874
1875 (define_insn "*thumb1_andsi3_insn"
1876 [(set (match_operand:SI 0 "register_operand" "=l")
1877 (and:SI (match_operand:SI 1 "register_operand" "%0")
1878 (match_operand:SI 2 "register_operand" "l")))]
1879 "TARGET_THUMB1"
1880 "and\\t%0, %0, %2"
1881 [(set_attr "length" "2")]
1882 )
1883
1884 (define_insn "*andsi3_compare0"
1885 [(set (reg:CC_NOOV CC_REGNUM)
1886 (compare:CC_NOOV
1887 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1888 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1889 (const_int 0)))
1890 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1891 (and:SI (match_dup 1) (match_dup 2)))]
1892 "TARGET_32BIT"
1893 "@
1894 and%.\\t%0, %1, %2
1895 bic%.\\t%0, %1, #%B2"
1896 [(set_attr "conds" "set")]
1897 )
1898
1899 (define_insn "*andsi3_compare0_scratch"
1900 [(set (reg:CC_NOOV CC_REGNUM)
1901 (compare:CC_NOOV
1902 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1903 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1904 (const_int 0)))
1905 (clobber (match_scratch:SI 2 "=X,r"))]
1906 "TARGET_32BIT"
1907 "@
1908 tst%?\\t%0, %1
1909 bic%.\\t%2, %0, #%B1"
1910 [(set_attr "conds" "set")]
1911 )
1912
1913 (define_insn "*zeroextractsi_compare0_scratch"
1914 [(set (reg:CC_NOOV CC_REGNUM)
1915 (compare:CC_NOOV (zero_extract:SI
1916 (match_operand:SI 0 "s_register_operand" "r")
1917 (match_operand 1 "const_int_operand" "n")
1918 (match_operand 2 "const_int_operand" "n"))
1919 (const_int 0)))]
1920 "TARGET_32BIT
1921 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1922 && INTVAL (operands[1]) > 0
1923 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1924 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1925 "*
1926 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1927 << INTVAL (operands[2]));
1928 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1929 return \"\";
1930 "
1931 [(set_attr "conds" "set")]
1932 )
1933
1934 (define_insn_and_split "*ne_zeroextractsi"
1935 [(set (match_operand:SI 0 "s_register_operand" "=r")
1936 (ne:SI (zero_extract:SI
1937 (match_operand:SI 1 "s_register_operand" "r")
1938 (match_operand:SI 2 "const_int_operand" "n")
1939 (match_operand:SI 3 "const_int_operand" "n"))
1940 (const_int 0)))
1941 (clobber (reg:CC CC_REGNUM))]
1942 "TARGET_32BIT
1943 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1944 && INTVAL (operands[2]) > 0
1945 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1946 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1947 "#"
1948 "TARGET_32BIT
1949 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1950 && INTVAL (operands[2]) > 0
1951 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1952 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1953 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1954 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1955 (const_int 0)))
1956 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1957 (set (match_dup 0)
1958 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1959 (match_dup 0) (const_int 1)))]
1960 "
1961 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1962 << INTVAL (operands[3]));
1963 "
1964 [(set_attr "conds" "clob")
1965 (set (attr "length")
1966 (if_then_else (eq_attr "is_thumb" "yes")
1967 (const_int 12)
1968 (const_int 8)))]
1969 )
1970
1971 (define_insn_and_split "*ne_zeroextractsi_shifted"
1972 [(set (match_operand:SI 0 "s_register_operand" "=r")
1973 (ne:SI (zero_extract:SI
1974 (match_operand:SI 1 "s_register_operand" "r")
1975 (match_operand:SI 2 "const_int_operand" "n")
1976 (const_int 0))
1977 (const_int 0)))
1978 (clobber (reg:CC CC_REGNUM))]
1979 "TARGET_ARM"
1980 "#"
1981 "TARGET_ARM"
1982 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1983 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1984 (const_int 0)))
1985 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1986 (set (match_dup 0)
1987 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1988 (match_dup 0) (const_int 1)))]
1989 "
1990 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1991 "
1992 [(set_attr "conds" "clob")
1993 (set_attr "length" "8")]
1994 )
1995
1996 (define_insn_and_split "*ite_ne_zeroextractsi"
1997 [(set (match_operand:SI 0 "s_register_operand" "=r")
1998 (if_then_else:SI (ne (zero_extract:SI
1999 (match_operand:SI 1 "s_register_operand" "r")
2000 (match_operand:SI 2 "const_int_operand" "n")
2001 (match_operand:SI 3 "const_int_operand" "n"))
2002 (const_int 0))
2003 (match_operand:SI 4 "arm_not_operand" "rIK")
2004 (const_int 0)))
2005 (clobber (reg:CC CC_REGNUM))]
2006 "TARGET_ARM
2007 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2008 && INTVAL (operands[2]) > 0
2009 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2010 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2011 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2012 "#"
2013 "TARGET_ARM
2014 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2015 && INTVAL (operands[2]) > 0
2016 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2017 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2018 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2019 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2020 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2021 (const_int 0)))
2022 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2023 (set (match_dup 0)
2024 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2025 (match_dup 0) (match_dup 4)))]
2026 "
2027 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2028 << INTVAL (operands[3]));
2029 "
2030 [(set_attr "conds" "clob")
2031 (set_attr "length" "8")]
2032 )
2033
2034 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2035 [(set (match_operand:SI 0 "s_register_operand" "=r")
2036 (if_then_else:SI (ne (zero_extract:SI
2037 (match_operand:SI 1 "s_register_operand" "r")
2038 (match_operand:SI 2 "const_int_operand" "n")
2039 (const_int 0))
2040 (const_int 0))
2041 (match_operand:SI 3 "arm_not_operand" "rIK")
2042 (const_int 0)))
2043 (clobber (reg:CC CC_REGNUM))]
2044 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2045 "#"
2046 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2047 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2048 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2049 (const_int 0)))
2050 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2051 (set (match_dup 0)
2052 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2053 (match_dup 0) (match_dup 3)))]
2054 "
2055 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2056 "
2057 [(set_attr "conds" "clob")
2058 (set_attr "length" "8")]
2059 )
2060
2061 (define_split
2062 [(set (match_operand:SI 0 "s_register_operand" "")
2063 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2064 (match_operand:SI 2 "const_int_operand" "")
2065 (match_operand:SI 3 "const_int_operand" "")))
2066 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2067 "TARGET_THUMB1"
2068 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2069 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2070 "{
2071 HOST_WIDE_INT temp = INTVAL (operands[2]);
2072
2073 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2074 operands[3] = GEN_INT (32 - temp);
2075 }"
2076 )
2077
2078 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2079 (define_split
2080 [(set (match_operand:SI 0 "s_register_operand" "")
2081 (match_operator:SI 1 "shiftable_operator"
2082 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2083 (match_operand:SI 3 "const_int_operand" "")
2084 (match_operand:SI 4 "const_int_operand" ""))
2085 (match_operand:SI 5 "s_register_operand" "")]))
2086 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2087 "TARGET_ARM"
2088 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2089 (set (match_dup 0)
2090 (match_op_dup 1
2091 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2092 (match_dup 5)]))]
2093 "{
2094 HOST_WIDE_INT temp = INTVAL (operands[3]);
2095
2096 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2097 operands[4] = GEN_INT (32 - temp);
2098 }"
2099 )
2100
2101 (define_split
2102 [(set (match_operand:SI 0 "s_register_operand" "")
2103 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2104 (match_operand:SI 2 "const_int_operand" "")
2105 (match_operand:SI 3 "const_int_operand" "")))]
2106 "TARGET_THUMB1"
2107 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2108 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2109 "{
2110 HOST_WIDE_INT temp = INTVAL (operands[2]);
2111
2112 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2113 operands[3] = GEN_INT (32 - temp);
2114 }"
2115 )
2116
2117 (define_split
2118 [(set (match_operand:SI 0 "s_register_operand" "")
2119 (match_operator:SI 1 "shiftable_operator"
2120 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2121 (match_operand:SI 3 "const_int_operand" "")
2122 (match_operand:SI 4 "const_int_operand" ""))
2123 (match_operand:SI 5 "s_register_operand" "")]))
2124 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2125 "TARGET_ARM"
2126 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2127 (set (match_dup 0)
2128 (match_op_dup 1
2129 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2130 (match_dup 5)]))]
2131 "{
2132 HOST_WIDE_INT temp = INTVAL (operands[3]);
2133
2134 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2135 operands[4] = GEN_INT (32 - temp);
2136 }"
2137 )
2138
2139 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2140 ;;; represented by the bitfield, then this will produce incorrect results.
2141 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2142 ;;; which have a real bit-field insert instruction, the truncation happens
2143 ;;; in the bit-field insert instruction itself. Since arm does not have a
2144 ;;; bit-field insert instruction, we would have to emit code here to truncate
2145 ;;; the value before we insert. This loses some of the advantage of having
2146 ;;; this insv pattern, so this pattern needs to be reevalutated.
2147
2148 (define_expand "insv"
2149 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2150 (match_operand:SI 1 "general_operand" "")
2151 (match_operand:SI 2 "general_operand" ""))
2152 (match_operand:SI 3 "reg_or_int_operand" ""))]
2153 "TARGET_ARM || arm_arch_thumb2"
2154 "
2155 {
2156 int start_bit = INTVAL (operands[2]);
2157 int width = INTVAL (operands[1]);
2158 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2159 rtx target, subtarget;
2160
2161 if (arm_arch_thumb2)
2162 {
2163 bool use_bfi = TRUE;
2164
2165 if (GET_CODE (operands[3]) == CONST_INT)
2166 {
2167 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2168
2169 if (val == 0)
2170 {
2171 emit_insn (gen_insv_zero (operands[0], operands[1],
2172 operands[2]));
2173 DONE;
2174 }
2175
2176 /* See if the set can be done with a single orr instruction. */
2177 if (val == mask && const_ok_for_arm (val << start_bit))
2178 use_bfi = FALSE;
2179 }
2180
2181 if (use_bfi)
2182 {
2183 if (GET_CODE (operands[3]) != REG)
2184 operands[3] = force_reg (SImode, operands[3]);
2185
2186 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2187 operands[3]));
2188 DONE;
2189 }
2190 }
2191
2192 target = operands[0];
2193 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2194 subreg as the final target. */
2195 if (GET_CODE (target) == SUBREG)
2196 {
2197 subtarget = gen_reg_rtx (SImode);
2198 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2199 < GET_MODE_SIZE (SImode))
2200 target = SUBREG_REG (target);
2201 }
2202 else
2203 subtarget = target;
2204
2205 if (GET_CODE (operands[3]) == CONST_INT)
2206 {
2207 /* Since we are inserting a known constant, we may be able to
2208 reduce the number of bits that we have to clear so that
2209 the mask becomes simple. */
2210 /* ??? This code does not check to see if the new mask is actually
2211 simpler. It may not be. */
2212 rtx op1 = gen_reg_rtx (SImode);
2213 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2214 start of this pattern. */
2215 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2216 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2217
2218 emit_insn (gen_andsi3 (op1, operands[0],
2219 gen_int_mode (~mask2, SImode)));
2220 emit_insn (gen_iorsi3 (subtarget, op1,
2221 gen_int_mode (op3_value << start_bit, SImode)));
2222 }
2223 else if (start_bit == 0
2224 && !(const_ok_for_arm (mask)
2225 || const_ok_for_arm (~mask)))
2226 {
2227 /* A Trick, since we are setting the bottom bits in the word,
2228 we can shift operand[3] up, operand[0] down, OR them together
2229 and rotate the result back again. This takes 3 insns, and
2230 the third might be mergeable into another op. */
2231 /* The shift up copes with the possibility that operand[3] is
2232 wider than the bitfield. */
2233 rtx op0 = gen_reg_rtx (SImode);
2234 rtx op1 = gen_reg_rtx (SImode);
2235
2236 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2237 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2238 emit_insn (gen_iorsi3 (op1, op1, op0));
2239 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2240 }
2241 else if ((width + start_bit == 32)
2242 && !(const_ok_for_arm (mask)
2243 || const_ok_for_arm (~mask)))
2244 {
2245 /* Similar trick, but slightly less efficient. */
2246
2247 rtx op0 = gen_reg_rtx (SImode);
2248 rtx op1 = gen_reg_rtx (SImode);
2249
2250 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2251 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2252 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2253 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2254 }
2255 else
2256 {
2257 rtx op0 = gen_int_mode (mask, SImode);
2258 rtx op1 = gen_reg_rtx (SImode);
2259 rtx op2 = gen_reg_rtx (SImode);
2260
2261 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2262 {
2263 rtx tmp = gen_reg_rtx (SImode);
2264
2265 emit_insn (gen_movsi (tmp, op0));
2266 op0 = tmp;
2267 }
2268
2269 /* Mask out any bits in operand[3] that are not needed. */
2270 emit_insn (gen_andsi3 (op1, operands[3], op0));
2271
2272 if (GET_CODE (op0) == CONST_INT
2273 && (const_ok_for_arm (mask << start_bit)
2274 || const_ok_for_arm (~(mask << start_bit))))
2275 {
2276 op0 = gen_int_mode (~(mask << start_bit), SImode);
2277 emit_insn (gen_andsi3 (op2, operands[0], op0));
2278 }
2279 else
2280 {
2281 if (GET_CODE (op0) == CONST_INT)
2282 {
2283 rtx tmp = gen_reg_rtx (SImode);
2284
2285 emit_insn (gen_movsi (tmp, op0));
2286 op0 = tmp;
2287 }
2288
2289 if (start_bit != 0)
2290 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2291
2292 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2293 }
2294
2295 if (start_bit != 0)
2296 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2297
2298 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2299 }
2300
2301 if (subtarget != target)
2302 {
2303 /* If TARGET is still a SUBREG, then it must be wider than a word,
2304 so we must be careful only to set the subword we were asked to. */
2305 if (GET_CODE (target) == SUBREG)
2306 emit_move_insn (target, subtarget);
2307 else
2308 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2309 }
2310
2311 DONE;
2312 }"
2313 )
2314
2315 (define_insn "insv_zero"
2316 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2317 (match_operand:SI 1 "const_int_operand" "M")
2318 (match_operand:SI 2 "const_int_operand" "M"))
2319 (const_int 0))]
2320 "arm_arch_thumb2"
2321 "bfc%?\t%0, %2, %1"
2322 [(set_attr "length" "4")
2323 (set_attr "predicable" "yes")]
2324 )
2325
2326 (define_insn "insv_t2"
2327 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2328 (match_operand:SI 1 "const_int_operand" "M")
2329 (match_operand:SI 2 "const_int_operand" "M"))
2330 (match_operand:SI 3 "s_register_operand" "r"))]
2331 "arm_arch_thumb2"
2332 "bfi%?\t%0, %3, %2, %1"
2333 [(set_attr "length" "4")
2334 (set_attr "predicable" "yes")]
2335 )
2336
2337 ; constants for op 2 will never be given to these patterns.
2338 (define_insn_and_split "*anddi_notdi_di"
2339 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2340 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2341 (match_operand:DI 2 "s_register_operand" "0,r")))]
2342 "TARGET_32BIT"
2343 "#"
2344 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2345 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2346 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2347 "
2348 {
2349 operands[3] = gen_highpart (SImode, operands[0]);
2350 operands[0] = gen_lowpart (SImode, operands[0]);
2351 operands[4] = gen_highpart (SImode, operands[1]);
2352 operands[1] = gen_lowpart (SImode, operands[1]);
2353 operands[5] = gen_highpart (SImode, operands[2]);
2354 operands[2] = gen_lowpart (SImode, operands[2]);
2355 }"
2356 [(set_attr "length" "8")
2357 (set_attr "predicable" "yes")]
2358 )
2359
2360 (define_insn_and_split "*anddi_notzesidi_di"
2361 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2362 (and:DI (not:DI (zero_extend:DI
2363 (match_operand:SI 2 "s_register_operand" "r,r")))
2364 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2365 "TARGET_32BIT"
2366 "@
2367 bic%?\\t%Q0, %Q1, %2
2368 #"
2369 ; (not (zero_extend ...)) allows us to just copy the high word from
2370 ; operand1 to operand0.
2371 "TARGET_32BIT
2372 && reload_completed
2373 && operands[0] != operands[1]"
2374 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2375 (set (match_dup 3) (match_dup 4))]
2376 "
2377 {
2378 operands[3] = gen_highpart (SImode, operands[0]);
2379 operands[0] = gen_lowpart (SImode, operands[0]);
2380 operands[4] = gen_highpart (SImode, operands[1]);
2381 operands[1] = gen_lowpart (SImode, operands[1]);
2382 }"
2383 [(set_attr "length" "4,8")
2384 (set_attr "predicable" "yes")]
2385 )
2386
2387 (define_insn_and_split "*anddi_notsesidi_di"
2388 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2389 (and:DI (not:DI (sign_extend:DI
2390 (match_operand:SI 2 "s_register_operand" "r,r")))
2391 (match_operand:DI 1 "s_register_operand" "0,r")))]
2392 "TARGET_32BIT"
2393 "#"
2394 "TARGET_32BIT && reload_completed"
2395 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2396 (set (match_dup 3) (and:SI (not:SI
2397 (ashiftrt:SI (match_dup 2) (const_int 31)))
2398 (match_dup 4)))]
2399 "
2400 {
2401 operands[3] = gen_highpart (SImode, operands[0]);
2402 operands[0] = gen_lowpart (SImode, operands[0]);
2403 operands[4] = gen_highpart (SImode, operands[1]);
2404 operands[1] = gen_lowpart (SImode, operands[1]);
2405 }"
2406 [(set_attr "length" "8")
2407 (set_attr "predicable" "yes")]
2408 )
2409
2410 (define_insn "andsi_notsi_si"
2411 [(set (match_operand:SI 0 "s_register_operand" "=r")
2412 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2413 (match_operand:SI 1 "s_register_operand" "r")))]
2414 "TARGET_32BIT"
2415 "bic%?\\t%0, %1, %2"
2416 [(set_attr "predicable" "yes")]
2417 )
2418
2419 (define_insn "bicsi3"
2420 [(set (match_operand:SI 0 "register_operand" "=l")
2421 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2422 (match_operand:SI 2 "register_operand" "0")))]
2423 "TARGET_THUMB1"
2424 "bic\\t%0, %0, %1"
2425 [(set_attr "length" "2")]
2426 )
2427
2428 (define_insn "andsi_not_shiftsi_si"
2429 [(set (match_operand:SI 0 "s_register_operand" "=r")
2430 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2431 [(match_operand:SI 2 "s_register_operand" "r")
2432 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2433 (match_operand:SI 1 "s_register_operand" "r")))]
2434 "TARGET_ARM"
2435 "bic%?\\t%0, %1, %2%S4"
2436 [(set_attr "predicable" "yes")
2437 (set_attr "shift" "2")
2438 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2439 (const_string "alu_shift")
2440 (const_string "alu_shift_reg")))]
2441 )
2442
2443 (define_insn "*andsi_notsi_si_compare0"
2444 [(set (reg:CC_NOOV CC_REGNUM)
2445 (compare:CC_NOOV
2446 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2447 (match_operand:SI 1 "s_register_operand" "r"))
2448 (const_int 0)))
2449 (set (match_operand:SI 0 "s_register_operand" "=r")
2450 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2451 "TARGET_32BIT"
2452 "bic%.\\t%0, %1, %2"
2453 [(set_attr "conds" "set")]
2454 )
2455
2456 (define_insn "*andsi_notsi_si_compare0_scratch"
2457 [(set (reg:CC_NOOV CC_REGNUM)
2458 (compare:CC_NOOV
2459 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2460 (match_operand:SI 1 "s_register_operand" "r"))
2461 (const_int 0)))
2462 (clobber (match_scratch:SI 0 "=r"))]
2463 "TARGET_32BIT"
2464 "bic%.\\t%0, %1, %2"
2465 [(set_attr "conds" "set")]
2466 )
2467
2468 (define_insn "iordi3"
2469 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2470 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2471 (match_operand:DI 2 "s_register_operand" "r,r")))]
2472 "TARGET_32BIT && ! TARGET_IWMMXT"
2473 "#"
2474 [(set_attr "length" "8")
2475 (set_attr "predicable" "yes")]
2476 )
2477
2478 (define_insn "*iordi_zesidi_di"
2479 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2480 (ior:DI (zero_extend:DI
2481 (match_operand:SI 2 "s_register_operand" "r,r"))
2482 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2483 "TARGET_32BIT"
2484 "@
2485 orr%?\\t%Q0, %Q1, %2
2486 #"
2487 [(set_attr "length" "4,8")
2488 (set_attr "predicable" "yes")]
2489 )
2490
2491 (define_insn "*iordi_sesidi_di"
2492 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2493 (ior:DI (sign_extend:DI
2494 (match_operand:SI 2 "s_register_operand" "r,r"))
2495 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2496 "TARGET_32BIT"
2497 "#"
2498 [(set_attr "length" "8")
2499 (set_attr "predicable" "yes")]
2500 )
2501
2502 (define_expand "iorsi3"
2503 [(set (match_operand:SI 0 "s_register_operand" "")
2504 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2505 (match_operand:SI 2 "reg_or_int_operand" "")))]
2506 "TARGET_EITHER"
2507 "
2508 if (GET_CODE (operands[2]) == CONST_INT)
2509 {
2510 if (TARGET_32BIT)
2511 {
2512 arm_split_constant (IOR, SImode, NULL_RTX,
2513 INTVAL (operands[2]), operands[0], operands[1],
2514 optimize && can_create_pseudo_p ());
2515 DONE;
2516 }
2517 else /* TARGET_THUMB1 */
2518 operands [2] = force_reg (SImode, operands [2]);
2519 }
2520 "
2521 )
2522
2523 (define_insn_and_split "*arm_iorsi3"
2524 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2525 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2526 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2527 "TARGET_32BIT"
2528 "@
2529 orr%?\\t%0, %1, %2
2530 #"
2531 "TARGET_32BIT
2532 && GET_CODE (operands[2]) == CONST_INT
2533 && !const_ok_for_arm (INTVAL (operands[2]))"
2534 [(clobber (const_int 0))]
2535 "
2536 arm_split_constant (IOR, SImode, curr_insn,
2537 INTVAL (operands[2]), operands[0], operands[1], 0);
2538 DONE;
2539 "
2540 [(set_attr "length" "4,16")
2541 (set_attr "predicable" "yes")]
2542 )
2543
2544 (define_insn "*thumb1_iorsi3"
2545 [(set (match_operand:SI 0 "register_operand" "=l")
2546 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2547 (match_operand:SI 2 "register_operand" "l")))]
2548 "TARGET_THUMB1"
2549 "orr\\t%0, %0, %2"
2550 [(set_attr "length" "2")]
2551 )
2552
2553 (define_peephole2
2554 [(match_scratch:SI 3 "r")
2555 (set (match_operand:SI 0 "arm_general_register_operand" "")
2556 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2557 (match_operand:SI 2 "const_int_operand" "")))]
2558 "TARGET_32BIT
2559 && !const_ok_for_arm (INTVAL (operands[2]))
2560 && const_ok_for_arm (~INTVAL (operands[2]))"
2561 [(set (match_dup 3) (match_dup 2))
2562 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2563 ""
2564 )
2565
2566 (define_insn "*iorsi3_compare0"
2567 [(set (reg:CC_NOOV CC_REGNUM)
2568 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2569 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2570 (const_int 0)))
2571 (set (match_operand:SI 0 "s_register_operand" "=r")
2572 (ior:SI (match_dup 1) (match_dup 2)))]
2573 "TARGET_32BIT"
2574 "orr%.\\t%0, %1, %2"
2575 [(set_attr "conds" "set")]
2576 )
2577
2578 (define_insn "*iorsi3_compare0_scratch"
2579 [(set (reg:CC_NOOV CC_REGNUM)
2580 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2581 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2582 (const_int 0)))
2583 (clobber (match_scratch:SI 0 "=r"))]
2584 "TARGET_32BIT"
2585 "orr%.\\t%0, %1, %2"
2586 [(set_attr "conds" "set")]
2587 )
2588
2589 (define_insn "xordi3"
2590 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2591 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2592 (match_operand:DI 2 "s_register_operand" "r,r")))]
2593 "TARGET_32BIT && !TARGET_IWMMXT"
2594 "#"
2595 [(set_attr "length" "8")
2596 (set_attr "predicable" "yes")]
2597 )
2598
2599 (define_insn "*xordi_zesidi_di"
2600 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2601 (xor:DI (zero_extend:DI
2602 (match_operand:SI 2 "s_register_operand" "r,r"))
2603 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2604 "TARGET_32BIT"
2605 "@
2606 eor%?\\t%Q0, %Q1, %2
2607 #"
2608 [(set_attr "length" "4,8")
2609 (set_attr "predicable" "yes")]
2610 )
2611
2612 (define_insn "*xordi_sesidi_di"
2613 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2614 (xor:DI (sign_extend:DI
2615 (match_operand:SI 2 "s_register_operand" "r,r"))
2616 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2617 "TARGET_32BIT"
2618 "#"
2619 [(set_attr "length" "8")
2620 (set_attr "predicable" "yes")]
2621 )
2622
2623 (define_expand "xorsi3"
2624 [(set (match_operand:SI 0 "s_register_operand" "")
2625 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2626 (match_operand:SI 2 "arm_rhs_operand" "")))]
2627 "TARGET_EITHER"
2628 "if (TARGET_THUMB1)
2629 if (GET_CODE (operands[2]) == CONST_INT)
2630 operands[2] = force_reg (SImode, operands[2]);
2631 "
2632 )
2633
2634 (define_insn "*arm_xorsi3"
2635 [(set (match_operand:SI 0 "s_register_operand" "=r")
2636 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2637 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2638 "TARGET_32BIT"
2639 "eor%?\\t%0, %1, %2"
2640 [(set_attr "predicable" "yes")]
2641 )
2642
2643 (define_insn "*thumb1_xorsi3"
2644 [(set (match_operand:SI 0 "register_operand" "=l")
2645 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2646 (match_operand:SI 2 "register_operand" "l")))]
2647 "TARGET_THUMB1"
2648 "eor\\t%0, %0, %2"
2649 [(set_attr "length" "2")]
2650 )
2651
2652 (define_insn "*xorsi3_compare0"
2653 [(set (reg:CC_NOOV CC_REGNUM)
2654 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2655 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2656 (const_int 0)))
2657 (set (match_operand:SI 0 "s_register_operand" "=r")
2658 (xor:SI (match_dup 1) (match_dup 2)))]
2659 "TARGET_32BIT"
2660 "eor%.\\t%0, %1, %2"
2661 [(set_attr "conds" "set")]
2662 )
2663
2664 (define_insn "*xorsi3_compare0_scratch"
2665 [(set (reg:CC_NOOV CC_REGNUM)
2666 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2667 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2668 (const_int 0)))]
2669 "TARGET_32BIT"
2670 "teq%?\\t%0, %1"
2671 [(set_attr "conds" "set")]
2672 )
2673
2674 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2675 ; (NOT D) we can sometimes merge the final NOT into one of the following
2676 ; insns.
2677
2678 (define_split
2679 [(set (match_operand:SI 0 "s_register_operand" "")
2680 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2681 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2682 (match_operand:SI 3 "arm_rhs_operand" "")))
2683 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2684 "TARGET_32BIT"
2685 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2686 (not:SI (match_dup 3))))
2687 (set (match_dup 0) (not:SI (match_dup 4)))]
2688 ""
2689 )
2690
2691 (define_insn "*andsi_iorsi3_notsi"
2692 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2693 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2694 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2695 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2696 "TARGET_32BIT"
2697 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2698 [(set_attr "length" "8")
2699 (set_attr "ce_count" "2")
2700 (set_attr "predicable" "yes")]
2701 )
2702
2703 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2704 ; insns are available?
2705 (define_split
2706 [(set (match_operand:SI 0 "s_register_operand" "")
2707 (match_operator:SI 1 "logical_binary_operator"
2708 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2709 (match_operand:SI 3 "const_int_operand" "")
2710 (match_operand:SI 4 "const_int_operand" ""))
2711 (match_operator:SI 9 "logical_binary_operator"
2712 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2713 (match_operand:SI 6 "const_int_operand" ""))
2714 (match_operand:SI 7 "s_register_operand" "")])]))
2715 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2716 "TARGET_32BIT
2717 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2718 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2719 [(set (match_dup 8)
2720 (match_op_dup 1
2721 [(ashift:SI (match_dup 2) (match_dup 4))
2722 (match_dup 5)]))
2723 (set (match_dup 0)
2724 (match_op_dup 1
2725 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2726 (match_dup 7)]))]
2727 "
2728 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2729 ")
2730
2731 (define_split
2732 [(set (match_operand:SI 0 "s_register_operand" "")
2733 (match_operator:SI 1 "logical_binary_operator"
2734 [(match_operator:SI 9 "logical_binary_operator"
2735 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2736 (match_operand:SI 6 "const_int_operand" ""))
2737 (match_operand:SI 7 "s_register_operand" "")])
2738 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2739 (match_operand:SI 3 "const_int_operand" "")
2740 (match_operand:SI 4 "const_int_operand" ""))]))
2741 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2742 "TARGET_32BIT
2743 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2744 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2745 [(set (match_dup 8)
2746 (match_op_dup 1
2747 [(ashift:SI (match_dup 2) (match_dup 4))
2748 (match_dup 5)]))
2749 (set (match_dup 0)
2750 (match_op_dup 1
2751 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2752 (match_dup 7)]))]
2753 "
2754 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2755 ")
2756
2757 (define_split
2758 [(set (match_operand:SI 0 "s_register_operand" "")
2759 (match_operator:SI 1 "logical_binary_operator"
2760 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2761 (match_operand:SI 3 "const_int_operand" "")
2762 (match_operand:SI 4 "const_int_operand" ""))
2763 (match_operator:SI 9 "logical_binary_operator"
2764 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2765 (match_operand:SI 6 "const_int_operand" ""))
2766 (match_operand:SI 7 "s_register_operand" "")])]))
2767 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2768 "TARGET_32BIT
2769 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2770 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2771 [(set (match_dup 8)
2772 (match_op_dup 1
2773 [(ashift:SI (match_dup 2) (match_dup 4))
2774 (match_dup 5)]))
2775 (set (match_dup 0)
2776 (match_op_dup 1
2777 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2778 (match_dup 7)]))]
2779 "
2780 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2781 ")
2782
2783 (define_split
2784 [(set (match_operand:SI 0 "s_register_operand" "")
2785 (match_operator:SI 1 "logical_binary_operator"
2786 [(match_operator:SI 9 "logical_binary_operator"
2787 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2788 (match_operand:SI 6 "const_int_operand" ""))
2789 (match_operand:SI 7 "s_register_operand" "")])
2790 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2791 (match_operand:SI 3 "const_int_operand" "")
2792 (match_operand:SI 4 "const_int_operand" ""))]))
2793 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2794 "TARGET_32BIT
2795 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2796 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2797 [(set (match_dup 8)
2798 (match_op_dup 1
2799 [(ashift:SI (match_dup 2) (match_dup 4))
2800 (match_dup 5)]))
2801 (set (match_dup 0)
2802 (match_op_dup 1
2803 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2804 (match_dup 7)]))]
2805 "
2806 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2807 ")
2808 \f
2809
2810 ;; Minimum and maximum insns
2811
2812 (define_expand "smaxsi3"
2813 [(parallel [
2814 (set (match_operand:SI 0 "s_register_operand" "")
2815 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2816 (match_operand:SI 2 "arm_rhs_operand" "")))
2817 (clobber (reg:CC CC_REGNUM))])]
2818 "TARGET_32BIT"
2819 "
2820 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2821 {
2822 /* No need for a clobber of the condition code register here. */
2823 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2824 gen_rtx_SMAX (SImode, operands[1],
2825 operands[2])));
2826 DONE;
2827 }
2828 ")
2829
2830 (define_insn "*smax_0"
2831 [(set (match_operand:SI 0 "s_register_operand" "=r")
2832 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2833 (const_int 0)))]
2834 "TARGET_32BIT"
2835 "bic%?\\t%0, %1, %1, asr #31"
2836 [(set_attr "predicable" "yes")]
2837 )
2838
2839 (define_insn "*smax_m1"
2840 [(set (match_operand:SI 0 "s_register_operand" "=r")
2841 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2842 (const_int -1)))]
2843 "TARGET_32BIT"
2844 "orr%?\\t%0, %1, %1, asr #31"
2845 [(set_attr "predicable" "yes")]
2846 )
2847
2848 (define_insn "*arm_smax_insn"
2849 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2850 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2851 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2852 (clobber (reg:CC CC_REGNUM))]
2853 "TARGET_ARM"
2854 "@
2855 cmp\\t%1, %2\;movlt\\t%0, %2
2856 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2857 [(set_attr "conds" "clob")
2858 (set_attr "length" "8,12")]
2859 )
2860
2861 (define_expand "sminsi3"
2862 [(parallel [
2863 (set (match_operand:SI 0 "s_register_operand" "")
2864 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2865 (match_operand:SI 2 "arm_rhs_operand" "")))
2866 (clobber (reg:CC CC_REGNUM))])]
2867 "TARGET_32BIT"
2868 "
2869 if (operands[2] == const0_rtx)
2870 {
2871 /* No need for a clobber of the condition code register here. */
2872 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2873 gen_rtx_SMIN (SImode, operands[1],
2874 operands[2])));
2875 DONE;
2876 }
2877 ")
2878
2879 (define_insn "*smin_0"
2880 [(set (match_operand:SI 0 "s_register_operand" "=r")
2881 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2882 (const_int 0)))]
2883 "TARGET_32BIT"
2884 "and%?\\t%0, %1, %1, asr #31"
2885 [(set_attr "predicable" "yes")]
2886 )
2887
2888 (define_insn "*arm_smin_insn"
2889 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2890 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2891 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2892 (clobber (reg:CC CC_REGNUM))]
2893 "TARGET_ARM"
2894 "@
2895 cmp\\t%1, %2\;movge\\t%0, %2
2896 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2897 [(set_attr "conds" "clob")
2898 (set_attr "length" "8,12")]
2899 )
2900
2901 (define_expand "umaxsi3"
2902 [(parallel [
2903 (set (match_operand:SI 0 "s_register_operand" "")
2904 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2905 (match_operand:SI 2 "arm_rhs_operand" "")))
2906 (clobber (reg:CC CC_REGNUM))])]
2907 "TARGET_32BIT"
2908 ""
2909 )
2910
2911 (define_insn "*arm_umaxsi3"
2912 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2913 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2914 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2915 (clobber (reg:CC CC_REGNUM))]
2916 "TARGET_ARM"
2917 "@
2918 cmp\\t%1, %2\;movcc\\t%0, %2
2919 cmp\\t%1, %2\;movcs\\t%0, %1
2920 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2921 [(set_attr "conds" "clob")
2922 (set_attr "length" "8,8,12")]
2923 )
2924
2925 (define_expand "uminsi3"
2926 [(parallel [
2927 (set (match_operand:SI 0 "s_register_operand" "")
2928 (umin:SI (match_operand:SI 1 "s_register_operand" "")
2929 (match_operand:SI 2 "arm_rhs_operand" "")))
2930 (clobber (reg:CC CC_REGNUM))])]
2931 "TARGET_32BIT"
2932 ""
2933 )
2934
2935 (define_insn "*arm_uminsi3"
2936 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2937 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2938 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2939 (clobber (reg:CC CC_REGNUM))]
2940 "TARGET_ARM"
2941 "@
2942 cmp\\t%1, %2\;movcs\\t%0, %2
2943 cmp\\t%1, %2\;movcc\\t%0, %1
2944 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2945 [(set_attr "conds" "clob")
2946 (set_attr "length" "8,8,12")]
2947 )
2948
2949 (define_insn "*store_minmaxsi"
2950 [(set (match_operand:SI 0 "memory_operand" "=m")
2951 (match_operator:SI 3 "minmax_operator"
2952 [(match_operand:SI 1 "s_register_operand" "r")
2953 (match_operand:SI 2 "s_register_operand" "r")]))
2954 (clobber (reg:CC CC_REGNUM))]
2955 "TARGET_32BIT"
2956 "*
2957 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2958 operands[1], operands[2]);
2959 output_asm_insn (\"cmp\\t%1, %2\", operands);
2960 if (TARGET_THUMB2)
2961 output_asm_insn (\"ite\t%d3\", operands);
2962 output_asm_insn (\"str%d3\\t%1, %0\", operands);
2963 output_asm_insn (\"str%D3\\t%2, %0\", operands);
2964 return \"\";
2965 "
2966 [(set_attr "conds" "clob")
2967 (set (attr "length")
2968 (if_then_else (eq_attr "is_thumb" "yes")
2969 (const_int 14)
2970 (const_int 12)))
2971 (set_attr "type" "store1")]
2972 )
2973
2974 ; Reject the frame pointer in operand[1], since reloading this after
2975 ; it has been eliminated can cause carnage.
2976 (define_insn "*minmax_arithsi"
2977 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2978 (match_operator:SI 4 "shiftable_operator"
2979 [(match_operator:SI 5 "minmax_operator"
2980 [(match_operand:SI 2 "s_register_operand" "r,r")
2981 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
2982 (match_operand:SI 1 "s_register_operand" "0,?r")]))
2983 (clobber (reg:CC CC_REGNUM))]
2984 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
2985 "*
2986 {
2987 enum rtx_code code = GET_CODE (operands[4]);
2988 bool need_else;
2989
2990 if (which_alternative != 0 || operands[3] != const0_rtx
2991 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
2992 need_else = true;
2993 else
2994 need_else = false;
2995
2996 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
2997 operands[2], operands[3]);
2998 output_asm_insn (\"cmp\\t%2, %3\", operands);
2999 if (TARGET_THUMB2)
3000 {
3001 if (need_else)
3002 output_asm_insn (\"ite\\t%d5\", operands);
3003 else
3004 output_asm_insn (\"it\\t%d5\", operands);
3005 }
3006 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3007 if (need_else)
3008 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3009 return \"\";
3010 }"
3011 [(set_attr "conds" "clob")
3012 (set (attr "length")
3013 (if_then_else (eq_attr "is_thumb" "yes")
3014 (const_int 14)
3015 (const_int 12)))]
3016 )
3017
3018 \f
3019 ;; Shift and rotation insns
3020
3021 (define_expand "ashldi3"
3022 [(set (match_operand:DI 0 "s_register_operand" "")
3023 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3024 (match_operand:SI 2 "reg_or_int_operand" "")))]
3025 "TARGET_32BIT"
3026 "
3027 if (GET_CODE (operands[2]) == CONST_INT)
3028 {
3029 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3030 {
3031 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3032 DONE;
3033 }
3034 /* Ideally we shouldn't fail here if we could know that operands[1]
3035 ends up already living in an iwmmxt register. Otherwise it's
3036 cheaper to have the alternate code being generated than moving
3037 values to iwmmxt regs and back. */
3038 FAIL;
3039 }
3040 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3041 FAIL;
3042 "
3043 )
3044
3045 (define_insn "arm_ashldi3_1bit"
3046 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3047 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3048 (const_int 1)))
3049 (clobber (reg:CC CC_REGNUM))]
3050 "TARGET_32BIT"
3051 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3052 [(set_attr "conds" "clob")
3053 (set_attr "length" "8")]
3054 )
3055
3056 (define_expand "ashlsi3"
3057 [(set (match_operand:SI 0 "s_register_operand" "")
3058 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3059 (match_operand:SI 2 "arm_rhs_operand" "")))]
3060 "TARGET_EITHER"
3061 "
3062 if (GET_CODE (operands[2]) == CONST_INT
3063 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3064 {
3065 emit_insn (gen_movsi (operands[0], const0_rtx));
3066 DONE;
3067 }
3068 "
3069 )
3070
3071 (define_insn "*thumb1_ashlsi3"
3072 [(set (match_operand:SI 0 "register_operand" "=l,l")
3073 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3074 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3075 "TARGET_THUMB1"
3076 "lsl\\t%0, %1, %2"
3077 [(set_attr "length" "2")]
3078 )
3079
3080 (define_expand "ashrdi3"
3081 [(set (match_operand:DI 0 "s_register_operand" "")
3082 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3083 (match_operand:SI 2 "reg_or_int_operand" "")))]
3084 "TARGET_32BIT"
3085 "
3086 if (GET_CODE (operands[2]) == CONST_INT)
3087 {
3088 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3089 {
3090 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3091 DONE;
3092 }
3093 /* Ideally we shouldn't fail here if we could know that operands[1]
3094 ends up already living in an iwmmxt register. Otherwise it's
3095 cheaper to have the alternate code being generated than moving
3096 values to iwmmxt regs and back. */
3097 FAIL;
3098 }
3099 else if (!TARGET_REALLY_IWMMXT)
3100 FAIL;
3101 "
3102 )
3103
3104 (define_insn "arm_ashrdi3_1bit"
3105 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3106 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3107 (const_int 1)))
3108 (clobber (reg:CC CC_REGNUM))]
3109 "TARGET_32BIT"
3110 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3111 [(set_attr "conds" "clob")
3112 (set_attr "length" "8")]
3113 )
3114
3115 (define_expand "ashrsi3"
3116 [(set (match_operand:SI 0 "s_register_operand" "")
3117 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3118 (match_operand:SI 2 "arm_rhs_operand" "")))]
3119 "TARGET_EITHER"
3120 "
3121 if (GET_CODE (operands[2]) == CONST_INT
3122 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3123 operands[2] = GEN_INT (31);
3124 "
3125 )
3126
3127 (define_insn "*thumb1_ashrsi3"
3128 [(set (match_operand:SI 0 "register_operand" "=l,l")
3129 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3130 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3131 "TARGET_THUMB1"
3132 "asr\\t%0, %1, %2"
3133 [(set_attr "length" "2")]
3134 )
3135
3136 (define_expand "lshrdi3"
3137 [(set (match_operand:DI 0 "s_register_operand" "")
3138 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3139 (match_operand:SI 2 "reg_or_int_operand" "")))]
3140 "TARGET_32BIT"
3141 "
3142 if (GET_CODE (operands[2]) == CONST_INT)
3143 {
3144 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3145 {
3146 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3147 DONE;
3148 }
3149 /* Ideally we shouldn't fail here if we could know that operands[1]
3150 ends up already living in an iwmmxt register. Otherwise it's
3151 cheaper to have the alternate code being generated than moving
3152 values to iwmmxt regs and back. */
3153 FAIL;
3154 }
3155 else if (!TARGET_REALLY_IWMMXT)
3156 FAIL;
3157 "
3158 )
3159
3160 (define_insn "arm_lshrdi3_1bit"
3161 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3162 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3163 (const_int 1)))
3164 (clobber (reg:CC CC_REGNUM))]
3165 "TARGET_32BIT"
3166 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3167 [(set_attr "conds" "clob")
3168 (set_attr "length" "8")]
3169 )
3170
3171 (define_expand "lshrsi3"
3172 [(set (match_operand:SI 0 "s_register_operand" "")
3173 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3174 (match_operand:SI 2 "arm_rhs_operand" "")))]
3175 "TARGET_EITHER"
3176 "
3177 if (GET_CODE (operands[2]) == CONST_INT
3178 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3179 {
3180 emit_insn (gen_movsi (operands[0], const0_rtx));
3181 DONE;
3182 }
3183 "
3184 )
3185
3186 (define_insn "*thumb1_lshrsi3"
3187 [(set (match_operand:SI 0 "register_operand" "=l,l")
3188 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3189 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3190 "TARGET_THUMB1"
3191 "lsr\\t%0, %1, %2"
3192 [(set_attr "length" "2")]
3193 )
3194
3195 (define_expand "rotlsi3"
3196 [(set (match_operand:SI 0 "s_register_operand" "")
3197 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3198 (match_operand:SI 2 "reg_or_int_operand" "")))]
3199 "TARGET_32BIT"
3200 "
3201 if (GET_CODE (operands[2]) == CONST_INT)
3202 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3203 else
3204 {
3205 rtx reg = gen_reg_rtx (SImode);
3206 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3207 operands[2] = reg;
3208 }
3209 "
3210 )
3211
3212 (define_expand "rotrsi3"
3213 [(set (match_operand:SI 0 "s_register_operand" "")
3214 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3215 (match_operand:SI 2 "arm_rhs_operand" "")))]
3216 "TARGET_EITHER"
3217 "
3218 if (TARGET_32BIT)
3219 {
3220 if (GET_CODE (operands[2]) == CONST_INT
3221 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3222 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3223 }
3224 else /* TARGET_THUMB1 */
3225 {
3226 if (GET_CODE (operands [2]) == CONST_INT)
3227 operands [2] = force_reg (SImode, operands[2]);
3228 }
3229 "
3230 )
3231
3232 (define_insn "*thumb1_rotrsi3"
3233 [(set (match_operand:SI 0 "register_operand" "=l")
3234 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3235 (match_operand:SI 2 "register_operand" "l")))]
3236 "TARGET_THUMB1"
3237 "ror\\t%0, %0, %2"
3238 [(set_attr "length" "2")]
3239 )
3240
3241 (define_insn "*arm_shiftsi3"
3242 [(set (match_operand:SI 0 "s_register_operand" "=r")
3243 (match_operator:SI 3 "shift_operator"
3244 [(match_operand:SI 1 "s_register_operand" "r")
3245 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3246 "TARGET_32BIT"
3247 "* return arm_output_shift(operands, 0);"
3248 [(set_attr "predicable" "yes")
3249 (set_attr "shift" "1")
3250 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3251 (const_string "alu_shift")
3252 (const_string "alu_shift_reg")))]
3253 )
3254
3255 (define_insn "*shiftsi3_compare0"
3256 [(set (reg:CC_NOOV CC_REGNUM)
3257 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3258 [(match_operand:SI 1 "s_register_operand" "r")
3259 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3260 (const_int 0)))
3261 (set (match_operand:SI 0 "s_register_operand" "=r")
3262 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3263 "TARGET_32BIT"
3264 "* return arm_output_shift(operands, 1);"
3265 [(set_attr "conds" "set")
3266 (set_attr "shift" "1")
3267 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3268 (const_string "alu_shift")
3269 (const_string "alu_shift_reg")))]
3270 )
3271
3272 (define_insn "*shiftsi3_compare0_scratch"
3273 [(set (reg:CC_NOOV CC_REGNUM)
3274 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3275 [(match_operand:SI 1 "s_register_operand" "r")
3276 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3277 (const_int 0)))
3278 (clobber (match_scratch:SI 0 "=r"))]
3279 "TARGET_32BIT"
3280 "* return arm_output_shift(operands, 1);"
3281 [(set_attr "conds" "set")
3282 (set_attr "shift" "1")]
3283 )
3284
3285 (define_insn "*arm_notsi_shiftsi"
3286 [(set (match_operand:SI 0 "s_register_operand" "=r")
3287 (not:SI (match_operator:SI 3 "shift_operator"
3288 [(match_operand:SI 1 "s_register_operand" "r")
3289 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3290 "TARGET_ARM"
3291 "mvn%?\\t%0, %1%S3"
3292 [(set_attr "predicable" "yes")
3293 (set_attr "shift" "1")
3294 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3295 (const_string "alu_shift")
3296 (const_string "alu_shift_reg")))]
3297 )
3298
3299 (define_insn "*arm_notsi_shiftsi_compare0"
3300 [(set (reg:CC_NOOV CC_REGNUM)
3301 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3302 [(match_operand:SI 1 "s_register_operand" "r")
3303 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3304 (const_int 0)))
3305 (set (match_operand:SI 0 "s_register_operand" "=r")
3306 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3307 "TARGET_ARM"
3308 "mvn%.\\t%0, %1%S3"
3309 [(set_attr "conds" "set")
3310 (set_attr "shift" "1")
3311 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3312 (const_string "alu_shift")
3313 (const_string "alu_shift_reg")))]
3314 )
3315
3316 (define_insn "*arm_not_shiftsi_compare0_scratch"
3317 [(set (reg:CC_NOOV CC_REGNUM)
3318 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3319 [(match_operand:SI 1 "s_register_operand" "r")
3320 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3321 (const_int 0)))
3322 (clobber (match_scratch:SI 0 "=r"))]
3323 "TARGET_ARM"
3324 "mvn%.\\t%0, %1%S3"
3325 [(set_attr "conds" "set")
3326 (set_attr "shift" "1")
3327 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3328 (const_string "alu_shift")
3329 (const_string "alu_shift_reg")))]
3330 )
3331
3332 ;; We don't really have extzv, but defining this using shifts helps
3333 ;; to reduce register pressure later on.
3334
3335 (define_expand "extzv"
3336 [(set (match_dup 4)
3337 (ashift:SI (match_operand:SI 1 "register_operand" "")
3338 (match_operand:SI 2 "const_int_operand" "")))
3339 (set (match_operand:SI 0 "register_operand" "")
3340 (lshiftrt:SI (match_dup 4)
3341 (match_operand:SI 3 "const_int_operand" "")))]
3342 "TARGET_THUMB1 || arm_arch_thumb2"
3343 "
3344 {
3345 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3346 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3347
3348 if (arm_arch_thumb2)
3349 {
3350 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3351 operands[3]));
3352 DONE;
3353 }
3354
3355 operands[3] = GEN_INT (rshift);
3356
3357 if (lshift == 0)
3358 {
3359 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3360 DONE;
3361 }
3362
3363 operands[2] = GEN_INT (lshift);
3364 operands[4] = gen_reg_rtx (SImode);
3365 }"
3366 )
3367
3368 (define_insn "extv"
3369 [(set (match_operand:SI 0 "s_register_operand" "=r")
3370 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3371 (match_operand:SI 2 "const_int_operand" "M")
3372 (match_operand:SI 3 "const_int_operand" "M")))]
3373 "arm_arch_thumb2"
3374 "sbfx%?\t%0, %1, %3, %2"
3375 [(set_attr "length" "4")
3376 (set_attr "predicable" "yes")]
3377 )
3378
3379 (define_insn "extzv_t2"
3380 [(set (match_operand:SI 0 "s_register_operand" "=r")
3381 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3382 (match_operand:SI 2 "const_int_operand" "M")
3383 (match_operand:SI 3 "const_int_operand" "M")))]
3384 "arm_arch_thumb2"
3385 "ubfx%?\t%0, %1, %3, %2"
3386 [(set_attr "length" "4")
3387 (set_attr "predicable" "yes")]
3388 )
3389
3390 \f
3391 ;; Unary arithmetic insns
3392
3393 (define_expand "negdi2"
3394 [(parallel
3395 [(set (match_operand:DI 0 "s_register_operand" "")
3396 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3397 (clobber (reg:CC CC_REGNUM))])]
3398 "TARGET_EITHER"
3399 "
3400 if (TARGET_THUMB1)
3401 {
3402 if (GET_CODE (operands[1]) != REG)
3403 operands[1] = force_reg (SImode, operands[1]);
3404 }
3405 "
3406 )
3407
3408 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3409 ;; The second alternative is to allow the common case of a *full* overlap.
3410 (define_insn "*arm_negdi2"
3411 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3412 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3413 (clobber (reg:CC CC_REGNUM))]
3414 "TARGET_ARM"
3415 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3416 [(set_attr "conds" "clob")
3417 (set_attr "length" "8")]
3418 )
3419
3420 (define_insn "*thumb1_negdi2"
3421 [(set (match_operand:DI 0 "register_operand" "=&l")
3422 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3423 (clobber (reg:CC CC_REGNUM))]
3424 "TARGET_THUMB1"
3425 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3426 [(set_attr "length" "6")]
3427 )
3428
3429 (define_expand "negsi2"
3430 [(set (match_operand:SI 0 "s_register_operand" "")
3431 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3432 "TARGET_EITHER"
3433 ""
3434 )
3435
3436 (define_insn "*arm_negsi2"
3437 [(set (match_operand:SI 0 "s_register_operand" "=r")
3438 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3439 "TARGET_32BIT"
3440 "rsb%?\\t%0, %1, #0"
3441 [(set_attr "predicable" "yes")]
3442 )
3443
3444 (define_insn "*thumb1_negsi2"
3445 [(set (match_operand:SI 0 "register_operand" "=l")
3446 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3447 "TARGET_THUMB1"
3448 "neg\\t%0, %1"
3449 [(set_attr "length" "2")]
3450 )
3451
3452 (define_expand "negsf2"
3453 [(set (match_operand:SF 0 "s_register_operand" "")
3454 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3455 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3456 ""
3457 )
3458
3459 (define_expand "negdf2"
3460 [(set (match_operand:DF 0 "s_register_operand" "")
3461 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3462 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3463 "")
3464
3465 ;; abssi2 doesn't really clobber the condition codes if a different register
3466 ;; is being set. To keep things simple, assume during rtl manipulations that
3467 ;; it does, but tell the final scan operator the truth. Similarly for
3468 ;; (neg (abs...))
3469
3470 (define_expand "abssi2"
3471 [(parallel
3472 [(set (match_operand:SI 0 "s_register_operand" "")
3473 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3474 (clobber (match_dup 2))])]
3475 "TARGET_EITHER"
3476 "
3477 if (TARGET_THUMB1)
3478 operands[2] = gen_rtx_SCRATCH (SImode);
3479 else
3480 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3481 ")
3482
3483 (define_insn "*arm_abssi2"
3484 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3485 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3486 (clobber (reg:CC CC_REGNUM))]
3487 "TARGET_ARM"
3488 "@
3489 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3490 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3491 [(set_attr "conds" "clob,*")
3492 (set_attr "shift" "1")
3493 ;; predicable can't be set based on the variant, so left as no
3494 (set_attr "length" "8")]
3495 )
3496
3497 (define_insn_and_split "*thumb1_abssi2"
3498 [(set (match_operand:SI 0 "s_register_operand" "=l")
3499 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3500 (clobber (match_scratch:SI 2 "=&l"))]
3501 "TARGET_THUMB1"
3502 "#"
3503 "TARGET_THUMB1 && reload_completed"
3504 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3505 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3506 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3507 ""
3508 [(set_attr "length" "6")]
3509 )
3510
3511 (define_insn "*arm_neg_abssi2"
3512 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3513 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3514 (clobber (reg:CC CC_REGNUM))]
3515 "TARGET_ARM"
3516 "@
3517 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3518 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3519 [(set_attr "conds" "clob,*")
3520 (set_attr "shift" "1")
3521 ;; predicable can't be set based on the variant, so left as no
3522 (set_attr "length" "8")]
3523 )
3524
3525 (define_insn_and_split "*thumb1_neg_abssi2"
3526 [(set (match_operand:SI 0 "s_register_operand" "=l")
3527 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3528 (clobber (match_scratch:SI 2 "=&l"))]
3529 "TARGET_THUMB1"
3530 "#"
3531 "TARGET_THUMB1 && reload_completed"
3532 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3533 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3534 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3535 ""
3536 [(set_attr "length" "6")]
3537 )
3538
3539 (define_expand "abssf2"
3540 [(set (match_operand:SF 0 "s_register_operand" "")
3541 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3542 "TARGET_32BIT && TARGET_HARD_FLOAT"
3543 "")
3544
3545 (define_expand "absdf2"
3546 [(set (match_operand:DF 0 "s_register_operand" "")
3547 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3548 "TARGET_32BIT && TARGET_HARD_FLOAT"
3549 "")
3550
3551 (define_expand "sqrtsf2"
3552 [(set (match_operand:SF 0 "s_register_operand" "")
3553 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3554 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3555 "")
3556
3557 (define_expand "sqrtdf2"
3558 [(set (match_operand:DF 0 "s_register_operand" "")
3559 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3560 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3561 "")
3562
3563 (define_insn_and_split "one_cmpldi2"
3564 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3565 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3566 "TARGET_32BIT"
3567 "#"
3568 "TARGET_32BIT && reload_completed"
3569 [(set (match_dup 0) (not:SI (match_dup 1)))
3570 (set (match_dup 2) (not:SI (match_dup 3)))]
3571 "
3572 {
3573 operands[2] = gen_highpart (SImode, operands[0]);
3574 operands[0] = gen_lowpart (SImode, operands[0]);
3575 operands[3] = gen_highpart (SImode, operands[1]);
3576 operands[1] = gen_lowpart (SImode, operands[1]);
3577 }"
3578 [(set_attr "length" "8")
3579 (set_attr "predicable" "yes")]
3580 )
3581
3582 (define_expand "one_cmplsi2"
3583 [(set (match_operand:SI 0 "s_register_operand" "")
3584 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3585 "TARGET_EITHER"
3586 ""
3587 )
3588
3589 (define_insn "*arm_one_cmplsi2"
3590 [(set (match_operand:SI 0 "s_register_operand" "=r")
3591 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3592 "TARGET_32BIT"
3593 "mvn%?\\t%0, %1"
3594 [(set_attr "predicable" "yes")]
3595 )
3596
3597 (define_insn "*thumb1_one_cmplsi2"
3598 [(set (match_operand:SI 0 "register_operand" "=l")
3599 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3600 "TARGET_THUMB1"
3601 "mvn\\t%0, %1"
3602 [(set_attr "length" "2")]
3603 )
3604
3605 (define_insn "*notsi_compare0"
3606 [(set (reg:CC_NOOV CC_REGNUM)
3607 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3608 (const_int 0)))
3609 (set (match_operand:SI 0 "s_register_operand" "=r")
3610 (not:SI (match_dup 1)))]
3611 "TARGET_32BIT"
3612 "mvn%.\\t%0, %1"
3613 [(set_attr "conds" "set")]
3614 )
3615
3616 (define_insn "*notsi_compare0_scratch"
3617 [(set (reg:CC_NOOV CC_REGNUM)
3618 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3619 (const_int 0)))
3620 (clobber (match_scratch:SI 0 "=r"))]
3621 "TARGET_32BIT"
3622 "mvn%.\\t%0, %1"
3623 [(set_attr "conds" "set")]
3624 )
3625 \f
3626 ;; Fixed <--> Floating conversion insns
3627
3628 (define_expand "floatsisf2"
3629 [(set (match_operand:SF 0 "s_register_operand" "")
3630 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3631 "TARGET_32BIT && TARGET_HARD_FLOAT"
3632 "
3633 if (TARGET_MAVERICK)
3634 {
3635 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3636 DONE;
3637 }
3638 ")
3639
3640 (define_expand "floatsidf2"
3641 [(set (match_operand:DF 0 "s_register_operand" "")
3642 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3643 "TARGET_32BIT && TARGET_HARD_FLOAT"
3644 "
3645 if (TARGET_MAVERICK)
3646 {
3647 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3648 DONE;
3649 }
3650 ")
3651
3652 (define_expand "fix_truncsfsi2"
3653 [(set (match_operand:SI 0 "s_register_operand" "")
3654 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3655 "TARGET_32BIT && TARGET_HARD_FLOAT"
3656 "
3657 if (TARGET_MAVERICK)
3658 {
3659 if (!cirrus_fp_register (operands[0], SImode))
3660 operands[0] = force_reg (SImode, operands[0]);
3661 if (!cirrus_fp_register (operands[1], SFmode))
3662 operands[1] = force_reg (SFmode, operands[0]);
3663 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3664 DONE;
3665 }
3666 ")
3667
3668 (define_expand "fix_truncdfsi2"
3669 [(set (match_operand:SI 0 "s_register_operand" "")
3670 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3671 "TARGET_32BIT && TARGET_HARD_FLOAT"
3672 "
3673 if (TARGET_MAVERICK)
3674 {
3675 if (!cirrus_fp_register (operands[1], DFmode))
3676 operands[1] = force_reg (DFmode, operands[0]);
3677 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3678 DONE;
3679 }
3680 ")
3681
3682 ;; Truncation insns
3683
3684 (define_expand "truncdfsf2"
3685 [(set (match_operand:SF 0 "s_register_operand" "")
3686 (float_truncate:SF
3687 (match_operand:DF 1 "s_register_operand" "")))]
3688 "TARGET_32BIT && TARGET_HARD_FLOAT"
3689 ""
3690 )
3691 \f
3692 ;; Zero and sign extension instructions.
3693
3694 (define_expand "zero_extendsidi2"
3695 [(set (match_operand:DI 0 "s_register_operand" "")
3696 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3697 "TARGET_32BIT"
3698 ""
3699 )
3700
3701 (define_insn "*arm_zero_extendsidi2"
3702 [(set (match_operand:DI 0 "s_register_operand" "=r")
3703 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3704 "TARGET_ARM"
3705 "*
3706 if (REGNO (operands[1])
3707 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3708 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3709 return \"mov%?\\t%R0, #0\";
3710 "
3711 [(set_attr "length" "8")
3712 (set_attr "predicable" "yes")]
3713 )
3714
3715 (define_expand "zero_extendqidi2"
3716 [(set (match_operand:DI 0 "s_register_operand" "")
3717 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3718 "TARGET_32BIT"
3719 ""
3720 )
3721
3722 (define_insn "*arm_zero_extendqidi2"
3723 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3724 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3725 "TARGET_ARM"
3726 "@
3727 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3728 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3729 [(set_attr "length" "8")
3730 (set_attr "predicable" "yes")
3731 (set_attr "type" "*,load_byte")
3732 (set_attr "pool_range" "*,4092")
3733 (set_attr "neg_pool_range" "*,4084")]
3734 )
3735
3736 (define_expand "extendsidi2"
3737 [(set (match_operand:DI 0 "s_register_operand" "")
3738 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3739 "TARGET_32BIT"
3740 ""
3741 )
3742
3743 (define_insn "*arm_extendsidi2"
3744 [(set (match_operand:DI 0 "s_register_operand" "=r")
3745 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3746 "TARGET_ARM"
3747 "*
3748 if (REGNO (operands[1])
3749 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3750 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3751 return \"mov%?\\t%R0, %Q0, asr #31\";
3752 "
3753 [(set_attr "length" "8")
3754 (set_attr "shift" "1")
3755 (set_attr "predicable" "yes")]
3756 )
3757
3758 (define_expand "zero_extendhisi2"
3759 [(set (match_dup 2)
3760 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3761 (const_int 16)))
3762 (set (match_operand:SI 0 "s_register_operand" "")
3763 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3764 "TARGET_EITHER"
3765 "
3766 {
3767 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3768 {
3769 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3770 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3771 DONE;
3772 }
3773
3774 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3775 {
3776 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3777 DONE;
3778 }
3779
3780 if (!s_register_operand (operands[1], HImode))
3781 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3782
3783 if (arm_arch6)
3784 {
3785 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3786 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3787 DONE;
3788 }
3789
3790 operands[1] = gen_lowpart (SImode, operands[1]);
3791 operands[2] = gen_reg_rtx (SImode);
3792 }"
3793 )
3794
3795 (define_insn "*thumb1_zero_extendhisi2"
3796 [(set (match_operand:SI 0 "register_operand" "=l")
3797 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3798 "TARGET_THUMB1 && !arm_arch6"
3799 "*
3800 rtx mem = XEXP (operands[1], 0);
3801
3802 if (GET_CODE (mem) == CONST)
3803 mem = XEXP (mem, 0);
3804
3805 if (GET_CODE (mem) == LABEL_REF)
3806 return \"ldr\\t%0, %1\";
3807
3808 if (GET_CODE (mem) == PLUS)
3809 {
3810 rtx a = XEXP (mem, 0);
3811 rtx b = XEXP (mem, 1);
3812
3813 /* This can happen due to bugs in reload. */
3814 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3815 {
3816 rtx ops[2];
3817 ops[0] = operands[0];
3818 ops[1] = a;
3819
3820 output_asm_insn (\"mov %0, %1\", ops);
3821
3822 XEXP (mem, 0) = operands[0];
3823 }
3824
3825 else if ( GET_CODE (a) == LABEL_REF
3826 && GET_CODE (b) == CONST_INT)
3827 return \"ldr\\t%0, %1\";
3828 }
3829
3830 return \"ldrh\\t%0, %1\";
3831 "
3832 [(set_attr "length" "4")
3833 (set_attr "type" "load_byte")
3834 (set_attr "pool_range" "60")]
3835 )
3836
3837 (define_insn "*thumb1_zero_extendhisi2_v6"
3838 [(set (match_operand:SI 0 "register_operand" "=l,l")
3839 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3840 "TARGET_THUMB1 && arm_arch6"
3841 "*
3842 rtx mem;
3843
3844 if (which_alternative == 0)
3845 return \"uxth\\t%0, %1\";
3846
3847 mem = XEXP (operands[1], 0);
3848
3849 if (GET_CODE (mem) == CONST)
3850 mem = XEXP (mem, 0);
3851
3852 if (GET_CODE (mem) == LABEL_REF)
3853 return \"ldr\\t%0, %1\";
3854
3855 if (GET_CODE (mem) == PLUS)
3856 {
3857 rtx a = XEXP (mem, 0);
3858 rtx b = XEXP (mem, 1);
3859
3860 /* This can happen due to bugs in reload. */
3861 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3862 {
3863 rtx ops[2];
3864 ops[0] = operands[0];
3865 ops[1] = a;
3866
3867 output_asm_insn (\"mov %0, %1\", ops);
3868
3869 XEXP (mem, 0) = operands[0];
3870 }
3871
3872 else if ( GET_CODE (a) == LABEL_REF
3873 && GET_CODE (b) == CONST_INT)
3874 return \"ldr\\t%0, %1\";
3875 }
3876
3877 return \"ldrh\\t%0, %1\";
3878 "
3879 [(set_attr "length" "2,4")
3880 (set_attr "type" "alu_shift,load_byte")
3881 (set_attr "pool_range" "*,60")]
3882 )
3883
3884 (define_insn "*arm_zero_extendhisi2"
3885 [(set (match_operand:SI 0 "s_register_operand" "=r")
3886 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3887 "TARGET_ARM && arm_arch4 && !arm_arch6"
3888 "ldr%(h%)\\t%0, %1"
3889 [(set_attr "type" "load_byte")
3890 (set_attr "predicable" "yes")
3891 (set_attr "pool_range" "256")
3892 (set_attr "neg_pool_range" "244")]
3893 )
3894
3895 (define_insn "*arm_zero_extendhisi2_v6"
3896 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3897 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3898 "TARGET_ARM && arm_arch6"
3899 "@
3900 uxth%?\\t%0, %1
3901 ldr%(h%)\\t%0, %1"
3902 [(set_attr "type" "alu_shift,load_byte")
3903 (set_attr "predicable" "yes")
3904 (set_attr "pool_range" "*,256")
3905 (set_attr "neg_pool_range" "*,244")]
3906 )
3907
3908 (define_insn "*arm_zero_extendhisi2addsi"
3909 [(set (match_operand:SI 0 "s_register_operand" "=r")
3910 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3911 (match_operand:SI 2 "s_register_operand" "r")))]
3912 "TARGET_INT_SIMD"
3913 "uxtah%?\\t%0, %2, %1"
3914 [(set_attr "type" "alu_shift")
3915 (set_attr "predicable" "yes")]
3916 )
3917
3918 (define_expand "zero_extendqisi2"
3919 [(set (match_operand:SI 0 "s_register_operand" "")
3920 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
3921 "TARGET_EITHER"
3922 "
3923 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
3924 {
3925 if (TARGET_ARM)
3926 {
3927 emit_insn (gen_andsi3 (operands[0],
3928 gen_lowpart (SImode, operands[1]),
3929 GEN_INT (255)));
3930 }
3931 else /* TARGET_THUMB */
3932 {
3933 rtx temp = gen_reg_rtx (SImode);
3934 rtx ops[3];
3935
3936 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3937 operands[1] = gen_lowpart (SImode, operands[1]);
3938
3939 ops[0] = temp;
3940 ops[1] = operands[1];
3941 ops[2] = GEN_INT (24);
3942
3943 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3944 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
3945
3946 ops[0] = operands[0];
3947 ops[1] = temp;
3948 ops[2] = GEN_INT (24);
3949
3950 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3951 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
3952 }
3953 DONE;
3954 }
3955 "
3956 )
3957
3958 (define_insn "*thumb1_zero_extendqisi2"
3959 [(set (match_operand:SI 0 "register_operand" "=l")
3960 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3961 "TARGET_THUMB1 && !arm_arch6"
3962 "ldrb\\t%0, %1"
3963 [(set_attr "length" "2")
3964 (set_attr "type" "load_byte")
3965 (set_attr "pool_range" "32")]
3966 )
3967
3968 (define_insn "*thumb1_zero_extendqisi2_v6"
3969 [(set (match_operand:SI 0 "register_operand" "=l,l")
3970 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
3971 "TARGET_THUMB1 && arm_arch6"
3972 "@
3973 uxtb\\t%0, %1
3974 ldrb\\t%0, %1"
3975 [(set_attr "length" "2,2")
3976 (set_attr "type" "alu_shift,load_byte")
3977 (set_attr "pool_range" "*,32")]
3978 )
3979
3980 (define_insn "*arm_zero_extendqisi2"
3981 [(set (match_operand:SI 0 "s_register_operand" "=r")
3982 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3983 "TARGET_ARM && !arm_arch6"
3984 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3985 [(set_attr "type" "load_byte")
3986 (set_attr "predicable" "yes")
3987 (set_attr "pool_range" "4096")
3988 (set_attr "neg_pool_range" "4084")]
3989 )
3990
3991 (define_insn "*arm_zero_extendqisi2_v6"
3992 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3993 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3994 "TARGET_ARM && arm_arch6"
3995 "@
3996 uxtb%(%)\\t%0, %1
3997 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3998 [(set_attr "type" "alu_shift,load_byte")
3999 (set_attr "predicable" "yes")
4000 (set_attr "pool_range" "*,4096")
4001 (set_attr "neg_pool_range" "*,4084")]
4002 )
4003
4004 (define_insn "*arm_zero_extendqisi2addsi"
4005 [(set (match_operand:SI 0 "s_register_operand" "=r")
4006 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4007 (match_operand:SI 2 "s_register_operand" "r")))]
4008 "TARGET_INT_SIMD"
4009 "uxtab%?\\t%0, %2, %1"
4010 [(set_attr "predicable" "yes")
4011 (set_attr "insn" "xtab")
4012 (set_attr "type" "alu_shift")]
4013 )
4014
4015 (define_split
4016 [(set (match_operand:SI 0 "s_register_operand" "")
4017 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4018 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4019 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4020 [(set (match_dup 2) (match_dup 1))
4021 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4022 ""
4023 )
4024
4025 (define_split
4026 [(set (match_operand:SI 0 "s_register_operand" "")
4027 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4028 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4029 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4030 [(set (match_dup 2) (match_dup 1))
4031 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4032 ""
4033 )
4034
4035 (define_insn "*compareqi_eq0"
4036 [(set (reg:CC_Z CC_REGNUM)
4037 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4038 (const_int 0)))]
4039 "TARGET_32BIT"
4040 "tst\\t%0, #255"
4041 [(set_attr "conds" "set")]
4042 )
4043
4044 (define_expand "extendhisi2"
4045 [(set (match_dup 2)
4046 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4047 (const_int 16)))
4048 (set (match_operand:SI 0 "s_register_operand" "")
4049 (ashiftrt:SI (match_dup 2)
4050 (const_int 16)))]
4051 "TARGET_EITHER"
4052 "
4053 {
4054 if (GET_CODE (operands[1]) == MEM)
4055 {
4056 if (TARGET_THUMB1)
4057 {
4058 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4059 DONE;
4060 }
4061 else if (arm_arch4)
4062 {
4063 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4064 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4065 DONE;
4066 }
4067 }
4068
4069 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4070 {
4071 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4072 DONE;
4073 }
4074
4075 if (!s_register_operand (operands[1], HImode))
4076 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4077
4078 if (arm_arch6)
4079 {
4080 if (TARGET_THUMB1)
4081 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4082 else
4083 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4084 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4085
4086 DONE;
4087 }
4088
4089 operands[1] = gen_lowpart (SImode, operands[1]);
4090 operands[2] = gen_reg_rtx (SImode);
4091 }"
4092 )
4093
4094 (define_insn "thumb1_extendhisi2"
4095 [(set (match_operand:SI 0 "register_operand" "=l")
4096 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4097 (clobber (match_scratch:SI 2 "=&l"))]
4098 "TARGET_THUMB1 && !arm_arch6"
4099 "*
4100 {
4101 rtx ops[4];
4102 rtx mem = XEXP (operands[1], 0);
4103
4104 /* This code used to try to use 'V', and fix the address only if it was
4105 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4106 range of QImode offsets, and offsettable_address_p does a QImode
4107 address check. */
4108
4109 if (GET_CODE (mem) == CONST)
4110 mem = XEXP (mem, 0);
4111
4112 if (GET_CODE (mem) == LABEL_REF)
4113 return \"ldr\\t%0, %1\";
4114
4115 if (GET_CODE (mem) == PLUS)
4116 {
4117 rtx a = XEXP (mem, 0);
4118 rtx b = XEXP (mem, 1);
4119
4120 if (GET_CODE (a) == LABEL_REF
4121 && GET_CODE (b) == CONST_INT)
4122 return \"ldr\\t%0, %1\";
4123
4124 if (GET_CODE (b) == REG)
4125 return \"ldrsh\\t%0, %1\";
4126
4127 ops[1] = a;
4128 ops[2] = b;
4129 }
4130 else
4131 {
4132 ops[1] = mem;
4133 ops[2] = const0_rtx;
4134 }
4135
4136 gcc_assert (GET_CODE (ops[1]) == REG);
4137
4138 ops[0] = operands[0];
4139 ops[3] = operands[2];
4140 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4141 return \"\";
4142 }"
4143 [(set_attr "length" "4")
4144 (set_attr "type" "load_byte")
4145 (set_attr "pool_range" "1020")]
4146 )
4147
4148 ;; We used to have an early-clobber on the scratch register here.
4149 ;; However, there's a bug somewhere in reload which means that this
4150 ;; can be partially ignored during spill allocation if the memory
4151 ;; address also needs reloading; this causes us to die later on when
4152 ;; we try to verify the operands. Fortunately, we don't really need
4153 ;; the early-clobber: we can always use operand 0 if operand 2
4154 ;; overlaps the address.
4155 (define_insn "*thumb1_extendhisi2_insn_v6"
4156 [(set (match_operand:SI 0 "register_operand" "=l,l")
4157 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4158 (clobber (match_scratch:SI 2 "=X,l"))]
4159 "TARGET_THUMB1 && arm_arch6"
4160 "*
4161 {
4162 rtx ops[4];
4163 rtx mem;
4164
4165 if (which_alternative == 0)
4166 return \"sxth\\t%0, %1\";
4167
4168 mem = XEXP (operands[1], 0);
4169
4170 /* This code used to try to use 'V', and fix the address only if it was
4171 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4172 range of QImode offsets, and offsettable_address_p does a QImode
4173 address check. */
4174
4175 if (GET_CODE (mem) == CONST)
4176 mem = XEXP (mem, 0);
4177
4178 if (GET_CODE (mem) == LABEL_REF)
4179 return \"ldr\\t%0, %1\";
4180
4181 if (GET_CODE (mem) == PLUS)
4182 {
4183 rtx a = XEXP (mem, 0);
4184 rtx b = XEXP (mem, 1);
4185
4186 if (GET_CODE (a) == LABEL_REF
4187 && GET_CODE (b) == CONST_INT)
4188 return \"ldr\\t%0, %1\";
4189
4190 if (GET_CODE (b) == REG)
4191 return \"ldrsh\\t%0, %1\";
4192
4193 ops[1] = a;
4194 ops[2] = b;
4195 }
4196 else
4197 {
4198 ops[1] = mem;
4199 ops[2] = const0_rtx;
4200 }
4201
4202 gcc_assert (GET_CODE (ops[1]) == REG);
4203
4204 ops[0] = operands[0];
4205 if (reg_mentioned_p (operands[2], ops[1]))
4206 ops[3] = ops[0];
4207 else
4208 ops[3] = operands[2];
4209 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4210 return \"\";
4211 }"
4212 [(set_attr "length" "2,4")
4213 (set_attr "type" "alu_shift,load_byte")
4214 (set_attr "pool_range" "*,1020")]
4215 )
4216
4217 ;; This pattern will only be used when ldsh is not available
4218 (define_expand "extendhisi2_mem"
4219 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4220 (set (match_dup 3)
4221 (zero_extend:SI (match_dup 7)))
4222 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4223 (set (match_operand:SI 0 "" "")
4224 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4225 "TARGET_ARM"
4226 "
4227 {
4228 rtx mem1, mem2;
4229 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4230
4231 mem1 = change_address (operands[1], QImode, addr);
4232 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4233 operands[0] = gen_lowpart (SImode, operands[0]);
4234 operands[1] = mem1;
4235 operands[2] = gen_reg_rtx (SImode);
4236 operands[3] = gen_reg_rtx (SImode);
4237 operands[6] = gen_reg_rtx (SImode);
4238 operands[7] = mem2;
4239
4240 if (BYTES_BIG_ENDIAN)
4241 {
4242 operands[4] = operands[2];
4243 operands[5] = operands[3];
4244 }
4245 else
4246 {
4247 operands[4] = operands[3];
4248 operands[5] = operands[2];
4249 }
4250 }"
4251 )
4252
4253 (define_insn "*arm_extendhisi2"
4254 [(set (match_operand:SI 0 "s_register_operand" "=r")
4255 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4256 "TARGET_ARM && arm_arch4 && !arm_arch6"
4257 "ldr%(sh%)\\t%0, %1"
4258 [(set_attr "type" "load_byte")
4259 (set_attr "predicable" "yes")
4260 (set_attr "pool_range" "256")
4261 (set_attr "neg_pool_range" "244")]
4262 )
4263
4264 ;; ??? Check Thumb-2 pool range
4265 (define_insn "*arm_extendhisi2_v6"
4266 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4267 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4268 "TARGET_32BIT && arm_arch6"
4269 "@
4270 sxth%?\\t%0, %1
4271 ldr%(sh%)\\t%0, %1"
4272 [(set_attr "type" "alu_shift,load_byte")
4273 (set_attr "predicable" "yes")
4274 (set_attr "pool_range" "*,256")
4275 (set_attr "neg_pool_range" "*,244")]
4276 )
4277
4278 (define_insn "*arm_extendhisi2addsi"
4279 [(set (match_operand:SI 0 "s_register_operand" "=r")
4280 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4281 (match_operand:SI 2 "s_register_operand" "r")))]
4282 "TARGET_INT_SIMD"
4283 "sxtah%?\\t%0, %2, %1"
4284 )
4285
4286 (define_expand "extendqihi2"
4287 [(set (match_dup 2)
4288 (ashift:SI (match_operand:QI 1 "general_operand" "")
4289 (const_int 24)))
4290 (set (match_operand:HI 0 "s_register_operand" "")
4291 (ashiftrt:SI (match_dup 2)
4292 (const_int 24)))]
4293 "TARGET_ARM"
4294 "
4295 {
4296 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4297 {
4298 emit_insn (gen_rtx_SET (VOIDmode,
4299 operands[0],
4300 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4301 DONE;
4302 }
4303 if (!s_register_operand (operands[1], QImode))
4304 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4305 operands[0] = gen_lowpart (SImode, operands[0]);
4306 operands[1] = gen_lowpart (SImode, operands[1]);
4307 operands[2] = gen_reg_rtx (SImode);
4308 }"
4309 )
4310
4311 (define_insn "*arm_extendqihi_insn"
4312 [(set (match_operand:HI 0 "s_register_operand" "=r")
4313 (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
4314 "TARGET_ARM && arm_arch4"
4315 "ldr%(sb%)\\t%0, %1"
4316 [(set_attr "type" "load_byte")
4317 (set_attr "predicable" "yes")
4318 (set_attr "pool_range" "256")
4319 (set_attr "neg_pool_range" "244")]
4320 )
4321
4322 (define_expand "extendqisi2"
4323 [(set (match_dup 2)
4324 (ashift:SI (match_operand:QI 1 "general_operand" "")
4325 (const_int 24)))
4326 (set (match_operand:SI 0 "s_register_operand" "")
4327 (ashiftrt:SI (match_dup 2)
4328 (const_int 24)))]
4329 "TARGET_EITHER"
4330 "
4331 {
4332 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4333 {
4334 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4335 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4336 DONE;
4337 }
4338
4339 if (!s_register_operand (operands[1], QImode))
4340 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4341
4342 if (arm_arch6)
4343 {
4344 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4345 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4346 DONE;
4347 }
4348
4349 operands[1] = gen_lowpart (SImode, operands[1]);
4350 operands[2] = gen_reg_rtx (SImode);
4351 }"
4352 )
4353
4354 (define_insn "*arm_extendqisi"
4355 [(set (match_operand:SI 0 "s_register_operand" "=r")
4356 (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
4357 "TARGET_ARM && arm_arch4 && !arm_arch6"
4358 "ldr%(sb%)\\t%0, %1"
4359 [(set_attr "type" "load_byte")
4360 (set_attr "predicable" "yes")
4361 (set_attr "pool_range" "256")
4362 (set_attr "neg_pool_range" "244")]
4363 )
4364
4365 (define_insn "*arm_extendqisi_v6"
4366 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4367 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
4368 "TARGET_ARM && arm_arch6"
4369 "@
4370 sxtb%?\\t%0, %1
4371 ldr%(sb%)\\t%0, %1"
4372 [(set_attr "type" "alu_shift,load_byte")
4373 (set_attr "predicable" "yes")
4374 (set_attr "pool_range" "*,256")
4375 (set_attr "neg_pool_range" "*,244")]
4376 )
4377
4378 (define_insn "*arm_extendqisi2addsi"
4379 [(set (match_operand:SI 0 "s_register_operand" "=r")
4380 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4381 (match_operand:SI 2 "s_register_operand" "r")))]
4382 "TARGET_INT_SIMD"
4383 "sxtab%?\\t%0, %2, %1"
4384 [(set_attr "type" "alu_shift")
4385 (set_attr "insn" "xtab")
4386 (set_attr "predicable" "yes")]
4387 )
4388
4389 (define_insn "*thumb1_extendqisi2"
4390 [(set (match_operand:SI 0 "register_operand" "=l,l")
4391 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4392 "TARGET_THUMB1 && !arm_arch6"
4393 "*
4394 {
4395 rtx ops[3];
4396 rtx mem = XEXP (operands[1], 0);
4397
4398 if (GET_CODE (mem) == CONST)
4399 mem = XEXP (mem, 0);
4400
4401 if (GET_CODE (mem) == LABEL_REF)
4402 return \"ldr\\t%0, %1\";
4403
4404 if (GET_CODE (mem) == PLUS
4405 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4406 return \"ldr\\t%0, %1\";
4407
4408 if (which_alternative == 0)
4409 return \"ldrsb\\t%0, %1\";
4410
4411 ops[0] = operands[0];
4412
4413 if (GET_CODE (mem) == PLUS)
4414 {
4415 rtx a = XEXP (mem, 0);
4416 rtx b = XEXP (mem, 1);
4417
4418 ops[1] = a;
4419 ops[2] = b;
4420
4421 if (GET_CODE (a) == REG)
4422 {
4423 if (GET_CODE (b) == REG)
4424 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4425 else if (REGNO (a) == REGNO (ops[0]))
4426 {
4427 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4428 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4429 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4430 }
4431 else
4432 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4433 }
4434 else
4435 {
4436 gcc_assert (GET_CODE (b) == REG);
4437 if (REGNO (b) == REGNO (ops[0]))
4438 {
4439 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4440 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4441 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4442 }
4443 else
4444 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4445 }
4446 }
4447 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4448 {
4449 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4450 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4451 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4452 }
4453 else
4454 {
4455 ops[1] = mem;
4456 ops[2] = const0_rtx;
4457
4458 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4459 }
4460 return \"\";
4461 }"
4462 [(set_attr "length" "2,6")
4463 (set_attr "type" "load_byte,load_byte")
4464 (set_attr "pool_range" "32,32")]
4465 )
4466
4467 (define_insn "*thumb1_extendqisi2_v6"
4468 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4469 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4470 "TARGET_THUMB1 && arm_arch6"
4471 "*
4472 {
4473 rtx ops[3];
4474 rtx mem;
4475
4476 if (which_alternative == 0)
4477 return \"sxtb\\t%0, %1\";
4478
4479 mem = XEXP (operands[1], 0);
4480
4481 if (GET_CODE (mem) == CONST)
4482 mem = XEXP (mem, 0);
4483
4484 if (GET_CODE (mem) == LABEL_REF)
4485 return \"ldr\\t%0, %1\";
4486
4487 if (GET_CODE (mem) == PLUS
4488 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4489 return \"ldr\\t%0, %1\";
4490
4491 if (which_alternative == 0)
4492 return \"ldrsb\\t%0, %1\";
4493
4494 ops[0] = operands[0];
4495
4496 if (GET_CODE (mem) == PLUS)
4497 {
4498 rtx a = XEXP (mem, 0);
4499 rtx b = XEXP (mem, 1);
4500
4501 ops[1] = a;
4502 ops[2] = b;
4503
4504 if (GET_CODE (a) == REG)
4505 {
4506 if (GET_CODE (b) == REG)
4507 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4508 else if (REGNO (a) == REGNO (ops[0]))
4509 {
4510 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4511 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4512 }
4513 else
4514 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4515 }
4516 else
4517 {
4518 gcc_assert (GET_CODE (b) == REG);
4519 if (REGNO (b) == REGNO (ops[0]))
4520 {
4521 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4522 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4523 }
4524 else
4525 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4526 }
4527 }
4528 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4529 {
4530 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4531 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4532 }
4533 else
4534 {
4535 ops[1] = mem;
4536 ops[2] = const0_rtx;
4537
4538 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4539 }
4540 return \"\";
4541 }"
4542 [(set_attr "length" "2,2,4")
4543 (set_attr "type" "alu_shift,load_byte,load_byte")
4544 (set_attr "pool_range" "*,32,32")]
4545 )
4546
4547 (define_expand "extendsfdf2"
4548 [(set (match_operand:DF 0 "s_register_operand" "")
4549 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4550 "TARGET_32BIT && TARGET_HARD_FLOAT"
4551 ""
4552 )
4553 \f
4554 ;; Move insns (including loads and stores)
4555
4556 ;; XXX Just some ideas about movti.
4557 ;; I don't think these are a good idea on the arm, there just aren't enough
4558 ;; registers
4559 ;;(define_expand "loadti"
4560 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4561 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4562 ;; "" "")
4563
4564 ;;(define_expand "storeti"
4565 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4566 ;; (match_operand:TI 1 "s_register_operand" ""))]
4567 ;; "" "")
4568
4569 ;;(define_expand "movti"
4570 ;; [(set (match_operand:TI 0 "general_operand" "")
4571 ;; (match_operand:TI 1 "general_operand" ""))]
4572 ;; ""
4573 ;; "
4574 ;;{
4575 ;; rtx insn;
4576 ;;
4577 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4578 ;; operands[1] = copy_to_reg (operands[1]);
4579 ;; if (GET_CODE (operands[0]) == MEM)
4580 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4581 ;; else if (GET_CODE (operands[1]) == MEM)
4582 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4583 ;; else
4584 ;; FAIL;
4585 ;;
4586 ;; emit_insn (insn);
4587 ;; DONE;
4588 ;;}")
4589
4590 ;; Recognize garbage generated above.
4591
4592 ;;(define_insn ""
4593 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4594 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4595 ;; ""
4596 ;; "*
4597 ;; {
4598 ;; register mem = (which_alternative < 3);
4599 ;; register const char *template;
4600 ;;
4601 ;; operands[mem] = XEXP (operands[mem], 0);
4602 ;; switch (which_alternative)
4603 ;; {
4604 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4605 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4606 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4607 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4608 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4609 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4610 ;; }
4611 ;; output_asm_insn (template, operands);
4612 ;; return \"\";
4613 ;; }")
4614
4615 (define_expand "movdi"
4616 [(set (match_operand:DI 0 "general_operand" "")
4617 (match_operand:DI 1 "general_operand" ""))]
4618 "TARGET_EITHER"
4619 "
4620 if (can_create_pseudo_p ())
4621 {
4622 if (GET_CODE (operands[0]) != REG)
4623 operands[1] = force_reg (DImode, operands[1]);
4624 }
4625 "
4626 )
4627
4628 (define_insn "*arm_movdi"
4629 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4630 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4631 "TARGET_ARM
4632 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4633 && !TARGET_IWMMXT
4634 && ( register_operand (operands[0], DImode)
4635 || register_operand (operands[1], DImode))"
4636 "*
4637 switch (which_alternative)
4638 {
4639 case 0:
4640 case 1:
4641 case 2:
4642 return \"#\";
4643 default:
4644 return output_move_double (operands);
4645 }
4646 "
4647 [(set_attr "length" "8,12,16,8,8")
4648 (set_attr "type" "*,*,*,load2,store2")
4649 (set_attr "pool_range" "*,*,*,1020,*")
4650 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4651 )
4652
4653 (define_split
4654 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4655 (match_operand:ANY64 1 "const_double_operand" ""))]
4656 "TARGET_32BIT
4657 && reload_completed
4658 && (arm_const_double_inline_cost (operands[1])
4659 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4660 [(const_int 0)]
4661 "
4662 arm_split_constant (SET, SImode, curr_insn,
4663 INTVAL (gen_lowpart (SImode, operands[1])),
4664 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4665 arm_split_constant (SET, SImode, curr_insn,
4666 INTVAL (gen_highpart_mode (SImode,
4667 GET_MODE (operands[0]),
4668 operands[1])),
4669 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4670 DONE;
4671 "
4672 )
4673
4674 ; If optimizing for size, or if we have load delay slots, then
4675 ; we want to split the constant into two separate operations.
4676 ; In both cases this may split a trivial part into a single data op
4677 ; leaving a single complex constant to load. We can also get longer
4678 ; offsets in a LDR which means we get better chances of sharing the pool
4679 ; entries. Finally, we can normally do a better job of scheduling
4680 ; LDR instructions than we can with LDM.
4681 ; This pattern will only match if the one above did not.
4682 (define_split
4683 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4684 (match_operand:ANY64 1 "const_double_operand" ""))]
4685 "TARGET_ARM && reload_completed
4686 && arm_const_double_by_parts (operands[1])"
4687 [(set (match_dup 0) (match_dup 1))
4688 (set (match_dup 2) (match_dup 3))]
4689 "
4690 operands[2] = gen_highpart (SImode, operands[0]);
4691 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4692 operands[1]);
4693 operands[0] = gen_lowpart (SImode, operands[0]);
4694 operands[1] = gen_lowpart (SImode, operands[1]);
4695 "
4696 )
4697
4698 (define_split
4699 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4700 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4701 "TARGET_EITHER && reload_completed"
4702 [(set (match_dup 0) (match_dup 1))
4703 (set (match_dup 2) (match_dup 3))]
4704 "
4705 operands[2] = gen_highpart (SImode, operands[0]);
4706 operands[3] = gen_highpart (SImode, operands[1]);
4707 operands[0] = gen_lowpart (SImode, operands[0]);
4708 operands[1] = gen_lowpart (SImode, operands[1]);
4709
4710 /* Handle a partial overlap. */
4711 if (rtx_equal_p (operands[0], operands[3]))
4712 {
4713 rtx tmp0 = operands[0];
4714 rtx tmp1 = operands[1];
4715
4716 operands[0] = operands[2];
4717 operands[1] = operands[3];
4718 operands[2] = tmp0;
4719 operands[3] = tmp1;
4720 }
4721 "
4722 )
4723
4724 ;; We can't actually do base+index doubleword loads if the index and
4725 ;; destination overlap. Split here so that we at least have chance to
4726 ;; schedule.
4727 (define_split
4728 [(set (match_operand:DI 0 "s_register_operand" "")
4729 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4730 (match_operand:SI 2 "s_register_operand" ""))))]
4731 "TARGET_LDRD
4732 && reg_overlap_mentioned_p (operands[0], operands[1])
4733 && reg_overlap_mentioned_p (operands[0], operands[2])"
4734 [(set (match_dup 4)
4735 (plus:SI (match_dup 1)
4736 (match_dup 2)))
4737 (set (match_dup 0)
4738 (mem:DI (match_dup 4)))]
4739 "
4740 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4741 "
4742 )
4743
4744 ;;; ??? This should have alternatives for constants.
4745 ;;; ??? This was originally identical to the movdf_insn pattern.
4746 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4747 ;;; thumb_reorg with a memory reference.
4748 (define_insn "*thumb1_movdi_insn"
4749 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4750 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4751 "TARGET_THUMB1
4752 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4753 && ( register_operand (operands[0], DImode)
4754 || register_operand (operands[1], DImode))"
4755 "*
4756 {
4757 switch (which_alternative)
4758 {
4759 default:
4760 case 0:
4761 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4762 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4763 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4764 case 1:
4765 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4766 case 2:
4767 operands[1] = GEN_INT (- INTVAL (operands[1]));
4768 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4769 case 3:
4770 return \"ldmia\\t%1, {%0, %H0}\";
4771 case 4:
4772 return \"stmia\\t%0, {%1, %H1}\";
4773 case 5:
4774 return thumb_load_double_from_address (operands);
4775 case 6:
4776 operands[2] = gen_rtx_MEM (SImode,
4777 plus_constant (XEXP (operands[0], 0), 4));
4778 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4779 return \"\";
4780 case 7:
4781 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4782 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4783 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4784 }
4785 }"
4786 [(set_attr "length" "4,4,6,2,2,6,4,4")
4787 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4788 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4789 )
4790
4791 (define_expand "movsi"
4792 [(set (match_operand:SI 0 "general_operand" "")
4793 (match_operand:SI 1 "general_operand" ""))]
4794 "TARGET_EITHER"
4795 "
4796 {
4797 rtx base, offset, tmp;
4798
4799 if (TARGET_32BIT)
4800 {
4801 /* Everything except mem = const or mem = mem can be done easily. */
4802 if (GET_CODE (operands[0]) == MEM)
4803 operands[1] = force_reg (SImode, operands[1]);
4804 if (arm_general_register_operand (operands[0], SImode)
4805 && GET_CODE (operands[1]) == CONST_INT
4806 && !(const_ok_for_arm (INTVAL (operands[1]))
4807 || const_ok_for_arm (~INTVAL (operands[1]))))
4808 {
4809 arm_split_constant (SET, SImode, NULL_RTX,
4810 INTVAL (operands[1]), operands[0], NULL_RTX,
4811 optimize && can_create_pseudo_p ());
4812 DONE;
4813 }
4814 }
4815 else /* TARGET_THUMB1... */
4816 {
4817 if (can_create_pseudo_p ())
4818 {
4819 if (GET_CODE (operands[0]) != REG)
4820 operands[1] = force_reg (SImode, operands[1]);
4821 }
4822 }
4823
4824 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4825 {
4826 split_const (operands[1], &base, &offset);
4827 if (GET_CODE (base) == SYMBOL_REF
4828 && !offset_within_block_p (base, INTVAL (offset)))
4829 {
4830 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4831 emit_move_insn (tmp, base);
4832 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4833 DONE;
4834 }
4835 }
4836
4837 /* Recognize the case where operand[1] is a reference to thread-local
4838 data and load its address to a register. */
4839 if (arm_tls_referenced_p (operands[1]))
4840 {
4841 rtx tmp = operands[1];
4842 rtx addend = NULL;
4843
4844 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4845 {
4846 addend = XEXP (XEXP (tmp, 0), 1);
4847 tmp = XEXP (XEXP (tmp, 0), 0);
4848 }
4849
4850 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4851 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4852
4853 tmp = legitimize_tls_address (tmp,
4854 !can_create_pseudo_p () ? operands[0] : 0);
4855 if (addend)
4856 {
4857 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4858 tmp = force_operand (tmp, operands[0]);
4859 }
4860 operands[1] = tmp;
4861 }
4862 else if (flag_pic
4863 && (CONSTANT_P (operands[1])
4864 || symbol_mentioned_p (operands[1])
4865 || label_mentioned_p (operands[1])))
4866 operands[1] = legitimize_pic_address (operands[1], SImode,
4867 (!can_create_pseudo_p ()
4868 ? operands[0]
4869 : 0));
4870 }
4871 "
4872 )
4873
4874 (define_insn "*arm_movsi_insn"
4875 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
4876 (match_operand:SI 1 "general_operand" "rk, I,K,N,mi,rk"))]
4877 "TARGET_ARM && ! TARGET_IWMMXT
4878 && !(TARGET_HARD_FLOAT && TARGET_VFP)
4879 && ( register_operand (operands[0], SImode)
4880 || register_operand (operands[1], SImode))"
4881 "@
4882 mov%?\\t%0, %1
4883 mov%?\\t%0, %1
4884 mvn%?\\t%0, #%B1
4885 movw%?\\t%0, %1
4886 ldr%?\\t%0, %1
4887 str%?\\t%1, %0"
4888 [(set_attr "type" "*,*,*,*,load1,store1")
4889 (set_attr "predicable" "yes")
4890 (set_attr "pool_range" "*,*,*,*,4096,*")
4891 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
4892 )
4893
4894 (define_split
4895 [(set (match_operand:SI 0 "arm_general_register_operand" "")
4896 (match_operand:SI 1 "const_int_operand" ""))]
4897 "TARGET_32BIT
4898 && (!(const_ok_for_arm (INTVAL (operands[1]))
4899 || const_ok_for_arm (~INTVAL (operands[1]))))"
4900 [(clobber (const_int 0))]
4901 "
4902 arm_split_constant (SET, SImode, NULL_RTX,
4903 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
4904 DONE;
4905 "
4906 )
4907
4908 (define_insn "*thumb1_movsi_insn"
4909 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
4910 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
4911 "TARGET_THUMB1
4912 && ( register_operand (operands[0], SImode)
4913 || register_operand (operands[1], SImode))"
4914 "@
4915 mov %0, %1
4916 mov %0, %1
4917 #
4918 #
4919 ldmia\\t%1, {%0}
4920 stmia\\t%0, {%1}
4921 ldr\\t%0, %1
4922 str\\t%1, %0
4923 mov\\t%0, %1"
4924 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
4925 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
4926 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
4927 )
4928
4929 (define_split
4930 [(set (match_operand:SI 0 "register_operand" "")
4931 (match_operand:SI 1 "const_int_operand" ""))]
4932 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
4933 [(set (match_dup 0) (match_dup 1))
4934 (set (match_dup 0) (neg:SI (match_dup 0)))]
4935 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
4936 )
4937
4938 (define_split
4939 [(set (match_operand:SI 0 "register_operand" "")
4940 (match_operand:SI 1 "const_int_operand" ""))]
4941 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
4942 [(set (match_dup 0) (match_dup 1))
4943 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
4944 "
4945 {
4946 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
4947 unsigned HOST_WIDE_INT mask = 0xff;
4948 int i;
4949
4950 for (i = 0; i < 25; i++)
4951 if ((val & (mask << i)) == val)
4952 break;
4953
4954 /* Shouldn't happen, but we don't want to split if the shift is zero. */
4955 if (i == 0)
4956 FAIL;
4957
4958 operands[1] = GEN_INT (val >> i);
4959 operands[2] = GEN_INT (i);
4960 }"
4961 )
4962
4963 ;; When generating pic, we need to load the symbol offset into a register.
4964 ;; So that the optimizer does not confuse this with a normal symbol load
4965 ;; we use an unspec. The offset will be loaded from a constant pool entry,
4966 ;; since that is the only type of relocation we can use.
4967
4968 ;; The rather odd constraints on the following are to force reload to leave
4969 ;; the insn alone, and to force the minipool generation pass to then move
4970 ;; the GOT symbol to memory.
4971
4972 (define_insn "pic_load_addr_arm"
4973 [(set (match_operand:SI 0 "s_register_operand" "=r")
4974 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4975 "TARGET_ARM && flag_pic"
4976 "ldr%?\\t%0, %1"
4977 [(set_attr "type" "load1")
4978 (set (attr "pool_range") (const_int 4096))
4979 (set (attr "neg_pool_range") (const_int 4084))]
4980 )
4981
4982 (define_insn "pic_load_addr_thumb1"
4983 [(set (match_operand:SI 0 "s_register_operand" "=l")
4984 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4985 "TARGET_THUMB1 && flag_pic"
4986 "ldr\\t%0, %1"
4987 [(set_attr "type" "load1")
4988 (set (attr "pool_range") (const_int 1024))]
4989 )
4990
4991 (define_insn "pic_add_dot_plus_four"
4992 [(set (match_operand:SI 0 "register_operand" "=r")
4993 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0")
4994 (const (plus:SI (pc) (const_int 4))))
4995 (match_operand 2 "" "")]
4996 UNSPEC_PIC_BASE))]
4997 "TARGET_THUMB1"
4998 "*
4999 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5000 INTVAL (operands[2]));
5001 return \"add\\t%0, %|pc\";
5002 "
5003 [(set_attr "length" "2")]
5004 )
5005
5006 (define_insn "pic_add_dot_plus_eight"
5007 [(set (match_operand:SI 0 "register_operand" "=r")
5008 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
5009 (const (plus:SI (pc) (const_int 8))))
5010 (match_operand 2 "" "")]
5011 UNSPEC_PIC_BASE))]
5012 "TARGET_ARM"
5013 "*
5014 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5015 INTVAL (operands[2]));
5016 return \"add%?\\t%0, %|pc, %1\";
5017 "
5018 [(set_attr "predicable" "yes")]
5019 )
5020
5021 (define_insn "tls_load_dot_plus_eight"
5022 [(set (match_operand:SI 0 "register_operand" "+r")
5023 (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
5024 (const (plus:SI (pc) (const_int 8))))
5025 (match_operand 2 "" "")]
5026 UNSPEC_PIC_BASE)))]
5027 "TARGET_ARM"
5028 "*
5029 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5030 INTVAL (operands[2]));
5031 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5032 "
5033 [(set_attr "predicable" "yes")]
5034 )
5035
5036 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5037 ;; followed by a load. These sequences can be crunched down to
5038 ;; tls_load_dot_plus_eight by a peephole.
5039
5040 (define_peephole2
5041 [(parallel [(set (match_operand:SI 0 "register_operand" "")
5042 (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "")
5043 (const (plus:SI (pc) (const_int 8))))]
5044 UNSPEC_PIC_BASE))
5045 (use (label_ref (match_operand 1 "" "")))])
5046 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5047 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5048 [(parallel [(set (match_dup 2)
5049 (mem:SI (unspec:SI [(plus:SI (match_dup 3)
5050 (const (plus:SI (pc) (const_int 8))))]
5051 UNSPEC_PIC_BASE)))
5052 (use (label_ref (match_dup 1)))])]
5053 ""
5054 )
5055
5056 (define_insn "pic_offset_arm"
5057 [(set (match_operand:SI 0 "register_operand" "=r")
5058 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5059 (unspec:SI [(match_operand:SI 2 "" "X")]
5060 UNSPEC_PIC_OFFSET))))]
5061 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5062 "ldr%?\\t%0, [%1,%2]"
5063 [(set_attr "type" "load1")]
5064 )
5065
5066 (define_expand "builtin_setjmp_receiver"
5067 [(label_ref (match_operand 0 "" ""))]
5068 "flag_pic"
5069 "
5070 {
5071 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5072 register. */
5073 if (arm_pic_register != INVALID_REGNUM)
5074 arm_load_pic_register (1UL << 3);
5075 DONE;
5076 }")
5077
5078 ;; If copying one reg to another we can set the condition codes according to
5079 ;; its value. Such a move is common after a return from subroutine and the
5080 ;; result is being tested against zero.
5081
5082 (define_insn "*movsi_compare0"
5083 [(set (reg:CC CC_REGNUM)
5084 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5085 (const_int 0)))
5086 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5087 (match_dup 1))]
5088 "TARGET_32BIT"
5089 "@
5090 cmp%?\\t%0, #0
5091 sub%.\\t%0, %1, #0"
5092 [(set_attr "conds" "set")]
5093 )
5094
5095 ;; Subroutine to store a half word from a register into memory.
5096 ;; Operand 0 is the source register (HImode)
5097 ;; Operand 1 is the destination address in a register (SImode)
5098
5099 ;; In both this routine and the next, we must be careful not to spill
5100 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5101 ;; can generate unrecognizable rtl.
5102
5103 (define_expand "storehi"
5104 [;; store the low byte
5105 (set (match_operand 1 "" "") (match_dup 3))
5106 ;; extract the high byte
5107 (set (match_dup 2)
5108 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5109 ;; store the high byte
5110 (set (match_dup 4) (match_dup 5))]
5111 "TARGET_ARM"
5112 "
5113 {
5114 rtx op1 = operands[1];
5115 rtx addr = XEXP (op1, 0);
5116 enum rtx_code code = GET_CODE (addr);
5117
5118 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5119 || code == MINUS)
5120 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5121
5122 operands[4] = adjust_address (op1, QImode, 1);
5123 operands[1] = adjust_address (operands[1], QImode, 0);
5124 operands[3] = gen_lowpart (QImode, operands[0]);
5125 operands[0] = gen_lowpart (SImode, operands[0]);
5126 operands[2] = gen_reg_rtx (SImode);
5127 operands[5] = gen_lowpart (QImode, operands[2]);
5128 }"
5129 )
5130
5131 (define_expand "storehi_bigend"
5132 [(set (match_dup 4) (match_dup 3))
5133 (set (match_dup 2)
5134 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5135 (set (match_operand 1 "" "") (match_dup 5))]
5136 "TARGET_ARM"
5137 "
5138 {
5139 rtx op1 = operands[1];
5140 rtx addr = XEXP (op1, 0);
5141 enum rtx_code code = GET_CODE (addr);
5142
5143 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5144 || code == MINUS)
5145 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5146
5147 operands[4] = adjust_address (op1, QImode, 1);
5148 operands[1] = adjust_address (operands[1], QImode, 0);
5149 operands[3] = gen_lowpart (QImode, operands[0]);
5150 operands[0] = gen_lowpart (SImode, operands[0]);
5151 operands[2] = gen_reg_rtx (SImode);
5152 operands[5] = gen_lowpart (QImode, operands[2]);
5153 }"
5154 )
5155
5156 ;; Subroutine to store a half word integer constant into memory.
5157 (define_expand "storeinthi"
5158 [(set (match_operand 0 "" "")
5159 (match_operand 1 "" ""))
5160 (set (match_dup 3) (match_dup 2))]
5161 "TARGET_ARM"
5162 "
5163 {
5164 HOST_WIDE_INT value = INTVAL (operands[1]);
5165 rtx addr = XEXP (operands[0], 0);
5166 rtx op0 = operands[0];
5167 enum rtx_code code = GET_CODE (addr);
5168
5169 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5170 || code == MINUS)
5171 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5172
5173 operands[1] = gen_reg_rtx (SImode);
5174 if (BYTES_BIG_ENDIAN)
5175 {
5176 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5177 if ((value & 255) == ((value >> 8) & 255))
5178 operands[2] = operands[1];
5179 else
5180 {
5181 operands[2] = gen_reg_rtx (SImode);
5182 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5183 }
5184 }
5185 else
5186 {
5187 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5188 if ((value & 255) == ((value >> 8) & 255))
5189 operands[2] = operands[1];
5190 else
5191 {
5192 operands[2] = gen_reg_rtx (SImode);
5193 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5194 }
5195 }
5196
5197 operands[3] = adjust_address (op0, QImode, 1);
5198 operands[0] = adjust_address (operands[0], QImode, 0);
5199 operands[2] = gen_lowpart (QImode, operands[2]);
5200 operands[1] = gen_lowpart (QImode, operands[1]);
5201 }"
5202 )
5203
5204 (define_expand "storehi_single_op"
5205 [(set (match_operand:HI 0 "memory_operand" "")
5206 (match_operand:HI 1 "general_operand" ""))]
5207 "TARGET_32BIT && arm_arch4"
5208 "
5209 if (!s_register_operand (operands[1], HImode))
5210 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5211 "
5212 )
5213
5214 (define_expand "movhi"
5215 [(set (match_operand:HI 0 "general_operand" "")
5216 (match_operand:HI 1 "general_operand" ""))]
5217 "TARGET_EITHER"
5218 "
5219 if (TARGET_ARM)
5220 {
5221 if (can_create_pseudo_p ())
5222 {
5223 if (GET_CODE (operands[0]) == MEM)
5224 {
5225 if (arm_arch4)
5226 {
5227 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5228 DONE;
5229 }
5230 if (GET_CODE (operands[1]) == CONST_INT)
5231 emit_insn (gen_storeinthi (operands[0], operands[1]));
5232 else
5233 {
5234 if (GET_CODE (operands[1]) == MEM)
5235 operands[1] = force_reg (HImode, operands[1]);
5236 if (BYTES_BIG_ENDIAN)
5237 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5238 else
5239 emit_insn (gen_storehi (operands[1], operands[0]));
5240 }
5241 DONE;
5242 }
5243 /* Sign extend a constant, and keep it in an SImode reg. */
5244 else if (GET_CODE (operands[1]) == CONST_INT)
5245 {
5246 rtx reg = gen_reg_rtx (SImode);
5247 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5248
5249 /* If the constant is already valid, leave it alone. */
5250 if (!const_ok_for_arm (val))
5251 {
5252 /* If setting all the top bits will make the constant
5253 loadable in a single instruction, then set them.
5254 Otherwise, sign extend the number. */
5255
5256 if (const_ok_for_arm (~(val | ~0xffff)))
5257 val |= ~0xffff;
5258 else if (val & 0x8000)
5259 val |= ~0xffff;
5260 }
5261
5262 emit_insn (gen_movsi (reg, GEN_INT (val)));
5263 operands[1] = gen_lowpart (HImode, reg);
5264 }
5265 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5266 && GET_CODE (operands[1]) == MEM)
5267 {
5268 rtx reg = gen_reg_rtx (SImode);
5269
5270 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5271 operands[1] = gen_lowpart (HImode, reg);
5272 }
5273 else if (!arm_arch4)
5274 {
5275 if (GET_CODE (operands[1]) == MEM)
5276 {
5277 rtx base;
5278 rtx offset = const0_rtx;
5279 rtx reg = gen_reg_rtx (SImode);
5280
5281 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5282 || (GET_CODE (base) == PLUS
5283 && (GET_CODE (offset = XEXP (base, 1))
5284 == CONST_INT)
5285 && ((INTVAL(offset) & 1) != 1)
5286 && GET_CODE (base = XEXP (base, 0)) == REG))
5287 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5288 {
5289 rtx new_rtx;
5290
5291 new_rtx = widen_memory_access (operands[1], SImode,
5292 ((INTVAL (offset) & ~3)
5293 - INTVAL (offset)));
5294 emit_insn (gen_movsi (reg, new_rtx));
5295 if (((INTVAL (offset) & 2) != 0)
5296 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5297 {
5298 rtx reg2 = gen_reg_rtx (SImode);
5299
5300 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5301 reg = reg2;
5302 }
5303 }
5304 else
5305 emit_insn (gen_movhi_bytes (reg, operands[1]));
5306
5307 operands[1] = gen_lowpart (HImode, reg);
5308 }
5309 }
5310 }
5311 /* Handle loading a large integer during reload. */
5312 else if (GET_CODE (operands[1]) == CONST_INT
5313 && !const_ok_for_arm (INTVAL (operands[1]))
5314 && !const_ok_for_arm (~INTVAL (operands[1])))
5315 {
5316 /* Writing a constant to memory needs a scratch, which should
5317 be handled with SECONDARY_RELOADs. */
5318 gcc_assert (GET_CODE (operands[0]) == REG);
5319
5320 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5321 emit_insn (gen_movsi (operands[0], operands[1]));
5322 DONE;
5323 }
5324 }
5325 else if (TARGET_THUMB2)
5326 {
5327 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5328 if (can_create_pseudo_p ())
5329 {
5330 if (GET_CODE (operands[0]) != REG)
5331 operands[1] = force_reg (HImode, operands[1]);
5332 /* Zero extend a constant, and keep it in an SImode reg. */
5333 else if (GET_CODE (operands[1]) == CONST_INT)
5334 {
5335 rtx reg = gen_reg_rtx (SImode);
5336 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5337
5338 emit_insn (gen_movsi (reg, GEN_INT (val)));
5339 operands[1] = gen_lowpart (HImode, reg);
5340 }
5341 }
5342 }
5343 else /* TARGET_THUMB1 */
5344 {
5345 if (can_create_pseudo_p ())
5346 {
5347 if (GET_CODE (operands[1]) == CONST_INT)
5348 {
5349 rtx reg = gen_reg_rtx (SImode);
5350
5351 emit_insn (gen_movsi (reg, operands[1]));
5352 operands[1] = gen_lowpart (HImode, reg);
5353 }
5354
5355 /* ??? We shouldn't really get invalid addresses here, but this can
5356 happen if we are passed a SP (never OK for HImode/QImode) or
5357 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5358 HImode/QImode) relative address. */
5359 /* ??? This should perhaps be fixed elsewhere, for instance, in
5360 fixup_stack_1, by checking for other kinds of invalid addresses,
5361 e.g. a bare reference to a virtual register. This may confuse the
5362 alpha though, which must handle this case differently. */
5363 if (GET_CODE (operands[0]) == MEM
5364 && !memory_address_p (GET_MODE (operands[0]),
5365 XEXP (operands[0], 0)))
5366 operands[0]
5367 = replace_equiv_address (operands[0],
5368 copy_to_reg (XEXP (operands[0], 0)));
5369
5370 if (GET_CODE (operands[1]) == MEM
5371 && !memory_address_p (GET_MODE (operands[1]),
5372 XEXP (operands[1], 0)))
5373 operands[1]
5374 = replace_equiv_address (operands[1],
5375 copy_to_reg (XEXP (operands[1], 0)));
5376
5377 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5378 {
5379 rtx reg = gen_reg_rtx (SImode);
5380
5381 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5382 operands[1] = gen_lowpart (HImode, reg);
5383 }
5384
5385 if (GET_CODE (operands[0]) == MEM)
5386 operands[1] = force_reg (HImode, operands[1]);
5387 }
5388 else if (GET_CODE (operands[1]) == CONST_INT
5389 && !satisfies_constraint_I (operands[1]))
5390 {
5391 /* Handle loading a large integer during reload. */
5392
5393 /* Writing a constant to memory needs a scratch, which should
5394 be handled with SECONDARY_RELOADs. */
5395 gcc_assert (GET_CODE (operands[0]) == REG);
5396
5397 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5398 emit_insn (gen_movsi (operands[0], operands[1]));
5399 DONE;
5400 }
5401 }
5402 "
5403 )
5404
5405 (define_insn "*thumb1_movhi_insn"
5406 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5407 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5408 "TARGET_THUMB1
5409 && ( register_operand (operands[0], HImode)
5410 || register_operand (operands[1], HImode))"
5411 "*
5412 switch (which_alternative)
5413 {
5414 case 0: return \"add %0, %1, #0\";
5415 case 2: return \"strh %1, %0\";
5416 case 3: return \"mov %0, %1\";
5417 case 4: return \"mov %0, %1\";
5418 case 5: return \"mov %0, %1\";
5419 default: gcc_unreachable ();
5420 case 1:
5421 /* The stack pointer can end up being taken as an index register.
5422 Catch this case here and deal with it. */
5423 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5424 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5425 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5426 {
5427 rtx ops[2];
5428 ops[0] = operands[0];
5429 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5430
5431 output_asm_insn (\"mov %0, %1\", ops);
5432
5433 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5434
5435 }
5436 return \"ldrh %0, %1\";
5437 }"
5438 [(set_attr "length" "2,4,2,2,2,2")
5439 (set_attr "type" "*,load1,store1,*,*,*")]
5440 )
5441
5442
5443 (define_expand "movhi_bytes"
5444 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5445 (set (match_dup 3)
5446 (zero_extend:SI (match_dup 6)))
5447 (set (match_operand:SI 0 "" "")
5448 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5449 "TARGET_ARM"
5450 "
5451 {
5452 rtx mem1, mem2;
5453 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5454
5455 mem1 = change_address (operands[1], QImode, addr);
5456 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5457 operands[0] = gen_lowpart (SImode, operands[0]);
5458 operands[1] = mem1;
5459 operands[2] = gen_reg_rtx (SImode);
5460 operands[3] = gen_reg_rtx (SImode);
5461 operands[6] = mem2;
5462
5463 if (BYTES_BIG_ENDIAN)
5464 {
5465 operands[4] = operands[2];
5466 operands[5] = operands[3];
5467 }
5468 else
5469 {
5470 operands[4] = operands[3];
5471 operands[5] = operands[2];
5472 }
5473 }"
5474 )
5475
5476 (define_expand "movhi_bigend"
5477 [(set (match_dup 2)
5478 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5479 (const_int 16)))
5480 (set (match_dup 3)
5481 (ashiftrt:SI (match_dup 2) (const_int 16)))
5482 (set (match_operand:HI 0 "s_register_operand" "")
5483 (match_dup 4))]
5484 "TARGET_ARM"
5485 "
5486 operands[2] = gen_reg_rtx (SImode);
5487 operands[3] = gen_reg_rtx (SImode);
5488 operands[4] = gen_lowpart (HImode, operands[3]);
5489 "
5490 )
5491
5492 ;; Pattern to recognize insn generated default case above
5493 (define_insn "*movhi_insn_arch4"
5494 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5495 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5496 "TARGET_ARM
5497 && arm_arch4
5498 && (GET_CODE (operands[1]) != CONST_INT
5499 || const_ok_for_arm (INTVAL (operands[1]))
5500 || const_ok_for_arm (~INTVAL (operands[1])))"
5501 "@
5502 mov%?\\t%0, %1\\t%@ movhi
5503 mvn%?\\t%0, #%B1\\t%@ movhi
5504 str%(h%)\\t%1, %0\\t%@ movhi
5505 ldr%(h%)\\t%0, %1\\t%@ movhi"
5506 [(set_attr "type" "*,*,store1,load1")
5507 (set_attr "predicable" "yes")
5508 (set_attr "pool_range" "*,*,*,256")
5509 (set_attr "neg_pool_range" "*,*,*,244")]
5510 )
5511
5512 (define_insn "*movhi_bytes"
5513 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5514 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5515 "TARGET_ARM"
5516 "@
5517 mov%?\\t%0, %1\\t%@ movhi
5518 mvn%?\\t%0, #%B1\\t%@ movhi"
5519 [(set_attr "predicable" "yes")]
5520 )
5521
5522 (define_expand "thumb_movhi_clobber"
5523 [(set (match_operand:HI 0 "memory_operand" "")
5524 (match_operand:HI 1 "register_operand" ""))
5525 (clobber (match_operand:DI 2 "register_operand" ""))]
5526 "TARGET_THUMB1"
5527 "
5528 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5529 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5530 {
5531 emit_insn (gen_movhi (operands[0], operands[1]));
5532 DONE;
5533 }
5534 /* XXX Fixme, need to handle other cases here as well. */
5535 gcc_unreachable ();
5536 "
5537 )
5538
5539 ;; We use a DImode scratch because we may occasionally need an additional
5540 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5541 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5542 (define_expand "reload_outhi"
5543 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5544 (match_operand:HI 1 "s_register_operand" "r")
5545 (match_operand:DI 2 "s_register_operand" "=&l")])]
5546 "TARGET_EITHER"
5547 "if (TARGET_ARM)
5548 arm_reload_out_hi (operands);
5549 else
5550 thumb_reload_out_hi (operands);
5551 DONE;
5552 "
5553 )
5554
5555 (define_expand "reload_inhi"
5556 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5557 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5558 (match_operand:DI 2 "s_register_operand" "=&r")])]
5559 "TARGET_EITHER"
5560 "
5561 if (TARGET_ARM)
5562 arm_reload_in_hi (operands);
5563 else
5564 thumb_reload_out_hi (operands);
5565 DONE;
5566 ")
5567
5568 (define_expand "movqi"
5569 [(set (match_operand:QI 0 "general_operand" "")
5570 (match_operand:QI 1 "general_operand" ""))]
5571 "TARGET_EITHER"
5572 "
5573 /* Everything except mem = const or mem = mem can be done easily */
5574
5575 if (can_create_pseudo_p ())
5576 {
5577 if (GET_CODE (operands[1]) == CONST_INT)
5578 {
5579 rtx reg = gen_reg_rtx (SImode);
5580
5581 emit_insn (gen_movsi (reg, operands[1]));
5582 operands[1] = gen_lowpart (QImode, reg);
5583 }
5584
5585 if (TARGET_THUMB)
5586 {
5587 /* ??? We shouldn't really get invalid addresses here, but this can
5588 happen if we are passed a SP (never OK for HImode/QImode) or
5589 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5590 HImode/QImode) relative address. */
5591 /* ??? This should perhaps be fixed elsewhere, for instance, in
5592 fixup_stack_1, by checking for other kinds of invalid addresses,
5593 e.g. a bare reference to a virtual register. This may confuse the
5594 alpha though, which must handle this case differently. */
5595 if (GET_CODE (operands[0]) == MEM
5596 && !memory_address_p (GET_MODE (operands[0]),
5597 XEXP (operands[0], 0)))
5598 operands[0]
5599 = replace_equiv_address (operands[0],
5600 copy_to_reg (XEXP (operands[0], 0)));
5601 if (GET_CODE (operands[1]) == MEM
5602 && !memory_address_p (GET_MODE (operands[1]),
5603 XEXP (operands[1], 0)))
5604 operands[1]
5605 = replace_equiv_address (operands[1],
5606 copy_to_reg (XEXP (operands[1], 0)));
5607 }
5608
5609 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5610 {
5611 rtx reg = gen_reg_rtx (SImode);
5612
5613 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5614 operands[1] = gen_lowpart (QImode, reg);
5615 }
5616
5617 if (GET_CODE (operands[0]) == MEM)
5618 operands[1] = force_reg (QImode, operands[1]);
5619 }
5620 else if (TARGET_THUMB
5621 && GET_CODE (operands[1]) == CONST_INT
5622 && !satisfies_constraint_I (operands[1]))
5623 {
5624 /* Handle loading a large integer during reload. */
5625
5626 /* Writing a constant to memory needs a scratch, which should
5627 be handled with SECONDARY_RELOADs. */
5628 gcc_assert (GET_CODE (operands[0]) == REG);
5629
5630 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5631 emit_insn (gen_movsi (operands[0], operands[1]));
5632 DONE;
5633 }
5634 "
5635 )
5636
5637
5638 (define_insn "*arm_movqi_insn"
5639 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5640 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5641 "TARGET_32BIT
5642 && ( register_operand (operands[0], QImode)
5643 || register_operand (operands[1], QImode))"
5644 "@
5645 mov%?\\t%0, %1
5646 mvn%?\\t%0, #%B1
5647 ldr%(b%)\\t%0, %1
5648 str%(b%)\\t%1, %0"
5649 [(set_attr "type" "*,*,load1,store1")
5650 (set_attr "predicable" "yes")]
5651 )
5652
5653 (define_insn "*thumb1_movqi_insn"
5654 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5655 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5656 "TARGET_THUMB1
5657 && ( register_operand (operands[0], QImode)
5658 || register_operand (operands[1], QImode))"
5659 "@
5660 add\\t%0, %1, #0
5661 ldrb\\t%0, %1
5662 strb\\t%1, %0
5663 mov\\t%0, %1
5664 mov\\t%0, %1
5665 mov\\t%0, %1"
5666 [(set_attr "length" "2")
5667 (set_attr "type" "*,load1,store1,*,*,*")
5668 (set_attr "pool_range" "*,32,*,*,*,*")]
5669 )
5670
5671 (define_expand "movsf"
5672 [(set (match_operand:SF 0 "general_operand" "")
5673 (match_operand:SF 1 "general_operand" ""))]
5674 "TARGET_EITHER"
5675 "
5676 if (TARGET_32BIT)
5677 {
5678 if (GET_CODE (operands[0]) == MEM)
5679 operands[1] = force_reg (SFmode, operands[1]);
5680 }
5681 else /* TARGET_THUMB1 */
5682 {
5683 if (can_create_pseudo_p ())
5684 {
5685 if (GET_CODE (operands[0]) != REG)
5686 operands[1] = force_reg (SFmode, operands[1]);
5687 }
5688 }
5689 "
5690 )
5691
5692 ;; Transform a floating-point move of a constant into a core register into
5693 ;; an SImode operation.
5694 (define_split
5695 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5696 (match_operand:SF 1 "immediate_operand" ""))]
5697 "TARGET_32BIT
5698 && reload_completed
5699 && GET_CODE (operands[1]) == CONST_DOUBLE"
5700 [(set (match_dup 2) (match_dup 3))]
5701 "
5702 operands[2] = gen_lowpart (SImode, operands[0]);
5703 operands[3] = gen_lowpart (SImode, operands[1]);
5704 if (operands[2] == 0 || operands[3] == 0)
5705 FAIL;
5706 "
5707 )
5708
5709 (define_insn "*arm_movsf_soft_insn"
5710 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5711 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5712 "TARGET_ARM
5713 && TARGET_SOFT_FLOAT
5714 && (GET_CODE (operands[0]) != MEM
5715 || register_operand (operands[1], SFmode))"
5716 "@
5717 mov%?\\t%0, %1
5718 ldr%?\\t%0, %1\\t%@ float
5719 str%?\\t%1, %0\\t%@ float"
5720 [(set_attr "length" "4,4,4")
5721 (set_attr "predicable" "yes")
5722 (set_attr "type" "*,load1,store1")
5723 (set_attr "pool_range" "*,4096,*")
5724 (set_attr "neg_pool_range" "*,4084,*")]
5725 )
5726
5727 ;;; ??? This should have alternatives for constants.
5728 (define_insn "*thumb1_movsf_insn"
5729 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5730 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5731 "TARGET_THUMB1
5732 && ( register_operand (operands[0], SFmode)
5733 || register_operand (operands[1], SFmode))"
5734 "@
5735 add\\t%0, %1, #0
5736 ldmia\\t%1, {%0}
5737 stmia\\t%0, {%1}
5738 ldr\\t%0, %1
5739 str\\t%1, %0
5740 mov\\t%0, %1
5741 mov\\t%0, %1"
5742 [(set_attr "length" "2")
5743 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5744 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5745 )
5746
5747 (define_expand "movdf"
5748 [(set (match_operand:DF 0 "general_operand" "")
5749 (match_operand:DF 1 "general_operand" ""))]
5750 "TARGET_EITHER"
5751 "
5752 if (TARGET_32BIT)
5753 {
5754 if (GET_CODE (operands[0]) == MEM)
5755 operands[1] = force_reg (DFmode, operands[1]);
5756 }
5757 else /* TARGET_THUMB */
5758 {
5759 if (can_create_pseudo_p ())
5760 {
5761 if (GET_CODE (operands[0]) != REG)
5762 operands[1] = force_reg (DFmode, operands[1]);
5763 }
5764 }
5765 "
5766 )
5767
5768 ;; Reloading a df mode value stored in integer regs to memory can require a
5769 ;; scratch reg.
5770 (define_expand "reload_outdf"
5771 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5772 (match_operand:DF 1 "s_register_operand" "r")
5773 (match_operand:SI 2 "s_register_operand" "=&r")]
5774 "TARGET_32BIT"
5775 "
5776 {
5777 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5778
5779 if (code == REG)
5780 operands[2] = XEXP (operands[0], 0);
5781 else if (code == POST_INC || code == PRE_DEC)
5782 {
5783 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5784 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5785 emit_insn (gen_movdi (operands[0], operands[1]));
5786 DONE;
5787 }
5788 else if (code == PRE_INC)
5789 {
5790 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5791
5792 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5793 operands[2] = reg;
5794 }
5795 else if (code == POST_DEC)
5796 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5797 else
5798 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5799 XEXP (XEXP (operands[0], 0), 1)));
5800
5801 emit_insn (gen_rtx_SET (VOIDmode,
5802 replace_equiv_address (operands[0], operands[2]),
5803 operands[1]));
5804
5805 if (code == POST_DEC)
5806 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5807
5808 DONE;
5809 }"
5810 )
5811
5812 (define_insn "*movdf_soft_insn"
5813 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5814 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5815 "TARGET_ARM && TARGET_SOFT_FLOAT
5816 && ( register_operand (operands[0], DFmode)
5817 || register_operand (operands[1], DFmode))"
5818 "*
5819 switch (which_alternative)
5820 {
5821 case 0:
5822 case 1:
5823 case 2:
5824 return \"#\";
5825 default:
5826 return output_move_double (operands);
5827 }
5828 "
5829 [(set_attr "length" "8,12,16,8,8")
5830 (set_attr "type" "*,*,*,load2,store2")
5831 (set_attr "pool_range" "1020")
5832 (set_attr "neg_pool_range" "1008")]
5833 )
5834
5835 ;;; ??? This should have alternatives for constants.
5836 ;;; ??? This was originally identical to the movdi_insn pattern.
5837 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5838 ;;; thumb_reorg with a memory reference.
5839 (define_insn "*thumb_movdf_insn"
5840 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5841 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5842 "TARGET_THUMB1
5843 && ( register_operand (operands[0], DFmode)
5844 || register_operand (operands[1], DFmode))"
5845 "*
5846 switch (which_alternative)
5847 {
5848 default:
5849 case 0:
5850 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5851 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5852 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5853 case 1:
5854 return \"ldmia\\t%1, {%0, %H0}\";
5855 case 2:
5856 return \"stmia\\t%0, {%1, %H1}\";
5857 case 3:
5858 return thumb_load_double_from_address (operands);
5859 case 4:
5860 operands[2] = gen_rtx_MEM (SImode,
5861 plus_constant (XEXP (operands[0], 0), 4));
5862 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5863 return \"\";
5864 case 5:
5865 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5866 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5867 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5868 }
5869 "
5870 [(set_attr "length" "4,2,2,6,4,4")
5871 (set_attr "type" "*,load2,store2,load2,store2,*")
5872 (set_attr "pool_range" "*,*,*,1020,*,*")]
5873 )
5874
5875 (define_expand "movxf"
5876 [(set (match_operand:XF 0 "general_operand" "")
5877 (match_operand:XF 1 "general_operand" ""))]
5878 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
5879 "
5880 if (GET_CODE (operands[0]) == MEM)
5881 operands[1] = force_reg (XFmode, operands[1]);
5882 "
5883 )
5884
5885 \f
5886
5887 ;; load- and store-multiple insns
5888 ;; The arm can load/store any set of registers, provided that they are in
5889 ;; ascending order; but that is beyond GCC so stick with what it knows.
5890
5891 (define_expand "load_multiple"
5892 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5893 (match_operand:SI 1 "" ""))
5894 (use (match_operand:SI 2 "" ""))])]
5895 "TARGET_32BIT"
5896 {
5897 HOST_WIDE_INT offset = 0;
5898
5899 /* Support only fixed point registers. */
5900 if (GET_CODE (operands[2]) != CONST_INT
5901 || INTVAL (operands[2]) > 14
5902 || INTVAL (operands[2]) < 2
5903 || GET_CODE (operands[1]) != MEM
5904 || GET_CODE (operands[0]) != REG
5905 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
5906 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5907 FAIL;
5908
5909 operands[3]
5910 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
5911 force_reg (SImode, XEXP (operands[1], 0)),
5912 TRUE, FALSE, operands[1], &offset);
5913 })
5914
5915 ;; Load multiple with write-back
5916
5917 (define_insn "*ldmsi_postinc4"
5918 [(match_parallel 0 "load_multiple_operation"
5919 [(set (match_operand:SI 1 "s_register_operand" "=r")
5920 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5921 (const_int 16)))
5922 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5923 (mem:SI (match_dup 2)))
5924 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5925 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5926 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5927 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5928 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5929 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5930 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5931 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5932 [(set_attr "type" "load4")
5933 (set_attr "predicable" "yes")]
5934 )
5935
5936 (define_insn "*ldmsi_postinc4_thumb1"
5937 [(match_parallel 0 "load_multiple_operation"
5938 [(set (match_operand:SI 1 "s_register_operand" "=l")
5939 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5940 (const_int 16)))
5941 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5942 (mem:SI (match_dup 2)))
5943 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5944 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5945 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5946 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5947 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5948 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5949 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5950 "ldmia\\t%1!, {%3, %4, %5, %6}"
5951 [(set_attr "type" "load4")]
5952 )
5953
5954 (define_insn "*ldmsi_postinc3"
5955 [(match_parallel 0 "load_multiple_operation"
5956 [(set (match_operand:SI 1 "s_register_operand" "=r")
5957 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5958 (const_int 12)))
5959 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5960 (mem:SI (match_dup 2)))
5961 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5962 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5963 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5964 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
5965 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5966 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
5967 [(set_attr "type" "load3")
5968 (set_attr "predicable" "yes")]
5969 )
5970
5971 (define_insn "*ldmsi_postinc2"
5972 [(match_parallel 0 "load_multiple_operation"
5973 [(set (match_operand:SI 1 "s_register_operand" "=r")
5974 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5975 (const_int 8)))
5976 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5977 (mem:SI (match_dup 2)))
5978 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5979 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
5980 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5981 "ldm%(ia%)\\t%1!, {%3, %4}"
5982 [(set_attr "type" "load2")
5983 (set_attr "predicable" "yes")]
5984 )
5985
5986 ;; Ordinary load multiple
5987
5988 (define_insn "*ldmsi4"
5989 [(match_parallel 0 "load_multiple_operation"
5990 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5991 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5992 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5993 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5994 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5995 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
5996 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5997 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
5998 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5999 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6000 [(set_attr "type" "load4")
6001 (set_attr "predicable" "yes")]
6002 )
6003
6004 (define_insn "*ldmsi3"
6005 [(match_parallel 0 "load_multiple_operation"
6006 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6007 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6008 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6009 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6010 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6011 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6012 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6013 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6014 [(set_attr "type" "load3")
6015 (set_attr "predicable" "yes")]
6016 )
6017
6018 (define_insn "*ldmsi2"
6019 [(match_parallel 0 "load_multiple_operation"
6020 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6021 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6022 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6023 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6024 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6025 "ldm%(ia%)\\t%1, {%2, %3}"
6026 [(set_attr "type" "load2")
6027 (set_attr "predicable" "yes")]
6028 )
6029
6030 (define_expand "store_multiple"
6031 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6032 (match_operand:SI 1 "" ""))
6033 (use (match_operand:SI 2 "" ""))])]
6034 "TARGET_32BIT"
6035 {
6036 HOST_WIDE_INT offset = 0;
6037
6038 /* Support only fixed point registers. */
6039 if (GET_CODE (operands[2]) != CONST_INT
6040 || INTVAL (operands[2]) > 14
6041 || INTVAL (operands[2]) < 2
6042 || GET_CODE (operands[1]) != REG
6043 || GET_CODE (operands[0]) != MEM
6044 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6045 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6046 FAIL;
6047
6048 operands[3]
6049 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6050 force_reg (SImode, XEXP (operands[0], 0)),
6051 TRUE, FALSE, operands[0], &offset);
6052 })
6053
6054 ;; Store multiple with write-back
6055
6056 (define_insn "*stmsi_postinc4"
6057 [(match_parallel 0 "store_multiple_operation"
6058 [(set (match_operand:SI 1 "s_register_operand" "=r")
6059 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6060 (const_int 16)))
6061 (set (mem:SI (match_dup 2))
6062 (match_operand:SI 3 "arm_hard_register_operand" ""))
6063 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6064 (match_operand:SI 4 "arm_hard_register_operand" ""))
6065 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6066 (match_operand:SI 5 "arm_hard_register_operand" ""))
6067 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6068 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6069 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6070 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6071 [(set_attr "predicable" "yes")
6072 (set_attr "type" "store4")]
6073 )
6074
6075 (define_insn "*stmsi_postinc4_thumb1"
6076 [(match_parallel 0 "store_multiple_operation"
6077 [(set (match_operand:SI 1 "s_register_operand" "=l")
6078 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6079 (const_int 16)))
6080 (set (mem:SI (match_dup 2))
6081 (match_operand:SI 3 "arm_hard_register_operand" ""))
6082 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6083 (match_operand:SI 4 "arm_hard_register_operand" ""))
6084 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6085 (match_operand:SI 5 "arm_hard_register_operand" ""))
6086 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6087 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6088 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6089 "stmia\\t%1!, {%3, %4, %5, %6}"
6090 [(set_attr "type" "store4")]
6091 )
6092
6093 (define_insn "*stmsi_postinc3"
6094 [(match_parallel 0 "store_multiple_operation"
6095 [(set (match_operand:SI 1 "s_register_operand" "=r")
6096 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6097 (const_int 12)))
6098 (set (mem:SI (match_dup 2))
6099 (match_operand:SI 3 "arm_hard_register_operand" ""))
6100 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6101 (match_operand:SI 4 "arm_hard_register_operand" ""))
6102 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6103 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6104 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6105 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6106 [(set_attr "predicable" "yes")
6107 (set_attr "type" "store3")]
6108 )
6109
6110 (define_insn "*stmsi_postinc2"
6111 [(match_parallel 0 "store_multiple_operation"
6112 [(set (match_operand:SI 1 "s_register_operand" "=r")
6113 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6114 (const_int 8)))
6115 (set (mem:SI (match_dup 2))
6116 (match_operand:SI 3 "arm_hard_register_operand" ""))
6117 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6118 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6119 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6120 "stm%(ia%)\\t%1!, {%3, %4}"
6121 [(set_attr "predicable" "yes")
6122 (set_attr "type" "store2")]
6123 )
6124
6125 ;; Ordinary store multiple
6126
6127 (define_insn "*stmsi4"
6128 [(match_parallel 0 "store_multiple_operation"
6129 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6130 (match_operand:SI 2 "arm_hard_register_operand" ""))
6131 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6132 (match_operand:SI 3 "arm_hard_register_operand" ""))
6133 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6134 (match_operand:SI 4 "arm_hard_register_operand" ""))
6135 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6136 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6137 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6138 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6139 [(set_attr "predicable" "yes")
6140 (set_attr "type" "store4")]
6141 )
6142
6143 (define_insn "*stmsi3"
6144 [(match_parallel 0 "store_multiple_operation"
6145 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6146 (match_operand:SI 2 "arm_hard_register_operand" ""))
6147 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6148 (match_operand:SI 3 "arm_hard_register_operand" ""))
6149 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6150 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6151 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6152 "stm%(ia%)\\t%1, {%2, %3, %4}"
6153 [(set_attr "predicable" "yes")
6154 (set_attr "type" "store3")]
6155 )
6156
6157 (define_insn "*stmsi2"
6158 [(match_parallel 0 "store_multiple_operation"
6159 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6160 (match_operand:SI 2 "arm_hard_register_operand" ""))
6161 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6162 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6163 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6164 "stm%(ia%)\\t%1, {%2, %3}"
6165 [(set_attr "predicable" "yes")
6166 (set_attr "type" "store2")]
6167 )
6168
6169 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6170 ;; We could let this apply for blocks of less than this, but it clobbers so
6171 ;; many registers that there is then probably a better way.
6172
6173 (define_expand "movmemqi"
6174 [(match_operand:BLK 0 "general_operand" "")
6175 (match_operand:BLK 1 "general_operand" "")
6176 (match_operand:SI 2 "const_int_operand" "")
6177 (match_operand:SI 3 "const_int_operand" "")]
6178 "TARGET_EITHER"
6179 "
6180 if (TARGET_32BIT)
6181 {
6182 if (arm_gen_movmemqi (operands))
6183 DONE;
6184 FAIL;
6185 }
6186 else /* TARGET_THUMB1 */
6187 {
6188 if ( INTVAL (operands[3]) != 4
6189 || INTVAL (operands[2]) > 48)
6190 FAIL;
6191
6192 thumb_expand_movmemqi (operands);
6193 DONE;
6194 }
6195 "
6196 )
6197
6198 ;; Thumb block-move insns
6199
6200 (define_insn "movmem12b"
6201 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6202 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6203 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6204 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6205 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6206 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6207 (set (match_operand:SI 0 "register_operand" "=l")
6208 (plus:SI (match_dup 2) (const_int 12)))
6209 (set (match_operand:SI 1 "register_operand" "=l")
6210 (plus:SI (match_dup 3) (const_int 12)))
6211 (clobber (match_scratch:SI 4 "=&l"))
6212 (clobber (match_scratch:SI 5 "=&l"))
6213 (clobber (match_scratch:SI 6 "=&l"))]
6214 "TARGET_THUMB1"
6215 "* return thumb_output_move_mem_multiple (3, operands);"
6216 [(set_attr "length" "4")
6217 ; This isn't entirely accurate... It loads as well, but in terms of
6218 ; scheduling the following insn it is better to consider it as a store
6219 (set_attr "type" "store3")]
6220 )
6221
6222 (define_insn "movmem8b"
6223 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6224 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6225 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6226 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6227 (set (match_operand:SI 0 "register_operand" "=l")
6228 (plus:SI (match_dup 2) (const_int 8)))
6229 (set (match_operand:SI 1 "register_operand" "=l")
6230 (plus:SI (match_dup 3) (const_int 8)))
6231 (clobber (match_scratch:SI 4 "=&l"))
6232 (clobber (match_scratch:SI 5 "=&l"))]
6233 "TARGET_THUMB1"
6234 "* return thumb_output_move_mem_multiple (2, operands);"
6235 [(set_attr "length" "4")
6236 ; This isn't entirely accurate... It loads as well, but in terms of
6237 ; scheduling the following insn it is better to consider it as a store
6238 (set_attr "type" "store2")]
6239 )
6240
6241 \f
6242
6243 ;; Compare & branch insns
6244 ;; The range calculations are based as follows:
6245 ;; For forward branches, the address calculation returns the address of
6246 ;; the next instruction. This is 2 beyond the branch instruction.
6247 ;; For backward branches, the address calculation returns the address of
6248 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6249 ;; instruction for the shortest sequence, and 4 before the branch instruction
6250 ;; if we have to jump around an unconditional branch.
6251 ;; To the basic branch range the PC offset must be added (this is +4).
6252 ;; So for forward branches we have
6253 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6254 ;; And for backward branches we have
6255 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6256 ;;
6257 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6258 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6259
6260 (define_expand "cbranchsi4"
6261 [(set (pc) (if_then_else
6262 (match_operator 0 "arm_comparison_operator"
6263 [(match_operand:SI 1 "s_register_operand" "")
6264 (match_operand:SI 2 "nonmemory_operand" "")])
6265 (label_ref (match_operand 3 "" ""))
6266 (pc)))]
6267 "TARGET_THUMB1"
6268 "
6269 if (thumb1_cmpneg_operand (operands[2], SImode))
6270 {
6271 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6272 operands[3], operands[0]));
6273 DONE;
6274 }
6275 if (!thumb1_cmp_operand (operands[2], SImode))
6276 operands[2] = force_reg (SImode, operands[2]);
6277 ")
6278
6279 (define_insn "*cbranchsi4_insn"
6280 [(set (pc) (if_then_else
6281 (match_operator 0 "arm_comparison_operator"
6282 [(match_operand:SI 1 "s_register_operand" "l,*h")
6283 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6284 (label_ref (match_operand 3 "" ""))
6285 (pc)))]
6286 "TARGET_THUMB1"
6287 "*
6288 output_asm_insn (\"cmp\\t%1, %2\", operands);
6289
6290 switch (get_attr_length (insn))
6291 {
6292 case 4: return \"b%d0\\t%l3\";
6293 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6294 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6295 }
6296 "
6297 [(set (attr "far_jump")
6298 (if_then_else
6299 (eq_attr "length" "8")
6300 (const_string "yes")
6301 (const_string "no")))
6302 (set (attr "length")
6303 (if_then_else
6304 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6305 (le (minus (match_dup 3) (pc)) (const_int 256)))
6306 (const_int 4)
6307 (if_then_else
6308 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6309 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6310 (const_int 6)
6311 (const_int 8))))]
6312 )
6313
6314 (define_insn "cbranchsi4_scratch"
6315 [(set (pc) (if_then_else
6316 (match_operator 4 "arm_comparison_operator"
6317 [(match_operand:SI 1 "s_register_operand" "l,0")
6318 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6319 (label_ref (match_operand 3 "" ""))
6320 (pc)))
6321 (clobber (match_scratch:SI 0 "=l,l"))]
6322 "TARGET_THUMB1"
6323 "*
6324 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6325
6326 switch (get_attr_length (insn))
6327 {
6328 case 4: return \"b%d4\\t%l3\";
6329 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6330 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6331 }
6332 "
6333 [(set (attr "far_jump")
6334 (if_then_else
6335 (eq_attr "length" "8")
6336 (const_string "yes")
6337 (const_string "no")))
6338 (set (attr "length")
6339 (if_then_else
6340 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6341 (le (minus (match_dup 3) (pc)) (const_int 256)))
6342 (const_int 4)
6343 (if_then_else
6344 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6345 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6346 (const_int 6)
6347 (const_int 8))))]
6348 )
6349 (define_insn "*movsi_cbranchsi4"
6350 [(set (pc)
6351 (if_then_else
6352 (match_operator 3 "arm_comparison_operator"
6353 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6354 (const_int 0)])
6355 (label_ref (match_operand 2 "" ""))
6356 (pc)))
6357 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6358 (match_dup 1))]
6359 "TARGET_THUMB1"
6360 "*{
6361 if (which_alternative == 0)
6362 output_asm_insn (\"cmp\t%0, #0\", operands);
6363 else if (which_alternative == 1)
6364 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6365 else
6366 {
6367 output_asm_insn (\"cmp\t%1, #0\", operands);
6368 if (which_alternative == 2)
6369 output_asm_insn (\"mov\t%0, %1\", operands);
6370 else
6371 output_asm_insn (\"str\t%1, %0\", operands);
6372 }
6373 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6374 {
6375 case 4: return \"b%d3\\t%l2\";
6376 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6377 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6378 }
6379 }"
6380 [(set (attr "far_jump")
6381 (if_then_else
6382 (ior (and (gt (symbol_ref ("which_alternative"))
6383 (const_int 1))
6384 (eq_attr "length" "8"))
6385 (eq_attr "length" "10"))
6386 (const_string "yes")
6387 (const_string "no")))
6388 (set (attr "length")
6389 (if_then_else
6390 (le (symbol_ref ("which_alternative"))
6391 (const_int 1))
6392 (if_then_else
6393 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6394 (le (minus (match_dup 2) (pc)) (const_int 256)))
6395 (const_int 4)
6396 (if_then_else
6397 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6398 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6399 (const_int 6)
6400 (const_int 8)))
6401 (if_then_else
6402 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6403 (le (minus (match_dup 2) (pc)) (const_int 256)))
6404 (const_int 6)
6405 (if_then_else
6406 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6407 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6408 (const_int 8)
6409 (const_int 10)))))]
6410 )
6411
6412 (define_insn "*negated_cbranchsi4"
6413 [(set (pc)
6414 (if_then_else
6415 (match_operator 0 "equality_operator"
6416 [(match_operand:SI 1 "s_register_operand" "l")
6417 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6418 (label_ref (match_operand 3 "" ""))
6419 (pc)))]
6420 "TARGET_THUMB1"
6421 "*
6422 output_asm_insn (\"cmn\\t%1, %2\", operands);
6423 switch (get_attr_length (insn))
6424 {
6425 case 4: return \"b%d0\\t%l3\";
6426 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6427 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6428 }
6429 "
6430 [(set (attr "far_jump")
6431 (if_then_else
6432 (eq_attr "length" "8")
6433 (const_string "yes")
6434 (const_string "no")))
6435 (set (attr "length")
6436 (if_then_else
6437 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6438 (le (minus (match_dup 3) (pc)) (const_int 256)))
6439 (const_int 4)
6440 (if_then_else
6441 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6442 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6443 (const_int 6)
6444 (const_int 8))))]
6445 )
6446
6447 (define_insn "*tbit_cbranch"
6448 [(set (pc)
6449 (if_then_else
6450 (match_operator 0 "equality_operator"
6451 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6452 (const_int 1)
6453 (match_operand:SI 2 "const_int_operand" "i"))
6454 (const_int 0)])
6455 (label_ref (match_operand 3 "" ""))
6456 (pc)))
6457 (clobber (match_scratch:SI 4 "=l"))]
6458 "TARGET_THUMB1"
6459 "*
6460 {
6461 rtx op[3];
6462 op[0] = operands[4];
6463 op[1] = operands[1];
6464 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6465
6466 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6467 switch (get_attr_length (insn))
6468 {
6469 case 4: return \"b%d0\\t%l3\";
6470 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6471 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6472 }
6473 }"
6474 [(set (attr "far_jump")
6475 (if_then_else
6476 (eq_attr "length" "8")
6477 (const_string "yes")
6478 (const_string "no")))
6479 (set (attr "length")
6480 (if_then_else
6481 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6482 (le (minus (match_dup 3) (pc)) (const_int 256)))
6483 (const_int 4)
6484 (if_then_else
6485 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6486 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6487 (const_int 6)
6488 (const_int 8))))]
6489 )
6490
6491 (define_insn "*tlobits_cbranch"
6492 [(set (pc)
6493 (if_then_else
6494 (match_operator 0 "equality_operator"
6495 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6496 (match_operand:SI 2 "const_int_operand" "i")
6497 (const_int 0))
6498 (const_int 0)])
6499 (label_ref (match_operand 3 "" ""))
6500 (pc)))
6501 (clobber (match_scratch:SI 4 "=l"))]
6502 "TARGET_THUMB1"
6503 "*
6504 {
6505 rtx op[3];
6506 op[0] = operands[4];
6507 op[1] = operands[1];
6508 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6509
6510 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6511 switch (get_attr_length (insn))
6512 {
6513 case 4: return \"b%d0\\t%l3\";
6514 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6515 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6516 }
6517 }"
6518 [(set (attr "far_jump")
6519 (if_then_else
6520 (eq_attr "length" "8")
6521 (const_string "yes")
6522 (const_string "no")))
6523 (set (attr "length")
6524 (if_then_else
6525 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6526 (le (minus (match_dup 3) (pc)) (const_int 256)))
6527 (const_int 4)
6528 (if_then_else
6529 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6530 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6531 (const_int 6)
6532 (const_int 8))))]
6533 )
6534
6535 (define_insn "*tstsi3_cbranch"
6536 [(set (pc)
6537 (if_then_else
6538 (match_operator 3 "equality_operator"
6539 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6540 (match_operand:SI 1 "s_register_operand" "l"))
6541 (const_int 0)])
6542 (label_ref (match_operand 2 "" ""))
6543 (pc)))]
6544 "TARGET_THUMB1"
6545 "*
6546 {
6547 output_asm_insn (\"tst\\t%0, %1\", operands);
6548 switch (get_attr_length (insn))
6549 {
6550 case 4: return \"b%d3\\t%l2\";
6551 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6552 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6553 }
6554 }"
6555 [(set (attr "far_jump")
6556 (if_then_else
6557 (eq_attr "length" "8")
6558 (const_string "yes")
6559 (const_string "no")))
6560 (set (attr "length")
6561 (if_then_else
6562 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6563 (le (minus (match_dup 2) (pc)) (const_int 256)))
6564 (const_int 4)
6565 (if_then_else
6566 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6567 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6568 (const_int 6)
6569 (const_int 8))))]
6570 )
6571
6572 (define_insn "*andsi3_cbranch"
6573 [(set (pc)
6574 (if_then_else
6575 (match_operator 5 "equality_operator"
6576 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6577 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6578 (const_int 0)])
6579 (label_ref (match_operand 4 "" ""))
6580 (pc)))
6581 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6582 (and:SI (match_dup 2) (match_dup 3)))
6583 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6584 "TARGET_THUMB1"
6585 "*
6586 {
6587 if (which_alternative == 0)
6588 output_asm_insn (\"and\\t%0, %3\", operands);
6589 else if (which_alternative == 1)
6590 {
6591 output_asm_insn (\"and\\t%1, %3\", operands);
6592 output_asm_insn (\"mov\\t%0, %1\", operands);
6593 }
6594 else
6595 {
6596 output_asm_insn (\"and\\t%1, %3\", operands);
6597 output_asm_insn (\"str\\t%1, %0\", operands);
6598 }
6599
6600 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6601 {
6602 case 4: return \"b%d5\\t%l4\";
6603 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6604 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6605 }
6606 }"
6607 [(set (attr "far_jump")
6608 (if_then_else
6609 (ior (and (eq (symbol_ref ("which_alternative"))
6610 (const_int 0))
6611 (eq_attr "length" "8"))
6612 (eq_attr "length" "10"))
6613 (const_string "yes")
6614 (const_string "no")))
6615 (set (attr "length")
6616 (if_then_else
6617 (eq (symbol_ref ("which_alternative"))
6618 (const_int 0))
6619 (if_then_else
6620 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6621 (le (minus (match_dup 4) (pc)) (const_int 256)))
6622 (const_int 4)
6623 (if_then_else
6624 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6625 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6626 (const_int 6)
6627 (const_int 8)))
6628 (if_then_else
6629 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6630 (le (minus (match_dup 4) (pc)) (const_int 256)))
6631 (const_int 6)
6632 (if_then_else
6633 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6634 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6635 (const_int 8)
6636 (const_int 10)))))]
6637 )
6638
6639 (define_insn "*orrsi3_cbranch_scratch"
6640 [(set (pc)
6641 (if_then_else
6642 (match_operator 4 "equality_operator"
6643 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6644 (match_operand:SI 2 "s_register_operand" "l"))
6645 (const_int 0)])
6646 (label_ref (match_operand 3 "" ""))
6647 (pc)))
6648 (clobber (match_scratch:SI 0 "=l"))]
6649 "TARGET_THUMB1"
6650 "*
6651 {
6652 output_asm_insn (\"orr\\t%0, %2\", operands);
6653 switch (get_attr_length (insn))
6654 {
6655 case 4: return \"b%d4\\t%l3\";
6656 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6657 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6658 }
6659 }"
6660 [(set (attr "far_jump")
6661 (if_then_else
6662 (eq_attr "length" "8")
6663 (const_string "yes")
6664 (const_string "no")))
6665 (set (attr "length")
6666 (if_then_else
6667 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6668 (le (minus (match_dup 3) (pc)) (const_int 256)))
6669 (const_int 4)
6670 (if_then_else
6671 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6672 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6673 (const_int 6)
6674 (const_int 8))))]
6675 )
6676
6677 (define_insn "*orrsi3_cbranch"
6678 [(set (pc)
6679 (if_then_else
6680 (match_operator 5 "equality_operator"
6681 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6682 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6683 (const_int 0)])
6684 (label_ref (match_operand 4 "" ""))
6685 (pc)))
6686 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6687 (ior:SI (match_dup 2) (match_dup 3)))
6688 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6689 "TARGET_THUMB1"
6690 "*
6691 {
6692 if (which_alternative == 0)
6693 output_asm_insn (\"orr\\t%0, %3\", operands);
6694 else if (which_alternative == 1)
6695 {
6696 output_asm_insn (\"orr\\t%1, %3\", operands);
6697 output_asm_insn (\"mov\\t%0, %1\", operands);
6698 }
6699 else
6700 {
6701 output_asm_insn (\"orr\\t%1, %3\", operands);
6702 output_asm_insn (\"str\\t%1, %0\", operands);
6703 }
6704
6705 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6706 {
6707 case 4: return \"b%d5\\t%l4\";
6708 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6709 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6710 }
6711 }"
6712 [(set (attr "far_jump")
6713 (if_then_else
6714 (ior (and (eq (symbol_ref ("which_alternative"))
6715 (const_int 0))
6716 (eq_attr "length" "8"))
6717 (eq_attr "length" "10"))
6718 (const_string "yes")
6719 (const_string "no")))
6720 (set (attr "length")
6721 (if_then_else
6722 (eq (symbol_ref ("which_alternative"))
6723 (const_int 0))
6724 (if_then_else
6725 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6726 (le (minus (match_dup 4) (pc)) (const_int 256)))
6727 (const_int 4)
6728 (if_then_else
6729 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6730 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6731 (const_int 6)
6732 (const_int 8)))
6733 (if_then_else
6734 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6735 (le (minus (match_dup 4) (pc)) (const_int 256)))
6736 (const_int 6)
6737 (if_then_else
6738 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6739 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6740 (const_int 8)
6741 (const_int 10)))))]
6742 )
6743
6744 (define_insn "*xorsi3_cbranch_scratch"
6745 [(set (pc)
6746 (if_then_else
6747 (match_operator 4 "equality_operator"
6748 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6749 (match_operand:SI 2 "s_register_operand" "l"))
6750 (const_int 0)])
6751 (label_ref (match_operand 3 "" ""))
6752 (pc)))
6753 (clobber (match_scratch:SI 0 "=l"))]
6754 "TARGET_THUMB1"
6755 "*
6756 {
6757 output_asm_insn (\"eor\\t%0, %2\", operands);
6758 switch (get_attr_length (insn))
6759 {
6760 case 4: return \"b%d4\\t%l3\";
6761 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6762 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6763 }
6764 }"
6765 [(set (attr "far_jump")
6766 (if_then_else
6767 (eq_attr "length" "8")
6768 (const_string "yes")
6769 (const_string "no")))
6770 (set (attr "length")
6771 (if_then_else
6772 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6773 (le (minus (match_dup 3) (pc)) (const_int 256)))
6774 (const_int 4)
6775 (if_then_else
6776 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6777 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6778 (const_int 6)
6779 (const_int 8))))]
6780 )
6781
6782 (define_insn "*xorsi3_cbranch"
6783 [(set (pc)
6784 (if_then_else
6785 (match_operator 5 "equality_operator"
6786 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6787 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6788 (const_int 0)])
6789 (label_ref (match_operand 4 "" ""))
6790 (pc)))
6791 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6792 (xor:SI (match_dup 2) (match_dup 3)))
6793 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6794 "TARGET_THUMB1"
6795 "*
6796 {
6797 if (which_alternative == 0)
6798 output_asm_insn (\"eor\\t%0, %3\", operands);
6799 else if (which_alternative == 1)
6800 {
6801 output_asm_insn (\"eor\\t%1, %3\", operands);
6802 output_asm_insn (\"mov\\t%0, %1\", operands);
6803 }
6804 else
6805 {
6806 output_asm_insn (\"eor\\t%1, %3\", operands);
6807 output_asm_insn (\"str\\t%1, %0\", operands);
6808 }
6809
6810 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6811 {
6812 case 4: return \"b%d5\\t%l4\";
6813 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6814 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6815 }
6816 }"
6817 [(set (attr "far_jump")
6818 (if_then_else
6819 (ior (and (eq (symbol_ref ("which_alternative"))
6820 (const_int 0))
6821 (eq_attr "length" "8"))
6822 (eq_attr "length" "10"))
6823 (const_string "yes")
6824 (const_string "no")))
6825 (set (attr "length")
6826 (if_then_else
6827 (eq (symbol_ref ("which_alternative"))
6828 (const_int 0))
6829 (if_then_else
6830 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6831 (le (minus (match_dup 4) (pc)) (const_int 256)))
6832 (const_int 4)
6833 (if_then_else
6834 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6835 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6836 (const_int 6)
6837 (const_int 8)))
6838 (if_then_else
6839 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6840 (le (minus (match_dup 4) (pc)) (const_int 256)))
6841 (const_int 6)
6842 (if_then_else
6843 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6844 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6845 (const_int 8)
6846 (const_int 10)))))]
6847 )
6848
6849 (define_insn "*bicsi3_cbranch_scratch"
6850 [(set (pc)
6851 (if_then_else
6852 (match_operator 4 "equality_operator"
6853 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6854 (match_operand:SI 1 "s_register_operand" "0"))
6855 (const_int 0)])
6856 (label_ref (match_operand 3 "" ""))
6857 (pc)))
6858 (clobber (match_scratch:SI 0 "=l"))]
6859 "TARGET_THUMB1"
6860 "*
6861 {
6862 output_asm_insn (\"bic\\t%0, %2\", operands);
6863 switch (get_attr_length (insn))
6864 {
6865 case 4: return \"b%d4\\t%l3\";
6866 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6867 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6868 }
6869 }"
6870 [(set (attr "far_jump")
6871 (if_then_else
6872 (eq_attr "length" "8")
6873 (const_string "yes")
6874 (const_string "no")))
6875 (set (attr "length")
6876 (if_then_else
6877 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6878 (le (minus (match_dup 3) (pc)) (const_int 256)))
6879 (const_int 4)
6880 (if_then_else
6881 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6882 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6883 (const_int 6)
6884 (const_int 8))))]
6885 )
6886
6887 (define_insn "*bicsi3_cbranch"
6888 [(set (pc)
6889 (if_then_else
6890 (match_operator 5 "equality_operator"
6891 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
6892 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
6893 (const_int 0)])
6894 (label_ref (match_operand 4 "" ""))
6895 (pc)))
6896 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
6897 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
6898 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
6899 "TARGET_THUMB1"
6900 "*
6901 {
6902 if (which_alternative == 0)
6903 output_asm_insn (\"bic\\t%0, %3\", operands);
6904 else if (which_alternative <= 2)
6905 {
6906 output_asm_insn (\"bic\\t%1, %3\", operands);
6907 /* It's ok if OP0 is a lo-reg, even though the mov will set the
6908 conditions again, since we're only testing for equality. */
6909 output_asm_insn (\"mov\\t%0, %1\", operands);
6910 }
6911 else
6912 {
6913 output_asm_insn (\"bic\\t%1, %3\", operands);
6914 output_asm_insn (\"str\\t%1, %0\", operands);
6915 }
6916
6917 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6918 {
6919 case 4: return \"b%d5\\t%l4\";
6920 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6921 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6922 }
6923 }"
6924 [(set (attr "far_jump")
6925 (if_then_else
6926 (ior (and (eq (symbol_ref ("which_alternative"))
6927 (const_int 0))
6928 (eq_attr "length" "8"))
6929 (eq_attr "length" "10"))
6930 (const_string "yes")
6931 (const_string "no")))
6932 (set (attr "length")
6933 (if_then_else
6934 (eq (symbol_ref ("which_alternative"))
6935 (const_int 0))
6936 (if_then_else
6937 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6938 (le (minus (match_dup 4) (pc)) (const_int 256)))
6939 (const_int 4)
6940 (if_then_else
6941 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6942 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6943 (const_int 6)
6944 (const_int 8)))
6945 (if_then_else
6946 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6947 (le (minus (match_dup 4) (pc)) (const_int 256)))
6948 (const_int 6)
6949 (if_then_else
6950 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6951 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6952 (const_int 8)
6953 (const_int 10)))))]
6954 )
6955
6956 (define_insn "*cbranchne_decr1"
6957 [(set (pc)
6958 (if_then_else (match_operator 3 "equality_operator"
6959 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6960 (const_int 0)])
6961 (label_ref (match_operand 4 "" ""))
6962 (pc)))
6963 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6964 (plus:SI (match_dup 2) (const_int -1)))
6965 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6966 "TARGET_THUMB1"
6967 "*
6968 {
6969 rtx cond[2];
6970 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6971 ? GEU : LTU),
6972 VOIDmode, operands[2], const1_rtx);
6973 cond[1] = operands[4];
6974
6975 if (which_alternative == 0)
6976 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6977 else if (which_alternative == 1)
6978 {
6979 /* We must provide an alternative for a hi reg because reload
6980 cannot handle output reloads on a jump instruction, but we
6981 can't subtract into that. Fortunately a mov from lo to hi
6982 does not clobber the condition codes. */
6983 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6984 output_asm_insn (\"mov\\t%0, %1\", operands);
6985 }
6986 else
6987 {
6988 /* Similarly, but the target is memory. */
6989 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6990 output_asm_insn (\"str\\t%1, %0\", operands);
6991 }
6992
6993 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6994 {
6995 case 4:
6996 output_asm_insn (\"b%d0\\t%l1\", cond);
6997 return \"\";
6998 case 6:
6999 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7000 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7001 default:
7002 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7003 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7004 }
7005 }
7006 "
7007 [(set (attr "far_jump")
7008 (if_then_else
7009 (ior (and (eq (symbol_ref ("which_alternative"))
7010 (const_int 0))
7011 (eq_attr "length" "8"))
7012 (eq_attr "length" "10"))
7013 (const_string "yes")
7014 (const_string "no")))
7015 (set_attr_alternative "length"
7016 [
7017 ;; Alternative 0
7018 (if_then_else
7019 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7020 (le (minus (match_dup 4) (pc)) (const_int 256)))
7021 (const_int 4)
7022 (if_then_else
7023 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7024 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7025 (const_int 6)
7026 (const_int 8)))
7027 ;; Alternative 1
7028 (if_then_else
7029 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7030 (le (minus (match_dup 4) (pc)) (const_int 256)))
7031 (const_int 6)
7032 (if_then_else
7033 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7034 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7035 (const_int 8)
7036 (const_int 10)))
7037 ;; Alternative 2
7038 (if_then_else
7039 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7040 (le (minus (match_dup 4) (pc)) (const_int 256)))
7041 (const_int 6)
7042 (if_then_else
7043 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7044 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7045 (const_int 8)
7046 (const_int 10)))
7047 ;; Alternative 3
7048 (if_then_else
7049 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7050 (le (minus (match_dup 4) (pc)) (const_int 256)))
7051 (const_int 6)
7052 (if_then_else
7053 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7054 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7055 (const_int 8)
7056 (const_int 10)))])]
7057 )
7058
7059 (define_insn "*addsi3_cbranch"
7060 [(set (pc)
7061 (if_then_else
7062 (match_operator 4 "comparison_operator"
7063 [(plus:SI
7064 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7065 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7066 (const_int 0)])
7067 (label_ref (match_operand 5 "" ""))
7068 (pc)))
7069 (set
7070 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7071 (plus:SI (match_dup 2) (match_dup 3)))
7072 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7073 "TARGET_THUMB1
7074 && (GET_CODE (operands[4]) == EQ
7075 || GET_CODE (operands[4]) == NE
7076 || GET_CODE (operands[4]) == GE
7077 || GET_CODE (operands[4]) == LT)"
7078 "*
7079 {
7080 rtx cond[3];
7081
7082
7083 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7084 cond[1] = operands[2];
7085 cond[2] = operands[3];
7086
7087 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7088 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7089 else
7090 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7091
7092 if (which_alternative >= 3
7093 && which_alternative < 4)
7094 output_asm_insn (\"mov\\t%0, %1\", operands);
7095 else if (which_alternative >= 4)
7096 output_asm_insn (\"str\\t%1, %0\", operands);
7097
7098 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7099 {
7100 case 4:
7101 return \"b%d4\\t%l5\";
7102 case 6:
7103 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7104 default:
7105 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7106 }
7107 }
7108 "
7109 [(set (attr "far_jump")
7110 (if_then_else
7111 (ior (and (lt (symbol_ref ("which_alternative"))
7112 (const_int 3))
7113 (eq_attr "length" "8"))
7114 (eq_attr "length" "10"))
7115 (const_string "yes")
7116 (const_string "no")))
7117 (set (attr "length")
7118 (if_then_else
7119 (lt (symbol_ref ("which_alternative"))
7120 (const_int 3))
7121 (if_then_else
7122 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7123 (le (minus (match_dup 5) (pc)) (const_int 256)))
7124 (const_int 4)
7125 (if_then_else
7126 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7127 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7128 (const_int 6)
7129 (const_int 8)))
7130 (if_then_else
7131 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7132 (le (minus (match_dup 5) (pc)) (const_int 256)))
7133 (const_int 6)
7134 (if_then_else
7135 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7136 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7137 (const_int 8)
7138 (const_int 10)))))]
7139 )
7140
7141 (define_insn "*addsi3_cbranch_scratch"
7142 [(set (pc)
7143 (if_then_else
7144 (match_operator 3 "comparison_operator"
7145 [(plus:SI
7146 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7147 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7148 (const_int 0)])
7149 (label_ref (match_operand 4 "" ""))
7150 (pc)))
7151 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7152 "TARGET_THUMB1
7153 && (GET_CODE (operands[3]) == EQ
7154 || GET_CODE (operands[3]) == NE
7155 || GET_CODE (operands[3]) == GE
7156 || GET_CODE (operands[3]) == LT)"
7157 "*
7158 {
7159 switch (which_alternative)
7160 {
7161 case 0:
7162 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7163 break;
7164 case 1:
7165 output_asm_insn (\"cmn\t%1, %2\", operands);
7166 break;
7167 case 2:
7168 if (INTVAL (operands[2]) < 0)
7169 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7170 else
7171 output_asm_insn (\"add\t%0, %1, %2\", operands);
7172 break;
7173 case 3:
7174 if (INTVAL (operands[2]) < 0)
7175 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7176 else
7177 output_asm_insn (\"add\t%0, %0, %2\", operands);
7178 break;
7179 }
7180
7181 switch (get_attr_length (insn))
7182 {
7183 case 4:
7184 return \"b%d3\\t%l4\";
7185 case 6:
7186 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7187 default:
7188 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7189 }
7190 }
7191 "
7192 [(set (attr "far_jump")
7193 (if_then_else
7194 (eq_attr "length" "8")
7195 (const_string "yes")
7196 (const_string "no")))
7197 (set (attr "length")
7198 (if_then_else
7199 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7200 (le (minus (match_dup 4) (pc)) (const_int 256)))
7201 (const_int 4)
7202 (if_then_else
7203 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7204 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7205 (const_int 6)
7206 (const_int 8))))]
7207 )
7208
7209 (define_insn "*subsi3_cbranch"
7210 [(set (pc)
7211 (if_then_else
7212 (match_operator 4 "comparison_operator"
7213 [(minus:SI
7214 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7215 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7216 (const_int 0)])
7217 (label_ref (match_operand 5 "" ""))
7218 (pc)))
7219 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7220 (minus:SI (match_dup 2) (match_dup 3)))
7221 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7222 "TARGET_THUMB1
7223 && (GET_CODE (operands[4]) == EQ
7224 || GET_CODE (operands[4]) == NE
7225 || GET_CODE (operands[4]) == GE
7226 || GET_CODE (operands[4]) == LT)"
7227 "*
7228 {
7229 if (which_alternative == 0)
7230 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7231 else if (which_alternative == 1)
7232 {
7233 /* We must provide an alternative for a hi reg because reload
7234 cannot handle output reloads on a jump instruction, but we
7235 can't subtract into that. Fortunately a mov from lo to hi
7236 does not clobber the condition codes. */
7237 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7238 output_asm_insn (\"mov\\t%0, %1\", operands);
7239 }
7240 else
7241 {
7242 /* Similarly, but the target is memory. */
7243 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7244 output_asm_insn (\"str\\t%1, %0\", operands);
7245 }
7246
7247 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7248 {
7249 case 4:
7250 return \"b%d4\\t%l5\";
7251 case 6:
7252 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7253 default:
7254 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7255 }
7256 }
7257 "
7258 [(set (attr "far_jump")
7259 (if_then_else
7260 (ior (and (eq (symbol_ref ("which_alternative"))
7261 (const_int 0))
7262 (eq_attr "length" "8"))
7263 (eq_attr "length" "10"))
7264 (const_string "yes")
7265 (const_string "no")))
7266 (set (attr "length")
7267 (if_then_else
7268 (eq (symbol_ref ("which_alternative"))
7269 (const_int 0))
7270 (if_then_else
7271 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7272 (le (minus (match_dup 5) (pc)) (const_int 256)))
7273 (const_int 4)
7274 (if_then_else
7275 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7276 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7277 (const_int 6)
7278 (const_int 8)))
7279 (if_then_else
7280 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7281 (le (minus (match_dup 5) (pc)) (const_int 256)))
7282 (const_int 6)
7283 (if_then_else
7284 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7285 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7286 (const_int 8)
7287 (const_int 10)))))]
7288 )
7289
7290 (define_insn "*subsi3_cbranch_scratch"
7291 [(set (pc)
7292 (if_then_else
7293 (match_operator 0 "arm_comparison_operator"
7294 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7295 (match_operand:SI 2 "nonmemory_operand" "l"))
7296 (const_int 0)])
7297 (label_ref (match_operand 3 "" ""))
7298 (pc)))]
7299 "TARGET_THUMB1
7300 && (GET_CODE (operands[0]) == EQ
7301 || GET_CODE (operands[0]) == NE
7302 || GET_CODE (operands[0]) == GE
7303 || GET_CODE (operands[0]) == LT)"
7304 "*
7305 output_asm_insn (\"cmp\\t%1, %2\", operands);
7306 switch (get_attr_length (insn))
7307 {
7308 case 4: return \"b%d0\\t%l3\";
7309 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7310 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7311 }
7312 "
7313 [(set (attr "far_jump")
7314 (if_then_else
7315 (eq_attr "length" "8")
7316 (const_string "yes")
7317 (const_string "no")))
7318 (set (attr "length")
7319 (if_then_else
7320 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7321 (le (minus (match_dup 3) (pc)) (const_int 256)))
7322 (const_int 4)
7323 (if_then_else
7324 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7325 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7326 (const_int 6)
7327 (const_int 8))))]
7328 )
7329
7330 ;; Comparison and test insns
7331
7332 (define_expand "cmpsi"
7333 [(match_operand:SI 0 "s_register_operand" "")
7334 (match_operand:SI 1 "arm_add_operand" "")]
7335 "TARGET_32BIT"
7336 "{
7337 arm_compare_op0 = operands[0];
7338 arm_compare_op1 = operands[1];
7339 DONE;
7340 }"
7341 )
7342
7343 (define_expand "cmpsf"
7344 [(match_operand:SF 0 "s_register_operand" "")
7345 (match_operand:SF 1 "arm_float_compare_operand" "")]
7346 "TARGET_32BIT && TARGET_HARD_FLOAT"
7347 "
7348 arm_compare_op0 = operands[0];
7349 arm_compare_op1 = operands[1];
7350 DONE;
7351 "
7352 )
7353
7354 (define_expand "cmpdf"
7355 [(match_operand:DF 0 "s_register_operand" "")
7356 (match_operand:DF 1 "arm_float_compare_operand" "")]
7357 "TARGET_32BIT && TARGET_HARD_FLOAT"
7358 "
7359 arm_compare_op0 = operands[0];
7360 arm_compare_op1 = operands[1];
7361 DONE;
7362 "
7363 )
7364
7365 (define_insn "*arm_cmpsi_insn"
7366 [(set (reg:CC CC_REGNUM)
7367 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7368 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7369 "TARGET_32BIT"
7370 "@
7371 cmp%?\\t%0, %1
7372 cmn%?\\t%0, #%n1"
7373 [(set_attr "conds" "set")]
7374 )
7375
7376 (define_insn "*arm_cmpsi_shiftsi"
7377 [(set (reg:CC CC_REGNUM)
7378 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7379 (match_operator:SI 3 "shift_operator"
7380 [(match_operand:SI 1 "s_register_operand" "r")
7381 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7382 "TARGET_ARM"
7383 "cmp%?\\t%0, %1%S3"
7384 [(set_attr "conds" "set")
7385 (set_attr "shift" "1")
7386 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7387 (const_string "alu_shift")
7388 (const_string "alu_shift_reg")))]
7389 )
7390
7391 (define_insn "*arm_cmpsi_shiftsi_swp"
7392 [(set (reg:CC_SWP CC_REGNUM)
7393 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7394 [(match_operand:SI 1 "s_register_operand" "r")
7395 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7396 (match_operand:SI 0 "s_register_operand" "r")))]
7397 "TARGET_ARM"
7398 "cmp%?\\t%0, %1%S3"
7399 [(set_attr "conds" "set")
7400 (set_attr "shift" "1")
7401 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7402 (const_string "alu_shift")
7403 (const_string "alu_shift_reg")))]
7404 )
7405
7406 (define_insn "*arm_cmpsi_negshiftsi_si"
7407 [(set (reg:CC_Z CC_REGNUM)
7408 (compare:CC_Z
7409 (neg:SI (match_operator:SI 1 "shift_operator"
7410 [(match_operand:SI 2 "s_register_operand" "r")
7411 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7412 (match_operand:SI 0 "s_register_operand" "r")))]
7413 "TARGET_ARM"
7414 "cmn%?\\t%0, %2%S1"
7415 [(set_attr "conds" "set")
7416 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7417 (const_string "alu_shift")
7418 (const_string "alu_shift_reg")))]
7419 )
7420
7421 ;; Cirrus SF compare instruction
7422 (define_insn "*cirrus_cmpsf"
7423 [(set (reg:CCFP CC_REGNUM)
7424 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7425 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7426 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7427 "cfcmps%?\\tr15, %V0, %V1"
7428 [(set_attr "type" "mav_farith")
7429 (set_attr "cirrus" "compare")]
7430 )
7431
7432 ;; Cirrus DF compare instruction
7433 (define_insn "*cirrus_cmpdf"
7434 [(set (reg:CCFP CC_REGNUM)
7435 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7436 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7437 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7438 "cfcmpd%?\\tr15, %V0, %V1"
7439 [(set_attr "type" "mav_farith")
7440 (set_attr "cirrus" "compare")]
7441 )
7442
7443 ;; Cirrus DI compare instruction
7444 (define_expand "cmpdi"
7445 [(match_operand:DI 0 "cirrus_fp_register" "")
7446 (match_operand:DI 1 "cirrus_fp_register" "")]
7447 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7448 "{
7449 arm_compare_op0 = operands[0];
7450 arm_compare_op1 = operands[1];
7451 DONE;
7452 }")
7453
7454 (define_insn "*cirrus_cmpdi"
7455 [(set (reg:CC CC_REGNUM)
7456 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7457 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7458 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7459 "cfcmp64%?\\tr15, %V0, %V1"
7460 [(set_attr "type" "mav_farith")
7461 (set_attr "cirrus" "compare")]
7462 )
7463
7464 ; This insn allows redundant compares to be removed by cse, nothing should
7465 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7466 ; is deleted later on. The match_dup will match the mode here, so that
7467 ; mode changes of the condition codes aren't lost by this even though we don't
7468 ; specify what they are.
7469
7470 (define_insn "*deleted_compare"
7471 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7472 "TARGET_32BIT"
7473 "\\t%@ deleted compare"
7474 [(set_attr "conds" "set")
7475 (set_attr "length" "0")]
7476 )
7477
7478 \f
7479 ;; Conditional branch insns
7480
7481 (define_expand "beq"
7482 [(set (pc)
7483 (if_then_else (eq (match_dup 1) (const_int 0))
7484 (label_ref (match_operand 0 "" ""))
7485 (pc)))]
7486 "TARGET_32BIT"
7487 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7488 )
7489
7490 (define_expand "bne"
7491 [(set (pc)
7492 (if_then_else (ne (match_dup 1) (const_int 0))
7493 (label_ref (match_operand 0 "" ""))
7494 (pc)))]
7495 "TARGET_32BIT"
7496 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7497 )
7498
7499 (define_expand "bgt"
7500 [(set (pc)
7501 (if_then_else (gt (match_dup 1) (const_int 0))
7502 (label_ref (match_operand 0 "" ""))
7503 (pc)))]
7504 "TARGET_32BIT"
7505 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7506 )
7507
7508 (define_expand "ble"
7509 [(set (pc)
7510 (if_then_else (le (match_dup 1) (const_int 0))
7511 (label_ref (match_operand 0 "" ""))
7512 (pc)))]
7513 "TARGET_32BIT"
7514 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7515 )
7516
7517 (define_expand "bge"
7518 [(set (pc)
7519 (if_then_else (ge (match_dup 1) (const_int 0))
7520 (label_ref (match_operand 0 "" ""))
7521 (pc)))]
7522 "TARGET_32BIT"
7523 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7524 )
7525
7526 (define_expand "blt"
7527 [(set (pc)
7528 (if_then_else (lt (match_dup 1) (const_int 0))
7529 (label_ref (match_operand 0 "" ""))
7530 (pc)))]
7531 "TARGET_32BIT"
7532 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7533 )
7534
7535 (define_expand "bgtu"
7536 [(set (pc)
7537 (if_then_else (gtu (match_dup 1) (const_int 0))
7538 (label_ref (match_operand 0 "" ""))
7539 (pc)))]
7540 "TARGET_32BIT"
7541 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7542 )
7543
7544 (define_expand "bleu"
7545 [(set (pc)
7546 (if_then_else (leu (match_dup 1) (const_int 0))
7547 (label_ref (match_operand 0 "" ""))
7548 (pc)))]
7549 "TARGET_32BIT"
7550 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7551 )
7552
7553 (define_expand "bgeu"
7554 [(set (pc)
7555 (if_then_else (geu (match_dup 1) (const_int 0))
7556 (label_ref (match_operand 0 "" ""))
7557 (pc)))]
7558 "TARGET_32BIT"
7559 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7560 )
7561
7562 (define_expand "bltu"
7563 [(set (pc)
7564 (if_then_else (ltu (match_dup 1) (const_int 0))
7565 (label_ref (match_operand 0 "" ""))
7566 (pc)))]
7567 "TARGET_32BIT"
7568 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7569 )
7570
7571 (define_expand "bunordered"
7572 [(set (pc)
7573 (if_then_else (unordered (match_dup 1) (const_int 0))
7574 (label_ref (match_operand 0 "" ""))
7575 (pc)))]
7576 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7577 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7578 arm_compare_op1);"
7579 )
7580
7581 (define_expand "bordered"
7582 [(set (pc)
7583 (if_then_else (ordered (match_dup 1) (const_int 0))
7584 (label_ref (match_operand 0 "" ""))
7585 (pc)))]
7586 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7587 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7588 arm_compare_op1);"
7589 )
7590
7591 (define_expand "bungt"
7592 [(set (pc)
7593 (if_then_else (ungt (match_dup 1) (const_int 0))
7594 (label_ref (match_operand 0 "" ""))
7595 (pc)))]
7596 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7597 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7598 )
7599
7600 (define_expand "bunlt"
7601 [(set (pc)
7602 (if_then_else (unlt (match_dup 1) (const_int 0))
7603 (label_ref (match_operand 0 "" ""))
7604 (pc)))]
7605 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7606 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7607 )
7608
7609 (define_expand "bunge"
7610 [(set (pc)
7611 (if_then_else (unge (match_dup 1) (const_int 0))
7612 (label_ref (match_operand 0 "" ""))
7613 (pc)))]
7614 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7615 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7616 )
7617
7618 (define_expand "bunle"
7619 [(set (pc)
7620 (if_then_else (unle (match_dup 1) (const_int 0))
7621 (label_ref (match_operand 0 "" ""))
7622 (pc)))]
7623 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7624 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7625 )
7626
7627 ;; The following two patterns need two branch instructions, since there is
7628 ;; no single instruction that will handle all cases.
7629 (define_expand "buneq"
7630 [(set (pc)
7631 (if_then_else (uneq (match_dup 1) (const_int 0))
7632 (label_ref (match_operand 0 "" ""))
7633 (pc)))]
7634 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7635 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7636 )
7637
7638 (define_expand "bltgt"
7639 [(set (pc)
7640 (if_then_else (ltgt (match_dup 1) (const_int 0))
7641 (label_ref (match_operand 0 "" ""))
7642 (pc)))]
7643 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7644 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7645 )
7646
7647 ;;
7648 ;; Patterns to match conditional branch insns.
7649 ;;
7650
7651 ; Special pattern to match UNEQ.
7652 (define_insn "*arm_buneq"
7653 [(set (pc)
7654 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7655 (label_ref (match_operand 0 "" ""))
7656 (pc)))]
7657 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7658 "*
7659 gcc_assert (!arm_ccfsm_state);
7660
7661 return \"bvs\\t%l0\;beq\\t%l0\";
7662 "
7663 [(set_attr "conds" "jump_clob")
7664 (set_attr "length" "8")]
7665 )
7666
7667 ; Special pattern to match LTGT.
7668 (define_insn "*arm_bltgt"
7669 [(set (pc)
7670 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7671 (label_ref (match_operand 0 "" ""))
7672 (pc)))]
7673 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7674 "*
7675 gcc_assert (!arm_ccfsm_state);
7676
7677 return \"bmi\\t%l0\;bgt\\t%l0\";
7678 "
7679 [(set_attr "conds" "jump_clob")
7680 (set_attr "length" "8")]
7681 )
7682
7683 (define_insn "*arm_cond_branch"
7684 [(set (pc)
7685 (if_then_else (match_operator 1 "arm_comparison_operator"
7686 [(match_operand 2 "cc_register" "") (const_int 0)])
7687 (label_ref (match_operand 0 "" ""))
7688 (pc)))]
7689 "TARGET_32BIT"
7690 "*
7691 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7692 {
7693 arm_ccfsm_state += 2;
7694 return \"\";
7695 }
7696 return \"b%d1\\t%l0\";
7697 "
7698 [(set_attr "conds" "use")
7699 (set_attr "type" "branch")]
7700 )
7701
7702 ; Special pattern to match reversed UNEQ.
7703 (define_insn "*arm_buneq_reversed"
7704 [(set (pc)
7705 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7706 (pc)
7707 (label_ref (match_operand 0 "" ""))))]
7708 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7709 "*
7710 gcc_assert (!arm_ccfsm_state);
7711
7712 return \"bmi\\t%l0\;bgt\\t%l0\";
7713 "
7714 [(set_attr "conds" "jump_clob")
7715 (set_attr "length" "8")]
7716 )
7717
7718 ; Special pattern to match reversed LTGT.
7719 (define_insn "*arm_bltgt_reversed"
7720 [(set (pc)
7721 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7722 (pc)
7723 (label_ref (match_operand 0 "" ""))))]
7724 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7725 "*
7726 gcc_assert (!arm_ccfsm_state);
7727
7728 return \"bvs\\t%l0\;beq\\t%l0\";
7729 "
7730 [(set_attr "conds" "jump_clob")
7731 (set_attr "length" "8")]
7732 )
7733
7734 (define_insn "*arm_cond_branch_reversed"
7735 [(set (pc)
7736 (if_then_else (match_operator 1 "arm_comparison_operator"
7737 [(match_operand 2 "cc_register" "") (const_int 0)])
7738 (pc)
7739 (label_ref (match_operand 0 "" ""))))]
7740 "TARGET_32BIT"
7741 "*
7742 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7743 {
7744 arm_ccfsm_state += 2;
7745 return \"\";
7746 }
7747 return \"b%D1\\t%l0\";
7748 "
7749 [(set_attr "conds" "use")
7750 (set_attr "type" "branch")]
7751 )
7752
7753 \f
7754
7755 ; scc insns
7756
7757 (define_expand "seq"
7758 [(set (match_operand:SI 0 "s_register_operand" "")
7759 (eq:SI (match_dup 1) (const_int 0)))]
7760 "TARGET_32BIT"
7761 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7762 )
7763
7764 (define_expand "sne"
7765 [(set (match_operand:SI 0 "s_register_operand" "")
7766 (ne:SI (match_dup 1) (const_int 0)))]
7767 "TARGET_32BIT"
7768 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7769 )
7770
7771 (define_expand "sgt"
7772 [(set (match_operand:SI 0 "s_register_operand" "")
7773 (gt:SI (match_dup 1) (const_int 0)))]
7774 "TARGET_32BIT"
7775 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7776 )
7777
7778 (define_expand "sle"
7779 [(set (match_operand:SI 0 "s_register_operand" "")
7780 (le:SI (match_dup 1) (const_int 0)))]
7781 "TARGET_32BIT"
7782 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7783 )
7784
7785 (define_expand "sge"
7786 [(set (match_operand:SI 0 "s_register_operand" "")
7787 (ge:SI (match_dup 1) (const_int 0)))]
7788 "TARGET_32BIT"
7789 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7790 )
7791
7792 (define_expand "slt"
7793 [(set (match_operand:SI 0 "s_register_operand" "")
7794 (lt:SI (match_dup 1) (const_int 0)))]
7795 "TARGET_32BIT"
7796 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7797 )
7798
7799 (define_expand "sgtu"
7800 [(set (match_operand:SI 0 "s_register_operand" "")
7801 (gtu:SI (match_dup 1) (const_int 0)))]
7802 "TARGET_32BIT"
7803 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7804 )
7805
7806 (define_expand "sleu"
7807 [(set (match_operand:SI 0 "s_register_operand" "")
7808 (leu:SI (match_dup 1) (const_int 0)))]
7809 "TARGET_32BIT"
7810 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7811 )
7812
7813 (define_expand "sgeu"
7814 [(set (match_operand:SI 0 "s_register_operand" "")
7815 (geu:SI (match_dup 1) (const_int 0)))]
7816 "TARGET_32BIT"
7817 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7818 )
7819
7820 (define_expand "sltu"
7821 [(set (match_operand:SI 0 "s_register_operand" "")
7822 (ltu:SI (match_dup 1) (const_int 0)))]
7823 "TARGET_32BIT"
7824 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7825 )
7826
7827 (define_expand "sunordered"
7828 [(set (match_operand:SI 0 "s_register_operand" "")
7829 (unordered:SI (match_dup 1) (const_int 0)))]
7830 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7831 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7832 arm_compare_op1);"
7833 )
7834
7835 (define_expand "sordered"
7836 [(set (match_operand:SI 0 "s_register_operand" "")
7837 (ordered:SI (match_dup 1) (const_int 0)))]
7838 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7839 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7840 arm_compare_op1);"
7841 )
7842
7843 (define_expand "sungt"
7844 [(set (match_operand:SI 0 "s_register_operand" "")
7845 (ungt:SI (match_dup 1) (const_int 0)))]
7846 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7847 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7848 arm_compare_op1);"
7849 )
7850
7851 (define_expand "sunge"
7852 [(set (match_operand:SI 0 "s_register_operand" "")
7853 (unge:SI (match_dup 1) (const_int 0)))]
7854 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7855 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7856 arm_compare_op1);"
7857 )
7858
7859 (define_expand "sunlt"
7860 [(set (match_operand:SI 0 "s_register_operand" "")
7861 (unlt:SI (match_dup 1) (const_int 0)))]
7862 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7863 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7864 arm_compare_op1);"
7865 )
7866
7867 (define_expand "sunle"
7868 [(set (match_operand:SI 0 "s_register_operand" "")
7869 (unle:SI (match_dup 1) (const_int 0)))]
7870 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7871 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7872 arm_compare_op1);"
7873 )
7874
7875 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7876 ;;; simple ARM instructions.
7877 ;
7878 ; (define_expand "suneq"
7879 ; [(set (match_operand:SI 0 "s_register_operand" "")
7880 ; (uneq:SI (match_dup 1) (const_int 0)))]
7881 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7882 ; "gcc_unreachable ();"
7883 ; )
7884 ;
7885 ; (define_expand "sltgt"
7886 ; [(set (match_operand:SI 0 "s_register_operand" "")
7887 ; (ltgt:SI (match_dup 1) (const_int 0)))]
7888 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7889 ; "gcc_unreachable ();"
7890 ; )
7891
7892 (define_insn "*mov_scc"
7893 [(set (match_operand:SI 0 "s_register_operand" "=r")
7894 (match_operator:SI 1 "arm_comparison_operator"
7895 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7896 "TARGET_ARM"
7897 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7898 [(set_attr "conds" "use")
7899 (set_attr "length" "8")]
7900 )
7901
7902 (define_insn "*mov_negscc"
7903 [(set (match_operand:SI 0 "s_register_operand" "=r")
7904 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7905 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7906 "TARGET_ARM"
7907 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7908 [(set_attr "conds" "use")
7909 (set_attr "length" "8")]
7910 )
7911
7912 (define_insn "*mov_notscc"
7913 [(set (match_operand:SI 0 "s_register_operand" "=r")
7914 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7915 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7916 "TARGET_ARM"
7917 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7918 [(set_attr "conds" "use")
7919 (set_attr "length" "8")]
7920 )
7921
7922 (define_expand "cstoresi4"
7923 [(set (match_operand:SI 0 "s_register_operand" "")
7924 (match_operator:SI 1 "arm_comparison_operator"
7925 [(match_operand:SI 2 "s_register_operand" "")
7926 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7927 "TARGET_THUMB1"
7928 "{
7929 rtx op3, scratch, scratch2;
7930
7931 if (operands[3] == const0_rtx)
7932 {
7933 switch (GET_CODE (operands[1]))
7934 {
7935 case EQ:
7936 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7937 break;
7938
7939 case NE:
7940 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7941 break;
7942
7943 case LE:
7944 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7945 NULL_RTX, 0, OPTAB_WIDEN);
7946 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7947 NULL_RTX, 0, OPTAB_WIDEN);
7948 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7949 operands[0], 1, OPTAB_WIDEN);
7950 break;
7951
7952 case GE:
7953 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7954 NULL_RTX, 1);
7955 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7956 NULL_RTX, 1, OPTAB_WIDEN);
7957 break;
7958
7959 case GT:
7960 scratch = expand_binop (SImode, ashr_optab, operands[2],
7961 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7962 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7963 NULL_RTX, 0, OPTAB_WIDEN);
7964 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7965 0, OPTAB_WIDEN);
7966 break;
7967
7968 /* LT is handled by generic code. No need for unsigned with 0. */
7969 default:
7970 FAIL;
7971 }
7972 DONE;
7973 }
7974
7975 switch (GET_CODE (operands[1]))
7976 {
7977 case EQ:
7978 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7979 NULL_RTX, 0, OPTAB_WIDEN);
7980 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7981 break;
7982
7983 case NE:
7984 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7985 NULL_RTX, 0, OPTAB_WIDEN);
7986 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7987 break;
7988
7989 case LE:
7990 op3 = force_reg (SImode, operands[3]);
7991
7992 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7993 NULL_RTX, 1, OPTAB_WIDEN);
7994 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7995 NULL_RTX, 0, OPTAB_WIDEN);
7996 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7997 op3, operands[2]));
7998 break;
7999
8000 case GE:
8001 op3 = operands[3];
8002 if (!thumb1_cmp_operand (op3, SImode))
8003 op3 = force_reg (SImode, op3);
8004 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8005 NULL_RTX, 0, OPTAB_WIDEN);
8006 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8007 NULL_RTX, 1, OPTAB_WIDEN);
8008 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8009 operands[2], op3));
8010 break;
8011
8012 case LEU:
8013 op3 = force_reg (SImode, operands[3]);
8014 scratch = force_reg (SImode, const0_rtx);
8015 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8016 op3, operands[2]));
8017 break;
8018
8019 case GEU:
8020 op3 = operands[3];
8021 if (!thumb1_cmp_operand (op3, SImode))
8022 op3 = force_reg (SImode, op3);
8023 scratch = force_reg (SImode, const0_rtx);
8024 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8025 operands[2], op3));
8026 break;
8027
8028 case LTU:
8029 op3 = operands[3];
8030 if (!thumb1_cmp_operand (op3, SImode))
8031 op3 = force_reg (SImode, op3);
8032 scratch = gen_reg_rtx (SImode);
8033 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8034 emit_insn (gen_negsi2 (operands[0], scratch));
8035 break;
8036
8037 case GTU:
8038 op3 = force_reg (SImode, operands[3]);
8039 scratch = gen_reg_rtx (SImode);
8040 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8041 emit_insn (gen_negsi2 (operands[0], scratch));
8042 break;
8043
8044 /* No good sequences for GT, LT. */
8045 default:
8046 FAIL;
8047 }
8048 DONE;
8049 }")
8050
8051 (define_expand "cstoresi_eq0_thumb1"
8052 [(parallel
8053 [(set (match_operand:SI 0 "s_register_operand" "")
8054 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8055 (const_int 0)))
8056 (clobber (match_dup:SI 2))])]
8057 "TARGET_THUMB1"
8058 "operands[2] = gen_reg_rtx (SImode);"
8059 )
8060
8061 (define_expand "cstoresi_ne0_thumb1"
8062 [(parallel
8063 [(set (match_operand:SI 0 "s_register_operand" "")
8064 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8065 (const_int 0)))
8066 (clobber (match_dup:SI 2))])]
8067 "TARGET_THUMB1"
8068 "operands[2] = gen_reg_rtx (SImode);"
8069 )
8070
8071 (define_insn "*cstoresi_eq0_thumb1_insn"
8072 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8073 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8074 (const_int 0)))
8075 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8076 "TARGET_THUMB1"
8077 "@
8078 neg\\t%0, %1\;adc\\t%0, %0, %1
8079 neg\\t%2, %1\;adc\\t%0, %1, %2"
8080 [(set_attr "length" "4")]
8081 )
8082
8083 (define_insn "*cstoresi_ne0_thumb1_insn"
8084 [(set (match_operand:SI 0 "s_register_operand" "=l")
8085 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8086 (const_int 0)))
8087 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8088 "TARGET_THUMB1"
8089 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8090 [(set_attr "length" "4")]
8091 )
8092
8093 (define_insn "cstoresi_nltu_thumb1"
8094 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8095 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8096 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8097 "TARGET_THUMB1"
8098 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8099 [(set_attr "length" "4")]
8100 )
8101
8102 ;; Used as part of the expansion of thumb les sequence.
8103 (define_insn "thumb1_addsi3_addgeu"
8104 [(set (match_operand:SI 0 "s_register_operand" "=l")
8105 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8106 (match_operand:SI 2 "s_register_operand" "l"))
8107 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8108 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8109 "TARGET_THUMB1"
8110 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8111 [(set_attr "length" "4")]
8112 )
8113
8114 \f
8115 ;; Conditional move insns
8116
8117 (define_expand "movsicc"
8118 [(set (match_operand:SI 0 "s_register_operand" "")
8119 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8120 (match_operand:SI 2 "arm_not_operand" "")
8121 (match_operand:SI 3 "arm_not_operand" "")))]
8122 "TARGET_32BIT"
8123 "
8124 {
8125 enum rtx_code code = GET_CODE (operands[1]);
8126 rtx ccreg;
8127
8128 if (code == UNEQ || code == LTGT)
8129 FAIL;
8130
8131 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8132 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8133 }"
8134 )
8135
8136 (define_expand "movsfcc"
8137 [(set (match_operand:SF 0 "s_register_operand" "")
8138 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8139 (match_operand:SF 2 "s_register_operand" "")
8140 (match_operand:SF 3 "nonmemory_operand" "")))]
8141 "TARGET_32BIT"
8142 "
8143 {
8144 enum rtx_code code = GET_CODE (operands[1]);
8145 rtx ccreg;
8146
8147 if (code == UNEQ || code == LTGT)
8148 FAIL;
8149
8150 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8151 Otherwise, ensure it is a valid FP add operand */
8152 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8153 || (!arm_float_add_operand (operands[3], SFmode)))
8154 operands[3] = force_reg (SFmode, operands[3]);
8155
8156 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8157 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8158 }"
8159 )
8160
8161 (define_expand "movdfcc"
8162 [(set (match_operand:DF 0 "s_register_operand" "")
8163 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8164 (match_operand:DF 2 "s_register_operand" "")
8165 (match_operand:DF 3 "arm_float_add_operand" "")))]
8166 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8167 "
8168 {
8169 enum rtx_code code = GET_CODE (operands[1]);
8170 rtx ccreg;
8171
8172 if (code == UNEQ || code == LTGT)
8173 FAIL;
8174
8175 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8176 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8177 }"
8178 )
8179
8180 (define_insn "*movsicc_insn"
8181 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8182 (if_then_else:SI
8183 (match_operator 3 "arm_comparison_operator"
8184 [(match_operand 4 "cc_register" "") (const_int 0)])
8185 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8186 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8187 "TARGET_ARM"
8188 "@
8189 mov%D3\\t%0, %2
8190 mvn%D3\\t%0, #%B2
8191 mov%d3\\t%0, %1
8192 mvn%d3\\t%0, #%B1
8193 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8194 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8195 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8196 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8197 [(set_attr "length" "4,4,4,4,8,8,8,8")
8198 (set_attr "conds" "use")]
8199 )
8200
8201 (define_insn "*movsfcc_soft_insn"
8202 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8203 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8204 [(match_operand 4 "cc_register" "") (const_int 0)])
8205 (match_operand:SF 1 "s_register_operand" "0,r")
8206 (match_operand:SF 2 "s_register_operand" "r,0")))]
8207 "TARGET_ARM && TARGET_SOFT_FLOAT"
8208 "@
8209 mov%D3\\t%0, %2
8210 mov%d3\\t%0, %1"
8211 [(set_attr "conds" "use")]
8212 )
8213
8214 \f
8215 ;; Jump and linkage insns
8216
8217 (define_expand "jump"
8218 [(set (pc)
8219 (label_ref (match_operand 0 "" "")))]
8220 "TARGET_EITHER"
8221 ""
8222 )
8223
8224 (define_insn "*arm_jump"
8225 [(set (pc)
8226 (label_ref (match_operand 0 "" "")))]
8227 "TARGET_32BIT"
8228 "*
8229 {
8230 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8231 {
8232 arm_ccfsm_state += 2;
8233 return \"\";
8234 }
8235 return \"b%?\\t%l0\";
8236 }
8237 "
8238 [(set_attr "predicable" "yes")]
8239 )
8240
8241 (define_insn "*thumb_jump"
8242 [(set (pc)
8243 (label_ref (match_operand 0 "" "")))]
8244 "TARGET_THUMB1"
8245 "*
8246 if (get_attr_length (insn) == 2)
8247 return \"b\\t%l0\";
8248 return \"bl\\t%l0\\t%@ far jump\";
8249 "
8250 [(set (attr "far_jump")
8251 (if_then_else
8252 (eq_attr "length" "4")
8253 (const_string "yes")
8254 (const_string "no")))
8255 (set (attr "length")
8256 (if_then_else
8257 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8258 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8259 (const_int 2)
8260 (const_int 4)))]
8261 )
8262
8263 (define_expand "call"
8264 [(parallel [(call (match_operand 0 "memory_operand" "")
8265 (match_operand 1 "general_operand" ""))
8266 (use (match_operand 2 "" ""))
8267 (clobber (reg:SI LR_REGNUM))])]
8268 "TARGET_EITHER"
8269 "
8270 {
8271 rtx callee, pat;
8272
8273 /* In an untyped call, we can get NULL for operand 2. */
8274 if (operands[2] == NULL_RTX)
8275 operands[2] = const0_rtx;
8276
8277 /* Decide if we should generate indirect calls by loading the
8278 32-bit address of the callee into a register before performing the
8279 branch and link. */
8280 callee = XEXP (operands[0], 0);
8281 if (GET_CODE (callee) == SYMBOL_REF
8282 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8283 : !REG_P (callee))
8284 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8285
8286 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8287 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8288 DONE;
8289 }"
8290 )
8291
8292 (define_expand "call_internal"
8293 [(parallel [(call (match_operand 0 "memory_operand" "")
8294 (match_operand 1 "general_operand" ""))
8295 (use (match_operand 2 "" ""))
8296 (clobber (reg:SI LR_REGNUM))])])
8297
8298 (define_insn "*call_reg_armv5"
8299 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8300 (match_operand 1 "" ""))
8301 (use (match_operand 2 "" ""))
8302 (clobber (reg:SI LR_REGNUM))]
8303 "TARGET_ARM && arm_arch5"
8304 "blx%?\\t%0"
8305 [(set_attr "type" "call")]
8306 )
8307
8308 (define_insn "*call_reg_arm"
8309 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8310 (match_operand 1 "" ""))
8311 (use (match_operand 2 "" ""))
8312 (clobber (reg:SI LR_REGNUM))]
8313 "TARGET_ARM && !arm_arch5"
8314 "*
8315 return output_call (operands);
8316 "
8317 ;; length is worst case, normally it is only two
8318 [(set_attr "length" "12")
8319 (set_attr "type" "call")]
8320 )
8321
8322 (define_insn "*call_mem"
8323 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8324 (match_operand 1 "" ""))
8325 (use (match_operand 2 "" ""))
8326 (clobber (reg:SI LR_REGNUM))]
8327 "TARGET_ARM"
8328 "*
8329 return output_call_mem (operands);
8330 "
8331 [(set_attr "length" "12")
8332 (set_attr "type" "call")]
8333 )
8334
8335 (define_insn "*call_reg_thumb1_v5"
8336 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8337 (match_operand 1 "" ""))
8338 (use (match_operand 2 "" ""))
8339 (clobber (reg:SI LR_REGNUM))]
8340 "TARGET_THUMB1 && arm_arch5"
8341 "blx\\t%0"
8342 [(set_attr "length" "2")
8343 (set_attr "type" "call")]
8344 )
8345
8346 (define_insn "*call_reg_thumb1"
8347 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8348 (match_operand 1 "" ""))
8349 (use (match_operand 2 "" ""))
8350 (clobber (reg:SI LR_REGNUM))]
8351 "TARGET_THUMB1 && !arm_arch5"
8352 "*
8353 {
8354 if (!TARGET_CALLER_INTERWORKING)
8355 return thumb_call_via_reg (operands[0]);
8356 else if (operands[1] == const0_rtx)
8357 return \"bl\\t%__interwork_call_via_%0\";
8358 else if (frame_pointer_needed)
8359 return \"bl\\t%__interwork_r7_call_via_%0\";
8360 else
8361 return \"bl\\t%__interwork_r11_call_via_%0\";
8362 }"
8363 [(set_attr "type" "call")]
8364 )
8365
8366 (define_expand "call_value"
8367 [(parallel [(set (match_operand 0 "" "")
8368 (call (match_operand 1 "memory_operand" "")
8369 (match_operand 2 "general_operand" "")))
8370 (use (match_operand 3 "" ""))
8371 (clobber (reg:SI LR_REGNUM))])]
8372 "TARGET_EITHER"
8373 "
8374 {
8375 rtx pat, callee;
8376
8377 /* In an untyped call, we can get NULL for operand 2. */
8378 if (operands[3] == 0)
8379 operands[3] = const0_rtx;
8380
8381 /* Decide if we should generate indirect calls by loading the
8382 32-bit address of the callee into a register before performing the
8383 branch and link. */
8384 callee = XEXP (operands[1], 0);
8385 if (GET_CODE (callee) == SYMBOL_REF
8386 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8387 : !REG_P (callee))
8388 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8389
8390 pat = gen_call_value_internal (operands[0], operands[1],
8391 operands[2], operands[3]);
8392 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8393 DONE;
8394 }"
8395 )
8396
8397 (define_expand "call_value_internal"
8398 [(parallel [(set (match_operand 0 "" "")
8399 (call (match_operand 1 "memory_operand" "")
8400 (match_operand 2 "general_operand" "")))
8401 (use (match_operand 3 "" ""))
8402 (clobber (reg:SI LR_REGNUM))])])
8403
8404 (define_insn "*call_value_reg_armv5"
8405 [(set (match_operand 0 "" "")
8406 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8407 (match_operand 2 "" "")))
8408 (use (match_operand 3 "" ""))
8409 (clobber (reg:SI LR_REGNUM))]
8410 "TARGET_ARM && arm_arch5"
8411 "blx%?\\t%1"
8412 [(set_attr "type" "call")]
8413 )
8414
8415 (define_insn "*call_value_reg_arm"
8416 [(set (match_operand 0 "" "")
8417 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8418 (match_operand 2 "" "")))
8419 (use (match_operand 3 "" ""))
8420 (clobber (reg:SI LR_REGNUM))]
8421 "TARGET_ARM && !arm_arch5"
8422 "*
8423 return output_call (&operands[1]);
8424 "
8425 [(set_attr "length" "12")
8426 (set_attr "type" "call")]
8427 )
8428
8429 (define_insn "*call_value_mem"
8430 [(set (match_operand 0 "" "")
8431 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8432 (match_operand 2 "" "")))
8433 (use (match_operand 3 "" ""))
8434 (clobber (reg:SI LR_REGNUM))]
8435 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8436 "*
8437 return output_call_mem (&operands[1]);
8438 "
8439 [(set_attr "length" "12")
8440 (set_attr "type" "call")]
8441 )
8442
8443 (define_insn "*call_value_reg_thumb1_v5"
8444 [(set (match_operand 0 "" "")
8445 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8446 (match_operand 2 "" "")))
8447 (use (match_operand 3 "" ""))
8448 (clobber (reg:SI LR_REGNUM))]
8449 "TARGET_THUMB1 && arm_arch5"
8450 "blx\\t%1"
8451 [(set_attr "length" "2")
8452 (set_attr "type" "call")]
8453 )
8454
8455 (define_insn "*call_value_reg_thumb1"
8456 [(set (match_operand 0 "" "")
8457 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8458 (match_operand 2 "" "")))
8459 (use (match_operand 3 "" ""))
8460 (clobber (reg:SI LR_REGNUM))]
8461 "TARGET_THUMB1 && !arm_arch5"
8462 "*
8463 {
8464 if (!TARGET_CALLER_INTERWORKING)
8465 return thumb_call_via_reg (operands[1]);
8466 else if (operands[2] == const0_rtx)
8467 return \"bl\\t%__interwork_call_via_%1\";
8468 else if (frame_pointer_needed)
8469 return \"bl\\t%__interwork_r7_call_via_%1\";
8470 else
8471 return \"bl\\t%__interwork_r11_call_via_%1\";
8472 }"
8473 [(set_attr "type" "call")]
8474 )
8475
8476 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8477 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8478
8479 (define_insn "*call_symbol"
8480 [(call (mem:SI (match_operand:SI 0 "" ""))
8481 (match_operand 1 "" ""))
8482 (use (match_operand 2 "" ""))
8483 (clobber (reg:SI LR_REGNUM))]
8484 "TARGET_ARM
8485 && (GET_CODE (operands[0]) == SYMBOL_REF)
8486 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8487 "*
8488 {
8489 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8490 }"
8491 [(set_attr "type" "call")]
8492 )
8493
8494 (define_insn "*call_value_symbol"
8495 [(set (match_operand 0 "" "")
8496 (call (mem:SI (match_operand:SI 1 "" ""))
8497 (match_operand:SI 2 "" "")))
8498 (use (match_operand 3 "" ""))
8499 (clobber (reg:SI LR_REGNUM))]
8500 "TARGET_ARM
8501 && (GET_CODE (operands[1]) == SYMBOL_REF)
8502 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8503 "*
8504 {
8505 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8506 }"
8507 [(set_attr "type" "call")]
8508 )
8509
8510 (define_insn "*call_insn"
8511 [(call (mem:SI (match_operand:SI 0 "" ""))
8512 (match_operand:SI 1 "" ""))
8513 (use (match_operand 2 "" ""))
8514 (clobber (reg:SI LR_REGNUM))]
8515 "TARGET_THUMB
8516 && GET_CODE (operands[0]) == SYMBOL_REF
8517 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8518 "bl\\t%a0"
8519 [(set_attr "length" "4")
8520 (set_attr "type" "call")]
8521 )
8522
8523 (define_insn "*call_value_insn"
8524 [(set (match_operand 0 "" "")
8525 (call (mem:SI (match_operand 1 "" ""))
8526 (match_operand 2 "" "")))
8527 (use (match_operand 3 "" ""))
8528 (clobber (reg:SI LR_REGNUM))]
8529 "TARGET_THUMB
8530 && GET_CODE (operands[1]) == SYMBOL_REF
8531 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8532 "bl\\t%a1"
8533 [(set_attr "length" "4")
8534 (set_attr "type" "call")]
8535 )
8536
8537 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8538 (define_expand "sibcall"
8539 [(parallel [(call (match_operand 0 "memory_operand" "")
8540 (match_operand 1 "general_operand" ""))
8541 (return)
8542 (use (match_operand 2 "" ""))])]
8543 "TARGET_ARM"
8544 "
8545 {
8546 if (operands[2] == NULL_RTX)
8547 operands[2] = const0_rtx;
8548 }"
8549 )
8550
8551 (define_expand "sibcall_value"
8552 [(parallel [(set (match_operand 0 "" "")
8553 (call (match_operand 1 "memory_operand" "")
8554 (match_operand 2 "general_operand" "")))
8555 (return)
8556 (use (match_operand 3 "" ""))])]
8557 "TARGET_ARM"
8558 "
8559 {
8560 if (operands[3] == NULL_RTX)
8561 operands[3] = const0_rtx;
8562 }"
8563 )
8564
8565 (define_insn "*sibcall_insn"
8566 [(call (mem:SI (match_operand:SI 0 "" "X"))
8567 (match_operand 1 "" ""))
8568 (return)
8569 (use (match_operand 2 "" ""))]
8570 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8571 "*
8572 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8573 "
8574 [(set_attr "type" "call")]
8575 )
8576
8577 (define_insn "*sibcall_value_insn"
8578 [(set (match_operand 0 "" "")
8579 (call (mem:SI (match_operand:SI 1 "" "X"))
8580 (match_operand 2 "" "")))
8581 (return)
8582 (use (match_operand 3 "" ""))]
8583 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8584 "*
8585 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8586 "
8587 [(set_attr "type" "call")]
8588 )
8589
8590 ;; Often the return insn will be the same as loading from memory, so set attr
8591 (define_insn "return"
8592 [(return)]
8593 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8594 "*
8595 {
8596 if (arm_ccfsm_state == 2)
8597 {
8598 arm_ccfsm_state += 2;
8599 return \"\";
8600 }
8601 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8602 }"
8603 [(set_attr "type" "load1")
8604 (set_attr "length" "12")
8605 (set_attr "predicable" "yes")]
8606 )
8607
8608 (define_insn "*cond_return"
8609 [(set (pc)
8610 (if_then_else (match_operator 0 "arm_comparison_operator"
8611 [(match_operand 1 "cc_register" "") (const_int 0)])
8612 (return)
8613 (pc)))]
8614 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8615 "*
8616 {
8617 if (arm_ccfsm_state == 2)
8618 {
8619 arm_ccfsm_state += 2;
8620 return \"\";
8621 }
8622 return output_return_instruction (operands[0], TRUE, FALSE);
8623 }"
8624 [(set_attr "conds" "use")
8625 (set_attr "length" "12")
8626 (set_attr "type" "load1")]
8627 )
8628
8629 (define_insn "*cond_return_inverted"
8630 [(set (pc)
8631 (if_then_else (match_operator 0 "arm_comparison_operator"
8632 [(match_operand 1 "cc_register" "") (const_int 0)])
8633 (pc)
8634 (return)))]
8635 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8636 "*
8637 {
8638 if (arm_ccfsm_state == 2)
8639 {
8640 arm_ccfsm_state += 2;
8641 return \"\";
8642 }
8643 return output_return_instruction (operands[0], TRUE, TRUE);
8644 }"
8645 [(set_attr "conds" "use")
8646 (set_attr "length" "12")
8647 (set_attr "type" "load1")]
8648 )
8649
8650 ;; Generate a sequence of instructions to determine if the processor is
8651 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8652 ;; mask.
8653
8654 (define_expand "return_addr_mask"
8655 [(set (match_dup 1)
8656 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8657 (const_int 0)))
8658 (set (match_operand:SI 0 "s_register_operand" "")
8659 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8660 (const_int -1)
8661 (const_int 67108860)))] ; 0x03fffffc
8662 "TARGET_ARM"
8663 "
8664 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8665 ")
8666
8667 (define_insn "*check_arch2"
8668 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8669 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8670 (const_int 0)))]
8671 "TARGET_ARM"
8672 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8673 [(set_attr "length" "8")
8674 (set_attr "conds" "set")]
8675 )
8676
8677 ;; Call subroutine returning any type.
8678
8679 (define_expand "untyped_call"
8680 [(parallel [(call (match_operand 0 "" "")
8681 (const_int 0))
8682 (match_operand 1 "" "")
8683 (match_operand 2 "" "")])]
8684 "TARGET_EITHER"
8685 "
8686 {
8687 int i;
8688 rtx par = gen_rtx_PARALLEL (VOIDmode,
8689 rtvec_alloc (XVECLEN (operands[2], 0)));
8690 rtx addr = gen_reg_rtx (Pmode);
8691 rtx mem;
8692 int size = 0;
8693
8694 emit_move_insn (addr, XEXP (operands[1], 0));
8695 mem = change_address (operands[1], BLKmode, addr);
8696
8697 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8698 {
8699 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8700
8701 /* Default code only uses r0 as a return value, but we could
8702 be using anything up to 4 registers. */
8703 if (REGNO (src) == R0_REGNUM)
8704 src = gen_rtx_REG (TImode, R0_REGNUM);
8705
8706 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8707 GEN_INT (size));
8708 size += GET_MODE_SIZE (GET_MODE (src));
8709 }
8710
8711 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8712 const0_rtx));
8713
8714 size = 0;
8715
8716 for (i = 0; i < XVECLEN (par, 0); i++)
8717 {
8718 HOST_WIDE_INT offset = 0;
8719 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8720
8721 if (size != 0)
8722 emit_move_insn (addr, plus_constant (addr, size));
8723
8724 mem = change_address (mem, GET_MODE (reg), NULL);
8725 if (REGNO (reg) == R0_REGNUM)
8726 {
8727 /* On thumb we have to use a write-back instruction. */
8728 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8729 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8730 size = TARGET_ARM ? 16 : 0;
8731 }
8732 else
8733 {
8734 emit_move_insn (mem, reg);
8735 size = GET_MODE_SIZE (GET_MODE (reg));
8736 }
8737 }
8738
8739 /* The optimizer does not know that the call sets the function value
8740 registers we stored in the result block. We avoid problems by
8741 claiming that all hard registers are used and clobbered at this
8742 point. */
8743 emit_insn (gen_blockage ());
8744
8745 DONE;
8746 }"
8747 )
8748
8749 (define_expand "untyped_return"
8750 [(match_operand:BLK 0 "memory_operand" "")
8751 (match_operand 1 "" "")]
8752 "TARGET_EITHER"
8753 "
8754 {
8755 int i;
8756 rtx addr = gen_reg_rtx (Pmode);
8757 rtx mem;
8758 int size = 0;
8759
8760 emit_move_insn (addr, XEXP (operands[0], 0));
8761 mem = change_address (operands[0], BLKmode, addr);
8762
8763 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8764 {
8765 HOST_WIDE_INT offset = 0;
8766 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8767
8768 if (size != 0)
8769 emit_move_insn (addr, plus_constant (addr, size));
8770
8771 mem = change_address (mem, GET_MODE (reg), NULL);
8772 if (REGNO (reg) == R0_REGNUM)
8773 {
8774 /* On thumb we have to use a write-back instruction. */
8775 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8776 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8777 size = TARGET_ARM ? 16 : 0;
8778 }
8779 else
8780 {
8781 emit_move_insn (reg, mem);
8782 size = GET_MODE_SIZE (GET_MODE (reg));
8783 }
8784 }
8785
8786 /* Emit USE insns before the return. */
8787 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8788 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8789
8790 /* Construct the return. */
8791 expand_naked_return ();
8792
8793 DONE;
8794 }"
8795 )
8796
8797 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8798 ;; all of memory. This blocks insns from being moved across this point.
8799
8800 (define_insn "blockage"
8801 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8802 "TARGET_EITHER"
8803 ""
8804 [(set_attr "length" "0")
8805 (set_attr "type" "block")]
8806 )
8807
8808 (define_expand "casesi"
8809 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8810 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8811 (match_operand:SI 2 "const_int_operand" "") ; total range
8812 (match_operand:SI 3 "" "") ; table label
8813 (match_operand:SI 4 "" "")] ; Out of range label
8814 "TARGET_32BIT"
8815 "
8816 {
8817 rtx reg;
8818 if (operands[1] != const0_rtx)
8819 {
8820 reg = gen_reg_rtx (SImode);
8821
8822 emit_insn (gen_addsi3 (reg, operands[0],
8823 GEN_INT (-INTVAL (operands[1]))));
8824 operands[0] = reg;
8825 }
8826
8827 if (!const_ok_for_arm (INTVAL (operands[2])))
8828 operands[2] = force_reg (SImode, operands[2]);
8829
8830 if (TARGET_ARM)
8831 {
8832 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8833 operands[3], operands[4]));
8834 }
8835 else if (flag_pic)
8836 {
8837 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8838 operands[2], operands[3], operands[4]));
8839 }
8840 else
8841 {
8842 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8843 operands[3], operands[4]));
8844 }
8845 DONE;
8846 }"
8847 )
8848
8849 ;; The USE in this pattern is needed to tell flow analysis that this is
8850 ;; a CASESI insn. It has no other purpose.
8851 (define_insn "arm_casesi_internal"
8852 [(parallel [(set (pc)
8853 (if_then_else
8854 (leu (match_operand:SI 0 "s_register_operand" "r")
8855 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8856 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8857 (label_ref (match_operand 2 "" ""))))
8858 (label_ref (match_operand 3 "" ""))))
8859 (clobber (reg:CC CC_REGNUM))
8860 (use (label_ref (match_dup 2)))])]
8861 "TARGET_ARM"
8862 "*
8863 if (flag_pic)
8864 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8865 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8866 "
8867 [(set_attr "conds" "clob")
8868 (set_attr "length" "12")]
8869 )
8870
8871 (define_expand "indirect_jump"
8872 [(set (pc)
8873 (match_operand:SI 0 "s_register_operand" ""))]
8874 "TARGET_EITHER"
8875 "
8876 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8877 address and use bx. */
8878 if (TARGET_THUMB2)
8879 {
8880 rtx tmp;
8881 tmp = gen_reg_rtx (SImode);
8882 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8883 operands[0] = tmp;
8884 }
8885 "
8886 )
8887
8888 ;; NB Never uses BX.
8889 (define_insn "*arm_indirect_jump"
8890 [(set (pc)
8891 (match_operand:SI 0 "s_register_operand" "r"))]
8892 "TARGET_ARM"
8893 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8894 [(set_attr "predicable" "yes")]
8895 )
8896
8897 (define_insn "*load_indirect_jump"
8898 [(set (pc)
8899 (match_operand:SI 0 "memory_operand" "m"))]
8900 "TARGET_ARM"
8901 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8902 [(set_attr "type" "load1")
8903 (set_attr "pool_range" "4096")
8904 (set_attr "neg_pool_range" "4084")
8905 (set_attr "predicable" "yes")]
8906 )
8907
8908 ;; NB Never uses BX.
8909 (define_insn "*thumb1_indirect_jump"
8910 [(set (pc)
8911 (match_operand:SI 0 "register_operand" "l*r"))]
8912 "TARGET_THUMB1"
8913 "mov\\tpc, %0"
8914 [(set_attr "conds" "clob")
8915 (set_attr "length" "2")]
8916 )
8917
8918 \f
8919 ;; Misc insns
8920
8921 (define_insn "nop"
8922 [(const_int 0)]
8923 "TARGET_EITHER"
8924 "*
8925 if (TARGET_UNIFIED_ASM)
8926 return \"nop\";
8927 if (TARGET_ARM)
8928 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8929 return \"mov\\tr8, r8\";
8930 "
8931 [(set (attr "length")
8932 (if_then_else (eq_attr "is_thumb" "yes")
8933 (const_int 2)
8934 (const_int 4)))]
8935 )
8936
8937 \f
8938 ;; Patterns to allow combination of arithmetic, cond code and shifts
8939
8940 (define_insn "*arith_shiftsi"
8941 [(set (match_operand:SI 0 "s_register_operand" "=r")
8942 (match_operator:SI 1 "shiftable_operator"
8943 [(match_operator:SI 3 "shift_operator"
8944 [(match_operand:SI 4 "s_register_operand" "r")
8945 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8946 (match_operand:SI 2 "s_register_operand" "r")]))]
8947 "TARGET_ARM"
8948 "%i1%?\\t%0, %2, %4%S3"
8949 [(set_attr "predicable" "yes")
8950 (set_attr "shift" "4")
8951 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8952 (const_string "alu_shift")
8953 (const_string "alu_shift_reg")))]
8954 )
8955
8956 (define_split
8957 [(set (match_operand:SI 0 "s_register_operand" "")
8958 (match_operator:SI 1 "shiftable_operator"
8959 [(match_operator:SI 2 "shiftable_operator"
8960 [(match_operator:SI 3 "shift_operator"
8961 [(match_operand:SI 4 "s_register_operand" "")
8962 (match_operand:SI 5 "reg_or_int_operand" "")])
8963 (match_operand:SI 6 "s_register_operand" "")])
8964 (match_operand:SI 7 "arm_rhs_operand" "")]))
8965 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8966 "TARGET_ARM"
8967 [(set (match_dup 8)
8968 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8969 (match_dup 6)]))
8970 (set (match_dup 0)
8971 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8972 "")
8973
8974 (define_insn "*arith_shiftsi_compare0"
8975 [(set (reg:CC_NOOV CC_REGNUM)
8976 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8977 [(match_operator:SI 3 "shift_operator"
8978 [(match_operand:SI 4 "s_register_operand" "r")
8979 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8980 (match_operand:SI 2 "s_register_operand" "r")])
8981 (const_int 0)))
8982 (set (match_operand:SI 0 "s_register_operand" "=r")
8983 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8984 (match_dup 2)]))]
8985 "TARGET_ARM"
8986 "%i1%.\\t%0, %2, %4%S3"
8987 [(set_attr "conds" "set")
8988 (set_attr "shift" "4")
8989 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8990 (const_string "alu_shift")
8991 (const_string "alu_shift_reg")))]
8992 )
8993
8994 (define_insn "*arith_shiftsi_compare0_scratch"
8995 [(set (reg:CC_NOOV CC_REGNUM)
8996 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8997 [(match_operator:SI 3 "shift_operator"
8998 [(match_operand:SI 4 "s_register_operand" "r")
8999 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9000 (match_operand:SI 2 "s_register_operand" "r")])
9001 (const_int 0)))
9002 (clobber (match_scratch:SI 0 "=r"))]
9003 "TARGET_ARM"
9004 "%i1%.\\t%0, %2, %4%S3"
9005 [(set_attr "conds" "set")
9006 (set_attr "shift" "4")
9007 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9008 (const_string "alu_shift")
9009 (const_string "alu_shift_reg")))]
9010 )
9011
9012 (define_insn "*sub_shiftsi"
9013 [(set (match_operand:SI 0 "s_register_operand" "=r")
9014 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9015 (match_operator:SI 2 "shift_operator"
9016 [(match_operand:SI 3 "s_register_operand" "r")
9017 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9018 "TARGET_ARM"
9019 "sub%?\\t%0, %1, %3%S2"
9020 [(set_attr "predicable" "yes")
9021 (set_attr "shift" "3")
9022 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9023 (const_string "alu_shift")
9024 (const_string "alu_shift_reg")))]
9025 )
9026
9027 (define_insn "*sub_shiftsi_compare0"
9028 [(set (reg:CC_NOOV CC_REGNUM)
9029 (compare:CC_NOOV
9030 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9031 (match_operator:SI 2 "shift_operator"
9032 [(match_operand:SI 3 "s_register_operand" "r")
9033 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9034 (const_int 0)))
9035 (set (match_operand:SI 0 "s_register_operand" "=r")
9036 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9037 (match_dup 4)])))]
9038 "TARGET_ARM"
9039 "sub%.\\t%0, %1, %3%S2"
9040 [(set_attr "conds" "set")
9041 (set_attr "shift" "3")
9042 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9043 (const_string "alu_shift")
9044 (const_string "alu_shift_reg")))]
9045 )
9046
9047 (define_insn "*sub_shiftsi_compare0_scratch"
9048 [(set (reg:CC_NOOV CC_REGNUM)
9049 (compare:CC_NOOV
9050 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9051 (match_operator:SI 2 "shift_operator"
9052 [(match_operand:SI 3 "s_register_operand" "r")
9053 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9054 (const_int 0)))
9055 (clobber (match_scratch:SI 0 "=r"))]
9056 "TARGET_ARM"
9057 "sub%.\\t%0, %1, %3%S2"
9058 [(set_attr "conds" "set")
9059 (set_attr "shift" "3")
9060 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9061 (const_string "alu_shift")
9062 (const_string "alu_shift_reg")))]
9063 )
9064
9065 \f
9066
9067 (define_insn "*and_scc"
9068 [(set (match_operand:SI 0 "s_register_operand" "=r")
9069 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9070 [(match_operand 3 "cc_register" "") (const_int 0)])
9071 (match_operand:SI 2 "s_register_operand" "r")))]
9072 "TARGET_ARM"
9073 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9074 [(set_attr "conds" "use")
9075 (set_attr "length" "8")]
9076 )
9077
9078 (define_insn "*ior_scc"
9079 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9080 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9081 [(match_operand 3 "cc_register" "") (const_int 0)])
9082 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9083 "TARGET_ARM"
9084 "@
9085 orr%d2\\t%0, %1, #1
9086 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9087 [(set_attr "conds" "use")
9088 (set_attr "length" "4,8")]
9089 )
9090
9091 (define_insn "*compare_scc"
9092 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9093 (match_operator:SI 1 "arm_comparison_operator"
9094 [(match_operand:SI 2 "s_register_operand" "r,r")
9095 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9096 (clobber (reg:CC CC_REGNUM))]
9097 "TARGET_ARM"
9098 "*
9099 if (operands[3] == const0_rtx)
9100 {
9101 if (GET_CODE (operands[1]) == LT)
9102 return \"mov\\t%0, %2, lsr #31\";
9103
9104 if (GET_CODE (operands[1]) == GE)
9105 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9106
9107 if (GET_CODE (operands[1]) == EQ)
9108 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9109 }
9110
9111 if (GET_CODE (operands[1]) == NE)
9112 {
9113 if (which_alternative == 1)
9114 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9115 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9116 }
9117 if (which_alternative == 1)
9118 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9119 else
9120 output_asm_insn (\"cmp\\t%2, %3\", operands);
9121 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9122 "
9123 [(set_attr "conds" "clob")
9124 (set_attr "length" "12")]
9125 )
9126
9127 (define_insn "*cond_move"
9128 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9129 (if_then_else:SI (match_operator 3 "equality_operator"
9130 [(match_operator 4 "arm_comparison_operator"
9131 [(match_operand 5 "cc_register" "") (const_int 0)])
9132 (const_int 0)])
9133 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9134 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9135 "TARGET_ARM"
9136 "*
9137 if (GET_CODE (operands[3]) == NE)
9138 {
9139 if (which_alternative != 1)
9140 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9141 if (which_alternative != 0)
9142 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9143 return \"\";
9144 }
9145 if (which_alternative != 0)
9146 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9147 if (which_alternative != 1)
9148 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9149 return \"\";
9150 "
9151 [(set_attr "conds" "use")
9152 (set_attr "length" "4,4,8")]
9153 )
9154
9155 (define_insn "*cond_arith"
9156 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9157 (match_operator:SI 5 "shiftable_operator"
9158 [(match_operator:SI 4 "arm_comparison_operator"
9159 [(match_operand:SI 2 "s_register_operand" "r,r")
9160 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9161 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9162 (clobber (reg:CC CC_REGNUM))]
9163 "TARGET_ARM"
9164 "*
9165 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9166 return \"%i5\\t%0, %1, %2, lsr #31\";
9167
9168 output_asm_insn (\"cmp\\t%2, %3\", operands);
9169 if (GET_CODE (operands[5]) == AND)
9170 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9171 else if (GET_CODE (operands[5]) == MINUS)
9172 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9173 else if (which_alternative != 0)
9174 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9175 return \"%i5%d4\\t%0, %1, #1\";
9176 "
9177 [(set_attr "conds" "clob")
9178 (set_attr "length" "12")]
9179 )
9180
9181 (define_insn "*cond_sub"
9182 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9183 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9184 (match_operator:SI 4 "arm_comparison_operator"
9185 [(match_operand:SI 2 "s_register_operand" "r,r")
9186 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9187 (clobber (reg:CC CC_REGNUM))]
9188 "TARGET_ARM"
9189 "*
9190 output_asm_insn (\"cmp\\t%2, %3\", operands);
9191 if (which_alternative != 0)
9192 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9193 return \"sub%d4\\t%0, %1, #1\";
9194 "
9195 [(set_attr "conds" "clob")
9196 (set_attr "length" "8,12")]
9197 )
9198
9199 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9200 (define_insn "*cmp_ite0"
9201 [(set (match_operand 6 "dominant_cc_register" "")
9202 (compare
9203 (if_then_else:SI
9204 (match_operator 4 "arm_comparison_operator"
9205 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9206 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9207 (match_operator:SI 5 "arm_comparison_operator"
9208 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9209 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9210 (const_int 0))
9211 (const_int 0)))]
9212 "TARGET_ARM"
9213 "*
9214 {
9215 static const char * const opcodes[4][2] =
9216 {
9217 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9218 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9219 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9220 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9221 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9222 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9223 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9224 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9225 };
9226 int swap =
9227 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9228
9229 return opcodes[which_alternative][swap];
9230 }"
9231 [(set_attr "conds" "set")
9232 (set_attr "length" "8")]
9233 )
9234
9235 (define_insn "*cmp_ite1"
9236 [(set (match_operand 6 "dominant_cc_register" "")
9237 (compare
9238 (if_then_else:SI
9239 (match_operator 4 "arm_comparison_operator"
9240 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9241 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9242 (match_operator:SI 5 "arm_comparison_operator"
9243 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9244 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9245 (const_int 1))
9246 (const_int 0)))]
9247 "TARGET_ARM"
9248 "*
9249 {
9250 static const char * const opcodes[4][2] =
9251 {
9252 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9253 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9254 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9255 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9256 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9257 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9258 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9259 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9260 };
9261 int swap =
9262 comparison_dominates_p (GET_CODE (operands[5]),
9263 reverse_condition (GET_CODE (operands[4])));
9264
9265 return opcodes[which_alternative][swap];
9266 }"
9267 [(set_attr "conds" "set")
9268 (set_attr "length" "8")]
9269 )
9270
9271 (define_insn "*cmp_and"
9272 [(set (match_operand 6 "dominant_cc_register" "")
9273 (compare
9274 (and:SI
9275 (match_operator 4 "arm_comparison_operator"
9276 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9277 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9278 (match_operator:SI 5 "arm_comparison_operator"
9279 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9280 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9281 (const_int 0)))]
9282 "TARGET_ARM"
9283 "*
9284 {
9285 static const char *const opcodes[4][2] =
9286 {
9287 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9288 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9289 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9290 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9291 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9292 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9293 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9294 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9295 };
9296 int swap =
9297 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9298
9299 return opcodes[which_alternative][swap];
9300 }"
9301 [(set_attr "conds" "set")
9302 (set_attr "predicable" "no")
9303 (set_attr "length" "8")]
9304 )
9305
9306 (define_insn "*cmp_ior"
9307 [(set (match_operand 6 "dominant_cc_register" "")
9308 (compare
9309 (ior:SI
9310 (match_operator 4 "arm_comparison_operator"
9311 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9312 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9313 (match_operator:SI 5 "arm_comparison_operator"
9314 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9315 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9316 (const_int 0)))]
9317 "TARGET_ARM"
9318 "*
9319 {
9320 static const char *const opcodes[4][2] =
9321 {
9322 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9323 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9324 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9325 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9326 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9327 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9328 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9329 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9330 };
9331 int swap =
9332 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9333
9334 return opcodes[which_alternative][swap];
9335 }
9336 "
9337 [(set_attr "conds" "set")
9338 (set_attr "length" "8")]
9339 )
9340
9341 (define_insn_and_split "*ior_scc_scc"
9342 [(set (match_operand:SI 0 "s_register_operand" "=r")
9343 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9344 [(match_operand:SI 1 "s_register_operand" "r")
9345 (match_operand:SI 2 "arm_add_operand" "rIL")])
9346 (match_operator:SI 6 "arm_comparison_operator"
9347 [(match_operand:SI 4 "s_register_operand" "r")
9348 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9349 (clobber (reg:CC CC_REGNUM))]
9350 "TARGET_ARM
9351 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9352 != CCmode)"
9353 "#"
9354 "TARGET_ARM && reload_completed"
9355 [(set (match_dup 7)
9356 (compare
9357 (ior:SI
9358 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9359 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9360 (const_int 0)))
9361 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9362 "operands[7]
9363 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9364 DOM_CC_X_OR_Y),
9365 CC_REGNUM);"
9366 [(set_attr "conds" "clob")
9367 (set_attr "length" "16")])
9368
9369 ; If the above pattern is followed by a CMP insn, then the compare is
9370 ; redundant, since we can rework the conditional instruction that follows.
9371 (define_insn_and_split "*ior_scc_scc_cmp"
9372 [(set (match_operand 0 "dominant_cc_register" "")
9373 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9374 [(match_operand:SI 1 "s_register_operand" "r")
9375 (match_operand:SI 2 "arm_add_operand" "rIL")])
9376 (match_operator:SI 6 "arm_comparison_operator"
9377 [(match_operand:SI 4 "s_register_operand" "r")
9378 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9379 (const_int 0)))
9380 (set (match_operand:SI 7 "s_register_operand" "=r")
9381 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9382 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9383 "TARGET_ARM"
9384 "#"
9385 "TARGET_ARM && reload_completed"
9386 [(set (match_dup 0)
9387 (compare
9388 (ior:SI
9389 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9390 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9391 (const_int 0)))
9392 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9393 ""
9394 [(set_attr "conds" "set")
9395 (set_attr "length" "16")])
9396
9397 (define_insn_and_split "*and_scc_scc"
9398 [(set (match_operand:SI 0 "s_register_operand" "=r")
9399 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9400 [(match_operand:SI 1 "s_register_operand" "r")
9401 (match_operand:SI 2 "arm_add_operand" "rIL")])
9402 (match_operator:SI 6 "arm_comparison_operator"
9403 [(match_operand:SI 4 "s_register_operand" "r")
9404 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9405 (clobber (reg:CC CC_REGNUM))]
9406 "TARGET_ARM
9407 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9408 != CCmode)"
9409 "#"
9410 "TARGET_ARM && reload_completed
9411 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9412 != CCmode)"
9413 [(set (match_dup 7)
9414 (compare
9415 (and:SI
9416 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9417 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9418 (const_int 0)))
9419 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9420 "operands[7]
9421 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9422 DOM_CC_X_AND_Y),
9423 CC_REGNUM);"
9424 [(set_attr "conds" "clob")
9425 (set_attr "length" "16")])
9426
9427 ; If the above pattern is followed by a CMP insn, then the compare is
9428 ; redundant, since we can rework the conditional instruction that follows.
9429 (define_insn_and_split "*and_scc_scc_cmp"
9430 [(set (match_operand 0 "dominant_cc_register" "")
9431 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9432 [(match_operand:SI 1 "s_register_operand" "r")
9433 (match_operand:SI 2 "arm_add_operand" "rIL")])
9434 (match_operator:SI 6 "arm_comparison_operator"
9435 [(match_operand:SI 4 "s_register_operand" "r")
9436 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9437 (const_int 0)))
9438 (set (match_operand:SI 7 "s_register_operand" "=r")
9439 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9440 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9441 "TARGET_ARM"
9442 "#"
9443 "TARGET_ARM && reload_completed"
9444 [(set (match_dup 0)
9445 (compare
9446 (and:SI
9447 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9448 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9449 (const_int 0)))
9450 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9451 ""
9452 [(set_attr "conds" "set")
9453 (set_attr "length" "16")])
9454
9455 ;; If there is no dominance in the comparison, then we can still save an
9456 ;; instruction in the AND case, since we can know that the second compare
9457 ;; need only zero the value if false (if true, then the value is already
9458 ;; correct).
9459 (define_insn_and_split "*and_scc_scc_nodom"
9460 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9461 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9462 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9463 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9464 (match_operator:SI 6 "arm_comparison_operator"
9465 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9466 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9467 (clobber (reg:CC CC_REGNUM))]
9468 "TARGET_ARM
9469 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9470 == CCmode)"
9471 "#"
9472 "TARGET_ARM && reload_completed"
9473 [(parallel [(set (match_dup 0)
9474 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9475 (clobber (reg:CC CC_REGNUM))])
9476 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9477 (set (match_dup 0)
9478 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9479 (match_dup 0)
9480 (const_int 0)))]
9481 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9482 operands[4], operands[5]),
9483 CC_REGNUM);
9484 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9485 operands[5]);"
9486 [(set_attr "conds" "clob")
9487 (set_attr "length" "20")])
9488
9489 (define_split
9490 [(set (reg:CC_NOOV CC_REGNUM)
9491 (compare:CC_NOOV (ior:SI
9492 (and:SI (match_operand:SI 0 "s_register_operand" "")
9493 (const_int 1))
9494 (match_operator:SI 1 "comparison_operator"
9495 [(match_operand:SI 2 "s_register_operand" "")
9496 (match_operand:SI 3 "arm_add_operand" "")]))
9497 (const_int 0)))
9498 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9499 "TARGET_ARM"
9500 [(set (match_dup 4)
9501 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9502 (match_dup 0)))
9503 (set (reg:CC_NOOV CC_REGNUM)
9504 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9505 (const_int 0)))]
9506 "")
9507
9508 (define_split
9509 [(set (reg:CC_NOOV CC_REGNUM)
9510 (compare:CC_NOOV (ior:SI
9511 (match_operator:SI 1 "comparison_operator"
9512 [(match_operand:SI 2 "s_register_operand" "")
9513 (match_operand:SI 3 "arm_add_operand" "")])
9514 (and:SI (match_operand:SI 0 "s_register_operand" "")
9515 (const_int 1)))
9516 (const_int 0)))
9517 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9518 "TARGET_ARM"
9519 [(set (match_dup 4)
9520 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9521 (match_dup 0)))
9522 (set (reg:CC_NOOV CC_REGNUM)
9523 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9524 (const_int 0)))]
9525 "")
9526 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9527
9528 (define_insn "*negscc"
9529 [(set (match_operand:SI 0 "s_register_operand" "=r")
9530 (neg:SI (match_operator 3 "arm_comparison_operator"
9531 [(match_operand:SI 1 "s_register_operand" "r")
9532 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9533 (clobber (reg:CC CC_REGNUM))]
9534 "TARGET_ARM"
9535 "*
9536 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9537 return \"mov\\t%0, %1, asr #31\";
9538
9539 if (GET_CODE (operands[3]) == NE)
9540 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9541
9542 output_asm_insn (\"cmp\\t%1, %2\", operands);
9543 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9544 return \"mvn%d3\\t%0, #0\";
9545 "
9546 [(set_attr "conds" "clob")
9547 (set_attr "length" "12")]
9548 )
9549
9550 (define_insn "movcond"
9551 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9552 (if_then_else:SI
9553 (match_operator 5 "arm_comparison_operator"
9554 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9555 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9556 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9557 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9558 (clobber (reg:CC CC_REGNUM))]
9559 "TARGET_ARM"
9560 "*
9561 if (GET_CODE (operands[5]) == LT
9562 && (operands[4] == const0_rtx))
9563 {
9564 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9565 {
9566 if (operands[2] == const0_rtx)
9567 return \"and\\t%0, %1, %3, asr #31\";
9568 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9569 }
9570 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9571 {
9572 if (operands[1] == const0_rtx)
9573 return \"bic\\t%0, %2, %3, asr #31\";
9574 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9575 }
9576 /* The only case that falls through to here is when both ops 1 & 2
9577 are constants. */
9578 }
9579
9580 if (GET_CODE (operands[5]) == GE
9581 && (operands[4] == const0_rtx))
9582 {
9583 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9584 {
9585 if (operands[2] == const0_rtx)
9586 return \"bic\\t%0, %1, %3, asr #31\";
9587 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9588 }
9589 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9590 {
9591 if (operands[1] == const0_rtx)
9592 return \"and\\t%0, %2, %3, asr #31\";
9593 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9594 }
9595 /* The only case that falls through to here is when both ops 1 & 2
9596 are constants. */
9597 }
9598 if (GET_CODE (operands[4]) == CONST_INT
9599 && !const_ok_for_arm (INTVAL (operands[4])))
9600 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9601 else
9602 output_asm_insn (\"cmp\\t%3, %4\", operands);
9603 if (which_alternative != 0)
9604 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9605 if (which_alternative != 1)
9606 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9607 return \"\";
9608 "
9609 [(set_attr "conds" "clob")
9610 (set_attr "length" "8,8,12")]
9611 )
9612
9613 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9614
9615 (define_insn "*ifcompare_plus_move"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9617 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9618 [(match_operand:SI 4 "s_register_operand" "r,r")
9619 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9620 (plus:SI
9621 (match_operand:SI 2 "s_register_operand" "r,r")
9622 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9623 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9624 (clobber (reg:CC CC_REGNUM))]
9625 "TARGET_ARM"
9626 "#"
9627 [(set_attr "conds" "clob")
9628 (set_attr "length" "8,12")]
9629 )
9630
9631 (define_insn "*if_plus_move"
9632 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9633 (if_then_else:SI
9634 (match_operator 4 "arm_comparison_operator"
9635 [(match_operand 5 "cc_register" "") (const_int 0)])
9636 (plus:SI
9637 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9638 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9639 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9640 "TARGET_ARM"
9641 "@
9642 add%d4\\t%0, %2, %3
9643 sub%d4\\t%0, %2, #%n3
9644 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9645 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9646 [(set_attr "conds" "use")
9647 (set_attr "length" "4,4,8,8")
9648 (set_attr "type" "*,*,*,*")]
9649 )
9650
9651 (define_insn "*ifcompare_move_plus"
9652 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9653 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9654 [(match_operand:SI 4 "s_register_operand" "r,r")
9655 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9656 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9657 (plus:SI
9658 (match_operand:SI 2 "s_register_operand" "r,r")
9659 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9660 (clobber (reg:CC CC_REGNUM))]
9661 "TARGET_ARM"
9662 "#"
9663 [(set_attr "conds" "clob")
9664 (set_attr "length" "8,12")]
9665 )
9666
9667 (define_insn "*if_move_plus"
9668 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9669 (if_then_else:SI
9670 (match_operator 4 "arm_comparison_operator"
9671 [(match_operand 5 "cc_register" "") (const_int 0)])
9672 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9673 (plus:SI
9674 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9675 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9676 "TARGET_ARM"
9677 "@
9678 add%D4\\t%0, %2, %3
9679 sub%D4\\t%0, %2, #%n3
9680 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9681 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9682 [(set_attr "conds" "use")
9683 (set_attr "length" "4,4,8,8")
9684 (set_attr "type" "*,*,*,*")]
9685 )
9686
9687 (define_insn "*ifcompare_arith_arith"
9688 [(set (match_operand:SI 0 "s_register_operand" "=r")
9689 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9690 [(match_operand:SI 5 "s_register_operand" "r")
9691 (match_operand:SI 6 "arm_add_operand" "rIL")])
9692 (match_operator:SI 8 "shiftable_operator"
9693 [(match_operand:SI 1 "s_register_operand" "r")
9694 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9695 (match_operator:SI 7 "shiftable_operator"
9696 [(match_operand:SI 3 "s_register_operand" "r")
9697 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9698 (clobber (reg:CC CC_REGNUM))]
9699 "TARGET_ARM"
9700 "#"
9701 [(set_attr "conds" "clob")
9702 (set_attr "length" "12")]
9703 )
9704
9705 (define_insn "*if_arith_arith"
9706 [(set (match_operand:SI 0 "s_register_operand" "=r")
9707 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9708 [(match_operand 8 "cc_register" "") (const_int 0)])
9709 (match_operator:SI 6 "shiftable_operator"
9710 [(match_operand:SI 1 "s_register_operand" "r")
9711 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9712 (match_operator:SI 7 "shiftable_operator"
9713 [(match_operand:SI 3 "s_register_operand" "r")
9714 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9715 "TARGET_ARM"
9716 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9717 [(set_attr "conds" "use")
9718 (set_attr "length" "8")]
9719 )
9720
9721 (define_insn "*ifcompare_arith_move"
9722 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9723 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9724 [(match_operand:SI 2 "s_register_operand" "r,r")
9725 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9726 (match_operator:SI 7 "shiftable_operator"
9727 [(match_operand:SI 4 "s_register_operand" "r,r")
9728 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9729 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9730 (clobber (reg:CC CC_REGNUM))]
9731 "TARGET_ARM"
9732 "*
9733 /* If we have an operation where (op x 0) is the identity operation and
9734 the conditional operator is LT or GE and we are comparing against zero and
9735 everything is in registers then we can do this in two instructions. */
9736 if (operands[3] == const0_rtx
9737 && GET_CODE (operands[7]) != AND
9738 && GET_CODE (operands[5]) == REG
9739 && GET_CODE (operands[1]) == REG
9740 && REGNO (operands[1]) == REGNO (operands[4])
9741 && REGNO (operands[4]) != REGNO (operands[0]))
9742 {
9743 if (GET_CODE (operands[6]) == LT)
9744 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9745 else if (GET_CODE (operands[6]) == GE)
9746 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9747 }
9748 if (GET_CODE (operands[3]) == CONST_INT
9749 && !const_ok_for_arm (INTVAL (operands[3])))
9750 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9751 else
9752 output_asm_insn (\"cmp\\t%2, %3\", operands);
9753 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9754 if (which_alternative != 0)
9755 return \"mov%D6\\t%0, %1\";
9756 return \"\";
9757 "
9758 [(set_attr "conds" "clob")
9759 (set_attr "length" "8,12")]
9760 )
9761
9762 (define_insn "*if_arith_move"
9763 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9764 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9765 [(match_operand 6 "cc_register" "") (const_int 0)])
9766 (match_operator:SI 5 "shiftable_operator"
9767 [(match_operand:SI 2 "s_register_operand" "r,r")
9768 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9769 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9770 "TARGET_ARM"
9771 "@
9772 %I5%d4\\t%0, %2, %3
9773 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9774 [(set_attr "conds" "use")
9775 (set_attr "length" "4,8")
9776 (set_attr "type" "*,*")]
9777 )
9778
9779 (define_insn "*ifcompare_move_arith"
9780 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9781 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9782 [(match_operand:SI 4 "s_register_operand" "r,r")
9783 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9784 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9785 (match_operator:SI 7 "shiftable_operator"
9786 [(match_operand:SI 2 "s_register_operand" "r,r")
9787 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9788 (clobber (reg:CC CC_REGNUM))]
9789 "TARGET_ARM"
9790 "*
9791 /* If we have an operation where (op x 0) is the identity operation and
9792 the conditional operator is LT or GE and we are comparing against zero and
9793 everything is in registers then we can do this in two instructions */
9794 if (operands[5] == const0_rtx
9795 && GET_CODE (operands[7]) != AND
9796 && GET_CODE (operands[3]) == REG
9797 && GET_CODE (operands[1]) == REG
9798 && REGNO (operands[1]) == REGNO (operands[2])
9799 && REGNO (operands[2]) != REGNO (operands[0]))
9800 {
9801 if (GET_CODE (operands[6]) == GE)
9802 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9803 else if (GET_CODE (operands[6]) == LT)
9804 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9805 }
9806
9807 if (GET_CODE (operands[5]) == CONST_INT
9808 && !const_ok_for_arm (INTVAL (operands[5])))
9809 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9810 else
9811 output_asm_insn (\"cmp\\t%4, %5\", operands);
9812
9813 if (which_alternative != 0)
9814 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9815 return \"%I7%D6\\t%0, %2, %3\";
9816 "
9817 [(set_attr "conds" "clob")
9818 (set_attr "length" "8,12")]
9819 )
9820
9821 (define_insn "*if_move_arith"
9822 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9823 (if_then_else:SI
9824 (match_operator 4 "arm_comparison_operator"
9825 [(match_operand 6 "cc_register" "") (const_int 0)])
9826 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9827 (match_operator:SI 5 "shiftable_operator"
9828 [(match_operand:SI 2 "s_register_operand" "r,r")
9829 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9830 "TARGET_ARM"
9831 "@
9832 %I5%D4\\t%0, %2, %3
9833 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9834 [(set_attr "conds" "use")
9835 (set_attr "length" "4,8")
9836 (set_attr "type" "*,*")]
9837 )
9838
9839 (define_insn "*ifcompare_move_not"
9840 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9841 (if_then_else:SI
9842 (match_operator 5 "arm_comparison_operator"
9843 [(match_operand:SI 3 "s_register_operand" "r,r")
9844 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9845 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9846 (not:SI
9847 (match_operand:SI 2 "s_register_operand" "r,r"))))
9848 (clobber (reg:CC CC_REGNUM))]
9849 "TARGET_ARM"
9850 "#"
9851 [(set_attr "conds" "clob")
9852 (set_attr "length" "8,12")]
9853 )
9854
9855 (define_insn "*if_move_not"
9856 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9857 (if_then_else:SI
9858 (match_operator 4 "arm_comparison_operator"
9859 [(match_operand 3 "cc_register" "") (const_int 0)])
9860 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9861 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9862 "TARGET_ARM"
9863 "@
9864 mvn%D4\\t%0, %2
9865 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9866 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9867 [(set_attr "conds" "use")
9868 (set_attr "length" "4,8,8")]
9869 )
9870
9871 (define_insn "*ifcompare_not_move"
9872 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9873 (if_then_else:SI
9874 (match_operator 5 "arm_comparison_operator"
9875 [(match_operand:SI 3 "s_register_operand" "r,r")
9876 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9877 (not:SI
9878 (match_operand:SI 2 "s_register_operand" "r,r"))
9879 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9880 (clobber (reg:CC CC_REGNUM))]
9881 "TARGET_ARM"
9882 "#"
9883 [(set_attr "conds" "clob")
9884 (set_attr "length" "8,12")]
9885 )
9886
9887 (define_insn "*if_not_move"
9888 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9889 (if_then_else:SI
9890 (match_operator 4 "arm_comparison_operator"
9891 [(match_operand 3 "cc_register" "") (const_int 0)])
9892 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9893 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9894 "TARGET_ARM"
9895 "@
9896 mvn%d4\\t%0, %2
9897 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9898 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9899 [(set_attr "conds" "use")
9900 (set_attr "length" "4,8,8")]
9901 )
9902
9903 (define_insn "*ifcompare_shift_move"
9904 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9905 (if_then_else:SI
9906 (match_operator 6 "arm_comparison_operator"
9907 [(match_operand:SI 4 "s_register_operand" "r,r")
9908 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9909 (match_operator:SI 7 "shift_operator"
9910 [(match_operand:SI 2 "s_register_operand" "r,r")
9911 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9912 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9913 (clobber (reg:CC CC_REGNUM))]
9914 "TARGET_ARM"
9915 "#"
9916 [(set_attr "conds" "clob")
9917 (set_attr "length" "8,12")]
9918 )
9919
9920 (define_insn "*if_shift_move"
9921 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9922 (if_then_else:SI
9923 (match_operator 5 "arm_comparison_operator"
9924 [(match_operand 6 "cc_register" "") (const_int 0)])
9925 (match_operator:SI 4 "shift_operator"
9926 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9927 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9928 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9929 "TARGET_ARM"
9930 "@
9931 mov%d5\\t%0, %2%S4
9932 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9933 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9934 [(set_attr "conds" "use")
9935 (set_attr "shift" "2")
9936 (set_attr "length" "4,8,8")
9937 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9938 (const_string "alu_shift")
9939 (const_string "alu_shift_reg")))]
9940 )
9941
9942 (define_insn "*ifcompare_move_shift"
9943 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9944 (if_then_else:SI
9945 (match_operator 6 "arm_comparison_operator"
9946 [(match_operand:SI 4 "s_register_operand" "r,r")
9947 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9948 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9949 (match_operator:SI 7 "shift_operator"
9950 [(match_operand:SI 2 "s_register_operand" "r,r")
9951 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9952 (clobber (reg:CC CC_REGNUM))]
9953 "TARGET_ARM"
9954 "#"
9955 [(set_attr "conds" "clob")
9956 (set_attr "length" "8,12")]
9957 )
9958
9959 (define_insn "*if_move_shift"
9960 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9961 (if_then_else:SI
9962 (match_operator 5 "arm_comparison_operator"
9963 [(match_operand 6 "cc_register" "") (const_int 0)])
9964 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9965 (match_operator:SI 4 "shift_operator"
9966 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9967 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9968 "TARGET_ARM"
9969 "@
9970 mov%D5\\t%0, %2%S4
9971 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9972 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9973 [(set_attr "conds" "use")
9974 (set_attr "shift" "2")
9975 (set_attr "length" "4,8,8")
9976 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9977 (const_string "alu_shift")
9978 (const_string "alu_shift_reg")))]
9979 )
9980
9981 (define_insn "*ifcompare_shift_shift"
9982 [(set (match_operand:SI 0 "s_register_operand" "=r")
9983 (if_then_else:SI
9984 (match_operator 7 "arm_comparison_operator"
9985 [(match_operand:SI 5 "s_register_operand" "r")
9986 (match_operand:SI 6 "arm_add_operand" "rIL")])
9987 (match_operator:SI 8 "shift_operator"
9988 [(match_operand:SI 1 "s_register_operand" "r")
9989 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9990 (match_operator:SI 9 "shift_operator"
9991 [(match_operand:SI 3 "s_register_operand" "r")
9992 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9993 (clobber (reg:CC CC_REGNUM))]
9994 "TARGET_ARM"
9995 "#"
9996 [(set_attr "conds" "clob")
9997 (set_attr "length" "12")]
9998 )
9999
10000 (define_insn "*if_shift_shift"
10001 [(set (match_operand:SI 0 "s_register_operand" "=r")
10002 (if_then_else:SI
10003 (match_operator 5 "arm_comparison_operator"
10004 [(match_operand 8 "cc_register" "") (const_int 0)])
10005 (match_operator:SI 6 "shift_operator"
10006 [(match_operand:SI 1 "s_register_operand" "r")
10007 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10008 (match_operator:SI 7 "shift_operator"
10009 [(match_operand:SI 3 "s_register_operand" "r")
10010 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10011 "TARGET_ARM"
10012 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10013 [(set_attr "conds" "use")
10014 (set_attr "shift" "1")
10015 (set_attr "length" "8")
10016 (set (attr "type") (if_then_else
10017 (and (match_operand 2 "const_int_operand" "")
10018 (match_operand 4 "const_int_operand" ""))
10019 (const_string "alu_shift")
10020 (const_string "alu_shift_reg")))]
10021 )
10022
10023 (define_insn "*ifcompare_not_arith"
10024 [(set (match_operand:SI 0 "s_register_operand" "=r")
10025 (if_then_else:SI
10026 (match_operator 6 "arm_comparison_operator"
10027 [(match_operand:SI 4 "s_register_operand" "r")
10028 (match_operand:SI 5 "arm_add_operand" "rIL")])
10029 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10030 (match_operator:SI 7 "shiftable_operator"
10031 [(match_operand:SI 2 "s_register_operand" "r")
10032 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10033 (clobber (reg:CC CC_REGNUM))]
10034 "TARGET_ARM"
10035 "#"
10036 [(set_attr "conds" "clob")
10037 (set_attr "length" "12")]
10038 )
10039
10040 (define_insn "*if_not_arith"
10041 [(set (match_operand:SI 0 "s_register_operand" "=r")
10042 (if_then_else:SI
10043 (match_operator 5 "arm_comparison_operator"
10044 [(match_operand 4 "cc_register" "") (const_int 0)])
10045 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10046 (match_operator:SI 6 "shiftable_operator"
10047 [(match_operand:SI 2 "s_register_operand" "r")
10048 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10049 "TARGET_ARM"
10050 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10051 [(set_attr "conds" "use")
10052 (set_attr "length" "8")]
10053 )
10054
10055 (define_insn "*ifcompare_arith_not"
10056 [(set (match_operand:SI 0 "s_register_operand" "=r")
10057 (if_then_else:SI
10058 (match_operator 6 "arm_comparison_operator"
10059 [(match_operand:SI 4 "s_register_operand" "r")
10060 (match_operand:SI 5 "arm_add_operand" "rIL")])
10061 (match_operator:SI 7 "shiftable_operator"
10062 [(match_operand:SI 2 "s_register_operand" "r")
10063 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10064 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10065 (clobber (reg:CC CC_REGNUM))]
10066 "TARGET_ARM"
10067 "#"
10068 [(set_attr "conds" "clob")
10069 (set_attr "length" "12")]
10070 )
10071
10072 (define_insn "*if_arith_not"
10073 [(set (match_operand:SI 0 "s_register_operand" "=r")
10074 (if_then_else:SI
10075 (match_operator 5 "arm_comparison_operator"
10076 [(match_operand 4 "cc_register" "") (const_int 0)])
10077 (match_operator:SI 6 "shiftable_operator"
10078 [(match_operand:SI 2 "s_register_operand" "r")
10079 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10080 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10081 "TARGET_ARM"
10082 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10083 [(set_attr "conds" "use")
10084 (set_attr "length" "8")]
10085 )
10086
10087 (define_insn "*ifcompare_neg_move"
10088 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10089 (if_then_else:SI
10090 (match_operator 5 "arm_comparison_operator"
10091 [(match_operand:SI 3 "s_register_operand" "r,r")
10092 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10093 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10094 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10095 (clobber (reg:CC CC_REGNUM))]
10096 "TARGET_ARM"
10097 "#"
10098 [(set_attr "conds" "clob")
10099 (set_attr "length" "8,12")]
10100 )
10101
10102 (define_insn "*if_neg_move"
10103 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10104 (if_then_else:SI
10105 (match_operator 4 "arm_comparison_operator"
10106 [(match_operand 3 "cc_register" "") (const_int 0)])
10107 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10108 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10109 "TARGET_ARM"
10110 "@
10111 rsb%d4\\t%0, %2, #0
10112 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10113 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10114 [(set_attr "conds" "use")
10115 (set_attr "length" "4,8,8")]
10116 )
10117
10118 (define_insn "*ifcompare_move_neg"
10119 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10120 (if_then_else:SI
10121 (match_operator 5 "arm_comparison_operator"
10122 [(match_operand:SI 3 "s_register_operand" "r,r")
10123 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10124 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10125 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10126 (clobber (reg:CC CC_REGNUM))]
10127 "TARGET_ARM"
10128 "#"
10129 [(set_attr "conds" "clob")
10130 (set_attr "length" "8,12")]
10131 )
10132
10133 (define_insn "*if_move_neg"
10134 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10135 (if_then_else:SI
10136 (match_operator 4 "arm_comparison_operator"
10137 [(match_operand 3 "cc_register" "") (const_int 0)])
10138 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10139 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10140 "TARGET_ARM"
10141 "@
10142 rsb%D4\\t%0, %2, #0
10143 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10144 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10145 [(set_attr "conds" "use")
10146 (set_attr "length" "4,8,8")]
10147 )
10148
10149 (define_insn "*arith_adjacentmem"
10150 [(set (match_operand:SI 0 "s_register_operand" "=r")
10151 (match_operator:SI 1 "shiftable_operator"
10152 [(match_operand:SI 2 "memory_operand" "m")
10153 (match_operand:SI 3 "memory_operand" "m")]))
10154 (clobber (match_scratch:SI 4 "=r"))]
10155 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10156 "*
10157 {
10158 rtx ldm[3];
10159 rtx arith[4];
10160 rtx base_reg;
10161 HOST_WIDE_INT val1 = 0, val2 = 0;
10162
10163 if (REGNO (operands[0]) > REGNO (operands[4]))
10164 {
10165 ldm[1] = operands[4];
10166 ldm[2] = operands[0];
10167 }
10168 else
10169 {
10170 ldm[1] = operands[0];
10171 ldm[2] = operands[4];
10172 }
10173
10174 base_reg = XEXP (operands[2], 0);
10175
10176 if (!REG_P (base_reg))
10177 {
10178 val1 = INTVAL (XEXP (base_reg, 1));
10179 base_reg = XEXP (base_reg, 0);
10180 }
10181
10182 if (!REG_P (XEXP (operands[3], 0)))
10183 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10184
10185 arith[0] = operands[0];
10186 arith[3] = operands[1];
10187
10188 if (val1 < val2)
10189 {
10190 arith[1] = ldm[1];
10191 arith[2] = ldm[2];
10192 }
10193 else
10194 {
10195 arith[1] = ldm[2];
10196 arith[2] = ldm[1];
10197 }
10198
10199 ldm[0] = base_reg;
10200 if (val1 !=0 && val2 != 0)
10201 {
10202 rtx ops[3];
10203
10204 if (val1 == 4 || val2 == 4)
10205 /* Other val must be 8, since we know they are adjacent and neither
10206 is zero. */
10207 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10208 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10209 {
10210 ldm[0] = ops[0] = operands[4];
10211 ops[1] = base_reg;
10212 ops[2] = GEN_INT (val1);
10213 output_add_immediate (ops);
10214 if (val1 < val2)
10215 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10216 else
10217 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10218 }
10219 else
10220 {
10221 /* Offset is out of range for a single add, so use two ldr. */
10222 ops[0] = ldm[1];
10223 ops[1] = base_reg;
10224 ops[2] = GEN_INT (val1);
10225 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10226 ops[0] = ldm[2];
10227 ops[2] = GEN_INT (val2);
10228 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10229 }
10230 }
10231 else if (val1 != 0)
10232 {
10233 if (val1 < val2)
10234 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10235 else
10236 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10237 }
10238 else
10239 {
10240 if (val1 < val2)
10241 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10242 else
10243 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10244 }
10245 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10246 return \"\";
10247 }"
10248 [(set_attr "length" "12")
10249 (set_attr "predicable" "yes")
10250 (set_attr "type" "load1")]
10251 )
10252
10253 ; This pattern is never tried by combine, so do it as a peephole
10254
10255 (define_peephole2
10256 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10257 (match_operand:SI 1 "arm_general_register_operand" ""))
10258 (set (reg:CC CC_REGNUM)
10259 (compare:CC (match_dup 1) (const_int 0)))]
10260 "TARGET_ARM"
10261 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10262 (set (match_dup 0) (match_dup 1))])]
10263 ""
10264 )
10265
10266 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10267 ; reversed, check that the memory references aren't volatile.
10268
10269 (define_peephole
10270 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10271 (match_operand:SI 4 "memory_operand" "m"))
10272 (set (match_operand:SI 1 "s_register_operand" "=rk")
10273 (match_operand:SI 5 "memory_operand" "m"))
10274 (set (match_operand:SI 2 "s_register_operand" "=rk")
10275 (match_operand:SI 6 "memory_operand" "m"))
10276 (set (match_operand:SI 3 "s_register_operand" "=rk")
10277 (match_operand:SI 7 "memory_operand" "m"))]
10278 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10279 "*
10280 return emit_ldm_seq (operands, 4);
10281 "
10282 )
10283
10284 (define_peephole
10285 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10286 (match_operand:SI 3 "memory_operand" "m"))
10287 (set (match_operand:SI 1 "s_register_operand" "=rk")
10288 (match_operand:SI 4 "memory_operand" "m"))
10289 (set (match_operand:SI 2 "s_register_operand" "=rk")
10290 (match_operand:SI 5 "memory_operand" "m"))]
10291 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10292 "*
10293 return emit_ldm_seq (operands, 3);
10294 "
10295 )
10296
10297 (define_peephole
10298 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10299 (match_operand:SI 2 "memory_operand" "m"))
10300 (set (match_operand:SI 1 "s_register_operand" "=rk")
10301 (match_operand:SI 3 "memory_operand" "m"))]
10302 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10303 "*
10304 return emit_ldm_seq (operands, 2);
10305 "
10306 )
10307
10308 (define_peephole
10309 [(set (match_operand:SI 4 "memory_operand" "=m")
10310 (match_operand:SI 0 "s_register_operand" "rk"))
10311 (set (match_operand:SI 5 "memory_operand" "=m")
10312 (match_operand:SI 1 "s_register_operand" "rk"))
10313 (set (match_operand:SI 6 "memory_operand" "=m")
10314 (match_operand:SI 2 "s_register_operand" "rk"))
10315 (set (match_operand:SI 7 "memory_operand" "=m")
10316 (match_operand:SI 3 "s_register_operand" "rk"))]
10317 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10318 "*
10319 return emit_stm_seq (operands, 4);
10320 "
10321 )
10322
10323 (define_peephole
10324 [(set (match_operand:SI 3 "memory_operand" "=m")
10325 (match_operand:SI 0 "s_register_operand" "rk"))
10326 (set (match_operand:SI 4 "memory_operand" "=m")
10327 (match_operand:SI 1 "s_register_operand" "rk"))
10328 (set (match_operand:SI 5 "memory_operand" "=m")
10329 (match_operand:SI 2 "s_register_operand" "rk"))]
10330 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10331 "*
10332 return emit_stm_seq (operands, 3);
10333 "
10334 )
10335
10336 (define_peephole
10337 [(set (match_operand:SI 2 "memory_operand" "=m")
10338 (match_operand:SI 0 "s_register_operand" "rk"))
10339 (set (match_operand:SI 3 "memory_operand" "=m")
10340 (match_operand:SI 1 "s_register_operand" "rk"))]
10341 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10342 "*
10343 return emit_stm_seq (operands, 2);
10344 "
10345 )
10346
10347 (define_split
10348 [(set (match_operand:SI 0 "s_register_operand" "")
10349 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10350 (const_int 0))
10351 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10352 [(match_operand:SI 3 "s_register_operand" "")
10353 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10354 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10355 "TARGET_ARM"
10356 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10357 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10358 (match_dup 5)))]
10359 ""
10360 )
10361
10362 ;; This split can be used because CC_Z mode implies that the following
10363 ;; branch will be an equality, or an unsigned inequality, so the sign
10364 ;; extension is not needed.
10365
10366 (define_split
10367 [(set (reg:CC_Z CC_REGNUM)
10368 (compare:CC_Z
10369 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10370 (const_int 24))
10371 (match_operand 1 "const_int_operand" "")))
10372 (clobber (match_scratch:SI 2 ""))]
10373 "TARGET_ARM
10374 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10375 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10376 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10377 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10378 "
10379 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10380 "
10381 )
10382 ;; ??? Check the patterns above for Thumb-2 usefulness
10383
10384 (define_expand "prologue"
10385 [(clobber (const_int 0))]
10386 "TARGET_EITHER"
10387 "if (TARGET_32BIT)
10388 arm_expand_prologue ();
10389 else
10390 thumb1_expand_prologue ();
10391 DONE;
10392 "
10393 )
10394
10395 (define_expand "epilogue"
10396 [(clobber (const_int 0))]
10397 "TARGET_EITHER"
10398 "
10399 if (crtl->calls_eh_return)
10400 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10401 if (TARGET_THUMB1)
10402 thumb1_expand_epilogue ();
10403 else if (USE_RETURN_INSN (FALSE))
10404 {
10405 emit_jump_insn (gen_return ());
10406 DONE;
10407 }
10408 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10409 gen_rtvec (1,
10410 gen_rtx_RETURN (VOIDmode)),
10411 VUNSPEC_EPILOGUE));
10412 DONE;
10413 "
10414 )
10415
10416 ;; Note - although unspec_volatile's USE all hard registers,
10417 ;; USEs are ignored after relaod has completed. Thus we need
10418 ;; to add an unspec of the link register to ensure that flow
10419 ;; does not think that it is unused by the sibcall branch that
10420 ;; will replace the standard function epilogue.
10421 (define_insn "sibcall_epilogue"
10422 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10423 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10424 "TARGET_32BIT"
10425 "*
10426 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10427 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10428 return arm_output_epilogue (next_nonnote_insn (insn));
10429 "
10430 ;; Length is absolute worst case
10431 [(set_attr "length" "44")
10432 (set_attr "type" "block")
10433 ;; We don't clobber the conditions, but the potential length of this
10434 ;; operation is sufficient to make conditionalizing the sequence
10435 ;; unlikely to be profitable.
10436 (set_attr "conds" "clob")]
10437 )
10438
10439 (define_insn "*epilogue_insns"
10440 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10441 "TARGET_EITHER"
10442 "*
10443 if (TARGET_32BIT)
10444 return arm_output_epilogue (NULL);
10445 else /* TARGET_THUMB1 */
10446 return thumb_unexpanded_epilogue ();
10447 "
10448 ; Length is absolute worst case
10449 [(set_attr "length" "44")
10450 (set_attr "type" "block")
10451 ;; We don't clobber the conditions, but the potential length of this
10452 ;; operation is sufficient to make conditionalizing the sequence
10453 ;; unlikely to be profitable.
10454 (set_attr "conds" "clob")]
10455 )
10456
10457 (define_expand "eh_epilogue"
10458 [(use (match_operand:SI 0 "register_operand" ""))
10459 (use (match_operand:SI 1 "register_operand" ""))
10460 (use (match_operand:SI 2 "register_operand" ""))]
10461 "TARGET_EITHER"
10462 "
10463 {
10464 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10465 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10466 {
10467 rtx ra = gen_rtx_REG (Pmode, 2);
10468
10469 emit_move_insn (ra, operands[2]);
10470 operands[2] = ra;
10471 }
10472 /* This is a hack -- we may have crystalized the function type too
10473 early. */
10474 cfun->machine->func_type = 0;
10475 }"
10476 )
10477
10478 ;; This split is only used during output to reduce the number of patterns
10479 ;; that need assembler instructions adding to them. We allowed the setting
10480 ;; of the conditions to be implicit during rtl generation so that
10481 ;; the conditional compare patterns would work. However this conflicts to
10482 ;; some extent with the conditional data operations, so we have to split them
10483 ;; up again here.
10484
10485 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10486 ;; conditional execution sufficient?
10487
10488 (define_split
10489 [(set (match_operand:SI 0 "s_register_operand" "")
10490 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10491 [(match_operand 2 "" "") (match_operand 3 "" "")])
10492 (match_dup 0)
10493 (match_operand 4 "" "")))
10494 (clobber (reg:CC CC_REGNUM))]
10495 "TARGET_ARM && reload_completed"
10496 [(set (match_dup 5) (match_dup 6))
10497 (cond_exec (match_dup 7)
10498 (set (match_dup 0) (match_dup 4)))]
10499 "
10500 {
10501 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10502 operands[2], operands[3]);
10503 enum rtx_code rc = GET_CODE (operands[1]);
10504
10505 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10506 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10507 if (mode == CCFPmode || mode == CCFPEmode)
10508 rc = reverse_condition_maybe_unordered (rc);
10509 else
10510 rc = reverse_condition (rc);
10511
10512 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10513 }"
10514 )
10515
10516 (define_split
10517 [(set (match_operand:SI 0 "s_register_operand" "")
10518 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10519 [(match_operand 2 "" "") (match_operand 3 "" "")])
10520 (match_operand 4 "" "")
10521 (match_dup 0)))
10522 (clobber (reg:CC CC_REGNUM))]
10523 "TARGET_ARM && reload_completed"
10524 [(set (match_dup 5) (match_dup 6))
10525 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10526 (set (match_dup 0) (match_dup 4)))]
10527 "
10528 {
10529 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10530 operands[2], operands[3]);
10531
10532 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10533 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10534 }"
10535 )
10536
10537 (define_split
10538 [(set (match_operand:SI 0 "s_register_operand" "")
10539 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10540 [(match_operand 2 "" "") (match_operand 3 "" "")])
10541 (match_operand 4 "" "")
10542 (match_operand 5 "" "")))
10543 (clobber (reg:CC CC_REGNUM))]
10544 "TARGET_ARM && reload_completed"
10545 [(set (match_dup 6) (match_dup 7))
10546 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10547 (set (match_dup 0) (match_dup 4)))
10548 (cond_exec (match_dup 8)
10549 (set (match_dup 0) (match_dup 5)))]
10550 "
10551 {
10552 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10553 operands[2], operands[3]);
10554 enum rtx_code rc = GET_CODE (operands[1]);
10555
10556 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10557 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10558 if (mode == CCFPmode || mode == CCFPEmode)
10559 rc = reverse_condition_maybe_unordered (rc);
10560 else
10561 rc = reverse_condition (rc);
10562
10563 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10564 }"
10565 )
10566
10567 (define_split
10568 [(set (match_operand:SI 0 "s_register_operand" "")
10569 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10570 [(match_operand:SI 2 "s_register_operand" "")
10571 (match_operand:SI 3 "arm_add_operand" "")])
10572 (match_operand:SI 4 "arm_rhs_operand" "")
10573 (not:SI
10574 (match_operand:SI 5 "s_register_operand" ""))))
10575 (clobber (reg:CC CC_REGNUM))]
10576 "TARGET_ARM && reload_completed"
10577 [(set (match_dup 6) (match_dup 7))
10578 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10579 (set (match_dup 0) (match_dup 4)))
10580 (cond_exec (match_dup 8)
10581 (set (match_dup 0) (not:SI (match_dup 5))))]
10582 "
10583 {
10584 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10585 operands[2], operands[3]);
10586 enum rtx_code rc = GET_CODE (operands[1]);
10587
10588 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10589 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10590 if (mode == CCFPmode || mode == CCFPEmode)
10591 rc = reverse_condition_maybe_unordered (rc);
10592 else
10593 rc = reverse_condition (rc);
10594
10595 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10596 }"
10597 )
10598
10599 (define_insn "*cond_move_not"
10600 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10601 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10602 [(match_operand 3 "cc_register" "") (const_int 0)])
10603 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10604 (not:SI
10605 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10606 "TARGET_ARM"
10607 "@
10608 mvn%D4\\t%0, %2
10609 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10610 [(set_attr "conds" "use")
10611 (set_attr "length" "4,8")]
10612 )
10613
10614 ;; The next two patterns occur when an AND operation is followed by a
10615 ;; scc insn sequence
10616
10617 (define_insn "*sign_extract_onebit"
10618 [(set (match_operand:SI 0 "s_register_operand" "=r")
10619 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10620 (const_int 1)
10621 (match_operand:SI 2 "const_int_operand" "n")))
10622 (clobber (reg:CC CC_REGNUM))]
10623 "TARGET_ARM"
10624 "*
10625 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10626 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10627 return \"mvnne\\t%0, #0\";
10628 "
10629 [(set_attr "conds" "clob")
10630 (set_attr "length" "8")]
10631 )
10632
10633 (define_insn "*not_signextract_onebit"
10634 [(set (match_operand:SI 0 "s_register_operand" "=r")
10635 (not:SI
10636 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10637 (const_int 1)
10638 (match_operand:SI 2 "const_int_operand" "n"))))
10639 (clobber (reg:CC CC_REGNUM))]
10640 "TARGET_ARM"
10641 "*
10642 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10643 output_asm_insn (\"tst\\t%1, %2\", operands);
10644 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10645 return \"movne\\t%0, #0\";
10646 "
10647 [(set_attr "conds" "clob")
10648 (set_attr "length" "12")]
10649 )
10650 ;; ??? The above patterns need auditing for Thumb-2
10651
10652 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10653 ;; expressions. For simplicity, the first register is also in the unspec
10654 ;; part.
10655 (define_insn "*push_multi"
10656 [(match_parallel 2 "multi_register_push"
10657 [(set (match_operand:BLK 0 "memory_operand" "=m")
10658 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10659 UNSPEC_PUSH_MULT))])]
10660 "TARGET_32BIT"
10661 "*
10662 {
10663 int num_saves = XVECLEN (operands[2], 0);
10664
10665 /* For the StrongARM at least it is faster to
10666 use STR to store only a single register.
10667 In Thumb mode always use push, and the assembler will pick
10668 something appropriate. */
10669 if (num_saves == 1 && TARGET_ARM)
10670 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10671 else
10672 {
10673 int i;
10674 char pattern[100];
10675
10676 if (TARGET_ARM)
10677 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10678 else
10679 strcpy (pattern, \"push\\t{%1\");
10680
10681 for (i = 1; i < num_saves; i++)
10682 {
10683 strcat (pattern, \", %|\");
10684 strcat (pattern,
10685 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10686 }
10687
10688 strcat (pattern, \"}\");
10689 output_asm_insn (pattern, operands);
10690 }
10691
10692 return \"\";
10693 }"
10694 [(set_attr "type" "store4")]
10695 )
10696
10697 (define_insn "stack_tie"
10698 [(set (mem:BLK (scratch))
10699 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10700 (match_operand:SI 1 "s_register_operand" "rk")]
10701 UNSPEC_PRLG_STK))]
10702 ""
10703 ""
10704 [(set_attr "length" "0")]
10705 )
10706
10707 ;; Similarly for the floating point registers
10708 (define_insn "*push_fp_multi"
10709 [(match_parallel 2 "multi_register_push"
10710 [(set (match_operand:BLK 0 "memory_operand" "=m")
10711 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10712 UNSPEC_PUSH_MULT))])]
10713 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10714 "*
10715 {
10716 char pattern[100];
10717
10718 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10719 output_asm_insn (pattern, operands);
10720 return \"\";
10721 }"
10722 [(set_attr "type" "f_store")]
10723 )
10724
10725 ;; Special patterns for dealing with the constant pool
10726
10727 (define_insn "align_4"
10728 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10729 "TARGET_EITHER"
10730 "*
10731 assemble_align (32);
10732 return \"\";
10733 "
10734 )
10735
10736 (define_insn "align_8"
10737 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10738 "TARGET_EITHER"
10739 "*
10740 assemble_align (64);
10741 return \"\";
10742 "
10743 )
10744
10745 (define_insn "consttable_end"
10746 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10747 "TARGET_EITHER"
10748 "*
10749 making_const_table = FALSE;
10750 return \"\";
10751 "
10752 )
10753
10754 (define_insn "consttable_1"
10755 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10756 "TARGET_THUMB1"
10757 "*
10758 making_const_table = TRUE;
10759 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10760 assemble_zeros (3);
10761 return \"\";
10762 "
10763 [(set_attr "length" "4")]
10764 )
10765
10766 (define_insn "consttable_2"
10767 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10768 "TARGET_THUMB1"
10769 "*
10770 making_const_table = TRUE;
10771 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10772 assemble_zeros (2);
10773 return \"\";
10774 "
10775 [(set_attr "length" "4")]
10776 )
10777
10778 (define_insn "consttable_4"
10779 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10780 "TARGET_EITHER"
10781 "*
10782 {
10783 making_const_table = TRUE;
10784 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10785 {
10786 case MODE_FLOAT:
10787 {
10788 REAL_VALUE_TYPE r;
10789 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10790 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10791 break;
10792 }
10793 default:
10794 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10795 break;
10796 }
10797 return \"\";
10798 }"
10799 [(set_attr "length" "4")]
10800 )
10801
10802 (define_insn "consttable_8"
10803 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10804 "TARGET_EITHER"
10805 "*
10806 {
10807 making_const_table = TRUE;
10808 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10809 {
10810 case MODE_FLOAT:
10811 {
10812 REAL_VALUE_TYPE r;
10813 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10814 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10815 break;
10816 }
10817 default:
10818 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10819 break;
10820 }
10821 return \"\";
10822 }"
10823 [(set_attr "length" "8")]
10824 )
10825
10826 (define_insn "consttable_16"
10827 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10828 "TARGET_EITHER"
10829 "*
10830 {
10831 making_const_table = TRUE;
10832 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10833 {
10834 case MODE_FLOAT:
10835 {
10836 REAL_VALUE_TYPE r;
10837 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10838 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10839 break;
10840 }
10841 default:
10842 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10843 break;
10844 }
10845 return \"\";
10846 }"
10847 [(set_attr "length" "16")]
10848 )
10849
10850 ;; Miscellaneous Thumb patterns
10851
10852 (define_expand "tablejump"
10853 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10854 (use (label_ref (match_operand 1 "" "")))])]
10855 "TARGET_THUMB1"
10856 "
10857 if (flag_pic)
10858 {
10859 /* Hopefully, CSE will eliminate this copy. */
10860 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10861 rtx reg2 = gen_reg_rtx (SImode);
10862
10863 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10864 operands[0] = reg2;
10865 }
10866 "
10867 )
10868
10869 ;; NB never uses BX.
10870 (define_insn "*thumb1_tablejump"
10871 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10872 (use (label_ref (match_operand 1 "" "")))]
10873 "TARGET_THUMB1"
10874 "mov\\t%|pc, %0"
10875 [(set_attr "length" "2")]
10876 )
10877
10878 ;; V5 Instructions,
10879
10880 (define_insn "clzsi2"
10881 [(set (match_operand:SI 0 "s_register_operand" "=r")
10882 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10883 "TARGET_32BIT && arm_arch5"
10884 "clz%?\\t%0, %1"
10885 [(set_attr "predicable" "yes")
10886 (set_attr "insn" "clz")])
10887
10888 ;; V5E instructions.
10889
10890 (define_insn "prefetch"
10891 [(prefetch (match_operand:SI 0 "address_operand" "p")
10892 (match_operand:SI 1 "" "")
10893 (match_operand:SI 2 "" ""))]
10894 "TARGET_32BIT && arm_arch5e"
10895 "pld\\t%a0")
10896
10897 ;; General predication pattern
10898
10899 (define_cond_exec
10900 [(match_operator 0 "arm_comparison_operator"
10901 [(match_operand 1 "cc_register" "")
10902 (const_int 0)])]
10903 "TARGET_32BIT"
10904 ""
10905 )
10906
10907 (define_insn "prologue_use"
10908 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10909 ""
10910 "%@ %0 needed for prologue"
10911 )
10912
10913
10914 ;; Patterns for exception handling
10915
10916 (define_expand "eh_return"
10917 [(use (match_operand 0 "general_operand" ""))]
10918 "TARGET_EITHER"
10919 "
10920 {
10921 if (TARGET_32BIT)
10922 emit_insn (gen_arm_eh_return (operands[0]));
10923 else
10924 emit_insn (gen_thumb_eh_return (operands[0]));
10925 DONE;
10926 }"
10927 )
10928
10929 ;; We can't expand this before we know where the link register is stored.
10930 (define_insn_and_split "arm_eh_return"
10931 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10932 VUNSPEC_EH_RETURN)
10933 (clobber (match_scratch:SI 1 "=&r"))]
10934 "TARGET_ARM"
10935 "#"
10936 "&& reload_completed"
10937 [(const_int 0)]
10938 "
10939 {
10940 arm_set_return_address (operands[0], operands[1]);
10941 DONE;
10942 }"
10943 )
10944
10945 (define_insn_and_split "thumb_eh_return"
10946 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10947 VUNSPEC_EH_RETURN)
10948 (clobber (match_scratch:SI 1 "=&l"))]
10949 "TARGET_THUMB1"
10950 "#"
10951 "&& reload_completed"
10952 [(const_int 0)]
10953 "
10954 {
10955 thumb_set_return_address (operands[0], operands[1]);
10956 DONE;
10957 }"
10958 )
10959
10960 \f
10961 ;; TLS support
10962
10963 (define_insn "load_tp_hard"
10964 [(set (match_operand:SI 0 "register_operand" "=r")
10965 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10966 "TARGET_HARD_TP"
10967 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10968 [(set_attr "predicable" "yes")]
10969 )
10970
10971 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10972 (define_insn "load_tp_soft"
10973 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10974 (clobber (reg:SI LR_REGNUM))
10975 (clobber (reg:SI IP_REGNUM))
10976 (clobber (reg:CC CC_REGNUM))]
10977 "TARGET_SOFT_TP"
10978 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10979 [(set_attr "conds" "clob")]
10980 )
10981
10982 ;; Load the FPA co-processor patterns
10983 (include "fpa.md")
10984 ;; Load the Maverick co-processor patterns
10985 (include "cirrus.md")
10986 ;; Vector bits common to IWMMXT and Neon
10987 (include "vec-common.md")
10988 ;; Load the Intel Wireless Multimedia Extension patterns
10989 (include "iwmmxt.md")
10990 ;; Load the VFP co-processor patterns
10991 (include "vfp.md")
10992 ;; Thumb-2 patterns
10993 (include "thumb2.md")
10994 ;; Neon patterns
10995 (include "neon.md")
10996