]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/arm/arm.md
2013-06-07 Kyrylo Tkachov <kyrylo.tkachov@arm.com>
[thirdparty/gcc.git] / gcc / config / arm / arm.md
1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2013 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7 ;; This file is part of GCC.
8
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
13
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 ;; Beware of splitting Thumb1 patterns that output multiple
26 ;; assembly instructions, in particular instruction such as SBC and
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29 ;; and then the Carry flag is used by SBC to compute the correct
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
31 ;; insns (using define_insn_and_split), the scheduler might place
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
34 ;; for flag-setting instructions do not have explicit RTL for setting
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
36 ;; with value "set" or "clob". However, this attribute is not used to
37 ;; identify dependencies and therefore the scheduler might reorder
38 ;; these instruction. Currenly, this problem cannot happen because
39 ;; there are no separate Thumb1 patterns for individual instruction
40 ;; that consume flags (except conditional execution, which is treated
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
42 ;; sbc or adc.
43
44 \f
45 ;;---------------------------------------------------------------------------
46 ;; Constants
47
48 ;; Register numbers -- All machine registers should be defined here
49 (define_constants
50 [(R0_REGNUM 0) ; First CORE register
51 (R1_REGNUM 1) ; Second CORE register
52 (IP_REGNUM 12) ; Scratch register
53 (SP_REGNUM 13) ; Stack pointer
54 (LR_REGNUM 14) ; Return address register
55 (PC_REGNUM 15) ; Program counter
56 (LAST_ARM_REGNUM 15) ;
57 (CC_REGNUM 100) ; Condition code pseudo register
58 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
59 ]
60 )
61 ;; 3rd operand to select_dominance_cc_mode
62 (define_constants
63 [(DOM_CC_X_AND_Y 0)
64 (DOM_CC_NX_OR_Y 1)
65 (DOM_CC_X_OR_Y 2)
66 ]
67 )
68 ;; conditional compare combination
69 (define_constants
70 [(CMP_CMP 0)
71 (CMN_CMP 1)
72 (CMP_CMN 2)
73 (CMN_CMN 3)
74 (NUM_OF_COND_CMP 4)
75 ]
76 )
77
78 \f
79 ;;---------------------------------------------------------------------------
80 ;; Attributes
81
82 ;; Processor type. This is created automatically from arm-cores.def.
83 (include "arm-tune.md")
84
85 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
86 ; generating ARM code. This is used to control the length of some insn
87 ; patterns that share the same RTL in both ARM and Thumb code.
88 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
89
90 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
91 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
92
93 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
94 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
95
96 ; We use this attribute to disable alternatives that can produce 32-bit
97 ; instructions inside an IT-block in Thumb2 state. ARMv8 deprecates IT blocks
98 ; that contain 32-bit instructions.
99 (define_attr "enabled_for_depr_it" "no,yes" (const_string "yes"))
100
101 ; This attribute is used to disable a predicated alternative when we have
102 ; arm_restrict_it.
103 (define_attr "predicable_short_it" "no,yes" (const_string "yes"))
104
105 ;; Operand number of an input operand that is shifted. Zero if the
106 ;; given instruction does not shift one of its input operands.
107 (define_attr "shift" "" (const_int 0))
108
109 ; Floating Point Unit. If we only have floating point emulation, then there
110 ; is no point in scheduling the floating point insns. (Well, for best
111 ; performance we should try and group them together).
112 (define_attr "fpu" "none,vfp"
113 (const (symbol_ref "arm_fpu_attr")))
114
115 (define_attr "predicated" "yes,no" (const_string "no"))
116
117 ; LENGTH of an instruction (in bytes)
118 (define_attr "length" ""
119 (const_int 4))
120
121 ; The architecture which supports the instruction (or alternative).
122 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
123 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
124 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
125 ; arm_arch6. This attribute is used to compute attribute "enabled",
126 ; use type "any" to enable an alternative in all cases.
127 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,neon_for_64bits,avoid_neon_for_64bits,iwmmxt,iwmmxt2"
128 (const_string "any"))
129
130 (define_attr "arch_enabled" "no,yes"
131 (cond [(eq_attr "arch" "any")
132 (const_string "yes")
133
134 (and (eq_attr "arch" "a")
135 (match_test "TARGET_ARM"))
136 (const_string "yes")
137
138 (and (eq_attr "arch" "t")
139 (match_test "TARGET_THUMB"))
140 (const_string "yes")
141
142 (and (eq_attr "arch" "t1")
143 (match_test "TARGET_THUMB1"))
144 (const_string "yes")
145
146 (and (eq_attr "arch" "t2")
147 (match_test "TARGET_THUMB2"))
148 (const_string "yes")
149
150 (and (eq_attr "arch" "32")
151 (match_test "TARGET_32BIT"))
152 (const_string "yes")
153
154 (and (eq_attr "arch" "v6")
155 (match_test "TARGET_32BIT && arm_arch6"))
156 (const_string "yes")
157
158 (and (eq_attr "arch" "nov6")
159 (match_test "TARGET_32BIT && !arm_arch6"))
160 (const_string "yes")
161
162 (and (eq_attr "arch" "avoid_neon_for_64bits")
163 (match_test "TARGET_NEON")
164 (not (match_test "TARGET_PREFER_NEON_64BITS")))
165 (const_string "yes")
166
167 (and (eq_attr "arch" "neon_for_64bits")
168 (match_test "TARGET_NEON")
169 (match_test "TARGET_PREFER_NEON_64BITS"))
170 (const_string "yes")
171
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")]
175
176 (const_string "no")))
177
178 (define_attr "opt" "any,speed,size"
179 (const_string "any"))
180
181 (define_attr "opt_enabled" "no,yes"
182 (cond [(eq_attr "opt" "any")
183 (const_string "yes")
184
185 (and (eq_attr "opt" "speed")
186 (match_test "optimize_function_for_speed_p (cfun)"))
187 (const_string "yes")
188
189 (and (eq_attr "opt" "size")
190 (match_test "optimize_function_for_size_p (cfun)"))
191 (const_string "yes")]
192 (const_string "no")))
193
194 ; Allows an insn to disable certain alternatives for reasons other than
195 ; arch support.
196 (define_attr "insn_enabled" "no,yes"
197 (const_string "yes"))
198
199 ; Enable all alternatives that are both arch_enabled and insn_enabled.
200 (define_attr "enabled" "no,yes"
201 (cond [(eq_attr "insn_enabled" "no")
202 (const_string "no")
203
204 (and (eq_attr "predicable_short_it" "no")
205 (and (eq_attr "predicated" "yes")
206 (match_test "arm_restrict_it")))
207 (const_string "no")
208
209 (and (eq_attr "enabled_for_depr_it" "no")
210 (match_test "arm_restrict_it"))
211 (const_string "no")
212
213 (eq_attr "arch_enabled" "no")
214 (const_string "no")
215
216 (eq_attr "opt_enabled" "no")
217 (const_string "no")]
218 (const_string "yes")))
219
220 ; POOL_RANGE is how far away from a constant pool entry that this insn
221 ; can be placed. If the distance is zero, then this insn will never
222 ; reference the pool.
223 ; Note that for Thumb constant pools the PC value is rounded down to the
224 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
225 ; Thumb insns) should be set to <max_range> - 2.
226 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
227 ; before its address. It is set to <max_range> - (8 + <data_size>).
228 (define_attr "arm_pool_range" "" (const_int 0))
229 (define_attr "thumb2_pool_range" "" (const_int 0))
230 (define_attr "arm_neg_pool_range" "" (const_int 0))
231 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
232
233 (define_attr "pool_range" ""
234 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
235 (attr "arm_pool_range")))
236 (define_attr "neg_pool_range" ""
237 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
238 (attr "arm_neg_pool_range")))
239
240 ; An assembler sequence may clobber the condition codes without us knowing.
241 ; If such an insn references the pool, then we have no way of knowing how,
242 ; so use the most conservative value for pool_range.
243 (define_asm_attributes
244 [(set_attr "conds" "clob")
245 (set_attr "length" "4")
246 (set_attr "pool_range" "250")])
247
248 ;; The instruction used to implement a particular pattern. This
249 ;; information is used by pipeline descriptions to provide accurate
250 ;; scheduling information.
251
252 (define_attr "insn"
253 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,sat,other"
254 (const_string "other"))
255
256 ; TYPE attribute is used to detect floating point instructions which, if
257 ; running on a co-processor can run in parallel with other, basic instructions
258 ; If write-buffer scheduling is enabled then it can also be used in the
259 ; scheduling of writes.
260
261 ; Classification of each insn
262 ; Note: vfp.md has different meanings for some of these, and some further
263 ; types as well. See that file for details.
264 ; simple_alu_imm a simple alu instruction that doesn't hit memory or fp
265 ; regs or have a shifted source operand and has an immediate
266 ; operand. This currently only tracks very basic immediate
267 ; alu operations.
268 ; alu_reg any alu instruction that doesn't hit memory or fp
269 ; regs or have a shifted source operand
270 ; and does not have an immediate operand. This is
271 ; also the default
272 ; simple_alu_shift covers UXTH, UXTB, SXTH, SXTB
273 ; alu_shift any data instruction that doesn't hit memory or fp
274 ; regs, but has a source operand shifted by a constant
275 ; alu_shift_reg any data instruction that doesn't hit memory or fp
276 ; regs, but has a source operand shifted by a register value
277 ; mult a multiply instruction
278 ; block blockage insn, this blocks all functional units
279 ; float a floating point arithmetic operation (subject to expansion)
280 ; fdivd DFmode floating point division
281 ; fdivs SFmode floating point division
282 ; f_load[sd] A single/double load from memory. Used for VFP unit.
283 ; f_store[sd] A single/double store to memory. Used for VFP unit.
284 ; f_flag a transfer of co-processor flags to the CPSR
285 ; f_2_r transfer float to core (no memory needed)
286 ; r_2_f transfer core to float
287 ; f_cvt convert floating<->integral
288 ; branch a branch
289 ; call a subroutine call
290 ; load_byte load byte(s) from memory to arm registers
291 ; load1 load 1 word from memory to arm registers
292 ; load2 load 2 words from memory to arm registers
293 ; load3 load 3 words from memory to arm registers
294 ; load4 load 4 words from memory to arm registers
295 ; store store 1 word to memory from arm registers
296 ; store2 store 2 words
297 ; store3 store 3 words
298 ; store4 store 4 (or more) words
299 ;
300
301 (define_attr "type"
302 "simple_alu_imm,\
303 alu_reg,\
304 simple_alu_shift,\
305 alu_shift,\
306 alu_shift_reg,\
307 mult,\
308 block,\
309 float,\
310 fdivd,\
311 fdivs,\
312 fmuls,\
313 fmuld,\
314 fmacs,\
315 fmacd,\
316 ffmas,\
317 ffmad,\
318 f_rints,\
319 f_rintd,\
320 f_minmaxs,\
321 f_minmaxd,\
322 f_flag,\
323 f_loads,\
324 f_loadd,\
325 f_stores,\
326 f_stored,\
327 f_2_r,\
328 r_2_f,\
329 f_cvt,\
330 f_sels,\
331 f_seld,\
332 branch,\
333 call,\
334 load_byte,\
335 load1,\
336 load2,\
337 load3,\
338 load4,\
339 store1,\
340 store2,\
341 store3,\
342 store4,\
343 fconsts,\
344 fconstd,\
345 fadds,\
346 faddd,\
347 ffariths,\
348 ffarithd,\
349 fcmps,\
350 fcmpd,\
351 fcpys"
352 (if_then_else
353 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,\
354 umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
355 (const_string "mult")
356 (const_string "alu_reg")))
357
358 ; Is this an (integer side) multiply with a 64-bit result?
359 (define_attr "mul64" "no,yes"
360 (if_then_else
361 (eq_attr "insn"
362 "smlalxy,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
363 (const_string "yes")
364 (const_string "no")))
365
366 ; wtype for WMMX insn scheduling purposes.
367 (define_attr "wtype"
368 "none,wor,wxor,wand,wandn,wmov,tmcrr,tmrrc,wldr,wstr,tmcr,tmrc,wadd,wsub,wmul,wmac,wavg2,tinsr,textrm,wshufh,wcmpeq,wcmpgt,wmax,wmin,wpack,wunpckih,wunpckil,wunpckeh,wunpckel,wror,wsra,wsrl,wsll,wmadd,tmia,tmiaph,tmiaxy,tbcst,tmovmsk,wacc,waligni,walignr,tandc,textrc,torc,torvsc,wsad,wabs,wabsdiff,waddsubhx,wsubaddhx,wavg4,wmulw,wqmulm,wqmulwm,waddbhus,wqmiaxy,wmiaxy,wmiawxy,wmerge" (const_string "none"))
369
370 ; Load scheduling, set from the arm_ld_sched variable
371 ; initialized by arm_option_override()
372 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
373
374 ;; Classification of NEON instructions for scheduling purposes.
375 (define_attr "neon_type"
376 "neon_int_1,\
377 neon_int_2,\
378 neon_int_3,\
379 neon_int_4,\
380 neon_int_5,\
381 neon_vqneg_vqabs,\
382 neon_vmov,\
383 neon_vaba,\
384 neon_vsma,\
385 neon_vaba_qqq,\
386 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
387 neon_mul_qqq_8_16_32_ddd_32,\
388 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
389 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
390 neon_mla_qqq_8_16,\
391 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
392 neon_mla_qqq_32_qqd_32_scalar,\
393 neon_mul_ddd_16_scalar_32_16_long_scalar,\
394 neon_mul_qqd_32_scalar,\
395 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
396 neon_shift_1,\
397 neon_shift_2,\
398 neon_shift_3,\
399 neon_vshl_ddd,\
400 neon_vqshl_vrshl_vqrshl_qqq,\
401 neon_vsra_vrsra,\
402 neon_fp_vadd_ddd_vabs_dd,\
403 neon_fp_vadd_qqq_vabs_qq,\
404 neon_fp_vsum,\
405 neon_fp_vmul_ddd,\
406 neon_fp_vmul_qqd,\
407 neon_fp_vmla_ddd,\
408 neon_fp_vmla_qqq,\
409 neon_fp_vmla_ddd_scalar,\
410 neon_fp_vmla_qqq_scalar,\
411 neon_fp_vrecps_vrsqrts_ddd,\
412 neon_fp_vrecps_vrsqrts_qqq,\
413 neon_bp_simple,\
414 neon_bp_2cycle,\
415 neon_bp_3cycle,\
416 neon_ldr,\
417 neon_str,\
418 neon_vld1_1_2_regs,\
419 neon_vld1_3_4_regs,\
420 neon_vld2_2_regs_vld1_vld2_all_lanes,\
421 neon_vld2_4_regs,\
422 neon_vld3_vld4,\
423 neon_vst1_1_2_regs_vst2_2_regs,\
424 neon_vst1_3_4_regs,\
425 neon_vst2_4_regs_vst3_vst4,\
426 neon_vst3_vst4,\
427 neon_vld1_vld2_lane,\
428 neon_vld3_vld4_lane,\
429 neon_vst1_vst2_lane,\
430 neon_vst3_vst4_lane,\
431 neon_vld3_vld4_all_lanes,\
432 neon_mcr,\
433 neon_mcr_2_mcrr,\
434 neon_mrc,\
435 neon_mrrc,\
436 neon_ldm_2,\
437 neon_stm_2,\
438 none"
439 (const_string "none"))
440
441 ; condition codes: this one is used by final_prescan_insn to speed up
442 ; conditionalizing instructions. It saves having to scan the rtl to see if
443 ; it uses or alters the condition codes.
444 ;
445 ; USE means that the condition codes are used by the insn in the process of
446 ; outputting code, this means (at present) that we can't use the insn in
447 ; inlined branches
448 ;
449 ; SET means that the purpose of the insn is to set the condition codes in a
450 ; well defined manner.
451 ;
452 ; CLOB means that the condition codes are altered in an undefined manner, if
453 ; they are altered at all
454 ;
455 ; UNCONDITIONAL means the instruction can not be conditionally executed and
456 ; that the instruction does not use or alter the condition codes.
457 ;
458 ; NOCOND means that the instruction does not use or alter the condition
459 ; codes but can be converted into a conditionally exectuted instruction.
460
461 (define_attr "conds" "use,set,clob,unconditional,nocond"
462 (if_then_else
463 (ior (eq_attr "is_thumb1" "yes")
464 (eq_attr "type" "call"))
465 (const_string "clob")
466 (if_then_else (eq_attr "neon_type" "none")
467 (const_string "nocond")
468 (const_string "unconditional"))))
469
470 ; Predicable means that the insn can be conditionally executed based on
471 ; an automatically added predicate (additional patterns are generated by
472 ; gen...). We default to 'no' because no Thumb patterns match this rule
473 ; and not all ARM patterns do.
474 (define_attr "predicable" "no,yes" (const_string "no"))
475
476 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
477 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
478 ; suffer blockages enough to warrant modelling this (and it can adversely
479 ; affect the schedule).
480 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
481
482 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
483 ; to stall the processor. Used with model_wbuf above.
484 (define_attr "write_conflict" "no,yes"
485 (if_then_else (eq_attr "type"
486 "block,call,load1")
487 (const_string "yes")
488 (const_string "no")))
489
490 ; Classify the insns into those that take one cycle and those that take more
491 ; than one on the main cpu execution unit.
492 (define_attr "core_cycles" "single,multi"
493 (if_then_else (eq_attr "type"
494 "simple_alu_imm,alu_reg,\
495 simple_alu_shift,alu_shift,\
496 float,fdivd,fdivs")
497 (const_string "single")
498 (const_string "multi")))
499
500 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
501 ;; distant label. Only applicable to Thumb code.
502 (define_attr "far_jump" "yes,no" (const_string "no"))
503
504
505 ;; The number of machine instructions this pattern expands to.
506 ;; Used for Thumb-2 conditional execution.
507 (define_attr "ce_count" "" (const_int 1))
508
509 ;;---------------------------------------------------------------------------
510 ;; Unspecs
511
512 (include "unspecs.md")
513
514 ;;---------------------------------------------------------------------------
515 ;; Mode iterators
516
517 (include "iterators.md")
518
519 ;;---------------------------------------------------------------------------
520 ;; Predicates
521
522 (include "predicates.md")
523 (include "constraints.md")
524
525 ;;---------------------------------------------------------------------------
526 ;; Pipeline descriptions
527
528 (define_attr "tune_cortexr4" "yes,no"
529 (const (if_then_else
530 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
531 (const_string "yes")
532 (const_string "no"))))
533
534 ;; True if the generic scheduling description should be used.
535
536 (define_attr "generic_sched" "yes,no"
537 (const (if_then_else
538 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa7,cortexa8,cortexa9,cortexa15,cortexa53,cortexm4,marvell_pj4")
539 (eq_attr "tune_cortexr4" "yes"))
540 (const_string "no")
541 (const_string "yes"))))
542
543 (define_attr "generic_vfp" "yes,no"
544 (const (if_then_else
545 (and (eq_attr "fpu" "vfp")
546 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa7,cortexa8,cortexa9,cortexa53,cortexm4,marvell_pj4")
547 (eq_attr "tune_cortexr4" "no"))
548 (const_string "yes")
549 (const_string "no"))))
550
551 (include "marvell-f-iwmmxt.md")
552 (include "arm-generic.md")
553 (include "arm926ejs.md")
554 (include "arm1020e.md")
555 (include "arm1026ejs.md")
556 (include "arm1136jfs.md")
557 (include "fa526.md")
558 (include "fa606te.md")
559 (include "fa626te.md")
560 (include "fmp626.md")
561 (include "fa726te.md")
562 (include "cortex-a5.md")
563 (include "cortex-a7.md")
564 (include "cortex-a8.md")
565 (include "cortex-a9.md")
566 (include "cortex-a15.md")
567 (include "cortex-a53.md")
568 (include "cortex-r4.md")
569 (include "cortex-r4f.md")
570 (include "cortex-m4.md")
571 (include "cortex-m4-fpu.md")
572 (include "vfp11.md")
573 (include "marvell-pj4.md")
574
575 \f
576 ;;---------------------------------------------------------------------------
577 ;; Insn patterns
578 ;;
579 ;; Addition insns.
580
581 ;; Note: For DImode insns, there is normally no reason why operands should
582 ;; not be in the same register, what we don't want is for something being
583 ;; written to partially overlap something that is an input.
584
585 (define_expand "adddi3"
586 [(parallel
587 [(set (match_operand:DI 0 "s_register_operand" "")
588 (plus:DI (match_operand:DI 1 "s_register_operand" "")
589 (match_operand:DI 2 "arm_adddi_operand" "")))
590 (clobber (reg:CC CC_REGNUM))])]
591 "TARGET_EITHER"
592 "
593 if (TARGET_THUMB1)
594 {
595 if (!REG_P (operands[1]))
596 operands[1] = force_reg (DImode, operands[1]);
597 if (!REG_P (operands[2]))
598 operands[2] = force_reg (DImode, operands[2]);
599 }
600 "
601 )
602
603 (define_insn "*thumb1_adddi3"
604 [(set (match_operand:DI 0 "register_operand" "=l")
605 (plus:DI (match_operand:DI 1 "register_operand" "%0")
606 (match_operand:DI 2 "register_operand" "l")))
607 (clobber (reg:CC CC_REGNUM))
608 ]
609 "TARGET_THUMB1"
610 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
611 [(set_attr "length" "4")]
612 )
613
614 (define_insn_and_split "*arm_adddi3"
615 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,&r")
616 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0, r, 0, r")
617 (match_operand:DI 2 "arm_adddi_operand" "r, 0, r, Dd, Dd")))
618 (clobber (reg:CC CC_REGNUM))]
619 "TARGET_32BIT && !TARGET_NEON"
620 "#"
621 "TARGET_32BIT && reload_completed
622 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
623 [(parallel [(set (reg:CC_C CC_REGNUM)
624 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
625 (match_dup 1)))
626 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
627 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
628 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
629 "
630 {
631 operands[3] = gen_highpart (SImode, operands[0]);
632 operands[0] = gen_lowpart (SImode, operands[0]);
633 operands[4] = gen_highpart (SImode, operands[1]);
634 operands[1] = gen_lowpart (SImode, operands[1]);
635 operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
636 operands[2] = gen_lowpart (SImode, operands[2]);
637 }"
638 [(set_attr "conds" "clob")
639 (set_attr "length" "8")]
640 )
641
642 (define_insn_and_split "*adddi_sesidi_di"
643 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
644 (plus:DI (sign_extend:DI
645 (match_operand:SI 2 "s_register_operand" "r,r"))
646 (match_operand:DI 1 "s_register_operand" "0,r")))
647 (clobber (reg:CC CC_REGNUM))]
648 "TARGET_32BIT"
649 "#"
650 "TARGET_32BIT && reload_completed"
651 [(parallel [(set (reg:CC_C CC_REGNUM)
652 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
653 (match_dup 1)))
654 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
655 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
656 (const_int 31))
657 (match_dup 4))
658 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
659 "
660 {
661 operands[3] = gen_highpart (SImode, operands[0]);
662 operands[0] = gen_lowpart (SImode, operands[0]);
663 operands[4] = gen_highpart (SImode, operands[1]);
664 operands[1] = gen_lowpart (SImode, operands[1]);
665 operands[2] = gen_lowpart (SImode, operands[2]);
666 }"
667 [(set_attr "conds" "clob")
668 (set_attr "length" "8")]
669 )
670
671 (define_insn_and_split "*adddi_zesidi_di"
672 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
673 (plus:DI (zero_extend:DI
674 (match_operand:SI 2 "s_register_operand" "r,r"))
675 (match_operand:DI 1 "s_register_operand" "0,r")))
676 (clobber (reg:CC CC_REGNUM))]
677 "TARGET_32BIT"
678 "#"
679 "TARGET_32BIT && reload_completed"
680 [(parallel [(set (reg:CC_C CC_REGNUM)
681 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
682 (match_dup 1)))
683 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
684 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
685 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
686 "
687 {
688 operands[3] = gen_highpart (SImode, operands[0]);
689 operands[0] = gen_lowpart (SImode, operands[0]);
690 operands[4] = gen_highpart (SImode, operands[1]);
691 operands[1] = gen_lowpart (SImode, operands[1]);
692 operands[2] = gen_lowpart (SImode, operands[2]);
693 }"
694 [(set_attr "conds" "clob")
695 (set_attr "length" "8")]
696 )
697
698 (define_expand "addsi3"
699 [(set (match_operand:SI 0 "s_register_operand" "")
700 (plus:SI (match_operand:SI 1 "s_register_operand" "")
701 (match_operand:SI 2 "reg_or_int_operand" "")))]
702 "TARGET_EITHER"
703 "
704 if (TARGET_32BIT && CONST_INT_P (operands[2]))
705 {
706 arm_split_constant (PLUS, SImode, NULL_RTX,
707 INTVAL (operands[2]), operands[0], operands[1],
708 optimize && can_create_pseudo_p ());
709 DONE;
710 }
711 "
712 )
713
714 ; If there is a scratch available, this will be faster than synthesizing the
715 ; addition.
716 (define_peephole2
717 [(match_scratch:SI 3 "r")
718 (set (match_operand:SI 0 "arm_general_register_operand" "")
719 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
720 (match_operand:SI 2 "const_int_operand" "")))]
721 "TARGET_32BIT &&
722 !(const_ok_for_arm (INTVAL (operands[2]))
723 || const_ok_for_arm (-INTVAL (operands[2])))
724 && const_ok_for_arm (~INTVAL (operands[2]))"
725 [(set (match_dup 3) (match_dup 2))
726 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
727 ""
728 )
729
730 ;; The r/r/k alternative is required when reloading the address
731 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
732 ;; put the duplicated register first, and not try the commutative version.
733 (define_insn_and_split "*arm_addsi3"
734 [(set (match_operand:SI 0 "s_register_operand" "=rk, r,k, r,r, k, r, k,k,r, k, r")
735 (plus:SI (match_operand:SI 1 "s_register_operand" "%0, rk,k, r,rk,k, rk,k,r,rk,k, rk")
736 (match_operand:SI 2 "reg_or_int_operand" "rk, rI,rI,k,Pj,Pj,L, L,L,PJ,PJ,?n")))]
737 "TARGET_32BIT"
738 "@
739 add%?\\t%0, %0, %2
740 add%?\\t%0, %1, %2
741 add%?\\t%0, %1, %2
742 add%?\\t%0, %2, %1
743 addw%?\\t%0, %1, %2
744 addw%?\\t%0, %1, %2
745 sub%?\\t%0, %1, #%n2
746 sub%?\\t%0, %1, #%n2
747 sub%?\\t%0, %1, #%n2
748 subw%?\\t%0, %1, #%n2
749 subw%?\\t%0, %1, #%n2
750 #"
751 "TARGET_32BIT
752 && CONST_INT_P (operands[2])
753 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
754 && (reload_completed || !arm_eliminable_register (operands[1]))"
755 [(clobber (const_int 0))]
756 "
757 arm_split_constant (PLUS, SImode, curr_insn,
758 INTVAL (operands[2]), operands[0],
759 operands[1], 0);
760 DONE;
761 "
762 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,16")
763 (set_attr "predicable" "yes")
764 (set_attr "arch" "t2,*,*,*,t2,t2,*,*,a,t2,t2,*")
765 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
766 (const_string "simple_alu_imm")
767 (const_string "alu_reg")))
768 ]
769 )
770
771 (define_insn_and_split "*thumb1_addsi3"
772 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
773 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
774 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
775 "TARGET_THUMB1"
776 "*
777 static const char * const asms[] =
778 {
779 \"add\\t%0, %0, %2\",
780 \"sub\\t%0, %0, #%n2\",
781 \"add\\t%0, %1, %2\",
782 \"add\\t%0, %0, %2\",
783 \"add\\t%0, %0, %2\",
784 \"add\\t%0, %1, %2\",
785 \"add\\t%0, %1, %2\",
786 \"#\",
787 \"#\",
788 \"#\"
789 };
790 if ((which_alternative == 2 || which_alternative == 6)
791 && CONST_INT_P (operands[2])
792 && INTVAL (operands[2]) < 0)
793 return \"sub\\t%0, %1, #%n2\";
794 return asms[which_alternative];
795 "
796 "&& reload_completed && CONST_INT_P (operands[2])
797 && ((operands[1] != stack_pointer_rtx
798 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
799 || (operands[1] == stack_pointer_rtx
800 && INTVAL (operands[2]) > 1020))"
801 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
802 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
803 {
804 HOST_WIDE_INT offset = INTVAL (operands[2]);
805 if (operands[1] == stack_pointer_rtx)
806 offset -= 1020;
807 else
808 {
809 if (offset > 255)
810 offset = 255;
811 else if (offset < -255)
812 offset = -255;
813 }
814 operands[3] = GEN_INT (offset);
815 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
816 }
817 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
818 )
819
820 ;; Reloading and elimination of the frame pointer can
821 ;; sometimes cause this optimization to be missed.
822 (define_peephole2
823 [(set (match_operand:SI 0 "arm_general_register_operand" "")
824 (match_operand:SI 1 "const_int_operand" ""))
825 (set (match_dup 0)
826 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
827 "TARGET_THUMB1
828 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
829 && (INTVAL (operands[1]) & 3) == 0"
830 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
831 ""
832 )
833
834 (define_insn "addsi3_compare0"
835 [(set (reg:CC_NOOV CC_REGNUM)
836 (compare:CC_NOOV
837 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
838 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
839 (const_int 0)))
840 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
841 (plus:SI (match_dup 1) (match_dup 2)))]
842 "TARGET_ARM"
843 "@
844 add%.\\t%0, %1, %2
845 sub%.\\t%0, %1, #%n2
846 add%.\\t%0, %1, %2"
847 [(set_attr "conds" "set")
848 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")]
849 )
850
851 (define_insn "*addsi3_compare0_scratch"
852 [(set (reg:CC_NOOV CC_REGNUM)
853 (compare:CC_NOOV
854 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
855 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
856 (const_int 0)))]
857 "TARGET_ARM"
858 "@
859 cmn%?\\t%0, %1
860 cmp%?\\t%0, #%n1
861 cmn%?\\t%0, %1"
862 [(set_attr "conds" "set")
863 (set_attr "predicable" "yes")
864 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")
865 ]
866 )
867
868 (define_insn "*compare_negsi_si"
869 [(set (reg:CC_Z CC_REGNUM)
870 (compare:CC_Z
871 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
872 (match_operand:SI 1 "s_register_operand" "r")))]
873 "TARGET_32BIT"
874 "cmn%?\\t%1, %0"
875 [(set_attr "conds" "set")
876 (set_attr "predicable" "yes")]
877 )
878
879 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
880 ;; addend is a constant.
881 (define_insn "cmpsi2_addneg"
882 [(set (reg:CC CC_REGNUM)
883 (compare:CC
884 (match_operand:SI 1 "s_register_operand" "r,r")
885 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
886 (set (match_operand:SI 0 "s_register_operand" "=r,r")
887 (plus:SI (match_dup 1)
888 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
889 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
890 "@
891 add%.\\t%0, %1, %3
892 sub%.\\t%0, %1, #%n3"
893 [(set_attr "conds" "set")]
894 )
895
896 ;; Convert the sequence
897 ;; sub rd, rn, #1
898 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
899 ;; bne dest
900 ;; into
901 ;; subs rd, rn, #1
902 ;; bcs dest ((unsigned)rn >= 1)
903 ;; similarly for the beq variant using bcc.
904 ;; This is a common looping idiom (while (n--))
905 (define_peephole2
906 [(set (match_operand:SI 0 "arm_general_register_operand" "")
907 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
908 (const_int -1)))
909 (set (match_operand 2 "cc_register" "")
910 (compare (match_dup 0) (const_int -1)))
911 (set (pc)
912 (if_then_else (match_operator 3 "equality_operator"
913 [(match_dup 2) (const_int 0)])
914 (match_operand 4 "" "")
915 (match_operand 5 "" "")))]
916 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
917 [(parallel[
918 (set (match_dup 2)
919 (compare:CC
920 (match_dup 1) (const_int 1)))
921 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
922 (set (pc)
923 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
924 (match_dup 4)
925 (match_dup 5)))]
926 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
927 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
928 ? GEU : LTU),
929 VOIDmode,
930 operands[2], const0_rtx);"
931 )
932
933 ;; The next four insns work because they compare the result with one of
934 ;; the operands, and we know that the use of the condition code is
935 ;; either GEU or LTU, so we can use the carry flag from the addition
936 ;; instead of doing the compare a second time.
937 (define_insn "*addsi3_compare_op1"
938 [(set (reg:CC_C CC_REGNUM)
939 (compare:CC_C
940 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
941 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
942 (match_dup 1)))
943 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
944 (plus:SI (match_dup 1) (match_dup 2)))]
945 "TARGET_32BIT"
946 "@
947 add%.\\t%0, %1, %2
948 sub%.\\t%0, %1, #%n2
949 add%.\\t%0, %1, %2"
950 [(set_attr "conds" "set")
951 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
952 )
953
954 (define_insn "*addsi3_compare_op2"
955 [(set (reg:CC_C CC_REGNUM)
956 (compare:CC_C
957 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
958 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
959 (match_dup 2)))
960 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
961 (plus:SI (match_dup 1) (match_dup 2)))]
962 "TARGET_32BIT"
963 "@
964 add%.\\t%0, %1, %2
965 add%.\\t%0, %1, %2
966 sub%.\\t%0, %1, #%n2"
967 [(set_attr "conds" "set")
968 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
969 )
970
971 (define_insn "*compare_addsi2_op0"
972 [(set (reg:CC_C CC_REGNUM)
973 (compare:CC_C
974 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
975 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
976 (match_dup 0)))]
977 "TARGET_32BIT"
978 "@
979 cmn%?\\t%0, %1
980 cmp%?\\t%0, #%n1
981 cmn%?\\t%0, %1"
982 [(set_attr "conds" "set")
983 (set_attr "predicable" "yes")
984 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
985 )
986
987 (define_insn "*compare_addsi2_op1"
988 [(set (reg:CC_C CC_REGNUM)
989 (compare:CC_C
990 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
991 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
992 (match_dup 1)))]
993 "TARGET_32BIT"
994 "@
995 cmn%?\\t%0, %1
996 cmp%?\\t%0, #%n1
997 cmn%?\\t%0, %1"
998 [(set_attr "conds" "set")
999 (set_attr "predicable" "yes")
1000 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1001 )
1002
1003 (define_insn "*addsi3_carryin_<optab>"
1004 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1005 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r")
1006 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1007 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1008 "TARGET_32BIT"
1009 "@
1010 adc%?\\t%0, %1, %2
1011 sbc%?\\t%0, %1, #%B2"
1012 [(set_attr "conds" "use")
1013 (set_attr "predicable" "yes")]
1014 )
1015
1016 (define_insn "*addsi3_carryin_alt2_<optab>"
1017 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1018 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
1019 (match_operand:SI 1 "s_register_operand" "%r,r"))
1020 (match_operand:SI 2 "arm_rhs_operand" "rI,K")))]
1021 "TARGET_32BIT"
1022 "@
1023 adc%?\\t%0, %1, %2
1024 sbc%?\\t%0, %1, #%B2"
1025 [(set_attr "conds" "use")
1026 (set_attr "predicable" "yes")]
1027 )
1028
1029 (define_insn "*addsi3_carryin_shift_<optab>"
1030 [(set (match_operand:SI 0 "s_register_operand" "=r")
1031 (plus:SI (plus:SI
1032 (match_operator:SI 2 "shift_operator"
1033 [(match_operand:SI 3 "s_register_operand" "r")
1034 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1035 (match_operand:SI 1 "s_register_operand" "r"))
1036 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1037 "TARGET_32BIT"
1038 "adc%?\\t%0, %1, %3%S2"
1039 [(set_attr "conds" "use")
1040 (set_attr "predicable" "yes")
1041 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1042 (const_string "alu_shift")
1043 (const_string "alu_shift_reg")))]
1044 )
1045
1046 (define_insn "*addsi3_carryin_clobercc_<optab>"
1047 [(set (match_operand:SI 0 "s_register_operand" "=r")
1048 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1049 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1050 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1051 (clobber (reg:CC CC_REGNUM))]
1052 "TARGET_32BIT"
1053 "adc%.\\t%0, %1, %2"
1054 [(set_attr "conds" "set")]
1055 )
1056
1057 (define_insn "*subsi3_carryin"
1058 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1059 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I")
1060 (match_operand:SI 2 "s_register_operand" "r,r"))
1061 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1062 "TARGET_32BIT"
1063 "@
1064 sbc%?\\t%0, %1, %2
1065 rsc%?\\t%0, %2, %1"
1066 [(set_attr "conds" "use")
1067 (set_attr "arch" "*,a")
1068 (set_attr "predicable" "yes")]
1069 )
1070
1071 (define_insn "*subsi3_carryin_const"
1072 [(set (match_operand:SI 0 "s_register_operand" "=r")
1073 (minus:SI (plus:SI (match_operand:SI 1 "reg_or_int_operand" "r")
1074 (match_operand:SI 2 "arm_not_operand" "K"))
1075 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1076 "TARGET_32BIT"
1077 "sbc\\t%0, %1, #%B2"
1078 [(set_attr "conds" "use")]
1079 )
1080
1081 (define_insn "*subsi3_carryin_compare"
1082 [(set (reg:CC CC_REGNUM)
1083 (compare:CC (match_operand:SI 1 "s_register_operand" "r")
1084 (match_operand:SI 2 "s_register_operand" "r")))
1085 (set (match_operand:SI 0 "s_register_operand" "=r")
1086 (minus:SI (minus:SI (match_dup 1)
1087 (match_dup 2))
1088 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1089 "TARGET_32BIT"
1090 "sbcs\\t%0, %1, %2"
1091 [(set_attr "conds" "set")]
1092 )
1093
1094 (define_insn "*subsi3_carryin_compare_const"
1095 [(set (reg:CC CC_REGNUM)
1096 (compare:CC (match_operand:SI 1 "reg_or_int_operand" "r")
1097 (match_operand:SI 2 "arm_not_operand" "K")))
1098 (set (match_operand:SI 0 "s_register_operand" "=r")
1099 (minus:SI (plus:SI (match_dup 1)
1100 (match_dup 2))
1101 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1102 "TARGET_32BIT"
1103 "sbcs\\t%0, %1, #%B2"
1104 [(set_attr "conds" "set")]
1105 )
1106
1107 (define_insn "*subsi3_carryin_shift"
1108 [(set (match_operand:SI 0 "s_register_operand" "=r")
1109 (minus:SI (minus:SI
1110 (match_operand:SI 1 "s_register_operand" "r")
1111 (match_operator:SI 2 "shift_operator"
1112 [(match_operand:SI 3 "s_register_operand" "r")
1113 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1114 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1115 "TARGET_32BIT"
1116 "sbc%?\\t%0, %1, %3%S2"
1117 [(set_attr "conds" "use")
1118 (set_attr "predicable" "yes")
1119 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1120 (const_string "alu_shift")
1121 (const_string "alu_shift_reg")))]
1122 )
1123
1124 (define_insn "*rsbsi3_carryin_shift"
1125 [(set (match_operand:SI 0 "s_register_operand" "=r")
1126 (minus:SI (minus:SI
1127 (match_operator:SI 2 "shift_operator"
1128 [(match_operand:SI 3 "s_register_operand" "r")
1129 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1130 (match_operand:SI 1 "s_register_operand" "r"))
1131 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1132 "TARGET_ARM"
1133 "rsc%?\\t%0, %1, %3%S2"
1134 [(set_attr "conds" "use")
1135 (set_attr "predicable" "yes")
1136 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1137 (const_string "alu_shift")
1138 (const_string "alu_shift_reg")))]
1139 )
1140
1141 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1142 (define_split
1143 [(set (match_operand:SI 0 "s_register_operand" "")
1144 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1145 (match_operand:SI 2 "s_register_operand" ""))
1146 (const_int -1)))
1147 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1148 "TARGET_32BIT"
1149 [(set (match_dup 3) (match_dup 1))
1150 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1151 "
1152 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1153 ")
1154
1155 (define_expand "addsf3"
1156 [(set (match_operand:SF 0 "s_register_operand" "")
1157 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1158 (match_operand:SF 2 "s_register_operand" "")))]
1159 "TARGET_32BIT && TARGET_HARD_FLOAT"
1160 "
1161 ")
1162
1163 (define_expand "adddf3"
1164 [(set (match_operand:DF 0 "s_register_operand" "")
1165 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1166 (match_operand:DF 2 "s_register_operand" "")))]
1167 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1168 "
1169 ")
1170
1171 (define_expand "subdi3"
1172 [(parallel
1173 [(set (match_operand:DI 0 "s_register_operand" "")
1174 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1175 (match_operand:DI 2 "s_register_operand" "")))
1176 (clobber (reg:CC CC_REGNUM))])]
1177 "TARGET_EITHER"
1178 "
1179 if (TARGET_THUMB1)
1180 {
1181 if (!REG_P (operands[1]))
1182 operands[1] = force_reg (DImode, operands[1]);
1183 if (!REG_P (operands[2]))
1184 operands[2] = force_reg (DImode, operands[2]);
1185 }
1186 "
1187 )
1188
1189 (define_insn_and_split "*arm_subdi3"
1190 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1191 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1192 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1193 (clobber (reg:CC CC_REGNUM))]
1194 "TARGET_32BIT && !TARGET_NEON"
1195 "#" ; "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1196 "&& reload_completed"
1197 [(parallel [(set (reg:CC CC_REGNUM)
1198 (compare:CC (match_dup 1) (match_dup 2)))
1199 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1200 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
1201 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1202 {
1203 operands[3] = gen_highpart (SImode, operands[0]);
1204 operands[0] = gen_lowpart (SImode, operands[0]);
1205 operands[4] = gen_highpart (SImode, operands[1]);
1206 operands[1] = gen_lowpart (SImode, operands[1]);
1207 operands[5] = gen_highpart (SImode, operands[2]);
1208 operands[2] = gen_lowpart (SImode, operands[2]);
1209 }
1210 [(set_attr "conds" "clob")
1211 (set_attr "length" "8")]
1212 )
1213
1214 (define_insn "*thumb_subdi3"
1215 [(set (match_operand:DI 0 "register_operand" "=l")
1216 (minus:DI (match_operand:DI 1 "register_operand" "0")
1217 (match_operand:DI 2 "register_operand" "l")))
1218 (clobber (reg:CC CC_REGNUM))]
1219 "TARGET_THUMB1"
1220 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1221 [(set_attr "length" "4")]
1222 )
1223
1224 (define_insn_and_split "*subdi_di_zesidi"
1225 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1226 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1227 (zero_extend:DI
1228 (match_operand:SI 2 "s_register_operand" "r,r"))))
1229 (clobber (reg:CC CC_REGNUM))]
1230 "TARGET_32BIT"
1231 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1232 "&& reload_completed"
1233 [(parallel [(set (reg:CC CC_REGNUM)
1234 (compare:CC (match_dup 1) (match_dup 2)))
1235 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1236 (set (match_dup 3) (minus:SI (plus:SI (match_dup 4) (match_dup 5))
1237 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1238 {
1239 operands[3] = gen_highpart (SImode, operands[0]);
1240 operands[0] = gen_lowpart (SImode, operands[0]);
1241 operands[4] = gen_highpart (SImode, operands[1]);
1242 operands[1] = gen_lowpart (SImode, operands[1]);
1243 operands[5] = GEN_INT (~0);
1244 }
1245 [(set_attr "conds" "clob")
1246 (set_attr "length" "8")]
1247 )
1248
1249 (define_insn_and_split "*subdi_di_sesidi"
1250 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1251 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1252 (sign_extend:DI
1253 (match_operand:SI 2 "s_register_operand" "r,r"))))
1254 (clobber (reg:CC CC_REGNUM))]
1255 "TARGET_32BIT"
1256 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1257 "&& reload_completed"
1258 [(parallel [(set (reg:CC CC_REGNUM)
1259 (compare:CC (match_dup 1) (match_dup 2)))
1260 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1261 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4)
1262 (ashiftrt:SI (match_dup 2)
1263 (const_int 31)))
1264 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1265 {
1266 operands[3] = gen_highpart (SImode, operands[0]);
1267 operands[0] = gen_lowpart (SImode, operands[0]);
1268 operands[4] = gen_highpart (SImode, operands[1]);
1269 operands[1] = gen_lowpart (SImode, operands[1]);
1270 }
1271 [(set_attr "conds" "clob")
1272 (set_attr "length" "8")]
1273 )
1274
1275 (define_insn_and_split "*subdi_zesidi_di"
1276 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1277 (minus:DI (zero_extend:DI
1278 (match_operand:SI 2 "s_register_operand" "r,r"))
1279 (match_operand:DI 1 "s_register_operand" "0,r")))
1280 (clobber (reg:CC CC_REGNUM))]
1281 "TARGET_ARM"
1282 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1283 ; is equivalent to:
1284 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, #0"
1285 "&& reload_completed"
1286 [(parallel [(set (reg:CC CC_REGNUM)
1287 (compare:CC (match_dup 2) (match_dup 1)))
1288 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1289 (set (match_dup 3) (minus:SI (minus:SI (const_int 0) (match_dup 4))
1290 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1291 {
1292 operands[3] = gen_highpart (SImode, operands[0]);
1293 operands[0] = gen_lowpart (SImode, operands[0]);
1294 operands[4] = gen_highpart (SImode, operands[1]);
1295 operands[1] = gen_lowpart (SImode, operands[1]);
1296 }
1297 [(set_attr "conds" "clob")
1298 (set_attr "length" "8")]
1299 )
1300
1301 (define_insn_and_split "*subdi_sesidi_di"
1302 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1303 (minus:DI (sign_extend:DI
1304 (match_operand:SI 2 "s_register_operand" "r,r"))
1305 (match_operand:DI 1 "s_register_operand" "0,r")))
1306 (clobber (reg:CC CC_REGNUM))]
1307 "TARGET_ARM"
1308 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1309 ; is equivalent to:
1310 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, %2, asr #31"
1311 "&& reload_completed"
1312 [(parallel [(set (reg:CC CC_REGNUM)
1313 (compare:CC (match_dup 2) (match_dup 1)))
1314 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1315 (set (match_dup 3) (minus:SI (minus:SI
1316 (ashiftrt:SI (match_dup 2)
1317 (const_int 31))
1318 (match_dup 4))
1319 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1320 {
1321 operands[3] = gen_highpart (SImode, operands[0]);
1322 operands[0] = gen_lowpart (SImode, operands[0]);
1323 operands[4] = gen_highpart (SImode, operands[1]);
1324 operands[1] = gen_lowpart (SImode, operands[1]);
1325 }
1326 [(set_attr "conds" "clob")
1327 (set_attr "length" "8")]
1328 )
1329
1330 (define_insn_and_split "*subdi_zesidi_zesidi"
1331 [(set (match_operand:DI 0 "s_register_operand" "=r")
1332 (minus:DI (zero_extend:DI
1333 (match_operand:SI 1 "s_register_operand" "r"))
1334 (zero_extend:DI
1335 (match_operand:SI 2 "s_register_operand" "r"))))
1336 (clobber (reg:CC CC_REGNUM))]
1337 "TARGET_32BIT"
1338 "#" ; "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1339 "&& reload_completed"
1340 [(parallel [(set (reg:CC CC_REGNUM)
1341 (compare:CC (match_dup 1) (match_dup 2)))
1342 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1343 (set (match_dup 3) (minus:SI (minus:SI (match_dup 1) (match_dup 1))
1344 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1345 {
1346 operands[3] = gen_highpart (SImode, operands[0]);
1347 operands[0] = gen_lowpart (SImode, operands[0]);
1348 }
1349 [(set_attr "conds" "clob")
1350 (set_attr "length" "8")]
1351 )
1352
1353 (define_expand "subsi3"
1354 [(set (match_operand:SI 0 "s_register_operand" "")
1355 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1356 (match_operand:SI 2 "s_register_operand" "")))]
1357 "TARGET_EITHER"
1358 "
1359 if (CONST_INT_P (operands[1]))
1360 {
1361 if (TARGET_32BIT)
1362 {
1363 arm_split_constant (MINUS, SImode, NULL_RTX,
1364 INTVAL (operands[1]), operands[0],
1365 operands[2], optimize && can_create_pseudo_p ());
1366 DONE;
1367 }
1368 else /* TARGET_THUMB1 */
1369 operands[1] = force_reg (SImode, operands[1]);
1370 }
1371 "
1372 )
1373
1374 (define_insn "thumb1_subsi3_insn"
1375 [(set (match_operand:SI 0 "register_operand" "=l")
1376 (minus:SI (match_operand:SI 1 "register_operand" "l")
1377 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1378 "TARGET_THUMB1"
1379 "sub\\t%0, %1, %2"
1380 [(set_attr "length" "2")
1381 (set_attr "conds" "set")])
1382
1383 ; ??? Check Thumb-2 split length
1384 (define_insn_and_split "*arm_subsi3_insn"
1385 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,rk,r")
1386 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,r,k,?n")
1387 (match_operand:SI 2 "reg_or_int_operand" "r,I,r,r, r")))]
1388 "TARGET_32BIT"
1389 "@
1390 rsb%?\\t%0, %2, %1
1391 sub%?\\t%0, %1, %2
1392 sub%?\\t%0, %1, %2
1393 sub%?\\t%0, %1, %2
1394 #"
1395 "&& (CONST_INT_P (operands[1])
1396 && !const_ok_for_arm (INTVAL (operands[1])))"
1397 [(clobber (const_int 0))]
1398 "
1399 arm_split_constant (MINUS, SImode, curr_insn,
1400 INTVAL (operands[1]), operands[0], operands[2], 0);
1401 DONE;
1402 "
1403 [(set_attr "length" "4,4,4,4,16")
1404 (set_attr "predicable" "yes")
1405 (set_attr "type" "*,simple_alu_imm,*,*,*")]
1406 )
1407
1408 (define_peephole2
1409 [(match_scratch:SI 3 "r")
1410 (set (match_operand:SI 0 "arm_general_register_operand" "")
1411 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1412 (match_operand:SI 2 "arm_general_register_operand" "")))]
1413 "TARGET_32BIT
1414 && !const_ok_for_arm (INTVAL (operands[1]))
1415 && const_ok_for_arm (~INTVAL (operands[1]))"
1416 [(set (match_dup 3) (match_dup 1))
1417 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1418 ""
1419 )
1420
1421 (define_insn "*subsi3_compare0"
1422 [(set (reg:CC_NOOV CC_REGNUM)
1423 (compare:CC_NOOV
1424 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1425 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1426 (const_int 0)))
1427 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1428 (minus:SI (match_dup 1) (match_dup 2)))]
1429 "TARGET_32BIT"
1430 "@
1431 sub%.\\t%0, %1, %2
1432 sub%.\\t%0, %1, %2
1433 rsb%.\\t%0, %2, %1"
1434 [(set_attr "conds" "set")
1435 (set_attr "type" "simple_alu_imm,*,*")]
1436 )
1437
1438 (define_insn "subsi3_compare"
1439 [(set (reg:CC CC_REGNUM)
1440 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1441 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1442 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1443 (minus:SI (match_dup 1) (match_dup 2)))]
1444 "TARGET_32BIT"
1445 "@
1446 sub%.\\t%0, %1, %2
1447 sub%.\\t%0, %1, %2
1448 rsb%.\\t%0, %2, %1"
1449 [(set_attr "conds" "set")
1450 (set_attr "type" "simple_alu_imm,*,*")]
1451 )
1452
1453 (define_expand "subsf3"
1454 [(set (match_operand:SF 0 "s_register_operand" "")
1455 (minus:SF (match_operand:SF 1 "s_register_operand" "")
1456 (match_operand:SF 2 "s_register_operand" "")))]
1457 "TARGET_32BIT && TARGET_HARD_FLOAT"
1458 "
1459 ")
1460
1461 (define_expand "subdf3"
1462 [(set (match_operand:DF 0 "s_register_operand" "")
1463 (minus:DF (match_operand:DF 1 "s_register_operand" "")
1464 (match_operand:DF 2 "s_register_operand" "")))]
1465 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1466 "
1467 ")
1468
1469 \f
1470 ;; Multiplication insns
1471
1472 (define_expand "mulsi3"
1473 [(set (match_operand:SI 0 "s_register_operand" "")
1474 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1475 (match_operand:SI 1 "s_register_operand" "")))]
1476 "TARGET_EITHER"
1477 ""
1478 )
1479
1480 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1481 (define_insn "*arm_mulsi3"
1482 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1483 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1484 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1485 "TARGET_32BIT && !arm_arch6"
1486 "mul%?\\t%0, %2, %1"
1487 [(set_attr "insn" "mul")
1488 (set_attr "predicable" "yes")]
1489 )
1490
1491 (define_insn "*arm_mulsi3_v6"
1492 [(set (match_operand:SI 0 "s_register_operand" "=r")
1493 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1494 (match_operand:SI 2 "s_register_operand" "r")))]
1495 "TARGET_32BIT && arm_arch6"
1496 "mul%?\\t%0, %1, %2"
1497 [(set_attr "insn" "mul")
1498 (set_attr "predicable" "yes")]
1499 )
1500
1501 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1502 ; 1 and 2; are the same, because reload will make operand 0 match
1503 ; operand 1 without realizing that this conflicts with operand 2. We fix
1504 ; this by adding another alternative to match this case, and then `reload'
1505 ; it ourselves. This alternative must come first.
1506 (define_insn "*thumb_mulsi3"
1507 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1508 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1509 (match_operand:SI 2 "register_operand" "l,l,l")))]
1510 "TARGET_THUMB1 && !arm_arch6"
1511 "*
1512 if (which_alternative < 2)
1513 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1514 else
1515 return \"mul\\t%0, %2\";
1516 "
1517 [(set_attr "length" "4,4,2")
1518 (set_attr "insn" "mul")]
1519 )
1520
1521 (define_insn "*thumb_mulsi3_v6"
1522 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1523 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1524 (match_operand:SI 2 "register_operand" "l,0,0")))]
1525 "TARGET_THUMB1 && arm_arch6"
1526 "@
1527 mul\\t%0, %2
1528 mul\\t%0, %1
1529 mul\\t%0, %1"
1530 [(set_attr "length" "2")
1531 (set_attr "insn" "mul")]
1532 )
1533
1534 (define_insn "*mulsi3_compare0"
1535 [(set (reg:CC_NOOV CC_REGNUM)
1536 (compare:CC_NOOV (mult:SI
1537 (match_operand:SI 2 "s_register_operand" "r,r")
1538 (match_operand:SI 1 "s_register_operand" "%0,r"))
1539 (const_int 0)))
1540 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1541 (mult:SI (match_dup 2) (match_dup 1)))]
1542 "TARGET_ARM && !arm_arch6"
1543 "mul%.\\t%0, %2, %1"
1544 [(set_attr "conds" "set")
1545 (set_attr "insn" "muls")]
1546 )
1547
1548 (define_insn "*mulsi3_compare0_v6"
1549 [(set (reg:CC_NOOV CC_REGNUM)
1550 (compare:CC_NOOV (mult:SI
1551 (match_operand:SI 2 "s_register_operand" "r")
1552 (match_operand:SI 1 "s_register_operand" "r"))
1553 (const_int 0)))
1554 (set (match_operand:SI 0 "s_register_operand" "=r")
1555 (mult:SI (match_dup 2) (match_dup 1)))]
1556 "TARGET_ARM && arm_arch6 && optimize_size"
1557 "mul%.\\t%0, %2, %1"
1558 [(set_attr "conds" "set")
1559 (set_attr "insn" "muls")]
1560 )
1561
1562 (define_insn "*mulsi_compare0_scratch"
1563 [(set (reg:CC_NOOV CC_REGNUM)
1564 (compare:CC_NOOV (mult:SI
1565 (match_operand:SI 2 "s_register_operand" "r,r")
1566 (match_operand:SI 1 "s_register_operand" "%0,r"))
1567 (const_int 0)))
1568 (clobber (match_scratch:SI 0 "=&r,&r"))]
1569 "TARGET_ARM && !arm_arch6"
1570 "mul%.\\t%0, %2, %1"
1571 [(set_attr "conds" "set")
1572 (set_attr "insn" "muls")]
1573 )
1574
1575 (define_insn "*mulsi_compare0_scratch_v6"
1576 [(set (reg:CC_NOOV CC_REGNUM)
1577 (compare:CC_NOOV (mult:SI
1578 (match_operand:SI 2 "s_register_operand" "r")
1579 (match_operand:SI 1 "s_register_operand" "r"))
1580 (const_int 0)))
1581 (clobber (match_scratch:SI 0 "=r"))]
1582 "TARGET_ARM && arm_arch6 && optimize_size"
1583 "mul%.\\t%0, %2, %1"
1584 [(set_attr "conds" "set")
1585 (set_attr "insn" "muls")]
1586 )
1587
1588 ;; Unnamed templates to match MLA instruction.
1589
1590 (define_insn "*mulsi3addsi"
1591 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1592 (plus:SI
1593 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1594 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1595 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1596 "TARGET_32BIT && !arm_arch6"
1597 "mla%?\\t%0, %2, %1, %3"
1598 [(set_attr "insn" "mla")
1599 (set_attr "predicable" "yes")]
1600 )
1601
1602 (define_insn "*mulsi3addsi_v6"
1603 [(set (match_operand:SI 0 "s_register_operand" "=r")
1604 (plus:SI
1605 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1606 (match_operand:SI 1 "s_register_operand" "r"))
1607 (match_operand:SI 3 "s_register_operand" "r")))]
1608 "TARGET_32BIT && arm_arch6"
1609 "mla%?\\t%0, %2, %1, %3"
1610 [(set_attr "insn" "mla")
1611 (set_attr "predicable" "yes")]
1612 )
1613
1614 (define_insn "*mulsi3addsi_compare0"
1615 [(set (reg:CC_NOOV CC_REGNUM)
1616 (compare:CC_NOOV
1617 (plus:SI (mult:SI
1618 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1619 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1620 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1621 (const_int 0)))
1622 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1623 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1624 (match_dup 3)))]
1625 "TARGET_ARM && arm_arch6"
1626 "mla%.\\t%0, %2, %1, %3"
1627 [(set_attr "conds" "set")
1628 (set_attr "insn" "mlas")]
1629 )
1630
1631 (define_insn "*mulsi3addsi_compare0_v6"
1632 [(set (reg:CC_NOOV CC_REGNUM)
1633 (compare:CC_NOOV
1634 (plus:SI (mult:SI
1635 (match_operand:SI 2 "s_register_operand" "r")
1636 (match_operand:SI 1 "s_register_operand" "r"))
1637 (match_operand:SI 3 "s_register_operand" "r"))
1638 (const_int 0)))
1639 (set (match_operand:SI 0 "s_register_operand" "=r")
1640 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1641 (match_dup 3)))]
1642 "TARGET_ARM && arm_arch6 && optimize_size"
1643 "mla%.\\t%0, %2, %1, %3"
1644 [(set_attr "conds" "set")
1645 (set_attr "insn" "mlas")]
1646 )
1647
1648 (define_insn "*mulsi3addsi_compare0_scratch"
1649 [(set (reg:CC_NOOV CC_REGNUM)
1650 (compare:CC_NOOV
1651 (plus:SI (mult:SI
1652 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1653 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1654 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1655 (const_int 0)))
1656 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1657 "TARGET_ARM && !arm_arch6"
1658 "mla%.\\t%0, %2, %1, %3"
1659 [(set_attr "conds" "set")
1660 (set_attr "insn" "mlas")]
1661 )
1662
1663 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1664 [(set (reg:CC_NOOV CC_REGNUM)
1665 (compare:CC_NOOV
1666 (plus:SI (mult:SI
1667 (match_operand:SI 2 "s_register_operand" "r")
1668 (match_operand:SI 1 "s_register_operand" "r"))
1669 (match_operand:SI 3 "s_register_operand" "r"))
1670 (const_int 0)))
1671 (clobber (match_scratch:SI 0 "=r"))]
1672 "TARGET_ARM && arm_arch6 && optimize_size"
1673 "mla%.\\t%0, %2, %1, %3"
1674 [(set_attr "conds" "set")
1675 (set_attr "insn" "mlas")]
1676 )
1677
1678 (define_insn "*mulsi3subsi"
1679 [(set (match_operand:SI 0 "s_register_operand" "=r")
1680 (minus:SI
1681 (match_operand:SI 3 "s_register_operand" "r")
1682 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1683 (match_operand:SI 1 "s_register_operand" "r"))))]
1684 "TARGET_32BIT && arm_arch_thumb2"
1685 "mls%?\\t%0, %2, %1, %3"
1686 [(set_attr "insn" "mla")
1687 (set_attr "predicable" "yes")]
1688 )
1689
1690 (define_expand "maddsidi4"
1691 [(set (match_operand:DI 0 "s_register_operand" "")
1692 (plus:DI
1693 (mult:DI
1694 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1695 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1696 (match_operand:DI 3 "s_register_operand" "")))]
1697 "TARGET_32BIT && arm_arch3m"
1698 "")
1699
1700 (define_insn "*mulsidi3adddi"
1701 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1702 (plus:DI
1703 (mult:DI
1704 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1705 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1706 (match_operand:DI 1 "s_register_operand" "0")))]
1707 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1708 "smlal%?\\t%Q0, %R0, %3, %2"
1709 [(set_attr "insn" "smlal")
1710 (set_attr "predicable" "yes")]
1711 )
1712
1713 (define_insn "*mulsidi3adddi_v6"
1714 [(set (match_operand:DI 0 "s_register_operand" "=r")
1715 (plus:DI
1716 (mult:DI
1717 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1718 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1719 (match_operand:DI 1 "s_register_operand" "0")))]
1720 "TARGET_32BIT && arm_arch6"
1721 "smlal%?\\t%Q0, %R0, %3, %2"
1722 [(set_attr "insn" "smlal")
1723 (set_attr "predicable" "yes")]
1724 )
1725
1726 ;; 32x32->64 widening multiply.
1727 ;; As with mulsi3, the only difference between the v3-5 and v6+
1728 ;; versions of these patterns is the requirement that the output not
1729 ;; overlap the inputs, but that still means we have to have a named
1730 ;; expander and two different starred insns.
1731
1732 (define_expand "mulsidi3"
1733 [(set (match_operand:DI 0 "s_register_operand" "")
1734 (mult:DI
1735 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1736 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1737 "TARGET_32BIT && arm_arch3m"
1738 ""
1739 )
1740
1741 (define_insn "*mulsidi3_nov6"
1742 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1743 (mult:DI
1744 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1745 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1746 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1747 "smull%?\\t%Q0, %R0, %1, %2"
1748 [(set_attr "insn" "smull")
1749 (set_attr "predicable" "yes")]
1750 )
1751
1752 (define_insn "*mulsidi3_v6"
1753 [(set (match_operand:DI 0 "s_register_operand" "=r")
1754 (mult:DI
1755 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1756 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1757 "TARGET_32BIT && arm_arch6"
1758 "smull%?\\t%Q0, %R0, %1, %2"
1759 [(set_attr "insn" "smull")
1760 (set_attr "predicable" "yes")]
1761 )
1762
1763 (define_expand "umulsidi3"
1764 [(set (match_operand:DI 0 "s_register_operand" "")
1765 (mult:DI
1766 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1767 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1768 "TARGET_32BIT && arm_arch3m"
1769 ""
1770 )
1771
1772 (define_insn "*umulsidi3_nov6"
1773 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1774 (mult:DI
1775 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1776 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1777 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1778 "umull%?\\t%Q0, %R0, %1, %2"
1779 [(set_attr "insn" "umull")
1780 (set_attr "predicable" "yes")]
1781 )
1782
1783 (define_insn "*umulsidi3_v6"
1784 [(set (match_operand:DI 0 "s_register_operand" "=r")
1785 (mult:DI
1786 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1787 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1788 "TARGET_32BIT && arm_arch6"
1789 "umull%?\\t%Q0, %R0, %1, %2"
1790 [(set_attr "insn" "umull")
1791 (set_attr "predicable" "yes")]
1792 )
1793
1794 (define_expand "umaddsidi4"
1795 [(set (match_operand:DI 0 "s_register_operand" "")
1796 (plus:DI
1797 (mult:DI
1798 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1799 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1800 (match_operand:DI 3 "s_register_operand" "")))]
1801 "TARGET_32BIT && arm_arch3m"
1802 "")
1803
1804 (define_insn "*umulsidi3adddi"
1805 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1806 (plus:DI
1807 (mult:DI
1808 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1809 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1810 (match_operand:DI 1 "s_register_operand" "0")))]
1811 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1812 "umlal%?\\t%Q0, %R0, %3, %2"
1813 [(set_attr "insn" "umlal")
1814 (set_attr "predicable" "yes")]
1815 )
1816
1817 (define_insn "*umulsidi3adddi_v6"
1818 [(set (match_operand:DI 0 "s_register_operand" "=r")
1819 (plus:DI
1820 (mult:DI
1821 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1822 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1823 (match_operand:DI 1 "s_register_operand" "0")))]
1824 "TARGET_32BIT && arm_arch6"
1825 "umlal%?\\t%Q0, %R0, %3, %2"
1826 [(set_attr "insn" "umlal")
1827 (set_attr "predicable" "yes")]
1828 )
1829
1830 (define_expand "smulsi3_highpart"
1831 [(parallel
1832 [(set (match_operand:SI 0 "s_register_operand" "")
1833 (truncate:SI
1834 (lshiftrt:DI
1835 (mult:DI
1836 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1837 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1838 (const_int 32))))
1839 (clobber (match_scratch:SI 3 ""))])]
1840 "TARGET_32BIT && arm_arch3m"
1841 ""
1842 )
1843
1844 (define_insn "*smulsi3_highpart_nov6"
1845 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1846 (truncate:SI
1847 (lshiftrt:DI
1848 (mult:DI
1849 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1850 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1851 (const_int 32))))
1852 (clobber (match_scratch:SI 3 "=&r,&r"))]
1853 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1854 "smull%?\\t%3, %0, %2, %1"
1855 [(set_attr "insn" "smull")
1856 (set_attr "predicable" "yes")]
1857 )
1858
1859 (define_insn "*smulsi3_highpart_v6"
1860 [(set (match_operand:SI 0 "s_register_operand" "=r")
1861 (truncate:SI
1862 (lshiftrt:DI
1863 (mult:DI
1864 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1865 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1866 (const_int 32))))
1867 (clobber (match_scratch:SI 3 "=r"))]
1868 "TARGET_32BIT && arm_arch6"
1869 "smull%?\\t%3, %0, %2, %1"
1870 [(set_attr "insn" "smull")
1871 (set_attr "predicable" "yes")]
1872 )
1873
1874 (define_expand "umulsi3_highpart"
1875 [(parallel
1876 [(set (match_operand:SI 0 "s_register_operand" "")
1877 (truncate:SI
1878 (lshiftrt:DI
1879 (mult:DI
1880 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1881 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1882 (const_int 32))))
1883 (clobber (match_scratch:SI 3 ""))])]
1884 "TARGET_32BIT && arm_arch3m"
1885 ""
1886 )
1887
1888 (define_insn "*umulsi3_highpart_nov6"
1889 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1890 (truncate:SI
1891 (lshiftrt:DI
1892 (mult:DI
1893 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1894 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1895 (const_int 32))))
1896 (clobber (match_scratch:SI 3 "=&r,&r"))]
1897 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1898 "umull%?\\t%3, %0, %2, %1"
1899 [(set_attr "insn" "umull")
1900 (set_attr "predicable" "yes")]
1901 )
1902
1903 (define_insn "*umulsi3_highpart_v6"
1904 [(set (match_operand:SI 0 "s_register_operand" "=r")
1905 (truncate:SI
1906 (lshiftrt:DI
1907 (mult:DI
1908 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1909 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1910 (const_int 32))))
1911 (clobber (match_scratch:SI 3 "=r"))]
1912 "TARGET_32BIT && arm_arch6"
1913 "umull%?\\t%3, %0, %2, %1"
1914 [(set_attr "insn" "umull")
1915 (set_attr "predicable" "yes")]
1916 )
1917
1918 (define_insn "mulhisi3"
1919 [(set (match_operand:SI 0 "s_register_operand" "=r")
1920 (mult:SI (sign_extend:SI
1921 (match_operand:HI 1 "s_register_operand" "%r"))
1922 (sign_extend:SI
1923 (match_operand:HI 2 "s_register_operand" "r"))))]
1924 "TARGET_DSP_MULTIPLY"
1925 "smulbb%?\\t%0, %1, %2"
1926 [(set_attr "insn" "smulxy")
1927 (set_attr "predicable" "yes")]
1928 )
1929
1930 (define_insn "*mulhisi3tb"
1931 [(set (match_operand:SI 0 "s_register_operand" "=r")
1932 (mult:SI (ashiftrt:SI
1933 (match_operand:SI 1 "s_register_operand" "r")
1934 (const_int 16))
1935 (sign_extend:SI
1936 (match_operand:HI 2 "s_register_operand" "r"))))]
1937 "TARGET_DSP_MULTIPLY"
1938 "smultb%?\\t%0, %1, %2"
1939 [(set_attr "insn" "smulxy")
1940 (set_attr "predicable" "yes")]
1941 )
1942
1943 (define_insn "*mulhisi3bt"
1944 [(set (match_operand:SI 0 "s_register_operand" "=r")
1945 (mult:SI (sign_extend:SI
1946 (match_operand:HI 1 "s_register_operand" "r"))
1947 (ashiftrt:SI
1948 (match_operand:SI 2 "s_register_operand" "r")
1949 (const_int 16))))]
1950 "TARGET_DSP_MULTIPLY"
1951 "smulbt%?\\t%0, %1, %2"
1952 [(set_attr "insn" "smulxy")
1953 (set_attr "predicable" "yes")]
1954 )
1955
1956 (define_insn "*mulhisi3tt"
1957 [(set (match_operand:SI 0 "s_register_operand" "=r")
1958 (mult:SI (ashiftrt:SI
1959 (match_operand:SI 1 "s_register_operand" "r")
1960 (const_int 16))
1961 (ashiftrt:SI
1962 (match_operand:SI 2 "s_register_operand" "r")
1963 (const_int 16))))]
1964 "TARGET_DSP_MULTIPLY"
1965 "smultt%?\\t%0, %1, %2"
1966 [(set_attr "insn" "smulxy")
1967 (set_attr "predicable" "yes")]
1968 )
1969
1970 (define_insn "maddhisi4"
1971 [(set (match_operand:SI 0 "s_register_operand" "=r")
1972 (plus:SI (mult:SI (sign_extend:SI
1973 (match_operand:HI 1 "s_register_operand" "r"))
1974 (sign_extend:SI
1975 (match_operand:HI 2 "s_register_operand" "r")))
1976 (match_operand:SI 3 "s_register_operand" "r")))]
1977 "TARGET_DSP_MULTIPLY"
1978 "smlabb%?\\t%0, %1, %2, %3"
1979 [(set_attr "insn" "smlaxy")
1980 (set_attr "predicable" "yes")]
1981 )
1982
1983 ;; Note: there is no maddhisi4ibt because this one is canonical form
1984 (define_insn "*maddhisi4tb"
1985 [(set (match_operand:SI 0 "s_register_operand" "=r")
1986 (plus:SI (mult:SI (ashiftrt:SI
1987 (match_operand:SI 1 "s_register_operand" "r")
1988 (const_int 16))
1989 (sign_extend:SI
1990 (match_operand:HI 2 "s_register_operand" "r")))
1991 (match_operand:SI 3 "s_register_operand" "r")))]
1992 "TARGET_DSP_MULTIPLY"
1993 "smlatb%?\\t%0, %1, %2, %3"
1994 [(set_attr "insn" "smlaxy")
1995 (set_attr "predicable" "yes")]
1996 )
1997
1998 (define_insn "*maddhisi4tt"
1999 [(set (match_operand:SI 0 "s_register_operand" "=r")
2000 (plus:SI (mult:SI (ashiftrt:SI
2001 (match_operand:SI 1 "s_register_operand" "r")
2002 (const_int 16))
2003 (ashiftrt:SI
2004 (match_operand:SI 2 "s_register_operand" "r")
2005 (const_int 16)))
2006 (match_operand:SI 3 "s_register_operand" "r")))]
2007 "TARGET_DSP_MULTIPLY"
2008 "smlatt%?\\t%0, %1, %2, %3"
2009 [(set_attr "insn" "smlaxy")
2010 (set_attr "predicable" "yes")]
2011 )
2012
2013 (define_insn "maddhidi4"
2014 [(set (match_operand:DI 0 "s_register_operand" "=r")
2015 (plus:DI
2016 (mult:DI (sign_extend:DI
2017 (match_operand:HI 1 "s_register_operand" "r"))
2018 (sign_extend:DI
2019 (match_operand:HI 2 "s_register_operand" "r")))
2020 (match_operand:DI 3 "s_register_operand" "0")))]
2021 "TARGET_DSP_MULTIPLY"
2022 "smlalbb%?\\t%Q0, %R0, %1, %2"
2023 [(set_attr "insn" "smlalxy")
2024 (set_attr "predicable" "yes")])
2025
2026 ;; Note: there is no maddhidi4ibt because this one is canonical form
2027 (define_insn "*maddhidi4tb"
2028 [(set (match_operand:DI 0 "s_register_operand" "=r")
2029 (plus:DI
2030 (mult:DI (sign_extend:DI
2031 (ashiftrt:SI
2032 (match_operand:SI 1 "s_register_operand" "r")
2033 (const_int 16)))
2034 (sign_extend:DI
2035 (match_operand:HI 2 "s_register_operand" "r")))
2036 (match_operand:DI 3 "s_register_operand" "0")))]
2037 "TARGET_DSP_MULTIPLY"
2038 "smlaltb%?\\t%Q0, %R0, %1, %2"
2039 [(set_attr "insn" "smlalxy")
2040 (set_attr "predicable" "yes")])
2041
2042 (define_insn "*maddhidi4tt"
2043 [(set (match_operand:DI 0 "s_register_operand" "=r")
2044 (plus:DI
2045 (mult:DI (sign_extend:DI
2046 (ashiftrt:SI
2047 (match_operand:SI 1 "s_register_operand" "r")
2048 (const_int 16)))
2049 (sign_extend:DI
2050 (ashiftrt:SI
2051 (match_operand:SI 2 "s_register_operand" "r")
2052 (const_int 16))))
2053 (match_operand:DI 3 "s_register_operand" "0")))]
2054 "TARGET_DSP_MULTIPLY"
2055 "smlaltt%?\\t%Q0, %R0, %1, %2"
2056 [(set_attr "insn" "smlalxy")
2057 (set_attr "predicable" "yes")])
2058
2059 (define_expand "mulsf3"
2060 [(set (match_operand:SF 0 "s_register_operand" "")
2061 (mult:SF (match_operand:SF 1 "s_register_operand" "")
2062 (match_operand:SF 2 "s_register_operand" "")))]
2063 "TARGET_32BIT && TARGET_HARD_FLOAT"
2064 "
2065 ")
2066
2067 (define_expand "muldf3"
2068 [(set (match_operand:DF 0 "s_register_operand" "")
2069 (mult:DF (match_operand:DF 1 "s_register_operand" "")
2070 (match_operand:DF 2 "s_register_operand" "")))]
2071 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2072 "
2073 ")
2074 \f
2075 ;; Division insns
2076
2077 (define_expand "divsf3"
2078 [(set (match_operand:SF 0 "s_register_operand" "")
2079 (div:SF (match_operand:SF 1 "s_register_operand" "")
2080 (match_operand:SF 2 "s_register_operand" "")))]
2081 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
2082 "")
2083
2084 (define_expand "divdf3"
2085 [(set (match_operand:DF 0 "s_register_operand" "")
2086 (div:DF (match_operand:DF 1 "s_register_operand" "")
2087 (match_operand:DF 2 "s_register_operand" "")))]
2088 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2089 "")
2090 \f
2091 ;; Boolean and,ior,xor insns
2092
2093 ;; Split up double word logical operations
2094
2095 ;; Split up simple DImode logical operations. Simply perform the logical
2096 ;; operation on the upper and lower halves of the registers.
2097 (define_split
2098 [(set (match_operand:DI 0 "s_register_operand" "")
2099 (match_operator:DI 6 "logical_binary_operator"
2100 [(match_operand:DI 1 "s_register_operand" "")
2101 (match_operand:DI 2 "s_register_operand" "")]))]
2102 "TARGET_32BIT && reload_completed
2103 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2104 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2105 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2106 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2107 "
2108 {
2109 operands[3] = gen_highpart (SImode, operands[0]);
2110 operands[0] = gen_lowpart (SImode, operands[0]);
2111 operands[4] = gen_highpart (SImode, operands[1]);
2112 operands[1] = gen_lowpart (SImode, operands[1]);
2113 operands[5] = gen_highpart (SImode, operands[2]);
2114 operands[2] = gen_lowpart (SImode, operands[2]);
2115 }"
2116 )
2117
2118 (define_split
2119 [(set (match_operand:DI 0 "s_register_operand" "")
2120 (match_operator:DI 6 "logical_binary_operator"
2121 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2122 (match_operand:DI 1 "s_register_operand" "")]))]
2123 "TARGET_32BIT && reload_completed"
2124 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2125 (set (match_dup 3) (match_op_dup:SI 6
2126 [(ashiftrt:SI (match_dup 2) (const_int 31))
2127 (match_dup 4)]))]
2128 "
2129 {
2130 operands[3] = gen_highpart (SImode, operands[0]);
2131 operands[0] = gen_lowpart (SImode, operands[0]);
2132 operands[4] = gen_highpart (SImode, operands[1]);
2133 operands[1] = gen_lowpart (SImode, operands[1]);
2134 operands[5] = gen_highpart (SImode, operands[2]);
2135 operands[2] = gen_lowpart (SImode, operands[2]);
2136 }"
2137 )
2138
2139 ;; The zero extend of operand 2 means we can just copy the high part of
2140 ;; operand1 into operand0.
2141 (define_split
2142 [(set (match_operand:DI 0 "s_register_operand" "")
2143 (ior:DI
2144 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2145 (match_operand:DI 1 "s_register_operand" "")))]
2146 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2147 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2148 (set (match_dup 3) (match_dup 4))]
2149 "
2150 {
2151 operands[4] = gen_highpart (SImode, operands[1]);
2152 operands[3] = gen_highpart (SImode, operands[0]);
2153 operands[0] = gen_lowpart (SImode, operands[0]);
2154 operands[1] = gen_lowpart (SImode, operands[1]);
2155 }"
2156 )
2157
2158 ;; The zero extend of operand 2 means we can just copy the high part of
2159 ;; operand1 into operand0.
2160 (define_split
2161 [(set (match_operand:DI 0 "s_register_operand" "")
2162 (xor:DI
2163 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2164 (match_operand:DI 1 "s_register_operand" "")))]
2165 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2166 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2167 (set (match_dup 3) (match_dup 4))]
2168 "
2169 {
2170 operands[4] = gen_highpart (SImode, operands[1]);
2171 operands[3] = gen_highpart (SImode, operands[0]);
2172 operands[0] = gen_lowpart (SImode, operands[0]);
2173 operands[1] = gen_lowpart (SImode, operands[1]);
2174 }"
2175 )
2176
2177 (define_expand "anddi3"
2178 [(set (match_operand:DI 0 "s_register_operand" "")
2179 (and:DI (match_operand:DI 1 "s_register_operand" "")
2180 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2181 "TARGET_32BIT"
2182 ""
2183 )
2184
2185 (define_insn_and_split "*anddi3_insn"
2186 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,w,w ,?&r,?&r,?w,?w")
2187 (and:DI (match_operand:DI 1 "s_register_operand" "%0 ,r ,0,r ,w,0 ,0 ,r ,w ,0")
2188 (match_operand:DI 2 "arm_anddi_operand_neon" "r ,r ,De,De,w,DL,r ,r ,w ,DL")))]
2189 "TARGET_32BIT && !TARGET_IWMMXT"
2190 {
2191 switch (which_alternative)
2192 {
2193 case 0:
2194 case 1:
2195 case 2:
2196 case 3: /* fall through */
2197 return "#";
2198 case 4: /* fall through */
2199 case 8: return "vand\t%P0, %P1, %P2";
2200 case 5: /* fall through */
2201 case 9: return neon_output_logic_immediate ("vand", &operands[2],
2202 DImode, 1, VALID_NEON_QREG_MODE (DImode));
2203 case 6: return "#";
2204 case 7: return "#";
2205 default: gcc_unreachable ();
2206 }
2207 }
2208 "TARGET_32BIT && !TARGET_IWMMXT"
2209 [(set (match_dup 3) (match_dup 4))
2210 (set (match_dup 5) (match_dup 6))]
2211 "
2212 {
2213 operands[3] = gen_lowpart (SImode, operands[0]);
2214 operands[5] = gen_highpart (SImode, operands[0]);
2215
2216 operands[4] = simplify_gen_binary (AND, SImode,
2217 gen_lowpart (SImode, operands[1]),
2218 gen_lowpart (SImode, operands[2]));
2219 operands[6] = simplify_gen_binary (AND, SImode,
2220 gen_highpart (SImode, operands[1]),
2221 gen_highpart_mode (SImode, DImode, operands[2]));
2222
2223 }"
2224 [(set_attr "neon_type" "*,*,*,*,neon_int_1,neon_int_1,*,*,neon_int_1,neon_int_1")
2225 (set_attr "arch" "*,*,*,*,neon_for_64bits,neon_for_64bits,*,*,
2226 avoid_neon_for_64bits,avoid_neon_for_64bits")
2227 (set_attr "length" "8,8,8,8,*,*,8,8,*,*")
2228 (set (attr "insn_enabled") (if_then_else
2229 (lt (symbol_ref "which_alternative")
2230 (const_int 4))
2231 (if_then_else (match_test "!TARGET_NEON")
2232 (const_string "yes")
2233 (const_string "no"))
2234 (if_then_else (match_test "TARGET_NEON")
2235 (const_string "yes")
2236 (const_string "no"))))]
2237 )
2238
2239 (define_insn_and_split "*anddi_zesidi_di"
2240 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2241 (and:DI (zero_extend:DI
2242 (match_operand:SI 2 "s_register_operand" "r,r"))
2243 (match_operand:DI 1 "s_register_operand" "0,r")))]
2244 "TARGET_32BIT"
2245 "#"
2246 "TARGET_32BIT && reload_completed"
2247 ; The zero extend of operand 2 clears the high word of the output
2248 ; operand.
2249 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2250 (set (match_dup 3) (const_int 0))]
2251 "
2252 {
2253 operands[3] = gen_highpart (SImode, operands[0]);
2254 operands[0] = gen_lowpart (SImode, operands[0]);
2255 operands[1] = gen_lowpart (SImode, operands[1]);
2256 }"
2257 [(set_attr "length" "8")]
2258 )
2259
2260 (define_insn "*anddi_sesdi_di"
2261 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2262 (and:DI (sign_extend:DI
2263 (match_operand:SI 2 "s_register_operand" "r,r"))
2264 (match_operand:DI 1 "s_register_operand" "0,r")))]
2265 "TARGET_32BIT"
2266 "#"
2267 [(set_attr "length" "8")]
2268 )
2269
2270 (define_expand "andsi3"
2271 [(set (match_operand:SI 0 "s_register_operand" "")
2272 (and:SI (match_operand:SI 1 "s_register_operand" "")
2273 (match_operand:SI 2 "reg_or_int_operand" "")))]
2274 "TARGET_EITHER"
2275 "
2276 if (TARGET_32BIT)
2277 {
2278 if (CONST_INT_P (operands[2]))
2279 {
2280 if (INTVAL (operands[2]) == 255 && arm_arch6)
2281 {
2282 operands[1] = convert_to_mode (QImode, operands[1], 1);
2283 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2284 operands[1]));
2285 }
2286 else
2287 arm_split_constant (AND, SImode, NULL_RTX,
2288 INTVAL (operands[2]), operands[0],
2289 operands[1],
2290 optimize && can_create_pseudo_p ());
2291
2292 DONE;
2293 }
2294 }
2295 else /* TARGET_THUMB1 */
2296 {
2297 if (!CONST_INT_P (operands[2]))
2298 {
2299 rtx tmp = force_reg (SImode, operands[2]);
2300 if (rtx_equal_p (operands[0], operands[1]))
2301 operands[2] = tmp;
2302 else
2303 {
2304 operands[2] = operands[1];
2305 operands[1] = tmp;
2306 }
2307 }
2308 else
2309 {
2310 int i;
2311
2312 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2313 {
2314 operands[2] = force_reg (SImode,
2315 GEN_INT (~INTVAL (operands[2])));
2316
2317 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2318
2319 DONE;
2320 }
2321
2322 for (i = 9; i <= 31; i++)
2323 {
2324 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2325 {
2326 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2327 const0_rtx));
2328 DONE;
2329 }
2330 else if ((((HOST_WIDE_INT) 1) << i) - 1
2331 == ~INTVAL (operands[2]))
2332 {
2333 rtx shift = GEN_INT (i);
2334 rtx reg = gen_reg_rtx (SImode);
2335
2336 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2337 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2338
2339 DONE;
2340 }
2341 }
2342
2343 operands[2] = force_reg (SImode, operands[2]);
2344 }
2345 }
2346 "
2347 )
2348
2349 ; ??? Check split length for Thumb-2
2350 (define_insn_and_split "*arm_andsi3_insn"
2351 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
2352 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r,r")
2353 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
2354 "TARGET_32BIT"
2355 "@
2356 and%?\\t%0, %1, %2
2357 bic%?\\t%0, %1, #%B2
2358 and%?\\t%0, %1, %2
2359 #"
2360 "TARGET_32BIT
2361 && CONST_INT_P (operands[2])
2362 && !(const_ok_for_arm (INTVAL (operands[2]))
2363 || const_ok_for_arm (~INTVAL (operands[2])))"
2364 [(clobber (const_int 0))]
2365 "
2366 arm_split_constant (AND, SImode, curr_insn,
2367 INTVAL (operands[2]), operands[0], operands[1], 0);
2368 DONE;
2369 "
2370 [(set_attr "length" "4,4,4,16")
2371 (set_attr "predicable" "yes")
2372 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,simple_alu_imm")]
2373 )
2374
2375 (define_insn "*thumb1_andsi3_insn"
2376 [(set (match_operand:SI 0 "register_operand" "=l")
2377 (and:SI (match_operand:SI 1 "register_operand" "%0")
2378 (match_operand:SI 2 "register_operand" "l")))]
2379 "TARGET_THUMB1"
2380 "and\\t%0, %2"
2381 [(set_attr "length" "2")
2382 (set_attr "type" "simple_alu_imm")
2383 (set_attr "conds" "set")])
2384
2385 (define_insn "*andsi3_compare0"
2386 [(set (reg:CC_NOOV CC_REGNUM)
2387 (compare:CC_NOOV
2388 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2389 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2390 (const_int 0)))
2391 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2392 (and:SI (match_dup 1) (match_dup 2)))]
2393 "TARGET_32BIT"
2394 "@
2395 and%.\\t%0, %1, %2
2396 bic%.\\t%0, %1, #%B2
2397 and%.\\t%0, %1, %2"
2398 [(set_attr "conds" "set")
2399 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2400 )
2401
2402 (define_insn "*andsi3_compare0_scratch"
2403 [(set (reg:CC_NOOV CC_REGNUM)
2404 (compare:CC_NOOV
2405 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2406 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2407 (const_int 0)))
2408 (clobber (match_scratch:SI 2 "=X,r,X"))]
2409 "TARGET_32BIT"
2410 "@
2411 tst%?\\t%0, %1
2412 bic%.\\t%2, %0, #%B1
2413 tst%?\\t%0, %1"
2414 [(set_attr "conds" "set")
2415 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2416 )
2417
2418 (define_insn "*zeroextractsi_compare0_scratch"
2419 [(set (reg:CC_NOOV CC_REGNUM)
2420 (compare:CC_NOOV (zero_extract:SI
2421 (match_operand:SI 0 "s_register_operand" "r")
2422 (match_operand 1 "const_int_operand" "n")
2423 (match_operand 2 "const_int_operand" "n"))
2424 (const_int 0)))]
2425 "TARGET_32BIT
2426 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2427 && INTVAL (operands[1]) > 0
2428 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2429 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2430 "*
2431 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2432 << INTVAL (operands[2]));
2433 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2434 return \"\";
2435 "
2436 [(set_attr "conds" "set")
2437 (set_attr "predicable" "yes")
2438 (set_attr "type" "simple_alu_imm")]
2439 )
2440
2441 (define_insn_and_split "*ne_zeroextractsi"
2442 [(set (match_operand:SI 0 "s_register_operand" "=r")
2443 (ne:SI (zero_extract:SI
2444 (match_operand:SI 1 "s_register_operand" "r")
2445 (match_operand:SI 2 "const_int_operand" "n")
2446 (match_operand:SI 3 "const_int_operand" "n"))
2447 (const_int 0)))
2448 (clobber (reg:CC CC_REGNUM))]
2449 "TARGET_32BIT
2450 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2451 && INTVAL (operands[2]) > 0
2452 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2453 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2454 "#"
2455 "TARGET_32BIT
2456 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2457 && INTVAL (operands[2]) > 0
2458 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2459 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2460 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2461 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2462 (const_int 0)))
2463 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2464 (set (match_dup 0)
2465 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2466 (match_dup 0) (const_int 1)))]
2467 "
2468 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2469 << INTVAL (operands[3]));
2470 "
2471 [(set_attr "conds" "clob")
2472 (set (attr "length")
2473 (if_then_else (eq_attr "is_thumb" "yes")
2474 (const_int 12)
2475 (const_int 8)))]
2476 )
2477
2478 (define_insn_and_split "*ne_zeroextractsi_shifted"
2479 [(set (match_operand:SI 0 "s_register_operand" "=r")
2480 (ne:SI (zero_extract:SI
2481 (match_operand:SI 1 "s_register_operand" "r")
2482 (match_operand:SI 2 "const_int_operand" "n")
2483 (const_int 0))
2484 (const_int 0)))
2485 (clobber (reg:CC CC_REGNUM))]
2486 "TARGET_ARM"
2487 "#"
2488 "TARGET_ARM"
2489 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2490 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2491 (const_int 0)))
2492 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2493 (set (match_dup 0)
2494 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2495 (match_dup 0) (const_int 1)))]
2496 "
2497 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2498 "
2499 [(set_attr "conds" "clob")
2500 (set_attr "length" "8")]
2501 )
2502
2503 (define_insn_and_split "*ite_ne_zeroextractsi"
2504 [(set (match_operand:SI 0 "s_register_operand" "=r")
2505 (if_then_else:SI (ne (zero_extract:SI
2506 (match_operand:SI 1 "s_register_operand" "r")
2507 (match_operand:SI 2 "const_int_operand" "n")
2508 (match_operand:SI 3 "const_int_operand" "n"))
2509 (const_int 0))
2510 (match_operand:SI 4 "arm_not_operand" "rIK")
2511 (const_int 0)))
2512 (clobber (reg:CC CC_REGNUM))]
2513 "TARGET_ARM
2514 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2515 && INTVAL (operands[2]) > 0
2516 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2517 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2518 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2519 "#"
2520 "TARGET_ARM
2521 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2522 && INTVAL (operands[2]) > 0
2523 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2524 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2525 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2526 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2527 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2528 (const_int 0)))
2529 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2530 (set (match_dup 0)
2531 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2532 (match_dup 0) (match_dup 4)))]
2533 "
2534 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2535 << INTVAL (operands[3]));
2536 "
2537 [(set_attr "conds" "clob")
2538 (set_attr "length" "8")]
2539 )
2540
2541 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2542 [(set (match_operand:SI 0 "s_register_operand" "=r")
2543 (if_then_else:SI (ne (zero_extract:SI
2544 (match_operand:SI 1 "s_register_operand" "r")
2545 (match_operand:SI 2 "const_int_operand" "n")
2546 (const_int 0))
2547 (const_int 0))
2548 (match_operand:SI 3 "arm_not_operand" "rIK")
2549 (const_int 0)))
2550 (clobber (reg:CC CC_REGNUM))]
2551 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2552 "#"
2553 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2554 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2555 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2556 (const_int 0)))
2557 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2558 (set (match_dup 0)
2559 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2560 (match_dup 0) (match_dup 3)))]
2561 "
2562 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2563 "
2564 [(set_attr "conds" "clob")
2565 (set_attr "length" "8")]
2566 )
2567
2568 (define_split
2569 [(set (match_operand:SI 0 "s_register_operand" "")
2570 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2571 (match_operand:SI 2 "const_int_operand" "")
2572 (match_operand:SI 3 "const_int_operand" "")))
2573 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2574 "TARGET_THUMB1"
2575 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2576 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2577 "{
2578 HOST_WIDE_INT temp = INTVAL (operands[2]);
2579
2580 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2581 operands[3] = GEN_INT (32 - temp);
2582 }"
2583 )
2584
2585 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2586 (define_split
2587 [(set (match_operand:SI 0 "s_register_operand" "")
2588 (match_operator:SI 1 "shiftable_operator"
2589 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2590 (match_operand:SI 3 "const_int_operand" "")
2591 (match_operand:SI 4 "const_int_operand" ""))
2592 (match_operand:SI 5 "s_register_operand" "")]))
2593 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2594 "TARGET_ARM"
2595 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2596 (set (match_dup 0)
2597 (match_op_dup 1
2598 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2599 (match_dup 5)]))]
2600 "{
2601 HOST_WIDE_INT temp = INTVAL (operands[3]);
2602
2603 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2604 operands[4] = GEN_INT (32 - temp);
2605 }"
2606 )
2607
2608 (define_split
2609 [(set (match_operand:SI 0 "s_register_operand" "")
2610 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2611 (match_operand:SI 2 "const_int_operand" "")
2612 (match_operand:SI 3 "const_int_operand" "")))]
2613 "TARGET_THUMB1"
2614 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2615 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2616 "{
2617 HOST_WIDE_INT temp = INTVAL (operands[2]);
2618
2619 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2620 operands[3] = GEN_INT (32 - temp);
2621 }"
2622 )
2623
2624 (define_split
2625 [(set (match_operand:SI 0 "s_register_operand" "")
2626 (match_operator:SI 1 "shiftable_operator"
2627 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2628 (match_operand:SI 3 "const_int_operand" "")
2629 (match_operand:SI 4 "const_int_operand" ""))
2630 (match_operand:SI 5 "s_register_operand" "")]))
2631 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2632 "TARGET_ARM"
2633 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2634 (set (match_dup 0)
2635 (match_op_dup 1
2636 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2637 (match_dup 5)]))]
2638 "{
2639 HOST_WIDE_INT temp = INTVAL (operands[3]);
2640
2641 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2642 operands[4] = GEN_INT (32 - temp);
2643 }"
2644 )
2645
2646 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2647 ;;; represented by the bitfield, then this will produce incorrect results.
2648 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2649 ;;; which have a real bit-field insert instruction, the truncation happens
2650 ;;; in the bit-field insert instruction itself. Since arm does not have a
2651 ;;; bit-field insert instruction, we would have to emit code here to truncate
2652 ;;; the value before we insert. This loses some of the advantage of having
2653 ;;; this insv pattern, so this pattern needs to be reevalutated.
2654
2655 (define_expand "insv"
2656 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2657 (match_operand 1 "general_operand" "")
2658 (match_operand 2 "general_operand" ""))
2659 (match_operand 3 "reg_or_int_operand" ""))]
2660 "TARGET_ARM || arm_arch_thumb2"
2661 "
2662 {
2663 int start_bit = INTVAL (operands[2]);
2664 int width = INTVAL (operands[1]);
2665 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2666 rtx target, subtarget;
2667
2668 if (arm_arch_thumb2)
2669 {
2670 if (unaligned_access && MEM_P (operands[0])
2671 && s_register_operand (operands[3], GET_MODE (operands[3]))
2672 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2673 {
2674 rtx base_addr;
2675
2676 if (BYTES_BIG_ENDIAN)
2677 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2678 - start_bit;
2679
2680 if (width == 32)
2681 {
2682 base_addr = adjust_address (operands[0], SImode,
2683 start_bit / BITS_PER_UNIT);
2684 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2685 }
2686 else
2687 {
2688 rtx tmp = gen_reg_rtx (HImode);
2689
2690 base_addr = adjust_address (operands[0], HImode,
2691 start_bit / BITS_PER_UNIT);
2692 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2693 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2694 }
2695 DONE;
2696 }
2697 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2698 {
2699 bool use_bfi = TRUE;
2700
2701 if (CONST_INT_P (operands[3]))
2702 {
2703 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2704
2705 if (val == 0)
2706 {
2707 emit_insn (gen_insv_zero (operands[0], operands[1],
2708 operands[2]));
2709 DONE;
2710 }
2711
2712 /* See if the set can be done with a single orr instruction. */
2713 if (val == mask && const_ok_for_arm (val << start_bit))
2714 use_bfi = FALSE;
2715 }
2716
2717 if (use_bfi)
2718 {
2719 if (!REG_P (operands[3]))
2720 operands[3] = force_reg (SImode, operands[3]);
2721
2722 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2723 operands[3]));
2724 DONE;
2725 }
2726 }
2727 else
2728 FAIL;
2729 }
2730
2731 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2732 FAIL;
2733
2734 target = copy_rtx (operands[0]);
2735 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2736 subreg as the final target. */
2737 if (GET_CODE (target) == SUBREG)
2738 {
2739 subtarget = gen_reg_rtx (SImode);
2740 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2741 < GET_MODE_SIZE (SImode))
2742 target = SUBREG_REG (target);
2743 }
2744 else
2745 subtarget = target;
2746
2747 if (CONST_INT_P (operands[3]))
2748 {
2749 /* Since we are inserting a known constant, we may be able to
2750 reduce the number of bits that we have to clear so that
2751 the mask becomes simple. */
2752 /* ??? This code does not check to see if the new mask is actually
2753 simpler. It may not be. */
2754 rtx op1 = gen_reg_rtx (SImode);
2755 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2756 start of this pattern. */
2757 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2758 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2759
2760 emit_insn (gen_andsi3 (op1, operands[0],
2761 gen_int_mode (~mask2, SImode)));
2762 emit_insn (gen_iorsi3 (subtarget, op1,
2763 gen_int_mode (op3_value << start_bit, SImode)));
2764 }
2765 else if (start_bit == 0
2766 && !(const_ok_for_arm (mask)
2767 || const_ok_for_arm (~mask)))
2768 {
2769 /* A Trick, since we are setting the bottom bits in the word,
2770 we can shift operand[3] up, operand[0] down, OR them together
2771 and rotate the result back again. This takes 3 insns, and
2772 the third might be mergeable into another op. */
2773 /* The shift up copes with the possibility that operand[3] is
2774 wider than the bitfield. */
2775 rtx op0 = gen_reg_rtx (SImode);
2776 rtx op1 = gen_reg_rtx (SImode);
2777
2778 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2779 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2780 emit_insn (gen_iorsi3 (op1, op1, op0));
2781 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2782 }
2783 else if ((width + start_bit == 32)
2784 && !(const_ok_for_arm (mask)
2785 || const_ok_for_arm (~mask)))
2786 {
2787 /* Similar trick, but slightly less efficient. */
2788
2789 rtx op0 = gen_reg_rtx (SImode);
2790 rtx op1 = gen_reg_rtx (SImode);
2791
2792 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2793 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2794 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2795 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2796 }
2797 else
2798 {
2799 rtx op0 = gen_int_mode (mask, SImode);
2800 rtx op1 = gen_reg_rtx (SImode);
2801 rtx op2 = gen_reg_rtx (SImode);
2802
2803 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2804 {
2805 rtx tmp = gen_reg_rtx (SImode);
2806
2807 emit_insn (gen_movsi (tmp, op0));
2808 op0 = tmp;
2809 }
2810
2811 /* Mask out any bits in operand[3] that are not needed. */
2812 emit_insn (gen_andsi3 (op1, operands[3], op0));
2813
2814 if (CONST_INT_P (op0)
2815 && (const_ok_for_arm (mask << start_bit)
2816 || const_ok_for_arm (~(mask << start_bit))))
2817 {
2818 op0 = gen_int_mode (~(mask << start_bit), SImode);
2819 emit_insn (gen_andsi3 (op2, operands[0], op0));
2820 }
2821 else
2822 {
2823 if (CONST_INT_P (op0))
2824 {
2825 rtx tmp = gen_reg_rtx (SImode);
2826
2827 emit_insn (gen_movsi (tmp, op0));
2828 op0 = tmp;
2829 }
2830
2831 if (start_bit != 0)
2832 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2833
2834 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2835 }
2836
2837 if (start_bit != 0)
2838 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2839
2840 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2841 }
2842
2843 if (subtarget != target)
2844 {
2845 /* If TARGET is still a SUBREG, then it must be wider than a word,
2846 so we must be careful only to set the subword we were asked to. */
2847 if (GET_CODE (target) == SUBREG)
2848 emit_move_insn (target, subtarget);
2849 else
2850 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2851 }
2852
2853 DONE;
2854 }"
2855 )
2856
2857 (define_insn "insv_zero"
2858 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2859 (match_operand:SI 1 "const_int_operand" "M")
2860 (match_operand:SI 2 "const_int_operand" "M"))
2861 (const_int 0))]
2862 "arm_arch_thumb2"
2863 "bfc%?\t%0, %2, %1"
2864 [(set_attr "length" "4")
2865 (set_attr "predicable" "yes")]
2866 )
2867
2868 (define_insn "insv_t2"
2869 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2870 (match_operand:SI 1 "const_int_operand" "M")
2871 (match_operand:SI 2 "const_int_operand" "M"))
2872 (match_operand:SI 3 "s_register_operand" "r"))]
2873 "arm_arch_thumb2"
2874 "bfi%?\t%0, %3, %2, %1"
2875 [(set_attr "length" "4")
2876 (set_attr "predicable" "yes")]
2877 )
2878
2879 ; constants for op 2 will never be given to these patterns.
2880 (define_insn_and_split "*anddi_notdi_di"
2881 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2882 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2883 (match_operand:DI 2 "s_register_operand" "r,0")))]
2884 "TARGET_32BIT"
2885 "#"
2886 "TARGET_32BIT && reload_completed
2887 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2888 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2889 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2890 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2891 "
2892 {
2893 operands[3] = gen_highpart (SImode, operands[0]);
2894 operands[0] = gen_lowpart (SImode, operands[0]);
2895 operands[4] = gen_highpart (SImode, operands[1]);
2896 operands[1] = gen_lowpart (SImode, operands[1]);
2897 operands[5] = gen_highpart (SImode, operands[2]);
2898 operands[2] = gen_lowpart (SImode, operands[2]);
2899 }"
2900 [(set_attr "length" "8")
2901 (set_attr "predicable" "yes")]
2902 )
2903
2904 (define_insn_and_split "*anddi_notzesidi_di"
2905 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2906 (and:DI (not:DI (zero_extend:DI
2907 (match_operand:SI 2 "s_register_operand" "r,r")))
2908 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2909 "TARGET_32BIT"
2910 "@
2911 bic%?\\t%Q0, %Q1, %2
2912 #"
2913 ; (not (zero_extend ...)) allows us to just copy the high word from
2914 ; operand1 to operand0.
2915 "TARGET_32BIT
2916 && reload_completed
2917 && operands[0] != operands[1]"
2918 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2919 (set (match_dup 3) (match_dup 4))]
2920 "
2921 {
2922 operands[3] = gen_highpart (SImode, operands[0]);
2923 operands[0] = gen_lowpart (SImode, operands[0]);
2924 operands[4] = gen_highpart (SImode, operands[1]);
2925 operands[1] = gen_lowpart (SImode, operands[1]);
2926 }"
2927 [(set_attr "length" "4,8")
2928 (set_attr "predicable" "yes")]
2929 )
2930
2931 (define_insn_and_split "*anddi_notsesidi_di"
2932 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2933 (and:DI (not:DI (sign_extend:DI
2934 (match_operand:SI 2 "s_register_operand" "r,r")))
2935 (match_operand:DI 1 "s_register_operand" "0,r")))]
2936 "TARGET_32BIT"
2937 "#"
2938 "TARGET_32BIT && reload_completed"
2939 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2940 (set (match_dup 3) (and:SI (not:SI
2941 (ashiftrt:SI (match_dup 2) (const_int 31)))
2942 (match_dup 4)))]
2943 "
2944 {
2945 operands[3] = gen_highpart (SImode, operands[0]);
2946 operands[0] = gen_lowpart (SImode, operands[0]);
2947 operands[4] = gen_highpart (SImode, operands[1]);
2948 operands[1] = gen_lowpart (SImode, operands[1]);
2949 }"
2950 [(set_attr "length" "8")
2951 (set_attr "predicable" "yes")]
2952 )
2953
2954 (define_insn "andsi_notsi_si"
2955 [(set (match_operand:SI 0 "s_register_operand" "=r")
2956 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2957 (match_operand:SI 1 "s_register_operand" "r")))]
2958 "TARGET_32BIT"
2959 "bic%?\\t%0, %1, %2"
2960 [(set_attr "predicable" "yes")]
2961 )
2962
2963 (define_insn "thumb1_bicsi3"
2964 [(set (match_operand:SI 0 "register_operand" "=l")
2965 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2966 (match_operand:SI 2 "register_operand" "0")))]
2967 "TARGET_THUMB1"
2968 "bic\\t%0, %1"
2969 [(set_attr "length" "2")
2970 (set_attr "conds" "set")])
2971
2972 (define_insn "andsi_not_shiftsi_si"
2973 [(set (match_operand:SI 0 "s_register_operand" "=r")
2974 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2975 [(match_operand:SI 2 "s_register_operand" "r")
2976 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2977 (match_operand:SI 1 "s_register_operand" "r")))]
2978 "TARGET_ARM"
2979 "bic%?\\t%0, %1, %2%S4"
2980 [(set_attr "predicable" "yes")
2981 (set_attr "shift" "2")
2982 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2983 (const_string "alu_shift")
2984 (const_string "alu_shift_reg")))]
2985 )
2986
2987 (define_insn "*andsi_notsi_si_compare0"
2988 [(set (reg:CC_NOOV CC_REGNUM)
2989 (compare:CC_NOOV
2990 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2991 (match_operand:SI 1 "s_register_operand" "r"))
2992 (const_int 0)))
2993 (set (match_operand:SI 0 "s_register_operand" "=r")
2994 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2995 "TARGET_32BIT"
2996 "bic%.\\t%0, %1, %2"
2997 [(set_attr "conds" "set")]
2998 )
2999
3000 (define_insn "*andsi_notsi_si_compare0_scratch"
3001 [(set (reg:CC_NOOV CC_REGNUM)
3002 (compare:CC_NOOV
3003 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3004 (match_operand:SI 1 "s_register_operand" "r"))
3005 (const_int 0)))
3006 (clobber (match_scratch:SI 0 "=r"))]
3007 "TARGET_32BIT"
3008 "bic%.\\t%0, %1, %2"
3009 [(set_attr "conds" "set")]
3010 )
3011
3012 (define_expand "iordi3"
3013 [(set (match_operand:DI 0 "s_register_operand" "")
3014 (ior:DI (match_operand:DI 1 "s_register_operand" "")
3015 (match_operand:DI 2 "neon_logic_op2" "")))]
3016 "TARGET_32BIT"
3017 ""
3018 )
3019
3020 (define_insn_and_split "*iordi3_insn"
3021 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
3022 (ior:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
3023 (match_operand:DI 2 "arm_iordi_operand_neon" "w ,Dl,r ,r ,Df,Df,w ,Dl")))]
3024 "TARGET_32BIT && !TARGET_IWMMXT"
3025 {
3026 switch (which_alternative)
3027 {
3028 case 0: /* fall through */
3029 case 6: return "vorr\t%P0, %P1, %P2";
3030 case 1: /* fall through */
3031 case 7: return neon_output_logic_immediate ("vorr", &operands[2],
3032 DImode, 0, VALID_NEON_QREG_MODE (DImode));
3033 case 2:
3034 case 3:
3035 case 4:
3036 case 5:
3037 return "#";
3038 default: gcc_unreachable ();
3039 }
3040 }
3041 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3042 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3043 [(set (match_dup 3) (match_dup 4))
3044 (set (match_dup 5) (match_dup 6))]
3045 "
3046 {
3047 operands[3] = gen_lowpart (SImode, operands[0]);
3048 operands[5] = gen_highpart (SImode, operands[0]);
3049
3050 operands[4] = simplify_gen_binary (IOR, SImode,
3051 gen_lowpart (SImode, operands[1]),
3052 gen_lowpart (SImode, operands[2]));
3053 operands[6] = simplify_gen_binary (IOR, SImode,
3054 gen_highpart (SImode, operands[1]),
3055 gen_highpart_mode (SImode, DImode, operands[2]));
3056
3057 }"
3058 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
3059 (set_attr "length" "*,*,8,8,8,8,*,*")
3060 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,avoid_neon_for_64bits,avoid_neon_for_64bits")]
3061 )
3062
3063 (define_insn "*iordi_zesidi_di"
3064 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3065 (ior:DI (zero_extend:DI
3066 (match_operand:SI 2 "s_register_operand" "r,r"))
3067 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3068 "TARGET_32BIT"
3069 "@
3070 orr%?\\t%Q0, %Q1, %2
3071 #"
3072 [(set_attr "length" "4,8")
3073 (set_attr "predicable" "yes")]
3074 )
3075
3076 (define_insn "*iordi_sesidi_di"
3077 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3078 (ior:DI (sign_extend:DI
3079 (match_operand:SI 2 "s_register_operand" "r,r"))
3080 (match_operand:DI 1 "s_register_operand" "0,r")))]
3081 "TARGET_32BIT"
3082 "#"
3083 [(set_attr "length" "8")
3084 (set_attr "predicable" "yes")]
3085 )
3086
3087 (define_expand "iorsi3"
3088 [(set (match_operand:SI 0 "s_register_operand" "")
3089 (ior:SI (match_operand:SI 1 "s_register_operand" "")
3090 (match_operand:SI 2 "reg_or_int_operand" "")))]
3091 "TARGET_EITHER"
3092 "
3093 if (CONST_INT_P (operands[2]))
3094 {
3095 if (TARGET_32BIT)
3096 {
3097 arm_split_constant (IOR, SImode, NULL_RTX,
3098 INTVAL (operands[2]), operands[0], operands[1],
3099 optimize && can_create_pseudo_p ());
3100 DONE;
3101 }
3102 else /* TARGET_THUMB1 */
3103 {
3104 rtx tmp = force_reg (SImode, operands[2]);
3105 if (rtx_equal_p (operands[0], operands[1]))
3106 operands[2] = tmp;
3107 else
3108 {
3109 operands[2] = operands[1];
3110 operands[1] = tmp;
3111 }
3112 }
3113 }
3114 "
3115 )
3116
3117 (define_insn_and_split "*iorsi3_insn"
3118 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
3119 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r,r")
3120 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
3121 "TARGET_32BIT"
3122 "@
3123 orr%?\\t%0, %1, %2
3124 orn%?\\t%0, %1, #%B2
3125 orr%?\\t%0, %1, %2
3126 #"
3127 "TARGET_32BIT
3128 && CONST_INT_P (operands[2])
3129 && !(const_ok_for_arm (INTVAL (operands[2]))
3130 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3131 [(clobber (const_int 0))]
3132 {
3133 arm_split_constant (IOR, SImode, curr_insn,
3134 INTVAL (operands[2]), operands[0], operands[1], 0);
3135 DONE;
3136 }
3137 [(set_attr "length" "4,4,4,16")
3138 (set_attr "arch" "32,t2,32,32")
3139 (set_attr "predicable" "yes")
3140 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,*")]
3141 )
3142
3143 (define_insn "*thumb1_iorsi3_insn"
3144 [(set (match_operand:SI 0 "register_operand" "=l")
3145 (ior:SI (match_operand:SI 1 "register_operand" "%0")
3146 (match_operand:SI 2 "register_operand" "l")))]
3147 "TARGET_THUMB1"
3148 "orr\\t%0, %2"
3149 [(set_attr "length" "2")
3150 (set_attr "conds" "set")])
3151
3152 (define_peephole2
3153 [(match_scratch:SI 3 "r")
3154 (set (match_operand:SI 0 "arm_general_register_operand" "")
3155 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3156 (match_operand:SI 2 "const_int_operand" "")))]
3157 "TARGET_ARM
3158 && !const_ok_for_arm (INTVAL (operands[2]))
3159 && const_ok_for_arm (~INTVAL (operands[2]))"
3160 [(set (match_dup 3) (match_dup 2))
3161 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3162 ""
3163 )
3164
3165 (define_insn "*iorsi3_compare0"
3166 [(set (reg:CC_NOOV CC_REGNUM)
3167 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3168 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3169 (const_int 0)))
3170 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3171 (ior:SI (match_dup 1) (match_dup 2)))]
3172 "TARGET_32BIT"
3173 "orr%.\\t%0, %1, %2"
3174 [(set_attr "conds" "set")
3175 (set_attr "type" "simple_alu_imm,*")]
3176 )
3177
3178 (define_insn "*iorsi3_compare0_scratch"
3179 [(set (reg:CC_NOOV CC_REGNUM)
3180 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3181 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3182 (const_int 0)))
3183 (clobber (match_scratch:SI 0 "=r,r"))]
3184 "TARGET_32BIT"
3185 "orr%.\\t%0, %1, %2"
3186 [(set_attr "conds" "set")
3187 (set_attr "type" "simple_alu_imm, *")]
3188 )
3189
3190 (define_expand "xordi3"
3191 [(set (match_operand:DI 0 "s_register_operand" "")
3192 (xor:DI (match_operand:DI 1 "s_register_operand" "")
3193 (match_operand:DI 2 "s_register_operand" "")))]
3194 "TARGET_32BIT"
3195 ""
3196 )
3197
3198 (define_insn "*xordi3_insn"
3199 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3200 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
3201 (match_operand:DI 2 "s_register_operand" "r,r")))]
3202 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
3203 "#"
3204 [(set_attr "length" "8")
3205 (set_attr "predicable" "yes")]
3206 )
3207
3208 (define_insn "*xordi_zesidi_di"
3209 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3210 (xor:DI (zero_extend:DI
3211 (match_operand:SI 2 "s_register_operand" "r,r"))
3212 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3213 "TARGET_32BIT"
3214 "@
3215 eor%?\\t%Q0, %Q1, %2
3216 #"
3217 [(set_attr "length" "4,8")
3218 (set_attr "predicable" "yes")]
3219 )
3220
3221 (define_insn "*xordi_sesidi_di"
3222 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3223 (xor:DI (sign_extend:DI
3224 (match_operand:SI 2 "s_register_operand" "r,r"))
3225 (match_operand:DI 1 "s_register_operand" "0,r")))]
3226 "TARGET_32BIT"
3227 "#"
3228 [(set_attr "length" "8")
3229 (set_attr "predicable" "yes")]
3230 )
3231
3232 (define_expand "xorsi3"
3233 [(set (match_operand:SI 0 "s_register_operand" "")
3234 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3235 (match_operand:SI 2 "reg_or_int_operand" "")))]
3236 "TARGET_EITHER"
3237 "if (CONST_INT_P (operands[2]))
3238 {
3239 if (TARGET_32BIT)
3240 {
3241 arm_split_constant (XOR, SImode, NULL_RTX,
3242 INTVAL (operands[2]), operands[0], operands[1],
3243 optimize && can_create_pseudo_p ());
3244 DONE;
3245 }
3246 else /* TARGET_THUMB1 */
3247 {
3248 rtx tmp = force_reg (SImode, operands[2]);
3249 if (rtx_equal_p (operands[0], operands[1]))
3250 operands[2] = tmp;
3251 else
3252 {
3253 operands[2] = operands[1];
3254 operands[1] = tmp;
3255 }
3256 }
3257 }"
3258 )
3259
3260 (define_insn_and_split "*arm_xorsi3"
3261 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3262 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
3263 (match_operand:SI 2 "reg_or_int_operand" "I,r,?n")))]
3264 "TARGET_32BIT"
3265 "@
3266 eor%?\\t%0, %1, %2
3267 eor%?\\t%0, %1, %2
3268 #"
3269 "TARGET_32BIT
3270 && CONST_INT_P (operands[2])
3271 && !const_ok_for_arm (INTVAL (operands[2]))"
3272 [(clobber (const_int 0))]
3273 {
3274 arm_split_constant (XOR, SImode, curr_insn,
3275 INTVAL (operands[2]), operands[0], operands[1], 0);
3276 DONE;
3277 }
3278 [(set_attr "length" "4,4,16")
3279 (set_attr "predicable" "yes")
3280 (set_attr "type" "simple_alu_imm,*,*")]
3281 )
3282
3283 (define_insn "*thumb1_xorsi3_insn"
3284 [(set (match_operand:SI 0 "register_operand" "=l")
3285 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3286 (match_operand:SI 2 "register_operand" "l")))]
3287 "TARGET_THUMB1"
3288 "eor\\t%0, %2"
3289 [(set_attr "length" "2")
3290 (set_attr "conds" "set")
3291 (set_attr "type" "simple_alu_imm")]
3292 )
3293
3294 (define_insn "*xorsi3_compare0"
3295 [(set (reg:CC_NOOV CC_REGNUM)
3296 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3297 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3298 (const_int 0)))
3299 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3300 (xor:SI (match_dup 1) (match_dup 2)))]
3301 "TARGET_32BIT"
3302 "eor%.\\t%0, %1, %2"
3303 [(set_attr "conds" "set")
3304 (set_attr "type" "simple_alu_imm,*")]
3305 )
3306
3307 (define_insn "*xorsi3_compare0_scratch"
3308 [(set (reg:CC_NOOV CC_REGNUM)
3309 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3310 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3311 (const_int 0)))]
3312 "TARGET_32BIT"
3313 "teq%?\\t%0, %1"
3314 [(set_attr "conds" "set")
3315 (set_attr "type" "simple_alu_imm, *")]
3316 )
3317
3318 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3319 ; (NOT D) we can sometimes merge the final NOT into one of the following
3320 ; insns.
3321
3322 (define_split
3323 [(set (match_operand:SI 0 "s_register_operand" "")
3324 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3325 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3326 (match_operand:SI 3 "arm_rhs_operand" "")))
3327 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3328 "TARGET_32BIT"
3329 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3330 (not:SI (match_dup 3))))
3331 (set (match_dup 0) (not:SI (match_dup 4)))]
3332 ""
3333 )
3334
3335 (define_insn_and_split "*andsi_iorsi3_notsi"
3336 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3337 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3338 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3339 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3340 "TARGET_32BIT"
3341 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3342 "&& reload_completed"
3343 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3344 (set (match_dup 0) (and:SI (not:SI (match_dup 3)) (match_dup 0)))]
3345 ""
3346 [(set_attr "length" "8")
3347 (set_attr "ce_count" "2")
3348 (set_attr "predicable" "yes")]
3349 )
3350
3351 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3352 ; insns are available?
3353 (define_split
3354 [(set (match_operand:SI 0 "s_register_operand" "")
3355 (match_operator:SI 1 "logical_binary_operator"
3356 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3357 (match_operand:SI 3 "const_int_operand" "")
3358 (match_operand:SI 4 "const_int_operand" ""))
3359 (match_operator:SI 9 "logical_binary_operator"
3360 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3361 (match_operand:SI 6 "const_int_operand" ""))
3362 (match_operand:SI 7 "s_register_operand" "")])]))
3363 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3364 "TARGET_32BIT
3365 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3366 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3367 [(set (match_dup 8)
3368 (match_op_dup 1
3369 [(ashift:SI (match_dup 2) (match_dup 4))
3370 (match_dup 5)]))
3371 (set (match_dup 0)
3372 (match_op_dup 1
3373 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3374 (match_dup 7)]))]
3375 "
3376 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3377 ")
3378
3379 (define_split
3380 [(set (match_operand:SI 0 "s_register_operand" "")
3381 (match_operator:SI 1 "logical_binary_operator"
3382 [(match_operator:SI 9 "logical_binary_operator"
3383 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3384 (match_operand:SI 6 "const_int_operand" ""))
3385 (match_operand:SI 7 "s_register_operand" "")])
3386 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3387 (match_operand:SI 3 "const_int_operand" "")
3388 (match_operand:SI 4 "const_int_operand" ""))]))
3389 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3390 "TARGET_32BIT
3391 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3392 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3393 [(set (match_dup 8)
3394 (match_op_dup 1
3395 [(ashift:SI (match_dup 2) (match_dup 4))
3396 (match_dup 5)]))
3397 (set (match_dup 0)
3398 (match_op_dup 1
3399 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3400 (match_dup 7)]))]
3401 "
3402 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3403 ")
3404
3405 (define_split
3406 [(set (match_operand:SI 0 "s_register_operand" "")
3407 (match_operator:SI 1 "logical_binary_operator"
3408 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3409 (match_operand:SI 3 "const_int_operand" "")
3410 (match_operand:SI 4 "const_int_operand" ""))
3411 (match_operator:SI 9 "logical_binary_operator"
3412 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3413 (match_operand:SI 6 "const_int_operand" ""))
3414 (match_operand:SI 7 "s_register_operand" "")])]))
3415 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3416 "TARGET_32BIT
3417 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3418 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3419 [(set (match_dup 8)
3420 (match_op_dup 1
3421 [(ashift:SI (match_dup 2) (match_dup 4))
3422 (match_dup 5)]))
3423 (set (match_dup 0)
3424 (match_op_dup 1
3425 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3426 (match_dup 7)]))]
3427 "
3428 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3429 ")
3430
3431 (define_split
3432 [(set (match_operand:SI 0 "s_register_operand" "")
3433 (match_operator:SI 1 "logical_binary_operator"
3434 [(match_operator:SI 9 "logical_binary_operator"
3435 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3436 (match_operand:SI 6 "const_int_operand" ""))
3437 (match_operand:SI 7 "s_register_operand" "")])
3438 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3439 (match_operand:SI 3 "const_int_operand" "")
3440 (match_operand:SI 4 "const_int_operand" ""))]))
3441 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3442 "TARGET_32BIT
3443 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3444 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3445 [(set (match_dup 8)
3446 (match_op_dup 1
3447 [(ashift:SI (match_dup 2) (match_dup 4))
3448 (match_dup 5)]))
3449 (set (match_dup 0)
3450 (match_op_dup 1
3451 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3452 (match_dup 7)]))]
3453 "
3454 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3455 ")
3456 \f
3457
3458 ;; Minimum and maximum insns
3459
3460 (define_expand "smaxsi3"
3461 [(parallel [
3462 (set (match_operand:SI 0 "s_register_operand" "")
3463 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3464 (match_operand:SI 2 "arm_rhs_operand" "")))
3465 (clobber (reg:CC CC_REGNUM))])]
3466 "TARGET_32BIT"
3467 "
3468 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3469 {
3470 /* No need for a clobber of the condition code register here. */
3471 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3472 gen_rtx_SMAX (SImode, operands[1],
3473 operands[2])));
3474 DONE;
3475 }
3476 ")
3477
3478 (define_insn "*smax_0"
3479 [(set (match_operand:SI 0 "s_register_operand" "=r")
3480 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3481 (const_int 0)))]
3482 "TARGET_32BIT"
3483 "bic%?\\t%0, %1, %1, asr #31"
3484 [(set_attr "predicable" "yes")]
3485 )
3486
3487 (define_insn "*smax_m1"
3488 [(set (match_operand:SI 0 "s_register_operand" "=r")
3489 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3490 (const_int -1)))]
3491 "TARGET_32BIT"
3492 "orr%?\\t%0, %1, %1, asr #31"
3493 [(set_attr "predicable" "yes")]
3494 )
3495
3496 (define_insn_and_split "*arm_smax_insn"
3497 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3498 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3499 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3500 (clobber (reg:CC CC_REGNUM))]
3501 "TARGET_ARM"
3502 "#"
3503 ; cmp\\t%1, %2\;movlt\\t%0, %2
3504 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3505 "TARGET_ARM"
3506 [(set (reg:CC CC_REGNUM)
3507 (compare:CC (match_dup 1) (match_dup 2)))
3508 (set (match_dup 0)
3509 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3510 (match_dup 1)
3511 (match_dup 2)))]
3512 ""
3513 [(set_attr "conds" "clob")
3514 (set_attr "length" "8,12")]
3515 )
3516
3517 (define_expand "sminsi3"
3518 [(parallel [
3519 (set (match_operand:SI 0 "s_register_operand" "")
3520 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3521 (match_operand:SI 2 "arm_rhs_operand" "")))
3522 (clobber (reg:CC CC_REGNUM))])]
3523 "TARGET_32BIT"
3524 "
3525 if (operands[2] == const0_rtx)
3526 {
3527 /* No need for a clobber of the condition code register here. */
3528 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3529 gen_rtx_SMIN (SImode, operands[1],
3530 operands[2])));
3531 DONE;
3532 }
3533 ")
3534
3535 (define_insn "*smin_0"
3536 [(set (match_operand:SI 0 "s_register_operand" "=r")
3537 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3538 (const_int 0)))]
3539 "TARGET_32BIT"
3540 "and%?\\t%0, %1, %1, asr #31"
3541 [(set_attr "predicable" "yes")]
3542 )
3543
3544 (define_insn_and_split "*arm_smin_insn"
3545 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3546 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3547 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3548 (clobber (reg:CC CC_REGNUM))]
3549 "TARGET_ARM"
3550 "#"
3551 ; cmp\\t%1, %2\;movge\\t%0, %2
3552 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3553 "TARGET_ARM"
3554 [(set (reg:CC CC_REGNUM)
3555 (compare:CC (match_dup 1) (match_dup 2)))
3556 (set (match_dup 0)
3557 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3558 (match_dup 1)
3559 (match_dup 2)))]
3560 ""
3561 [(set_attr "conds" "clob")
3562 (set_attr "length" "8,12")]
3563 )
3564
3565 (define_expand "umaxsi3"
3566 [(parallel [
3567 (set (match_operand:SI 0 "s_register_operand" "")
3568 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3569 (match_operand:SI 2 "arm_rhs_operand" "")))
3570 (clobber (reg:CC CC_REGNUM))])]
3571 "TARGET_32BIT"
3572 ""
3573 )
3574
3575 (define_insn_and_split "*arm_umaxsi3"
3576 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3577 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3578 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3579 (clobber (reg:CC CC_REGNUM))]
3580 "TARGET_ARM"
3581 "#"
3582 ; cmp\\t%1, %2\;movcc\\t%0, %2
3583 ; cmp\\t%1, %2\;movcs\\t%0, %1
3584 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3585 "TARGET_ARM"
3586 [(set (reg:CC CC_REGNUM)
3587 (compare:CC (match_dup 1) (match_dup 2)))
3588 (set (match_dup 0)
3589 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3590 (match_dup 1)
3591 (match_dup 2)))]
3592 ""
3593 [(set_attr "conds" "clob")
3594 (set_attr "length" "8,8,12")]
3595 )
3596
3597 (define_expand "uminsi3"
3598 [(parallel [
3599 (set (match_operand:SI 0 "s_register_operand" "")
3600 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3601 (match_operand:SI 2 "arm_rhs_operand" "")))
3602 (clobber (reg:CC CC_REGNUM))])]
3603 "TARGET_32BIT"
3604 ""
3605 )
3606
3607 (define_insn_and_split "*arm_uminsi3"
3608 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3609 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3610 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3611 (clobber (reg:CC CC_REGNUM))]
3612 "TARGET_ARM"
3613 "#"
3614 ; cmp\\t%1, %2\;movcs\\t%0, %2
3615 ; cmp\\t%1, %2\;movcc\\t%0, %1
3616 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3617 "TARGET_ARM"
3618 [(set (reg:CC CC_REGNUM)
3619 (compare:CC (match_dup 1) (match_dup 2)))
3620 (set (match_dup 0)
3621 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3622 (match_dup 1)
3623 (match_dup 2)))]
3624 ""
3625 [(set_attr "conds" "clob")
3626 (set_attr "length" "8,8,12")]
3627 )
3628
3629 (define_insn "*store_minmaxsi"
3630 [(set (match_operand:SI 0 "memory_operand" "=m")
3631 (match_operator:SI 3 "minmax_operator"
3632 [(match_operand:SI 1 "s_register_operand" "r")
3633 (match_operand:SI 2 "s_register_operand" "r")]))
3634 (clobber (reg:CC CC_REGNUM))]
3635 "TARGET_32BIT && optimize_insn_for_size_p()"
3636 "*
3637 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3638 operands[1], operands[2]);
3639 output_asm_insn (\"cmp\\t%1, %2\", operands);
3640 if (TARGET_THUMB2)
3641 output_asm_insn (\"ite\t%d3\", operands);
3642 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3643 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3644 return \"\";
3645 "
3646 [(set_attr "conds" "clob")
3647 (set (attr "length")
3648 (if_then_else (eq_attr "is_thumb" "yes")
3649 (const_int 14)
3650 (const_int 12)))
3651 (set_attr "type" "store1")]
3652 )
3653
3654 ; Reject the frame pointer in operand[1], since reloading this after
3655 ; it has been eliminated can cause carnage.
3656 (define_insn "*minmax_arithsi"
3657 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3658 (match_operator:SI 4 "shiftable_operator"
3659 [(match_operator:SI 5 "minmax_operator"
3660 [(match_operand:SI 2 "s_register_operand" "r,r")
3661 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3662 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3663 (clobber (reg:CC CC_REGNUM))]
3664 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3665 "*
3666 {
3667 enum rtx_code code = GET_CODE (operands[4]);
3668 bool need_else;
3669
3670 if (which_alternative != 0 || operands[3] != const0_rtx
3671 || (code != PLUS && code != IOR && code != XOR))
3672 need_else = true;
3673 else
3674 need_else = false;
3675
3676 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3677 operands[2], operands[3]);
3678 output_asm_insn (\"cmp\\t%2, %3\", operands);
3679 if (TARGET_THUMB2)
3680 {
3681 if (need_else)
3682 output_asm_insn (\"ite\\t%d5\", operands);
3683 else
3684 output_asm_insn (\"it\\t%d5\", operands);
3685 }
3686 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3687 if (need_else)
3688 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3689 return \"\";
3690 }"
3691 [(set_attr "conds" "clob")
3692 (set (attr "length")
3693 (if_then_else (eq_attr "is_thumb" "yes")
3694 (const_int 14)
3695 (const_int 12)))]
3696 )
3697
3698 ; Reject the frame pointer in operand[1], since reloading this after
3699 ; it has been eliminated can cause carnage.
3700 (define_insn_and_split "*minmax_arithsi_non_canon"
3701 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3702 (minus:SI
3703 (match_operand:SI 1 "s_register_operand" "0,?r")
3704 (match_operator:SI 4 "minmax_operator"
3705 [(match_operand:SI 2 "s_register_operand" "r,r")
3706 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
3707 (clobber (reg:CC CC_REGNUM))]
3708 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3709 "#"
3710 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3711 [(set (reg:CC CC_REGNUM)
3712 (compare:CC (match_dup 2) (match_dup 3)))
3713
3714 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3715 (set (match_dup 0)
3716 (minus:SI (match_dup 1)
3717 (match_dup 2))))
3718 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3719 (set (match_dup 0)
3720 (minus:SI (match_dup 1)
3721 (match_dup 3))))]
3722 {
3723 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3724 operands[2], operands[3]);
3725 enum rtx_code rc = minmax_code (operands[4]);
3726 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3727 operands[2], operands[3]);
3728
3729 if (mode == CCFPmode || mode == CCFPEmode)
3730 rc = reverse_condition_maybe_unordered (rc);
3731 else
3732 rc = reverse_condition (rc);
3733 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3734 }
3735 [(set_attr "conds" "clob")
3736 (set (attr "length")
3737 (if_then_else (eq_attr "is_thumb" "yes")
3738 (const_int 14)
3739 (const_int 12)))]
3740 )
3741
3742 (define_code_iterator SAT [smin smax])
3743 (define_code_iterator SATrev [smin smax])
3744 (define_code_attr SATlo [(smin "1") (smax "2")])
3745 (define_code_attr SAThi [(smin "2") (smax "1")])
3746
3747 (define_insn "*satsi_<SAT:code>"
3748 [(set (match_operand:SI 0 "s_register_operand" "=r")
3749 (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
3750 (match_operand:SI 1 "const_int_operand" "i"))
3751 (match_operand:SI 2 "const_int_operand" "i")))]
3752 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3753 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3754 {
3755 int mask;
3756 bool signed_sat;
3757 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3758 &mask, &signed_sat))
3759 gcc_unreachable ();
3760
3761 operands[1] = GEN_INT (mask);
3762 if (signed_sat)
3763 return "ssat%?\t%0, %1, %3";
3764 else
3765 return "usat%?\t%0, %1, %3";
3766 }
3767 [(set_attr "predicable" "yes")
3768 (set_attr "insn" "sat")])
3769
3770 (define_insn "*satsi_<SAT:code>_shift"
3771 [(set (match_operand:SI 0 "s_register_operand" "=r")
3772 (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
3773 [(match_operand:SI 4 "s_register_operand" "r")
3774 (match_operand:SI 5 "const_int_operand" "i")])
3775 (match_operand:SI 1 "const_int_operand" "i"))
3776 (match_operand:SI 2 "const_int_operand" "i")))]
3777 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3778 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3779 {
3780 int mask;
3781 bool signed_sat;
3782 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3783 &mask, &signed_sat))
3784 gcc_unreachable ();
3785
3786 operands[1] = GEN_INT (mask);
3787 if (signed_sat)
3788 return "ssat%?\t%0, %1, %4%S3";
3789 else
3790 return "usat%?\t%0, %1, %4%S3";
3791 }
3792 [(set_attr "predicable" "yes")
3793 (set_attr "insn" "sat")
3794 (set_attr "shift" "3")
3795 (set_attr "type" "alu_shift")])
3796 \f
3797 ;; Shift and rotation insns
3798
3799 (define_expand "ashldi3"
3800 [(set (match_operand:DI 0 "s_register_operand" "")
3801 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3802 (match_operand:SI 2 "general_operand" "")))]
3803 "TARGET_32BIT"
3804 "
3805 if (TARGET_NEON)
3806 {
3807 /* Delay the decision whether to use NEON or core-regs until
3808 register allocation. */
3809 emit_insn (gen_ashldi3_neon (operands[0], operands[1], operands[2]));
3810 DONE;
3811 }
3812 else
3813 {
3814 /* Only the NEON case can handle in-memory shift counts. */
3815 if (!reg_or_int_operand (operands[2], SImode))
3816 operands[2] = force_reg (SImode, operands[2]);
3817 }
3818
3819 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3820 ; /* No special preparation statements; expand pattern as above. */
3821 else
3822 {
3823 rtx scratch1, scratch2;
3824
3825 if (CONST_INT_P (operands[2])
3826 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3827 {
3828 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3829 DONE;
3830 }
3831
3832 /* Ideally we should use iwmmxt here if we could know that operands[1]
3833 ends up already living in an iwmmxt register. Otherwise it's
3834 cheaper to have the alternate code being generated than moving
3835 values to iwmmxt regs and back. */
3836
3837 /* If we're optimizing for size, we prefer the libgcc calls. */
3838 if (optimize_function_for_size_p (cfun))
3839 FAIL;
3840
3841 /* Expand operation using core-registers.
3842 'FAIL' would achieve the same thing, but this is a bit smarter. */
3843 scratch1 = gen_reg_rtx (SImode);
3844 scratch2 = gen_reg_rtx (SImode);
3845 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3846 operands[2], scratch1, scratch2);
3847 DONE;
3848 }
3849 "
3850 )
3851
3852 (define_insn_and_split "arm_ashldi3_1bit"
3853 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3854 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3855 (const_int 1)))
3856 (clobber (reg:CC CC_REGNUM))]
3857 "TARGET_32BIT"
3858 "#" ; "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3859 "&& reload_completed"
3860 [(parallel [(set (reg:CC CC_REGNUM)
3861 (compare:CC (ashift:SI (match_dup 1) (const_int 1))
3862 (const_int 0)))
3863 (set (match_dup 0) (ashift:SI (match_dup 1) (const_int 1)))])
3864 (set (match_dup 2) (plus:SI (plus:SI (match_dup 3) (match_dup 3))
3865 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
3866 {
3867 operands[2] = gen_highpart (SImode, operands[0]);
3868 operands[0] = gen_lowpart (SImode, operands[0]);
3869 operands[3] = gen_highpart (SImode, operands[1]);
3870 operands[1] = gen_lowpart (SImode, operands[1]);
3871 }
3872 [(set_attr "conds" "clob")
3873 (set_attr "length" "8")]
3874 )
3875
3876 (define_expand "ashlsi3"
3877 [(set (match_operand:SI 0 "s_register_operand" "")
3878 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3879 (match_operand:SI 2 "arm_rhs_operand" "")))]
3880 "TARGET_EITHER"
3881 "
3882 if (CONST_INT_P (operands[2])
3883 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3884 {
3885 emit_insn (gen_movsi (operands[0], const0_rtx));
3886 DONE;
3887 }
3888 "
3889 )
3890
3891 (define_insn "*thumb1_ashlsi3"
3892 [(set (match_operand:SI 0 "register_operand" "=l,l")
3893 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3894 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3895 "TARGET_THUMB1"
3896 "lsl\\t%0, %1, %2"
3897 [(set_attr "length" "2")
3898 (set_attr "conds" "set")])
3899
3900 (define_expand "ashrdi3"
3901 [(set (match_operand:DI 0 "s_register_operand" "")
3902 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3903 (match_operand:SI 2 "reg_or_int_operand" "")))]
3904 "TARGET_32BIT"
3905 "
3906 if (TARGET_NEON)
3907 {
3908 /* Delay the decision whether to use NEON or core-regs until
3909 register allocation. */
3910 emit_insn (gen_ashrdi3_neon (operands[0], operands[1], operands[2]));
3911 DONE;
3912 }
3913
3914 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3915 ; /* No special preparation statements; expand pattern as above. */
3916 else
3917 {
3918 rtx scratch1, scratch2;
3919
3920 if (CONST_INT_P (operands[2])
3921 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3922 {
3923 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3924 DONE;
3925 }
3926
3927 /* Ideally we should use iwmmxt here if we could know that operands[1]
3928 ends up already living in an iwmmxt register. Otherwise it's
3929 cheaper to have the alternate code being generated than moving
3930 values to iwmmxt regs and back. */
3931
3932 /* If we're optimizing for size, we prefer the libgcc calls. */
3933 if (optimize_function_for_size_p (cfun))
3934 FAIL;
3935
3936 /* Expand operation using core-registers.
3937 'FAIL' would achieve the same thing, but this is a bit smarter. */
3938 scratch1 = gen_reg_rtx (SImode);
3939 scratch2 = gen_reg_rtx (SImode);
3940 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3941 operands[2], scratch1, scratch2);
3942 DONE;
3943 }
3944 "
3945 )
3946
3947 (define_insn_and_split "arm_ashrdi3_1bit"
3948 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3949 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3950 (const_int 1)))
3951 (clobber (reg:CC CC_REGNUM))]
3952 "TARGET_32BIT"
3953 "#" ; "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3954 "&& reload_completed"
3955 [(parallel [(set (reg:CC CC_REGNUM)
3956 (compare:CC (ashiftrt:SI (match_dup 3) (const_int 1))
3957 (const_int 0)))
3958 (set (match_dup 2) (ashiftrt:SI (match_dup 3) (const_int 1)))])
3959 (set (match_dup 0) (unspec:SI [(match_dup 1)
3960 (reg:CC_C CC_REGNUM)]
3961 UNSPEC_RRX))]
3962 {
3963 operands[2] = gen_highpart (SImode, operands[0]);
3964 operands[0] = gen_lowpart (SImode, operands[0]);
3965 operands[3] = gen_highpart (SImode, operands[1]);
3966 operands[1] = gen_lowpart (SImode, operands[1]);
3967 }
3968 [(set_attr "conds" "clob")
3969 (set_attr "length" "8")]
3970 )
3971
3972 (define_insn "*rrx"
3973 [(set (match_operand:SI 0 "s_register_operand" "=r")
3974 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
3975 (reg:CC_C CC_REGNUM)]
3976 UNSPEC_RRX))]
3977 "TARGET_32BIT"
3978 "mov\\t%0, %1, rrx"
3979 [(set_attr "conds" "use")
3980 (set_attr "insn" "mov")
3981 (set_attr "type" "alu_shift")]
3982 )
3983
3984 (define_expand "ashrsi3"
3985 [(set (match_operand:SI 0 "s_register_operand" "")
3986 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3987 (match_operand:SI 2 "arm_rhs_operand" "")))]
3988 "TARGET_EITHER"
3989 "
3990 if (CONST_INT_P (operands[2])
3991 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3992 operands[2] = GEN_INT (31);
3993 "
3994 )
3995
3996 (define_insn "*thumb1_ashrsi3"
3997 [(set (match_operand:SI 0 "register_operand" "=l,l")
3998 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3999 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4000 "TARGET_THUMB1"
4001 "asr\\t%0, %1, %2"
4002 [(set_attr "length" "2")
4003 (set_attr "conds" "set")])
4004
4005 (define_expand "lshrdi3"
4006 [(set (match_operand:DI 0 "s_register_operand" "")
4007 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4008 (match_operand:SI 2 "reg_or_int_operand" "")))]
4009 "TARGET_32BIT"
4010 "
4011 if (TARGET_NEON)
4012 {
4013 /* Delay the decision whether to use NEON or core-regs until
4014 register allocation. */
4015 emit_insn (gen_lshrdi3_neon (operands[0], operands[1], operands[2]));
4016 DONE;
4017 }
4018
4019 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4020 ; /* No special preparation statements; expand pattern as above. */
4021 else
4022 {
4023 rtx scratch1, scratch2;
4024
4025 if (CONST_INT_P (operands[2])
4026 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4027 {
4028 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
4029 DONE;
4030 }
4031
4032 /* Ideally we should use iwmmxt here if we could know that operands[1]
4033 ends up already living in an iwmmxt register. Otherwise it's
4034 cheaper to have the alternate code being generated than moving
4035 values to iwmmxt regs and back. */
4036
4037 /* If we're optimizing for size, we prefer the libgcc calls. */
4038 if (optimize_function_for_size_p (cfun))
4039 FAIL;
4040
4041 /* Expand operation using core-registers.
4042 'FAIL' would achieve the same thing, but this is a bit smarter. */
4043 scratch1 = gen_reg_rtx (SImode);
4044 scratch2 = gen_reg_rtx (SImode);
4045 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4046 operands[2], scratch1, scratch2);
4047 DONE;
4048 }
4049 "
4050 )
4051
4052 (define_insn_and_split "arm_lshrdi3_1bit"
4053 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4054 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4055 (const_int 1)))
4056 (clobber (reg:CC CC_REGNUM))]
4057 "TARGET_32BIT"
4058 "#" ; "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
4059 "&& reload_completed"
4060 [(parallel [(set (reg:CC CC_REGNUM)
4061 (compare:CC (lshiftrt:SI (match_dup 3) (const_int 1))
4062 (const_int 0)))
4063 (set (match_dup 2) (lshiftrt:SI (match_dup 3) (const_int 1)))])
4064 (set (match_dup 0) (unspec:SI [(match_dup 1)
4065 (reg:CC_C CC_REGNUM)]
4066 UNSPEC_RRX))]
4067 {
4068 operands[2] = gen_highpart (SImode, operands[0]);
4069 operands[0] = gen_lowpart (SImode, operands[0]);
4070 operands[3] = gen_highpart (SImode, operands[1]);
4071 operands[1] = gen_lowpart (SImode, operands[1]);
4072 }
4073 [(set_attr "conds" "clob")
4074 (set_attr "length" "8")]
4075 )
4076
4077 (define_expand "lshrsi3"
4078 [(set (match_operand:SI 0 "s_register_operand" "")
4079 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4080 (match_operand:SI 2 "arm_rhs_operand" "")))]
4081 "TARGET_EITHER"
4082 "
4083 if (CONST_INT_P (operands[2])
4084 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4085 {
4086 emit_insn (gen_movsi (operands[0], const0_rtx));
4087 DONE;
4088 }
4089 "
4090 )
4091
4092 (define_insn "*thumb1_lshrsi3"
4093 [(set (match_operand:SI 0 "register_operand" "=l,l")
4094 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4095 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4096 "TARGET_THUMB1"
4097 "lsr\\t%0, %1, %2"
4098 [(set_attr "length" "2")
4099 (set_attr "conds" "set")])
4100
4101 (define_expand "rotlsi3"
4102 [(set (match_operand:SI 0 "s_register_operand" "")
4103 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4104 (match_operand:SI 2 "reg_or_int_operand" "")))]
4105 "TARGET_32BIT"
4106 "
4107 if (CONST_INT_P (operands[2]))
4108 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4109 else
4110 {
4111 rtx reg = gen_reg_rtx (SImode);
4112 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4113 operands[2] = reg;
4114 }
4115 "
4116 )
4117
4118 (define_expand "rotrsi3"
4119 [(set (match_operand:SI 0 "s_register_operand" "")
4120 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4121 (match_operand:SI 2 "arm_rhs_operand" "")))]
4122 "TARGET_EITHER"
4123 "
4124 if (TARGET_32BIT)
4125 {
4126 if (CONST_INT_P (operands[2])
4127 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4128 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4129 }
4130 else /* TARGET_THUMB1 */
4131 {
4132 if (CONST_INT_P (operands [2]))
4133 operands [2] = force_reg (SImode, operands[2]);
4134 }
4135 "
4136 )
4137
4138 (define_insn "*thumb1_rotrsi3"
4139 [(set (match_operand:SI 0 "register_operand" "=l")
4140 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
4141 (match_operand:SI 2 "register_operand" "l")))]
4142 "TARGET_THUMB1"
4143 "ror\\t%0, %0, %2"
4144 [(set_attr "length" "2")]
4145 )
4146
4147 (define_insn "*arm_shiftsi3"
4148 [(set (match_operand:SI 0 "s_register_operand" "=r")
4149 (match_operator:SI 3 "shift_operator"
4150 [(match_operand:SI 1 "s_register_operand" "r")
4151 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
4152 "TARGET_32BIT"
4153 "* return arm_output_shift(operands, 0);"
4154 [(set_attr "predicable" "yes")
4155 (set_attr "shift" "1")
4156 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4157 (const_string "alu_shift")
4158 (const_string "alu_shift_reg")))]
4159 )
4160
4161 (define_insn "*shiftsi3_compare"
4162 [(set (reg:CC CC_REGNUM)
4163 (compare:CC (match_operator:SI 3 "shift_operator"
4164 [(match_operand:SI 1 "s_register_operand" "r")
4165 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4166 (const_int 0)))
4167 (set (match_operand:SI 0 "s_register_operand" "=r")
4168 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4169 "TARGET_32BIT"
4170 "* return arm_output_shift(operands, 1);"
4171 [(set_attr "conds" "set")
4172 (set_attr "shift" "1")
4173 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4174 (const_string "alu_shift")
4175 (const_string "alu_shift_reg")))]
4176 )
4177
4178 (define_insn "*shiftsi3_compare0"
4179 [(set (reg:CC_NOOV CC_REGNUM)
4180 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4181 [(match_operand:SI 1 "s_register_operand" "r")
4182 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4183 (const_int 0)))
4184 (set (match_operand:SI 0 "s_register_operand" "=r")
4185 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4186 "TARGET_32BIT"
4187 "* return arm_output_shift(operands, 1);"
4188 [(set_attr "conds" "set")
4189 (set_attr "shift" "1")
4190 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4191 (const_string "alu_shift")
4192 (const_string "alu_shift_reg")))]
4193 )
4194
4195 (define_insn "*shiftsi3_compare0_scratch"
4196 [(set (reg:CC_NOOV CC_REGNUM)
4197 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4198 [(match_operand:SI 1 "s_register_operand" "r")
4199 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4200 (const_int 0)))
4201 (clobber (match_scratch:SI 0 "=r"))]
4202 "TARGET_32BIT"
4203 "* return arm_output_shift(operands, 1);"
4204 [(set_attr "conds" "set")
4205 (set_attr "shift" "1")]
4206 )
4207
4208 (define_insn "*not_shiftsi"
4209 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4210 (not:SI (match_operator:SI 3 "shift_operator"
4211 [(match_operand:SI 1 "s_register_operand" "r,r")
4212 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4213 "TARGET_32BIT"
4214 "mvn%?\\t%0, %1%S3"
4215 [(set_attr "predicable" "yes")
4216 (set_attr "shift" "1")
4217 (set_attr "insn" "mvn")
4218 (set_attr "arch" "32,a")
4219 (set_attr "type" "alu_shift,alu_shift_reg")])
4220
4221 (define_insn "*not_shiftsi_compare0"
4222 [(set (reg:CC_NOOV CC_REGNUM)
4223 (compare:CC_NOOV
4224 (not:SI (match_operator:SI 3 "shift_operator"
4225 [(match_operand:SI 1 "s_register_operand" "r,r")
4226 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4227 (const_int 0)))
4228 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4229 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4230 "TARGET_32BIT"
4231 "mvn%.\\t%0, %1%S3"
4232 [(set_attr "conds" "set")
4233 (set_attr "shift" "1")
4234 (set_attr "insn" "mvn")
4235 (set_attr "arch" "32,a")
4236 (set_attr "type" "alu_shift,alu_shift_reg")])
4237
4238 (define_insn "*not_shiftsi_compare0_scratch"
4239 [(set (reg:CC_NOOV CC_REGNUM)
4240 (compare:CC_NOOV
4241 (not:SI (match_operator:SI 3 "shift_operator"
4242 [(match_operand:SI 1 "s_register_operand" "r,r")
4243 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4244 (const_int 0)))
4245 (clobber (match_scratch:SI 0 "=r,r"))]
4246 "TARGET_32BIT"
4247 "mvn%.\\t%0, %1%S3"
4248 [(set_attr "conds" "set")
4249 (set_attr "shift" "1")
4250 (set_attr "insn" "mvn")
4251 (set_attr "arch" "32,a")
4252 (set_attr "type" "alu_shift,alu_shift_reg")])
4253
4254 ;; We don't really have extzv, but defining this using shifts helps
4255 ;; to reduce register pressure later on.
4256
4257 (define_expand "extzv"
4258 [(set (match_operand 0 "s_register_operand" "")
4259 (zero_extract (match_operand 1 "nonimmediate_operand" "")
4260 (match_operand 2 "const_int_operand" "")
4261 (match_operand 3 "const_int_operand" "")))]
4262 "TARGET_THUMB1 || arm_arch_thumb2"
4263 "
4264 {
4265 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4266 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4267
4268 if (arm_arch_thumb2)
4269 {
4270 HOST_WIDE_INT width = INTVAL (operands[2]);
4271 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4272
4273 if (unaligned_access && MEM_P (operands[1])
4274 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4275 {
4276 rtx base_addr;
4277
4278 if (BYTES_BIG_ENDIAN)
4279 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4280 - bitpos;
4281
4282 if (width == 32)
4283 {
4284 base_addr = adjust_address (operands[1], SImode,
4285 bitpos / BITS_PER_UNIT);
4286 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4287 }
4288 else
4289 {
4290 rtx dest = operands[0];
4291 rtx tmp = gen_reg_rtx (SImode);
4292
4293 /* We may get a paradoxical subreg here. Strip it off. */
4294 if (GET_CODE (dest) == SUBREG
4295 && GET_MODE (dest) == SImode
4296 && GET_MODE (SUBREG_REG (dest)) == HImode)
4297 dest = SUBREG_REG (dest);
4298
4299 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4300 FAIL;
4301
4302 base_addr = adjust_address (operands[1], HImode,
4303 bitpos / BITS_PER_UNIT);
4304 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4305 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4306 }
4307 DONE;
4308 }
4309 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4310 {
4311 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4312 operands[3]));
4313 DONE;
4314 }
4315 else
4316 FAIL;
4317 }
4318
4319 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4320 FAIL;
4321
4322 operands[3] = GEN_INT (rshift);
4323
4324 if (lshift == 0)
4325 {
4326 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4327 DONE;
4328 }
4329
4330 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4331 operands[3], gen_reg_rtx (SImode)));
4332 DONE;
4333 }"
4334 )
4335
4336 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4337
4338 (define_expand "extzv_t1"
4339 [(set (match_operand:SI 4 "s_register_operand" "")
4340 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
4341 (match_operand:SI 2 "const_int_operand" "")))
4342 (set (match_operand:SI 0 "s_register_operand" "")
4343 (lshiftrt:SI (match_dup 4)
4344 (match_operand:SI 3 "const_int_operand" "")))]
4345 "TARGET_THUMB1"
4346 "")
4347
4348 (define_expand "extv"
4349 [(set (match_operand 0 "s_register_operand" "")
4350 (sign_extract (match_operand 1 "nonimmediate_operand" "")
4351 (match_operand 2 "const_int_operand" "")
4352 (match_operand 3 "const_int_operand" "")))]
4353 "arm_arch_thumb2"
4354 {
4355 HOST_WIDE_INT width = INTVAL (operands[2]);
4356 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4357
4358 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4359 && (bitpos % BITS_PER_UNIT) == 0)
4360 {
4361 rtx base_addr;
4362
4363 if (BYTES_BIG_ENDIAN)
4364 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4365
4366 if (width == 32)
4367 {
4368 base_addr = adjust_address (operands[1], SImode,
4369 bitpos / BITS_PER_UNIT);
4370 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4371 }
4372 else
4373 {
4374 rtx dest = operands[0];
4375 rtx tmp = gen_reg_rtx (SImode);
4376
4377 /* We may get a paradoxical subreg here. Strip it off. */
4378 if (GET_CODE (dest) == SUBREG
4379 && GET_MODE (dest) == SImode
4380 && GET_MODE (SUBREG_REG (dest)) == HImode)
4381 dest = SUBREG_REG (dest);
4382
4383 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4384 FAIL;
4385
4386 base_addr = adjust_address (operands[1], HImode,
4387 bitpos / BITS_PER_UNIT);
4388 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4389 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4390 }
4391
4392 DONE;
4393 }
4394 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4395 FAIL;
4396 else if (GET_MODE (operands[0]) == SImode
4397 && GET_MODE (operands[1]) == SImode)
4398 {
4399 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4400 operands[3]));
4401 DONE;
4402 }
4403
4404 FAIL;
4405 })
4406
4407 ; Helper to expand register forms of extv with the proper modes.
4408
4409 (define_expand "extv_regsi"
4410 [(set (match_operand:SI 0 "s_register_operand" "")
4411 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
4412 (match_operand 2 "const_int_operand" "")
4413 (match_operand 3 "const_int_operand" "")))]
4414 ""
4415 {
4416 })
4417
4418 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4419
4420 (define_insn "unaligned_loadsi"
4421 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4422 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
4423 UNSPEC_UNALIGNED_LOAD))]
4424 "unaligned_access && TARGET_32BIT"
4425 "ldr%?\t%0, %1\t@ unaligned"
4426 [(set_attr "arch" "t2,any")
4427 (set_attr "length" "2,4")
4428 (set_attr "predicable" "yes")
4429 (set_attr "type" "load1")])
4430
4431 (define_insn "unaligned_loadhis"
4432 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4433 (sign_extend:SI
4434 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4435 UNSPEC_UNALIGNED_LOAD)))]
4436 "unaligned_access && TARGET_32BIT"
4437 "ldr%(sh%)\t%0, %1\t@ unaligned"
4438 [(set_attr "arch" "t2,any")
4439 (set_attr "length" "2,4")
4440 (set_attr "predicable" "yes")
4441 (set_attr "type" "load_byte")])
4442
4443 (define_insn "unaligned_loadhiu"
4444 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4445 (zero_extend:SI
4446 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4447 UNSPEC_UNALIGNED_LOAD)))]
4448 "unaligned_access && TARGET_32BIT"
4449 "ldr%(h%)\t%0, %1\t@ unaligned"
4450 [(set_attr "arch" "t2,any")
4451 (set_attr "length" "2,4")
4452 (set_attr "predicable" "yes")
4453 (set_attr "type" "load_byte")])
4454
4455 (define_insn "unaligned_storesi"
4456 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
4457 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
4458 UNSPEC_UNALIGNED_STORE))]
4459 "unaligned_access && TARGET_32BIT"
4460 "str%?\t%1, %0\t@ unaligned"
4461 [(set_attr "arch" "t2,any")
4462 (set_attr "length" "2,4")
4463 (set_attr "predicable" "yes")
4464 (set_attr "type" "store1")])
4465
4466 (define_insn "unaligned_storehi"
4467 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
4468 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
4469 UNSPEC_UNALIGNED_STORE))]
4470 "unaligned_access && TARGET_32BIT"
4471 "str%(h%)\t%1, %0\t@ unaligned"
4472 [(set_attr "arch" "t2,any")
4473 (set_attr "length" "2,4")
4474 (set_attr "predicable" "yes")
4475 (set_attr "type" "store1")])
4476
4477 ;; Unaligned double-word load and store.
4478 ;; Split after reload into two unaligned single-word accesses.
4479 ;; It prevents lower_subreg from splitting some other aligned
4480 ;; double-word accesses too early. Used for internal memcpy.
4481
4482 (define_insn_and_split "unaligned_loaddi"
4483 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
4484 (unspec:DI [(match_operand:DI 1 "memory_operand" "o,o")]
4485 UNSPEC_UNALIGNED_LOAD))]
4486 "unaligned_access && TARGET_32BIT"
4487 "#"
4488 "&& reload_completed"
4489 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_LOAD))
4490 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_LOAD))]
4491 {
4492 operands[2] = gen_highpart (SImode, operands[0]);
4493 operands[0] = gen_lowpart (SImode, operands[0]);
4494 operands[3] = gen_highpart (SImode, operands[1]);
4495 operands[1] = gen_lowpart (SImode, operands[1]);
4496
4497 /* If the first destination register overlaps with the base address,
4498 swap the order in which the loads are emitted. */
4499 if (reg_overlap_mentioned_p (operands[0], operands[1]))
4500 {
4501 rtx tmp = operands[1];
4502 operands[1] = operands[3];
4503 operands[3] = tmp;
4504 tmp = operands[0];
4505 operands[0] = operands[2];
4506 operands[2] = tmp;
4507 }
4508 }
4509 [(set_attr "arch" "t2,any")
4510 (set_attr "length" "4,8")
4511 (set_attr "predicable" "yes")
4512 (set_attr "type" "load2")])
4513
4514 (define_insn_and_split "unaligned_storedi"
4515 [(set (match_operand:DI 0 "memory_operand" "=o,o")
4516 (unspec:DI [(match_operand:DI 1 "s_register_operand" "l,r")]
4517 UNSPEC_UNALIGNED_STORE))]
4518 "unaligned_access && TARGET_32BIT"
4519 "#"
4520 "&& reload_completed"
4521 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_STORE))
4522 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_STORE))]
4523 {
4524 operands[2] = gen_highpart (SImode, operands[0]);
4525 operands[0] = gen_lowpart (SImode, operands[0]);
4526 operands[3] = gen_highpart (SImode, operands[1]);
4527 operands[1] = gen_lowpart (SImode, operands[1]);
4528 }
4529 [(set_attr "arch" "t2,any")
4530 (set_attr "length" "4,8")
4531 (set_attr "predicable" "yes")
4532 (set_attr "type" "store2")])
4533
4534
4535 (define_insn "*extv_reg"
4536 [(set (match_operand:SI 0 "s_register_operand" "=r")
4537 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4538 (match_operand:SI 2 "const_int_operand" "M")
4539 (match_operand:SI 3 "const_int_operand" "M")))]
4540 "arm_arch_thumb2"
4541 "sbfx%?\t%0, %1, %3, %2"
4542 [(set_attr "length" "4")
4543 (set_attr "predicable" "yes")]
4544 )
4545
4546 (define_insn "extzv_t2"
4547 [(set (match_operand:SI 0 "s_register_operand" "=r")
4548 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4549 (match_operand:SI 2 "const_int_operand" "M")
4550 (match_operand:SI 3 "const_int_operand" "M")))]
4551 "arm_arch_thumb2"
4552 "ubfx%?\t%0, %1, %3, %2"
4553 [(set_attr "length" "4")
4554 (set_attr "predicable" "yes")]
4555 )
4556
4557
4558 ;; Division instructions
4559 (define_insn "divsi3"
4560 [(set (match_operand:SI 0 "s_register_operand" "=r")
4561 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4562 (match_operand:SI 2 "s_register_operand" "r")))]
4563 "TARGET_IDIV"
4564 "sdiv%?\t%0, %1, %2"
4565 [(set_attr "predicable" "yes")
4566 (set_attr "insn" "sdiv")]
4567 )
4568
4569 (define_insn "udivsi3"
4570 [(set (match_operand:SI 0 "s_register_operand" "=r")
4571 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4572 (match_operand:SI 2 "s_register_operand" "r")))]
4573 "TARGET_IDIV"
4574 "udiv%?\t%0, %1, %2"
4575 [(set_attr "predicable" "yes")
4576 (set_attr "insn" "udiv")]
4577 )
4578
4579 \f
4580 ;; Unary arithmetic insns
4581
4582 (define_expand "negdi2"
4583 [(parallel
4584 [(set (match_operand:DI 0 "s_register_operand" "")
4585 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4586 (clobber (reg:CC CC_REGNUM))])]
4587 "TARGET_EITHER"
4588 {
4589 if (TARGET_NEON)
4590 {
4591 emit_insn (gen_negdi2_neon (operands[0], operands[1]));
4592 DONE;
4593 }
4594 }
4595 )
4596
4597 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4598 ;; The first alternative allows the common case of a *full* overlap.
4599 (define_insn_and_split "*arm_negdi2"
4600 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4601 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4602 (clobber (reg:CC CC_REGNUM))]
4603 "TARGET_ARM"
4604 "#" ; "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4605 "&& reload_completed"
4606 [(parallel [(set (reg:CC CC_REGNUM)
4607 (compare:CC (const_int 0) (match_dup 1)))
4608 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4609 (set (match_dup 2) (minus:SI (minus:SI (const_int 0) (match_dup 3))
4610 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4611 {
4612 operands[2] = gen_highpart (SImode, operands[0]);
4613 operands[0] = gen_lowpart (SImode, operands[0]);
4614 operands[3] = gen_highpart (SImode, operands[1]);
4615 operands[1] = gen_lowpart (SImode, operands[1]);
4616 }
4617 [(set_attr "conds" "clob")
4618 (set_attr "length" "8")]
4619 )
4620
4621 (define_insn "*thumb1_negdi2"
4622 [(set (match_operand:DI 0 "register_operand" "=&l")
4623 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4624 (clobber (reg:CC CC_REGNUM))]
4625 "TARGET_THUMB1"
4626 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4627 [(set_attr "length" "6")]
4628 )
4629
4630 (define_expand "negsi2"
4631 [(set (match_operand:SI 0 "s_register_operand" "")
4632 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4633 "TARGET_EITHER"
4634 ""
4635 )
4636
4637 (define_insn "*arm_negsi2"
4638 [(set (match_operand:SI 0 "s_register_operand" "=r")
4639 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4640 "TARGET_32BIT"
4641 "rsb%?\\t%0, %1, #0"
4642 [(set_attr "predicable" "yes")]
4643 )
4644
4645 (define_insn "*thumb1_negsi2"
4646 [(set (match_operand:SI 0 "register_operand" "=l")
4647 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4648 "TARGET_THUMB1"
4649 "neg\\t%0, %1"
4650 [(set_attr "length" "2")]
4651 )
4652
4653 (define_expand "negsf2"
4654 [(set (match_operand:SF 0 "s_register_operand" "")
4655 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4656 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4657 ""
4658 )
4659
4660 (define_expand "negdf2"
4661 [(set (match_operand:DF 0 "s_register_operand" "")
4662 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4663 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4664 "")
4665
4666 ;; Negate an extended 32-bit value.
4667 (define_insn_and_split "*negdi_extendsidi"
4668 [(set (match_operand:DI 0 "s_register_operand" "=r,&r,l,&l")
4669 (neg:DI (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r,0,l"))))
4670 (clobber (reg:CC CC_REGNUM))]
4671 "TARGET_32BIT"
4672 "#" ; rsb\\t%Q0, %1, #0\;asr\\t%R0, %Q0, #31
4673 "&& reload_completed"
4674 [(const_int 0)]
4675 {
4676 operands[2] = gen_highpart (SImode, operands[0]);
4677 operands[0] = gen_lowpart (SImode, operands[0]);
4678 rtx tmp = gen_rtx_SET (VOIDmode,
4679 operands[0],
4680 gen_rtx_MINUS (SImode,
4681 const0_rtx,
4682 operands[1]));
4683 if (TARGET_ARM)
4684 {
4685 emit_insn (tmp);
4686 }
4687 else
4688 {
4689 /* Set the flags, to emit the short encoding in Thumb2. */
4690 rtx flags = gen_rtx_SET (VOIDmode,
4691 gen_rtx_REG (CCmode, CC_REGNUM),
4692 gen_rtx_COMPARE (CCmode,
4693 const0_rtx,
4694 operands[1]));
4695 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4696 gen_rtvec (2,
4697 flags,
4698 tmp)));
4699 }
4700 emit_insn (gen_rtx_SET (VOIDmode,
4701 operands[2],
4702 gen_rtx_ASHIFTRT (SImode,
4703 operands[0],
4704 GEN_INT (31))));
4705 DONE;
4706 }
4707 [(set_attr "length" "8,8,4,4")
4708 (set_attr "arch" "a,a,t2,t2")]
4709 )
4710
4711 (define_insn_and_split "*negdi_zero_extendsidi"
4712 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4713 (neg:DI (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))))
4714 (clobber (reg:CC CC_REGNUM))]
4715 "TARGET_32BIT"
4716 "#" ; "rsbs\\t%Q0, %1, #0\;sbc\\t%R0,%R0,%R0"
4717 ;; Don't care what register is input to sbc,
4718 ;; since we just just need to propagate the carry.
4719 "&& reload_completed"
4720 [(parallel [(set (reg:CC CC_REGNUM)
4721 (compare:CC (const_int 0) (match_dup 1)))
4722 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4723 (set (match_dup 2) (minus:SI (minus:SI (match_dup 2) (match_dup 2))
4724 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4725 {
4726 operands[2] = gen_highpart (SImode, operands[0]);
4727 operands[0] = gen_lowpart (SImode, operands[0]);
4728 }
4729 [(set_attr "conds" "clob")
4730 (set_attr "length" "8")] ;; length in thumb is 4
4731 )
4732
4733 ;; abssi2 doesn't really clobber the condition codes if a different register
4734 ;; is being set. To keep things simple, assume during rtl manipulations that
4735 ;; it does, but tell the final scan operator the truth. Similarly for
4736 ;; (neg (abs...))
4737
4738 (define_expand "abssi2"
4739 [(parallel
4740 [(set (match_operand:SI 0 "s_register_operand" "")
4741 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4742 (clobber (match_dup 2))])]
4743 "TARGET_EITHER"
4744 "
4745 if (TARGET_THUMB1)
4746 operands[2] = gen_rtx_SCRATCH (SImode);
4747 else
4748 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4749 ")
4750
4751 (define_insn_and_split "*arm_abssi2"
4752 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4753 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4754 (clobber (reg:CC CC_REGNUM))]
4755 "TARGET_ARM"
4756 "#"
4757 "&& reload_completed"
4758 [(const_int 0)]
4759 {
4760 /* if (which_alternative == 0) */
4761 if (REGNO(operands[0]) == REGNO(operands[1]))
4762 {
4763 /* Emit the pattern:
4764 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4765 [(set (reg:CC CC_REGNUM)
4766 (compare:CC (match_dup 0) (const_int 0)))
4767 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4768 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4769 */
4770 emit_insn (gen_rtx_SET (VOIDmode,
4771 gen_rtx_REG (CCmode, CC_REGNUM),
4772 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4773 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4774 (gen_rtx_LT (SImode,
4775 gen_rtx_REG (CCmode, CC_REGNUM),
4776 const0_rtx)),
4777 (gen_rtx_SET (VOIDmode,
4778 operands[0],
4779 (gen_rtx_MINUS (SImode,
4780 const0_rtx,
4781 operands[1]))))));
4782 DONE;
4783 }
4784 else
4785 {
4786 /* Emit the pattern:
4787 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4788 [(set (match_dup 0)
4789 (xor:SI (match_dup 1)
4790 (ashiftrt:SI (match_dup 1) (const_int 31))))
4791 (set (match_dup 0)
4792 (minus:SI (match_dup 0)
4793 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4794 */
4795 emit_insn (gen_rtx_SET (VOIDmode,
4796 operands[0],
4797 gen_rtx_XOR (SImode,
4798 gen_rtx_ASHIFTRT (SImode,
4799 operands[1],
4800 GEN_INT (31)),
4801 operands[1])));
4802 emit_insn (gen_rtx_SET (VOIDmode,
4803 operands[0],
4804 gen_rtx_MINUS (SImode,
4805 operands[0],
4806 gen_rtx_ASHIFTRT (SImode,
4807 operands[1],
4808 GEN_INT (31)))));
4809 DONE;
4810 }
4811 }
4812 [(set_attr "conds" "clob,*")
4813 (set_attr "shift" "1")
4814 (set_attr "predicable" "no, yes")
4815 (set_attr "length" "8")]
4816 )
4817
4818 (define_insn_and_split "*thumb1_abssi2"
4819 [(set (match_operand:SI 0 "s_register_operand" "=l")
4820 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4821 (clobber (match_scratch:SI 2 "=&l"))]
4822 "TARGET_THUMB1"
4823 "#"
4824 "TARGET_THUMB1 && reload_completed"
4825 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4826 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4827 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4828 ""
4829 [(set_attr "length" "6")]
4830 )
4831
4832 (define_insn_and_split "*arm_neg_abssi2"
4833 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4834 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4835 (clobber (reg:CC CC_REGNUM))]
4836 "TARGET_ARM"
4837 "#"
4838 "&& reload_completed"
4839 [(const_int 0)]
4840 {
4841 /* if (which_alternative == 0) */
4842 if (REGNO (operands[0]) == REGNO (operands[1]))
4843 {
4844 /* Emit the pattern:
4845 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4846 */
4847 emit_insn (gen_rtx_SET (VOIDmode,
4848 gen_rtx_REG (CCmode, CC_REGNUM),
4849 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4850 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4851 gen_rtx_GT (SImode,
4852 gen_rtx_REG (CCmode, CC_REGNUM),
4853 const0_rtx),
4854 gen_rtx_SET (VOIDmode,
4855 operands[0],
4856 (gen_rtx_MINUS (SImode,
4857 const0_rtx,
4858 operands[1])))));
4859 }
4860 else
4861 {
4862 /* Emit the pattern:
4863 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4864 */
4865 emit_insn (gen_rtx_SET (VOIDmode,
4866 operands[0],
4867 gen_rtx_XOR (SImode,
4868 gen_rtx_ASHIFTRT (SImode,
4869 operands[1],
4870 GEN_INT (31)),
4871 operands[1])));
4872 emit_insn (gen_rtx_SET (VOIDmode,
4873 operands[0],
4874 gen_rtx_MINUS (SImode,
4875 gen_rtx_ASHIFTRT (SImode,
4876 operands[1],
4877 GEN_INT (31)),
4878 operands[0])));
4879 }
4880 DONE;
4881 }
4882 [(set_attr "conds" "clob,*")
4883 (set_attr "shift" "1")
4884 (set_attr "predicable" "no, yes")
4885 (set_attr "length" "8")]
4886 )
4887
4888 (define_insn_and_split "*thumb1_neg_abssi2"
4889 [(set (match_operand:SI 0 "s_register_operand" "=l")
4890 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4891 (clobber (match_scratch:SI 2 "=&l"))]
4892 "TARGET_THUMB1"
4893 "#"
4894 "TARGET_THUMB1 && reload_completed"
4895 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4896 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4897 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4898 ""
4899 [(set_attr "length" "6")]
4900 )
4901
4902 (define_expand "abssf2"
4903 [(set (match_operand:SF 0 "s_register_operand" "")
4904 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
4905 "TARGET_32BIT && TARGET_HARD_FLOAT"
4906 "")
4907
4908 (define_expand "absdf2"
4909 [(set (match_operand:DF 0 "s_register_operand" "")
4910 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
4911 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4912 "")
4913
4914 (define_expand "sqrtsf2"
4915 [(set (match_operand:SF 0 "s_register_operand" "")
4916 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
4917 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4918 "")
4919
4920 (define_expand "sqrtdf2"
4921 [(set (match_operand:DF 0 "s_register_operand" "")
4922 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
4923 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4924 "")
4925
4926 (define_insn_and_split "one_cmpldi2"
4927 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,?w")
4928 (not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
4929 "TARGET_32BIT"
4930 "@
4931 vmvn\t%P0, %P1
4932 #
4933 #
4934 vmvn\t%P0, %P1"
4935 "TARGET_32BIT && reload_completed
4936 && arm_general_register_operand (operands[0], DImode)"
4937 [(set (match_dup 0) (not:SI (match_dup 1)))
4938 (set (match_dup 2) (not:SI (match_dup 3)))]
4939 "
4940 {
4941 operands[2] = gen_highpart (SImode, operands[0]);
4942 operands[0] = gen_lowpart (SImode, operands[0]);
4943 operands[3] = gen_highpart (SImode, operands[1]);
4944 operands[1] = gen_lowpart (SImode, operands[1]);
4945 }"
4946 [(set_attr "length" "*,8,8,*")
4947 (set_attr "predicable" "no,yes,yes,no")
4948 (set_attr "neon_type" "neon_int_1,*,*,neon_int_1")
4949 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")]
4950 )
4951
4952 (define_expand "one_cmplsi2"
4953 [(set (match_operand:SI 0 "s_register_operand" "")
4954 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
4955 "TARGET_EITHER"
4956 ""
4957 )
4958
4959 (define_insn "*arm_one_cmplsi2"
4960 [(set (match_operand:SI 0 "s_register_operand" "=r")
4961 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
4962 "TARGET_32BIT"
4963 "mvn%?\\t%0, %1"
4964 [(set_attr "predicable" "yes")
4965 (set_attr "insn" "mvn")]
4966 )
4967
4968 (define_insn "*thumb1_one_cmplsi2"
4969 [(set (match_operand:SI 0 "register_operand" "=l")
4970 (not:SI (match_operand:SI 1 "register_operand" "l")))]
4971 "TARGET_THUMB1"
4972 "mvn\\t%0, %1"
4973 [(set_attr "length" "2")
4974 (set_attr "insn" "mvn")]
4975 )
4976
4977 (define_insn "*notsi_compare0"
4978 [(set (reg:CC_NOOV CC_REGNUM)
4979 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4980 (const_int 0)))
4981 (set (match_operand:SI 0 "s_register_operand" "=r")
4982 (not:SI (match_dup 1)))]
4983 "TARGET_32BIT"
4984 "mvn%.\\t%0, %1"
4985 [(set_attr "conds" "set")
4986 (set_attr "insn" "mvn")]
4987 )
4988
4989 (define_insn "*notsi_compare0_scratch"
4990 [(set (reg:CC_NOOV CC_REGNUM)
4991 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4992 (const_int 0)))
4993 (clobber (match_scratch:SI 0 "=r"))]
4994 "TARGET_32BIT"
4995 "mvn%.\\t%0, %1"
4996 [(set_attr "conds" "set")
4997 (set_attr "insn" "mvn")]
4998 )
4999 \f
5000 ;; Fixed <--> Floating conversion insns
5001
5002 (define_expand "floatsihf2"
5003 [(set (match_operand:HF 0 "general_operand" "")
5004 (float:HF (match_operand:SI 1 "general_operand" "")))]
5005 "TARGET_EITHER"
5006 "
5007 {
5008 rtx op1 = gen_reg_rtx (SFmode);
5009 expand_float (op1, operands[1], 0);
5010 op1 = convert_to_mode (HFmode, op1, 0);
5011 emit_move_insn (operands[0], op1);
5012 DONE;
5013 }"
5014 )
5015
5016 (define_expand "floatdihf2"
5017 [(set (match_operand:HF 0 "general_operand" "")
5018 (float:HF (match_operand:DI 1 "general_operand" "")))]
5019 "TARGET_EITHER"
5020 "
5021 {
5022 rtx op1 = gen_reg_rtx (SFmode);
5023 expand_float (op1, operands[1], 0);
5024 op1 = convert_to_mode (HFmode, op1, 0);
5025 emit_move_insn (operands[0], op1);
5026 DONE;
5027 }"
5028 )
5029
5030 (define_expand "floatsisf2"
5031 [(set (match_operand:SF 0 "s_register_operand" "")
5032 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
5033 "TARGET_32BIT && TARGET_HARD_FLOAT"
5034 "
5035 ")
5036
5037 (define_expand "floatsidf2"
5038 [(set (match_operand:DF 0 "s_register_operand" "")
5039 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
5040 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5041 "
5042 ")
5043
5044 (define_expand "fix_trunchfsi2"
5045 [(set (match_operand:SI 0 "general_operand" "")
5046 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5047 "TARGET_EITHER"
5048 "
5049 {
5050 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5051 expand_fix (operands[0], op1, 0);
5052 DONE;
5053 }"
5054 )
5055
5056 (define_expand "fix_trunchfdi2"
5057 [(set (match_operand:DI 0 "general_operand" "")
5058 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5059 "TARGET_EITHER"
5060 "
5061 {
5062 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5063 expand_fix (operands[0], op1, 0);
5064 DONE;
5065 }"
5066 )
5067
5068 (define_expand "fix_truncsfsi2"
5069 [(set (match_operand:SI 0 "s_register_operand" "")
5070 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
5071 "TARGET_32BIT && TARGET_HARD_FLOAT"
5072 "
5073 ")
5074
5075 (define_expand "fix_truncdfsi2"
5076 [(set (match_operand:SI 0 "s_register_operand" "")
5077 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
5078 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5079 "
5080 ")
5081
5082 ;; Truncation insns
5083
5084 (define_expand "truncdfsf2"
5085 [(set (match_operand:SF 0 "s_register_operand" "")
5086 (float_truncate:SF
5087 (match_operand:DF 1 "s_register_operand" "")))]
5088 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5089 ""
5090 )
5091
5092 /* DFmode -> HFmode conversions have to go through SFmode. */
5093 (define_expand "truncdfhf2"
5094 [(set (match_operand:HF 0 "general_operand" "")
5095 (float_truncate:HF
5096 (match_operand:DF 1 "general_operand" "")))]
5097 "TARGET_EITHER"
5098 "
5099 {
5100 rtx op1;
5101 op1 = convert_to_mode (SFmode, operands[1], 0);
5102 op1 = convert_to_mode (HFmode, op1, 0);
5103 emit_move_insn (operands[0], op1);
5104 DONE;
5105 }"
5106 )
5107 \f
5108 ;; Zero and sign extension instructions.
5109
5110 (define_insn "zero_extend<mode>di2"
5111 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,w")
5112 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
5113 "<qhs_zextenddi_cstr>")))]
5114 "TARGET_32BIT <qhs_zextenddi_cond>"
5115 "#"
5116 [(set_attr "length" "8,4,8,8")
5117 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")
5118 (set_attr "ce_count" "2")
5119 (set_attr "predicable" "yes")]
5120 )
5121
5122 (define_insn "extend<mode>di2"
5123 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,?r,w")
5124 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
5125 "<qhs_extenddi_cstr>")))]
5126 "TARGET_32BIT <qhs_sextenddi_cond>"
5127 "#"
5128 [(set_attr "length" "8,4,8,8,8")
5129 (set_attr "ce_count" "2")
5130 (set_attr "shift" "1")
5131 (set_attr "predicable" "yes")
5132 (set_attr "arch" "neon_for_64bits,*,a,t,avoid_neon_for_64bits")]
5133 )
5134
5135 ;; Splits for all extensions to DImode
5136 (define_split
5137 [(set (match_operand:DI 0 "s_register_operand" "")
5138 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5139 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5140 [(set (match_dup 0) (match_dup 1))]
5141 {
5142 rtx lo_part = gen_lowpart (SImode, operands[0]);
5143 enum machine_mode src_mode = GET_MODE (operands[1]);
5144
5145 if (REG_P (operands[0])
5146 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5147 emit_clobber (operands[0]);
5148 if (!REG_P (lo_part) || src_mode != SImode
5149 || !rtx_equal_p (lo_part, operands[1]))
5150 {
5151 if (src_mode == SImode)
5152 emit_move_insn (lo_part, operands[1]);
5153 else
5154 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5155 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5156 operands[1] = lo_part;
5157 }
5158 operands[0] = gen_highpart (SImode, operands[0]);
5159 operands[1] = const0_rtx;
5160 })
5161
5162 (define_split
5163 [(set (match_operand:DI 0 "s_register_operand" "")
5164 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5165 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5166 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5167 {
5168 rtx lo_part = gen_lowpart (SImode, operands[0]);
5169 enum machine_mode src_mode = GET_MODE (operands[1]);
5170
5171 if (REG_P (operands[0])
5172 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5173 emit_clobber (operands[0]);
5174
5175 if (!REG_P (lo_part) || src_mode != SImode
5176 || !rtx_equal_p (lo_part, operands[1]))
5177 {
5178 if (src_mode == SImode)
5179 emit_move_insn (lo_part, operands[1]);
5180 else
5181 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5182 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5183 operands[1] = lo_part;
5184 }
5185 operands[0] = gen_highpart (SImode, operands[0]);
5186 })
5187
5188 (define_expand "zero_extendhisi2"
5189 [(set (match_operand:SI 0 "s_register_operand" "")
5190 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5191 "TARGET_EITHER"
5192 {
5193 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5194 {
5195 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5196 DONE;
5197 }
5198 if (!arm_arch6 && !MEM_P (operands[1]))
5199 {
5200 rtx t = gen_lowpart (SImode, operands[1]);
5201 rtx tmp = gen_reg_rtx (SImode);
5202 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5203 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5204 DONE;
5205 }
5206 })
5207
5208 (define_split
5209 [(set (match_operand:SI 0 "s_register_operand" "")
5210 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5211 "!TARGET_THUMB2 && !arm_arch6"
5212 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5213 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5214 {
5215 operands[2] = gen_lowpart (SImode, operands[1]);
5216 })
5217
5218 (define_insn "*thumb1_zero_extendhisi2"
5219 [(set (match_operand:SI 0 "register_operand" "=l,l")
5220 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
5221 "TARGET_THUMB1"
5222 {
5223 rtx mem;
5224
5225 if (which_alternative == 0 && arm_arch6)
5226 return "uxth\t%0, %1";
5227 if (which_alternative == 0)
5228 return "#";
5229
5230 mem = XEXP (operands[1], 0);
5231
5232 if (GET_CODE (mem) == CONST)
5233 mem = XEXP (mem, 0);
5234
5235 if (GET_CODE (mem) == PLUS)
5236 {
5237 rtx a = XEXP (mem, 0);
5238
5239 /* This can happen due to bugs in reload. */
5240 if (REG_P (a) && REGNO (a) == SP_REGNUM)
5241 {
5242 rtx ops[2];
5243 ops[0] = operands[0];
5244 ops[1] = a;
5245
5246 output_asm_insn ("mov\t%0, %1", ops);
5247
5248 XEXP (mem, 0) = operands[0];
5249 }
5250 }
5251
5252 return "ldrh\t%0, %1";
5253 }
5254 [(set_attr_alternative "length"
5255 [(if_then_else (eq_attr "is_arch6" "yes")
5256 (const_int 2) (const_int 4))
5257 (const_int 4)])
5258 (set_attr "type" "simple_alu_shift, load_byte")]
5259 )
5260
5261 (define_insn "*arm_zero_extendhisi2"
5262 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5263 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5264 "TARGET_ARM && arm_arch4 && !arm_arch6"
5265 "@
5266 #
5267 ldr%(h%)\\t%0, %1"
5268 [(set_attr "type" "alu_shift,load_byte")
5269 (set_attr "predicable" "yes")]
5270 )
5271
5272 (define_insn "*arm_zero_extendhisi2_v6"
5273 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5274 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5275 "TARGET_ARM && arm_arch6"
5276 "@
5277 uxth%?\\t%0, %1
5278 ldr%(h%)\\t%0, %1"
5279 [(set_attr "predicable" "yes")
5280 (set_attr "type" "simple_alu_shift,load_byte")]
5281 )
5282
5283 (define_insn "*arm_zero_extendhisi2addsi"
5284 [(set (match_operand:SI 0 "s_register_operand" "=r")
5285 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5286 (match_operand:SI 2 "s_register_operand" "r")))]
5287 "TARGET_INT_SIMD"
5288 "uxtah%?\\t%0, %2, %1"
5289 [(set_attr "type" "alu_shift")
5290 (set_attr "predicable" "yes")]
5291 )
5292
5293 (define_expand "zero_extendqisi2"
5294 [(set (match_operand:SI 0 "s_register_operand" "")
5295 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
5296 "TARGET_EITHER"
5297 {
5298 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5299 {
5300 emit_insn (gen_andsi3 (operands[0],
5301 gen_lowpart (SImode, operands[1]),
5302 GEN_INT (255)));
5303 DONE;
5304 }
5305 if (!arm_arch6 && !MEM_P (operands[1]))
5306 {
5307 rtx t = gen_lowpart (SImode, operands[1]);
5308 rtx tmp = gen_reg_rtx (SImode);
5309 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5310 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5311 DONE;
5312 }
5313 })
5314
5315 (define_split
5316 [(set (match_operand:SI 0 "s_register_operand" "")
5317 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5318 "!arm_arch6"
5319 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5320 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5321 {
5322 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5323 if (TARGET_ARM)
5324 {
5325 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5326 DONE;
5327 }
5328 })
5329
5330 (define_insn "*thumb1_zero_extendqisi2"
5331 [(set (match_operand:SI 0 "register_operand" "=l,l")
5332 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5333 "TARGET_THUMB1 && !arm_arch6"
5334 "@
5335 #
5336 ldrb\\t%0, %1"
5337 [(set_attr "length" "4,2")
5338 (set_attr "type" "alu_shift,load_byte")
5339 (set_attr "pool_range" "*,32")]
5340 )
5341
5342 (define_insn "*thumb1_zero_extendqisi2_v6"
5343 [(set (match_operand:SI 0 "register_operand" "=l,l")
5344 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5345 "TARGET_THUMB1 && arm_arch6"
5346 "@
5347 uxtb\\t%0, %1
5348 ldrb\\t%0, %1"
5349 [(set_attr "length" "2")
5350 (set_attr "type" "simple_alu_shift,load_byte")]
5351 )
5352
5353 (define_insn "*arm_zero_extendqisi2"
5354 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5355 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5356 "TARGET_ARM && !arm_arch6"
5357 "@
5358 #
5359 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5360 [(set_attr "length" "8,4")
5361 (set_attr "type" "alu_shift,load_byte")
5362 (set_attr "predicable" "yes")]
5363 )
5364
5365 (define_insn "*arm_zero_extendqisi2_v6"
5366 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5367 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5368 "TARGET_ARM && arm_arch6"
5369 "@
5370 uxtb%(%)\\t%0, %1
5371 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5372 [(set_attr "type" "simple_alu_shift,load_byte")
5373 (set_attr "predicable" "yes")]
5374 )
5375
5376 (define_insn "*arm_zero_extendqisi2addsi"
5377 [(set (match_operand:SI 0 "s_register_operand" "=r")
5378 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5379 (match_operand:SI 2 "s_register_operand" "r")))]
5380 "TARGET_INT_SIMD"
5381 "uxtab%?\\t%0, %2, %1"
5382 [(set_attr "predicable" "yes")
5383 (set_attr "insn" "xtab")
5384 (set_attr "type" "alu_shift")]
5385 )
5386
5387 (define_split
5388 [(set (match_operand:SI 0 "s_register_operand" "")
5389 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5390 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5391 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5392 [(set (match_dup 2) (match_dup 1))
5393 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5394 ""
5395 )
5396
5397 (define_split
5398 [(set (match_operand:SI 0 "s_register_operand" "")
5399 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5400 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5401 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5402 [(set (match_dup 2) (match_dup 1))
5403 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5404 ""
5405 )
5406
5407
5408 (define_split
5409 [(set (match_operand:SI 0 "s_register_operand" "")
5410 (ior_xor:SI (and:SI (ashift:SI
5411 (match_operand:SI 1 "s_register_operand" "")
5412 (match_operand:SI 2 "const_int_operand" ""))
5413 (match_operand:SI 3 "const_int_operand" ""))
5414 (zero_extend:SI
5415 (match_operator 5 "subreg_lowpart_operator"
5416 [(match_operand:SI 4 "s_register_operand" "")]))))]
5417 "TARGET_32BIT
5418 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
5419 == (GET_MODE_MASK (GET_MODE (operands[5]))
5420 & (GET_MODE_MASK (GET_MODE (operands[5]))
5421 << (INTVAL (operands[2])))))"
5422 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
5423 (match_dup 4)))
5424 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5425 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5426 )
5427
5428 (define_insn "*compareqi_eq0"
5429 [(set (reg:CC_Z CC_REGNUM)
5430 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5431 (const_int 0)))]
5432 "TARGET_32BIT"
5433 "tst%?\\t%0, #255"
5434 [(set_attr "conds" "set")
5435 (set_attr "predicable" "yes")]
5436 )
5437
5438 (define_expand "extendhisi2"
5439 [(set (match_operand:SI 0 "s_register_operand" "")
5440 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5441 "TARGET_EITHER"
5442 {
5443 if (TARGET_THUMB1)
5444 {
5445 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5446 DONE;
5447 }
5448 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5449 {
5450 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5451 DONE;
5452 }
5453
5454 if (!arm_arch6 && !MEM_P (operands[1]))
5455 {
5456 rtx t = gen_lowpart (SImode, operands[1]);
5457 rtx tmp = gen_reg_rtx (SImode);
5458 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5459 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5460 DONE;
5461 }
5462 })
5463
5464 (define_split
5465 [(parallel
5466 [(set (match_operand:SI 0 "register_operand" "")
5467 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5468 (clobber (match_scratch:SI 2 ""))])]
5469 "!arm_arch6"
5470 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5471 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5472 {
5473 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5474 })
5475
5476 ;; We used to have an early-clobber on the scratch register here.
5477 ;; However, there's a bug somewhere in reload which means that this
5478 ;; can be partially ignored during spill allocation if the memory
5479 ;; address also needs reloading; this causes us to die later on when
5480 ;; we try to verify the operands. Fortunately, we don't really need
5481 ;; the early-clobber: we can always use operand 0 if operand 2
5482 ;; overlaps the address.
5483 (define_insn "thumb1_extendhisi2"
5484 [(set (match_operand:SI 0 "register_operand" "=l,l")
5485 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
5486 (clobber (match_scratch:SI 2 "=X,l"))]
5487 "TARGET_THUMB1"
5488 "*
5489 {
5490 rtx ops[4];
5491 rtx mem;
5492
5493 if (which_alternative == 0 && !arm_arch6)
5494 return \"#\";
5495 if (which_alternative == 0)
5496 return \"sxth\\t%0, %1\";
5497
5498 mem = XEXP (operands[1], 0);
5499
5500 /* This code used to try to use 'V', and fix the address only if it was
5501 offsettable, but this fails for e.g. REG+48 because 48 is outside the
5502 range of QImode offsets, and offsettable_address_p does a QImode
5503 address check. */
5504
5505 if (GET_CODE (mem) == CONST)
5506 mem = XEXP (mem, 0);
5507
5508 if (GET_CODE (mem) == LABEL_REF)
5509 return \"ldr\\t%0, %1\";
5510
5511 if (GET_CODE (mem) == PLUS)
5512 {
5513 rtx a = XEXP (mem, 0);
5514 rtx b = XEXP (mem, 1);
5515
5516 if (GET_CODE (a) == LABEL_REF
5517 && CONST_INT_P (b))
5518 return \"ldr\\t%0, %1\";
5519
5520 if (REG_P (b))
5521 return \"ldrsh\\t%0, %1\";
5522
5523 ops[1] = a;
5524 ops[2] = b;
5525 }
5526 else
5527 {
5528 ops[1] = mem;
5529 ops[2] = const0_rtx;
5530 }
5531
5532 gcc_assert (REG_P (ops[1]));
5533
5534 ops[0] = operands[0];
5535 if (reg_mentioned_p (operands[2], ops[1]))
5536 ops[3] = ops[0];
5537 else
5538 ops[3] = operands[2];
5539 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
5540 return \"\";
5541 }"
5542 [(set_attr_alternative "length"
5543 [(if_then_else (eq_attr "is_arch6" "yes")
5544 (const_int 2) (const_int 4))
5545 (const_int 4)])
5546 (set_attr "type" "simple_alu_shift,load_byte")
5547 (set_attr "pool_range" "*,1018")]
5548 )
5549
5550 ;; This pattern will only be used when ldsh is not available
5551 (define_expand "extendhisi2_mem"
5552 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5553 (set (match_dup 3)
5554 (zero_extend:SI (match_dup 7)))
5555 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5556 (set (match_operand:SI 0 "" "")
5557 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5558 "TARGET_ARM"
5559 "
5560 {
5561 rtx mem1, mem2;
5562 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5563
5564 mem1 = change_address (operands[1], QImode, addr);
5565 mem2 = change_address (operands[1], QImode,
5566 plus_constant (Pmode, addr, 1));
5567 operands[0] = gen_lowpart (SImode, operands[0]);
5568 operands[1] = mem1;
5569 operands[2] = gen_reg_rtx (SImode);
5570 operands[3] = gen_reg_rtx (SImode);
5571 operands[6] = gen_reg_rtx (SImode);
5572 operands[7] = mem2;
5573
5574 if (BYTES_BIG_ENDIAN)
5575 {
5576 operands[4] = operands[2];
5577 operands[5] = operands[3];
5578 }
5579 else
5580 {
5581 operands[4] = operands[3];
5582 operands[5] = operands[2];
5583 }
5584 }"
5585 )
5586
5587 (define_split
5588 [(set (match_operand:SI 0 "register_operand" "")
5589 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5590 "!arm_arch6"
5591 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5592 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5593 {
5594 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5595 })
5596
5597 (define_insn "*arm_extendhisi2"
5598 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5599 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5600 "TARGET_ARM && arm_arch4 && !arm_arch6"
5601 "@
5602 #
5603 ldr%(sh%)\\t%0, %1"
5604 [(set_attr "length" "8,4")
5605 (set_attr "type" "alu_shift,load_byte")
5606 (set_attr "predicable" "yes")
5607 (set_attr "pool_range" "*,256")
5608 (set_attr "neg_pool_range" "*,244")]
5609 )
5610
5611 ;; ??? Check Thumb-2 pool range
5612 (define_insn "*arm_extendhisi2_v6"
5613 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5614 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5615 "TARGET_32BIT && arm_arch6"
5616 "@
5617 sxth%?\\t%0, %1
5618 ldr%(sh%)\\t%0, %1"
5619 [(set_attr "type" "simple_alu_shift,load_byte")
5620 (set_attr "predicable" "yes")
5621 (set_attr "pool_range" "*,256")
5622 (set_attr "neg_pool_range" "*,244")]
5623 )
5624
5625 (define_insn "*arm_extendhisi2addsi"
5626 [(set (match_operand:SI 0 "s_register_operand" "=r")
5627 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5628 (match_operand:SI 2 "s_register_operand" "r")))]
5629 "TARGET_INT_SIMD"
5630 "sxtah%?\\t%0, %2, %1"
5631 )
5632
5633 (define_expand "extendqihi2"
5634 [(set (match_dup 2)
5635 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
5636 (const_int 24)))
5637 (set (match_operand:HI 0 "s_register_operand" "")
5638 (ashiftrt:SI (match_dup 2)
5639 (const_int 24)))]
5640 "TARGET_ARM"
5641 "
5642 {
5643 if (arm_arch4 && MEM_P (operands[1]))
5644 {
5645 emit_insn (gen_rtx_SET (VOIDmode,
5646 operands[0],
5647 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5648 DONE;
5649 }
5650 if (!s_register_operand (operands[1], QImode))
5651 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5652 operands[0] = gen_lowpart (SImode, operands[0]);
5653 operands[1] = gen_lowpart (SImode, operands[1]);
5654 operands[2] = gen_reg_rtx (SImode);
5655 }"
5656 )
5657
5658 (define_insn "*arm_extendqihi_insn"
5659 [(set (match_operand:HI 0 "s_register_operand" "=r")
5660 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5661 "TARGET_ARM && arm_arch4"
5662 "ldr%(sb%)\\t%0, %1"
5663 [(set_attr "type" "load_byte")
5664 (set_attr "predicable" "yes")
5665 (set_attr "pool_range" "256")
5666 (set_attr "neg_pool_range" "244")]
5667 )
5668
5669 (define_expand "extendqisi2"
5670 [(set (match_operand:SI 0 "s_register_operand" "")
5671 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
5672 "TARGET_EITHER"
5673 {
5674 if (!arm_arch4 && MEM_P (operands[1]))
5675 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5676
5677 if (!arm_arch6 && !MEM_P (operands[1]))
5678 {
5679 rtx t = gen_lowpart (SImode, operands[1]);
5680 rtx tmp = gen_reg_rtx (SImode);
5681 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5682 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5683 DONE;
5684 }
5685 })
5686
5687 (define_split
5688 [(set (match_operand:SI 0 "register_operand" "")
5689 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5690 "!arm_arch6"
5691 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5692 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5693 {
5694 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5695 })
5696
5697 (define_insn "*arm_extendqisi"
5698 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5699 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5700 "TARGET_ARM && arm_arch4 && !arm_arch6"
5701 "@
5702 #
5703 ldr%(sb%)\\t%0, %1"
5704 [(set_attr "length" "8,4")
5705 (set_attr "type" "alu_shift,load_byte")
5706 (set_attr "predicable" "yes")
5707 (set_attr "pool_range" "*,256")
5708 (set_attr "neg_pool_range" "*,244")]
5709 )
5710
5711 (define_insn "*arm_extendqisi_v6"
5712 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5713 (sign_extend:SI
5714 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5715 "TARGET_ARM && arm_arch6"
5716 "@
5717 sxtb%?\\t%0, %1
5718 ldr%(sb%)\\t%0, %1"
5719 [(set_attr "type" "simple_alu_shift,load_byte")
5720 (set_attr "predicable" "yes")
5721 (set_attr "pool_range" "*,256")
5722 (set_attr "neg_pool_range" "*,244")]
5723 )
5724
5725 (define_insn "*arm_extendqisi2addsi"
5726 [(set (match_operand:SI 0 "s_register_operand" "=r")
5727 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5728 (match_operand:SI 2 "s_register_operand" "r")))]
5729 "TARGET_INT_SIMD"
5730 "sxtab%?\\t%0, %2, %1"
5731 [(set_attr "type" "alu_shift")
5732 (set_attr "insn" "xtab")
5733 (set_attr "predicable" "yes")]
5734 )
5735
5736 (define_split
5737 [(set (match_operand:SI 0 "register_operand" "")
5738 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5739 "TARGET_THUMB1 && reload_completed"
5740 [(set (match_dup 0) (match_dup 2))
5741 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5742 {
5743 rtx addr = XEXP (operands[1], 0);
5744
5745 if (GET_CODE (addr) == CONST)
5746 addr = XEXP (addr, 0);
5747
5748 if (GET_CODE (addr) == PLUS
5749 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5750 /* No split necessary. */
5751 FAIL;
5752
5753 if (GET_CODE (addr) == PLUS
5754 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5755 FAIL;
5756
5757 if (reg_overlap_mentioned_p (operands[0], addr))
5758 {
5759 rtx t = gen_lowpart (QImode, operands[0]);
5760 emit_move_insn (t, operands[1]);
5761 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5762 DONE;
5763 }
5764
5765 if (REG_P (addr))
5766 {
5767 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5768 operands[2] = const0_rtx;
5769 }
5770 else if (GET_CODE (addr) != PLUS)
5771 FAIL;
5772 else if (REG_P (XEXP (addr, 0)))
5773 {
5774 operands[2] = XEXP (addr, 1);
5775 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5776 }
5777 else
5778 {
5779 operands[2] = XEXP (addr, 0);
5780 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5781 }
5782
5783 operands[3] = change_address (operands[1], QImode, addr);
5784 })
5785
5786 (define_peephole2
5787 [(set (match_operand:SI 0 "register_operand" "")
5788 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5789 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5790 (set (match_operand:SI 3 "register_operand" "")
5791 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5792 "TARGET_THUMB1
5793 && GET_CODE (XEXP (operands[4], 0)) == PLUS
5794 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5795 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5796 && (peep2_reg_dead_p (3, operands[0])
5797 || rtx_equal_p (operands[0], operands[3]))
5798 && (peep2_reg_dead_p (3, operands[2])
5799 || rtx_equal_p (operands[2], operands[3]))"
5800 [(set (match_dup 2) (match_dup 1))
5801 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5802 {
5803 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5804 operands[4] = change_address (operands[4], QImode, addr);
5805 })
5806
5807 (define_insn "thumb1_extendqisi2"
5808 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5809 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5810 "TARGET_THUMB1"
5811 {
5812 rtx addr;
5813
5814 if (which_alternative == 0 && arm_arch6)
5815 return "sxtb\\t%0, %1";
5816 if (which_alternative == 0)
5817 return "#";
5818
5819 addr = XEXP (operands[1], 0);
5820 if (GET_CODE (addr) == PLUS
5821 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5822 return "ldrsb\\t%0, %1";
5823
5824 return "#";
5825 }
5826 [(set_attr_alternative "length"
5827 [(if_then_else (eq_attr "is_arch6" "yes")
5828 (const_int 2) (const_int 4))
5829 (const_int 2)
5830 (if_then_else (eq_attr "is_arch6" "yes")
5831 (const_int 4) (const_int 6))])
5832 (set_attr "type" "simple_alu_shift,load_byte,load_byte")]
5833 )
5834
5835 (define_expand "extendsfdf2"
5836 [(set (match_operand:DF 0 "s_register_operand" "")
5837 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
5838 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5839 ""
5840 )
5841
5842 /* HFmode -> DFmode conversions have to go through SFmode. */
5843 (define_expand "extendhfdf2"
5844 [(set (match_operand:DF 0 "general_operand" "")
5845 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
5846 "TARGET_EITHER"
5847 "
5848 {
5849 rtx op1;
5850 op1 = convert_to_mode (SFmode, operands[1], 0);
5851 op1 = convert_to_mode (DFmode, op1, 0);
5852 emit_insn (gen_movdf (operands[0], op1));
5853 DONE;
5854 }"
5855 )
5856 \f
5857 ;; Move insns (including loads and stores)
5858
5859 ;; XXX Just some ideas about movti.
5860 ;; I don't think these are a good idea on the arm, there just aren't enough
5861 ;; registers
5862 ;;(define_expand "loadti"
5863 ;; [(set (match_operand:TI 0 "s_register_operand" "")
5864 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
5865 ;; "" "")
5866
5867 ;;(define_expand "storeti"
5868 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5869 ;; (match_operand:TI 1 "s_register_operand" ""))]
5870 ;; "" "")
5871
5872 ;;(define_expand "movti"
5873 ;; [(set (match_operand:TI 0 "general_operand" "")
5874 ;; (match_operand:TI 1 "general_operand" ""))]
5875 ;; ""
5876 ;; "
5877 ;;{
5878 ;; rtx insn;
5879 ;;
5880 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5881 ;; operands[1] = copy_to_reg (operands[1]);
5882 ;; if (MEM_P (operands[0]))
5883 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5884 ;; else if (MEM_P (operands[1]))
5885 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5886 ;; else
5887 ;; FAIL;
5888 ;;
5889 ;; emit_insn (insn);
5890 ;; DONE;
5891 ;;}")
5892
5893 ;; Recognize garbage generated above.
5894
5895 ;;(define_insn ""
5896 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5897 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5898 ;; ""
5899 ;; "*
5900 ;; {
5901 ;; register mem = (which_alternative < 3);
5902 ;; register const char *template;
5903 ;;
5904 ;; operands[mem] = XEXP (operands[mem], 0);
5905 ;; switch (which_alternative)
5906 ;; {
5907 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5908 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5909 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5910 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5911 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5912 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5913 ;; }
5914 ;; output_asm_insn (template, operands);
5915 ;; return \"\";
5916 ;; }")
5917
5918 (define_expand "movdi"
5919 [(set (match_operand:DI 0 "general_operand" "")
5920 (match_operand:DI 1 "general_operand" ""))]
5921 "TARGET_EITHER"
5922 "
5923 if (can_create_pseudo_p ())
5924 {
5925 if (!REG_P (operands[0]))
5926 operands[1] = force_reg (DImode, operands[1]);
5927 }
5928 "
5929 )
5930
5931 (define_insn "*arm_movdi"
5932 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, q, m")
5933 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,q"))]
5934 "TARGET_32BIT
5935 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5936 && !TARGET_IWMMXT
5937 && ( register_operand (operands[0], DImode)
5938 || register_operand (operands[1], DImode))"
5939 "*
5940 switch (which_alternative)
5941 {
5942 case 0:
5943 case 1:
5944 case 2:
5945 return \"#\";
5946 default:
5947 return output_move_double (operands, true, NULL);
5948 }
5949 "
5950 [(set_attr "length" "8,12,16,8,8")
5951 (set_attr "type" "*,*,*,load2,store2")
5952 (set_attr "arm_pool_range" "*,*,*,1020,*")
5953 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5954 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5955 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5956 )
5957
5958 (define_split
5959 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5960 (match_operand:ANY64 1 "const_double_operand" ""))]
5961 "TARGET_32BIT
5962 && reload_completed
5963 && (arm_const_double_inline_cost (operands[1])
5964 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5965 [(const_int 0)]
5966 "
5967 arm_split_constant (SET, SImode, curr_insn,
5968 INTVAL (gen_lowpart (SImode, operands[1])),
5969 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5970 arm_split_constant (SET, SImode, curr_insn,
5971 INTVAL (gen_highpart_mode (SImode,
5972 GET_MODE (operands[0]),
5973 operands[1])),
5974 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5975 DONE;
5976 "
5977 )
5978
5979 ; If optimizing for size, or if we have load delay slots, then
5980 ; we want to split the constant into two separate operations.
5981 ; In both cases this may split a trivial part into a single data op
5982 ; leaving a single complex constant to load. We can also get longer
5983 ; offsets in a LDR which means we get better chances of sharing the pool
5984 ; entries. Finally, we can normally do a better job of scheduling
5985 ; LDR instructions than we can with LDM.
5986 ; This pattern will only match if the one above did not.
5987 (define_split
5988 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5989 (match_operand:ANY64 1 "const_double_operand" ""))]
5990 "TARGET_ARM && reload_completed
5991 && arm_const_double_by_parts (operands[1])"
5992 [(set (match_dup 0) (match_dup 1))
5993 (set (match_dup 2) (match_dup 3))]
5994 "
5995 operands[2] = gen_highpart (SImode, operands[0]);
5996 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5997 operands[1]);
5998 operands[0] = gen_lowpart (SImode, operands[0]);
5999 operands[1] = gen_lowpart (SImode, operands[1]);
6000 "
6001 )
6002
6003 (define_split
6004 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6005 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
6006 "TARGET_EITHER && reload_completed"
6007 [(set (match_dup 0) (match_dup 1))
6008 (set (match_dup 2) (match_dup 3))]
6009 "
6010 operands[2] = gen_highpart (SImode, operands[0]);
6011 operands[3] = gen_highpart (SImode, operands[1]);
6012 operands[0] = gen_lowpart (SImode, operands[0]);
6013 operands[1] = gen_lowpart (SImode, operands[1]);
6014
6015 /* Handle a partial overlap. */
6016 if (rtx_equal_p (operands[0], operands[3]))
6017 {
6018 rtx tmp0 = operands[0];
6019 rtx tmp1 = operands[1];
6020
6021 operands[0] = operands[2];
6022 operands[1] = operands[3];
6023 operands[2] = tmp0;
6024 operands[3] = tmp1;
6025 }
6026 "
6027 )
6028
6029 ;; We can't actually do base+index doubleword loads if the index and
6030 ;; destination overlap. Split here so that we at least have chance to
6031 ;; schedule.
6032 (define_split
6033 [(set (match_operand:DI 0 "s_register_operand" "")
6034 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6035 (match_operand:SI 2 "s_register_operand" ""))))]
6036 "TARGET_LDRD
6037 && reg_overlap_mentioned_p (operands[0], operands[1])
6038 && reg_overlap_mentioned_p (operands[0], operands[2])"
6039 [(set (match_dup 4)
6040 (plus:SI (match_dup 1)
6041 (match_dup 2)))
6042 (set (match_dup 0)
6043 (mem:DI (match_dup 4)))]
6044 "
6045 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6046 "
6047 )
6048
6049 ;;; ??? This should have alternatives for constants.
6050 ;;; ??? This was originally identical to the movdf_insn pattern.
6051 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
6052 ;;; thumb_reorg with a memory reference.
6053 (define_insn "*thumb1_movdi_insn"
6054 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
6055 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
6056 "TARGET_THUMB1
6057 && ( register_operand (operands[0], DImode)
6058 || register_operand (operands[1], DImode))"
6059 "*
6060 {
6061 switch (which_alternative)
6062 {
6063 default:
6064 case 0:
6065 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6066 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6067 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6068 case 1:
6069 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
6070 case 2:
6071 operands[1] = GEN_INT (- INTVAL (operands[1]));
6072 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
6073 case 3:
6074 return \"ldmia\\t%1, {%0, %H0}\";
6075 case 4:
6076 return \"stmia\\t%0, {%1, %H1}\";
6077 case 5:
6078 return thumb_load_double_from_address (operands);
6079 case 6:
6080 operands[2] = gen_rtx_MEM (SImode,
6081 plus_constant (Pmode, XEXP (operands[0], 0), 4));
6082 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6083 return \"\";
6084 case 7:
6085 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6086 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6087 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6088 }
6089 }"
6090 [(set_attr "length" "4,4,6,2,2,6,4,4")
6091 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
6092 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
6093 (set_attr "pool_range" "*,*,*,*,*,1018,*,*")]
6094 )
6095
6096 (define_expand "movsi"
6097 [(set (match_operand:SI 0 "general_operand" "")
6098 (match_operand:SI 1 "general_operand" ""))]
6099 "TARGET_EITHER"
6100 "
6101 {
6102 rtx base, offset, tmp;
6103
6104 if (TARGET_32BIT)
6105 {
6106 /* Everything except mem = const or mem = mem can be done easily. */
6107 if (MEM_P (operands[0]))
6108 operands[1] = force_reg (SImode, operands[1]);
6109 if (arm_general_register_operand (operands[0], SImode)
6110 && CONST_INT_P (operands[1])
6111 && !(const_ok_for_arm (INTVAL (operands[1]))
6112 || const_ok_for_arm (~INTVAL (operands[1]))))
6113 {
6114 arm_split_constant (SET, SImode, NULL_RTX,
6115 INTVAL (operands[1]), operands[0], NULL_RTX,
6116 optimize && can_create_pseudo_p ());
6117 DONE;
6118 }
6119 }
6120 else /* TARGET_THUMB1... */
6121 {
6122 if (can_create_pseudo_p ())
6123 {
6124 if (!REG_P (operands[0]))
6125 operands[1] = force_reg (SImode, operands[1]);
6126 }
6127 }
6128
6129 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
6130 {
6131 split_const (operands[1], &base, &offset);
6132 if (GET_CODE (base) == SYMBOL_REF
6133 && !offset_within_block_p (base, INTVAL (offset)))
6134 {
6135 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6136 emit_move_insn (tmp, base);
6137 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6138 DONE;
6139 }
6140 }
6141
6142 /* Recognize the case where operand[1] is a reference to thread-local
6143 data and load its address to a register. */
6144 if (arm_tls_referenced_p (operands[1]))
6145 {
6146 rtx tmp = operands[1];
6147 rtx addend = NULL;
6148
6149 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
6150 {
6151 addend = XEXP (XEXP (tmp, 0), 1);
6152 tmp = XEXP (XEXP (tmp, 0), 0);
6153 }
6154
6155 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
6156 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
6157
6158 tmp = legitimize_tls_address (tmp,
6159 !can_create_pseudo_p () ? operands[0] : 0);
6160 if (addend)
6161 {
6162 tmp = gen_rtx_PLUS (SImode, tmp, addend);
6163 tmp = force_operand (tmp, operands[0]);
6164 }
6165 operands[1] = tmp;
6166 }
6167 else if (flag_pic
6168 && (CONSTANT_P (operands[1])
6169 || symbol_mentioned_p (operands[1])
6170 || label_mentioned_p (operands[1])))
6171 operands[1] = legitimize_pic_address (operands[1], SImode,
6172 (!can_create_pseudo_p ()
6173 ? operands[0]
6174 : 0));
6175 }
6176 "
6177 )
6178
6179 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6180 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6181 ;; so this does not matter.
6182 (define_insn "*arm_movt"
6183 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
6184 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
6185 (match_operand:SI 2 "general_operand" "i")))]
6186 "arm_arch_thumb2"
6187 "movt%?\t%0, #:upper16:%c2"
6188 [(set_attr "predicable" "yes")
6189 (set_attr "length" "4")]
6190 )
6191
6192 (define_insn "*arm_movsi_insn"
6193 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6194 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6195 "TARGET_ARM && ! TARGET_IWMMXT
6196 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6197 && ( register_operand (operands[0], SImode)
6198 || register_operand (operands[1], SImode))"
6199 "@
6200 mov%?\\t%0, %1
6201 mov%?\\t%0, %1
6202 mvn%?\\t%0, #%B1
6203 movw%?\\t%0, %1
6204 ldr%?\\t%0, %1
6205 str%?\\t%1, %0"
6206 [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,simple_alu_imm,load1,store1")
6207 (set_attr "insn" "mov,mov,mvn,mov,*,*")
6208 (set_attr "predicable" "yes")
6209 (set_attr "pool_range" "*,*,*,*,4096,*")
6210 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6211 )
6212
6213 (define_split
6214 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6215 (match_operand:SI 1 "const_int_operand" ""))]
6216 "TARGET_32BIT
6217 && (!(const_ok_for_arm (INTVAL (operands[1]))
6218 || const_ok_for_arm (~INTVAL (operands[1]))))"
6219 [(clobber (const_int 0))]
6220 "
6221 arm_split_constant (SET, SImode, NULL_RTX,
6222 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6223 DONE;
6224 "
6225 )
6226
6227 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6228 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6229 ;; and lo_sum would be merged back into memory load at cprop. However,
6230 ;; if the default is to prefer movt/movw rather than a load from the constant
6231 ;; pool, the performance is better.
6232 (define_split
6233 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6234 (match_operand:SI 1 "general_operand" ""))]
6235 "TARGET_32BIT
6236 && TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6237 && !flag_pic && !target_word_relocations
6238 && !arm_tls_referenced_p (operands[1])"
6239 [(clobber (const_int 0))]
6240 {
6241 arm_emit_movpair (operands[0], operands[1]);
6242 DONE;
6243 })
6244
6245 (define_insn "*thumb1_movsi_insn"
6246 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
6247 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
6248 "TARGET_THUMB1
6249 && ( register_operand (operands[0], SImode)
6250 || register_operand (operands[1], SImode))"
6251 "@
6252 mov %0, %1
6253 mov %0, %1
6254 #
6255 #
6256 ldmia\\t%1, {%0}
6257 stmia\\t%0, {%1}
6258 ldr\\t%0, %1
6259 str\\t%1, %0
6260 mov\\t%0, %1"
6261 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
6262 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
6263 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")
6264 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
6265
6266 (define_split
6267 [(set (match_operand:SI 0 "register_operand" "")
6268 (match_operand:SI 1 "const_int_operand" ""))]
6269 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
6270 [(set (match_dup 2) (match_dup 1))
6271 (set (match_dup 0) (neg:SI (match_dup 2)))]
6272 "
6273 {
6274 operands[1] = GEN_INT (- INTVAL (operands[1]));
6275 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6276 }"
6277 )
6278
6279 (define_split
6280 [(set (match_operand:SI 0 "register_operand" "")
6281 (match_operand:SI 1 "const_int_operand" ""))]
6282 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
6283 [(set (match_dup 2) (match_dup 1))
6284 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
6285 "
6286 {
6287 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
6288 unsigned HOST_WIDE_INT mask = 0xff;
6289 int i;
6290
6291 for (i = 0; i < 25; i++)
6292 if ((val & (mask << i)) == val)
6293 break;
6294
6295 /* Don't split if the shift is zero. */
6296 if (i == 0)
6297 FAIL;
6298
6299 operands[1] = GEN_INT (val >> i);
6300 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6301 operands[3] = GEN_INT (i);
6302 }"
6303 )
6304
6305 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
6306 (define_split
6307 [(set (match_operand:SI 0 "register_operand" "")
6308 (match_operand:SI 1 "const_int_operand" ""))]
6309 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
6310 [(set (match_dup 2) (match_dup 1))
6311 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
6312 "
6313 {
6314 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
6315 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6316 operands[3] = GEN_INT (255);
6317 }"
6318 )
6319
6320 ;; When generating pic, we need to load the symbol offset into a register.
6321 ;; So that the optimizer does not confuse this with a normal symbol load
6322 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6323 ;; since that is the only type of relocation we can use.
6324
6325 ;; Wrap calculation of the whole PIC address in a single pattern for the
6326 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6327 ;; a PIC address involves two loads from memory, so we want to CSE it
6328 ;; as often as possible.
6329 ;; This pattern will be split into one of the pic_load_addr_* patterns
6330 ;; and a move after GCSE optimizations.
6331 ;;
6332 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6333 (define_expand "calculate_pic_address"
6334 [(set (match_operand:SI 0 "register_operand" "")
6335 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6336 (unspec:SI [(match_operand:SI 2 "" "")]
6337 UNSPEC_PIC_SYM))))]
6338 "flag_pic"
6339 )
6340
6341 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6342 (define_split
6343 [(set (match_operand:SI 0 "register_operand" "")
6344 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6345 (unspec:SI [(match_operand:SI 2 "" "")]
6346 UNSPEC_PIC_SYM))))]
6347 "flag_pic"
6348 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6349 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6350 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6351 )
6352
6353 ;; operand1 is the memory address to go into
6354 ;; pic_load_addr_32bit.
6355 ;; operand2 is the PIC label to be emitted
6356 ;; from pic_add_dot_plus_eight.
6357 ;; We do this to allow hoisting of the entire insn.
6358 (define_insn_and_split "pic_load_addr_unified"
6359 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6360 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6361 (match_operand:SI 2 "" "")]
6362 UNSPEC_PIC_UNIFIED))]
6363 "flag_pic"
6364 "#"
6365 "&& reload_completed"
6366 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6367 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6368 (match_dup 2)] UNSPEC_PIC_BASE))]
6369 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6370 [(set_attr "type" "load1,load1,load1")
6371 (set_attr "pool_range" "4096,4094,1022")
6372 (set_attr "neg_pool_range" "4084,0,0")
6373 (set_attr "arch" "a,t2,t1")
6374 (set_attr "length" "8,6,4")]
6375 )
6376
6377 ;; The rather odd constraints on the following are to force reload to leave
6378 ;; the insn alone, and to force the minipool generation pass to then move
6379 ;; the GOT symbol to memory.
6380
6381 (define_insn "pic_load_addr_32bit"
6382 [(set (match_operand:SI 0 "s_register_operand" "=r")
6383 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6384 "TARGET_32BIT && flag_pic"
6385 "ldr%?\\t%0, %1"
6386 [(set_attr "type" "load1")
6387 (set (attr "pool_range")
6388 (if_then_else (eq_attr "is_thumb" "no")
6389 (const_int 4096)
6390 (const_int 4094)))
6391 (set (attr "neg_pool_range")
6392 (if_then_else (eq_attr "is_thumb" "no")
6393 (const_int 4084)
6394 (const_int 0)))]
6395 )
6396
6397 (define_insn "pic_load_addr_thumb1"
6398 [(set (match_operand:SI 0 "s_register_operand" "=l")
6399 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6400 "TARGET_THUMB1 && flag_pic"
6401 "ldr\\t%0, %1"
6402 [(set_attr "type" "load1")
6403 (set (attr "pool_range") (const_int 1018))]
6404 )
6405
6406 (define_insn "pic_add_dot_plus_four"
6407 [(set (match_operand:SI 0 "register_operand" "=r")
6408 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6409 (const_int 4)
6410 (match_operand 2 "" "")]
6411 UNSPEC_PIC_BASE))]
6412 "TARGET_THUMB"
6413 "*
6414 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6415 INTVAL (operands[2]));
6416 return \"add\\t%0, %|pc\";
6417 "
6418 [(set_attr "length" "2")]
6419 )
6420
6421 (define_insn "pic_add_dot_plus_eight"
6422 [(set (match_operand:SI 0 "register_operand" "=r")
6423 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6424 (const_int 8)
6425 (match_operand 2 "" "")]
6426 UNSPEC_PIC_BASE))]
6427 "TARGET_ARM"
6428 "*
6429 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6430 INTVAL (operands[2]));
6431 return \"add%?\\t%0, %|pc, %1\";
6432 "
6433 [(set_attr "predicable" "yes")]
6434 )
6435
6436 (define_insn "tls_load_dot_plus_eight"
6437 [(set (match_operand:SI 0 "register_operand" "=r")
6438 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6439 (const_int 8)
6440 (match_operand 2 "" "")]
6441 UNSPEC_PIC_BASE)))]
6442 "TARGET_ARM"
6443 "*
6444 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6445 INTVAL (operands[2]));
6446 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6447 "
6448 [(set_attr "predicable" "yes")]
6449 )
6450
6451 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6452 ;; followed by a load. These sequences can be crunched down to
6453 ;; tls_load_dot_plus_eight by a peephole.
6454
6455 (define_peephole2
6456 [(set (match_operand:SI 0 "register_operand" "")
6457 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6458 (const_int 8)
6459 (match_operand 1 "" "")]
6460 UNSPEC_PIC_BASE))
6461 (set (match_operand:SI 2 "arm_general_register_operand" "")
6462 (mem:SI (match_dup 0)))]
6463 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6464 [(set (match_dup 2)
6465 (mem:SI (unspec:SI [(match_dup 3)
6466 (const_int 8)
6467 (match_dup 1)]
6468 UNSPEC_PIC_BASE)))]
6469 ""
6470 )
6471
6472 (define_insn "pic_offset_arm"
6473 [(set (match_operand:SI 0 "register_operand" "=r")
6474 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6475 (unspec:SI [(match_operand:SI 2 "" "X")]
6476 UNSPEC_PIC_OFFSET))))]
6477 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6478 "ldr%?\\t%0, [%1,%2]"
6479 [(set_attr "type" "load1")]
6480 )
6481
6482 (define_expand "builtin_setjmp_receiver"
6483 [(label_ref (match_operand 0 "" ""))]
6484 "flag_pic"
6485 "
6486 {
6487 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6488 register. */
6489 if (arm_pic_register != INVALID_REGNUM)
6490 arm_load_pic_register (1UL << 3);
6491 DONE;
6492 }")
6493
6494 ;; If copying one reg to another we can set the condition codes according to
6495 ;; its value. Such a move is common after a return from subroutine and the
6496 ;; result is being tested against zero.
6497
6498 (define_insn "*movsi_compare0"
6499 [(set (reg:CC CC_REGNUM)
6500 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6501 (const_int 0)))
6502 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6503 (match_dup 1))]
6504 "TARGET_32BIT"
6505 "@
6506 cmp%?\\t%0, #0
6507 sub%.\\t%0, %1, #0"
6508 [(set_attr "conds" "set")
6509 (set_attr "type" "simple_alu_imm,simple_alu_imm")]
6510 )
6511
6512 ;; Subroutine to store a half word from a register into memory.
6513 ;; Operand 0 is the source register (HImode)
6514 ;; Operand 1 is the destination address in a register (SImode)
6515
6516 ;; In both this routine and the next, we must be careful not to spill
6517 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6518 ;; can generate unrecognizable rtl.
6519
6520 (define_expand "storehi"
6521 [;; store the low byte
6522 (set (match_operand 1 "" "") (match_dup 3))
6523 ;; extract the high byte
6524 (set (match_dup 2)
6525 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6526 ;; store the high byte
6527 (set (match_dup 4) (match_dup 5))]
6528 "TARGET_ARM"
6529 "
6530 {
6531 rtx op1 = operands[1];
6532 rtx addr = XEXP (op1, 0);
6533 enum rtx_code code = GET_CODE (addr);
6534
6535 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6536 || code == MINUS)
6537 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6538
6539 operands[4] = adjust_address (op1, QImode, 1);
6540 operands[1] = adjust_address (operands[1], QImode, 0);
6541 operands[3] = gen_lowpart (QImode, operands[0]);
6542 operands[0] = gen_lowpart (SImode, operands[0]);
6543 operands[2] = gen_reg_rtx (SImode);
6544 operands[5] = gen_lowpart (QImode, operands[2]);
6545 }"
6546 )
6547
6548 (define_expand "storehi_bigend"
6549 [(set (match_dup 4) (match_dup 3))
6550 (set (match_dup 2)
6551 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6552 (set (match_operand 1 "" "") (match_dup 5))]
6553 "TARGET_ARM"
6554 "
6555 {
6556 rtx op1 = operands[1];
6557 rtx addr = XEXP (op1, 0);
6558 enum rtx_code code = GET_CODE (addr);
6559
6560 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6561 || code == MINUS)
6562 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6563
6564 operands[4] = adjust_address (op1, QImode, 1);
6565 operands[1] = adjust_address (operands[1], QImode, 0);
6566 operands[3] = gen_lowpart (QImode, operands[0]);
6567 operands[0] = gen_lowpart (SImode, operands[0]);
6568 operands[2] = gen_reg_rtx (SImode);
6569 operands[5] = gen_lowpart (QImode, operands[2]);
6570 }"
6571 )
6572
6573 ;; Subroutine to store a half word integer constant into memory.
6574 (define_expand "storeinthi"
6575 [(set (match_operand 0 "" "")
6576 (match_operand 1 "" ""))
6577 (set (match_dup 3) (match_dup 2))]
6578 "TARGET_ARM"
6579 "
6580 {
6581 HOST_WIDE_INT value = INTVAL (operands[1]);
6582 rtx addr = XEXP (operands[0], 0);
6583 rtx op0 = operands[0];
6584 enum rtx_code code = GET_CODE (addr);
6585
6586 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6587 || code == MINUS)
6588 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6589
6590 operands[1] = gen_reg_rtx (SImode);
6591 if (BYTES_BIG_ENDIAN)
6592 {
6593 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6594 if ((value & 255) == ((value >> 8) & 255))
6595 operands[2] = operands[1];
6596 else
6597 {
6598 operands[2] = gen_reg_rtx (SImode);
6599 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6600 }
6601 }
6602 else
6603 {
6604 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6605 if ((value & 255) == ((value >> 8) & 255))
6606 operands[2] = operands[1];
6607 else
6608 {
6609 operands[2] = gen_reg_rtx (SImode);
6610 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6611 }
6612 }
6613
6614 operands[3] = adjust_address (op0, QImode, 1);
6615 operands[0] = adjust_address (operands[0], QImode, 0);
6616 operands[2] = gen_lowpart (QImode, operands[2]);
6617 operands[1] = gen_lowpart (QImode, operands[1]);
6618 }"
6619 )
6620
6621 (define_expand "storehi_single_op"
6622 [(set (match_operand:HI 0 "memory_operand" "")
6623 (match_operand:HI 1 "general_operand" ""))]
6624 "TARGET_32BIT && arm_arch4"
6625 "
6626 if (!s_register_operand (operands[1], HImode))
6627 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6628 "
6629 )
6630
6631 (define_expand "movhi"
6632 [(set (match_operand:HI 0 "general_operand" "")
6633 (match_operand:HI 1 "general_operand" ""))]
6634 "TARGET_EITHER"
6635 "
6636 if (TARGET_ARM)
6637 {
6638 if (can_create_pseudo_p ())
6639 {
6640 if (MEM_P (operands[0]))
6641 {
6642 if (arm_arch4)
6643 {
6644 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6645 DONE;
6646 }
6647 if (CONST_INT_P (operands[1]))
6648 emit_insn (gen_storeinthi (operands[0], operands[1]));
6649 else
6650 {
6651 if (MEM_P (operands[1]))
6652 operands[1] = force_reg (HImode, operands[1]);
6653 if (BYTES_BIG_ENDIAN)
6654 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6655 else
6656 emit_insn (gen_storehi (operands[1], operands[0]));
6657 }
6658 DONE;
6659 }
6660 /* Sign extend a constant, and keep it in an SImode reg. */
6661 else if (CONST_INT_P (operands[1]))
6662 {
6663 rtx reg = gen_reg_rtx (SImode);
6664 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6665
6666 /* If the constant is already valid, leave it alone. */
6667 if (!const_ok_for_arm (val))
6668 {
6669 /* If setting all the top bits will make the constant
6670 loadable in a single instruction, then set them.
6671 Otherwise, sign extend the number. */
6672
6673 if (const_ok_for_arm (~(val | ~0xffff)))
6674 val |= ~0xffff;
6675 else if (val & 0x8000)
6676 val |= ~0xffff;
6677 }
6678
6679 emit_insn (gen_movsi (reg, GEN_INT (val)));
6680 operands[1] = gen_lowpart (HImode, reg);
6681 }
6682 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6683 && MEM_P (operands[1]))
6684 {
6685 rtx reg = gen_reg_rtx (SImode);
6686
6687 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6688 operands[1] = gen_lowpart (HImode, reg);
6689 }
6690 else if (!arm_arch4)
6691 {
6692 if (MEM_P (operands[1]))
6693 {
6694 rtx base;
6695 rtx offset = const0_rtx;
6696 rtx reg = gen_reg_rtx (SImode);
6697
6698 if ((REG_P (base = XEXP (operands[1], 0))
6699 || (GET_CODE (base) == PLUS
6700 && (CONST_INT_P (offset = XEXP (base, 1)))
6701 && ((INTVAL(offset) & 1) != 1)
6702 && REG_P (base = XEXP (base, 0))))
6703 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6704 {
6705 rtx new_rtx;
6706
6707 new_rtx = widen_memory_access (operands[1], SImode,
6708 ((INTVAL (offset) & ~3)
6709 - INTVAL (offset)));
6710 emit_insn (gen_movsi (reg, new_rtx));
6711 if (((INTVAL (offset) & 2) != 0)
6712 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6713 {
6714 rtx reg2 = gen_reg_rtx (SImode);
6715
6716 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6717 reg = reg2;
6718 }
6719 }
6720 else
6721 emit_insn (gen_movhi_bytes (reg, operands[1]));
6722
6723 operands[1] = gen_lowpart (HImode, reg);
6724 }
6725 }
6726 }
6727 /* Handle loading a large integer during reload. */
6728 else if (CONST_INT_P (operands[1])
6729 && !const_ok_for_arm (INTVAL (operands[1]))
6730 && !const_ok_for_arm (~INTVAL (operands[1])))
6731 {
6732 /* Writing a constant to memory needs a scratch, which should
6733 be handled with SECONDARY_RELOADs. */
6734 gcc_assert (REG_P (operands[0]));
6735
6736 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6737 emit_insn (gen_movsi (operands[0], operands[1]));
6738 DONE;
6739 }
6740 }
6741 else if (TARGET_THUMB2)
6742 {
6743 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6744 if (can_create_pseudo_p ())
6745 {
6746 if (!REG_P (operands[0]))
6747 operands[1] = force_reg (HImode, operands[1]);
6748 /* Zero extend a constant, and keep it in an SImode reg. */
6749 else if (CONST_INT_P (operands[1]))
6750 {
6751 rtx reg = gen_reg_rtx (SImode);
6752 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6753
6754 emit_insn (gen_movsi (reg, GEN_INT (val)));
6755 operands[1] = gen_lowpart (HImode, reg);
6756 }
6757 }
6758 }
6759 else /* TARGET_THUMB1 */
6760 {
6761 if (can_create_pseudo_p ())
6762 {
6763 if (CONST_INT_P (operands[1]))
6764 {
6765 rtx reg = gen_reg_rtx (SImode);
6766
6767 emit_insn (gen_movsi (reg, operands[1]));
6768 operands[1] = gen_lowpart (HImode, reg);
6769 }
6770
6771 /* ??? We shouldn't really get invalid addresses here, but this can
6772 happen if we are passed a SP (never OK for HImode/QImode) or
6773 virtual register (also rejected as illegitimate for HImode/QImode)
6774 relative address. */
6775 /* ??? This should perhaps be fixed elsewhere, for instance, in
6776 fixup_stack_1, by checking for other kinds of invalid addresses,
6777 e.g. a bare reference to a virtual register. This may confuse the
6778 alpha though, which must handle this case differently. */
6779 if (MEM_P (operands[0])
6780 && !memory_address_p (GET_MODE (operands[0]),
6781 XEXP (operands[0], 0)))
6782 operands[0]
6783 = replace_equiv_address (operands[0],
6784 copy_to_reg (XEXP (operands[0], 0)));
6785
6786 if (MEM_P (operands[1])
6787 && !memory_address_p (GET_MODE (operands[1]),
6788 XEXP (operands[1], 0)))
6789 operands[1]
6790 = replace_equiv_address (operands[1],
6791 copy_to_reg (XEXP (operands[1], 0)));
6792
6793 if (MEM_P (operands[1]) && optimize > 0)
6794 {
6795 rtx reg = gen_reg_rtx (SImode);
6796
6797 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6798 operands[1] = gen_lowpart (HImode, reg);
6799 }
6800
6801 if (MEM_P (operands[0]))
6802 operands[1] = force_reg (HImode, operands[1]);
6803 }
6804 else if (CONST_INT_P (operands[1])
6805 && !satisfies_constraint_I (operands[1]))
6806 {
6807 /* Handle loading a large integer during reload. */
6808
6809 /* Writing a constant to memory needs a scratch, which should
6810 be handled with SECONDARY_RELOADs. */
6811 gcc_assert (REG_P (operands[0]));
6812
6813 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6814 emit_insn (gen_movsi (operands[0], operands[1]));
6815 DONE;
6816 }
6817 }
6818 "
6819 )
6820
6821 (define_insn "*thumb1_movhi_insn"
6822 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6823 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
6824 "TARGET_THUMB1
6825 && ( register_operand (operands[0], HImode)
6826 || register_operand (operands[1], HImode))"
6827 "*
6828 switch (which_alternative)
6829 {
6830 case 0: return \"add %0, %1, #0\";
6831 case 2: return \"strh %1, %0\";
6832 case 3: return \"mov %0, %1\";
6833 case 4: return \"mov %0, %1\";
6834 case 5: return \"mov %0, %1\";
6835 default: gcc_unreachable ();
6836 case 1:
6837 /* The stack pointer can end up being taken as an index register.
6838 Catch this case here and deal with it. */
6839 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6840 && REG_P (XEXP (XEXP (operands[1], 0), 0))
6841 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6842 {
6843 rtx ops[2];
6844 ops[0] = operands[0];
6845 ops[1] = XEXP (XEXP (operands[1], 0), 0);
6846
6847 output_asm_insn (\"mov %0, %1\", ops);
6848
6849 XEXP (XEXP (operands[1], 0), 0) = operands[0];
6850
6851 }
6852 return \"ldrh %0, %1\";
6853 }"
6854 [(set_attr "length" "2,4,2,2,2,2")
6855 (set_attr "type" "*,load1,store1,*,*,*")
6856 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6857
6858
6859 (define_expand "movhi_bytes"
6860 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6861 (set (match_dup 3)
6862 (zero_extend:SI (match_dup 6)))
6863 (set (match_operand:SI 0 "" "")
6864 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6865 "TARGET_ARM"
6866 "
6867 {
6868 rtx mem1, mem2;
6869 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6870
6871 mem1 = change_address (operands[1], QImode, addr);
6872 mem2 = change_address (operands[1], QImode,
6873 plus_constant (Pmode, addr, 1));
6874 operands[0] = gen_lowpart (SImode, operands[0]);
6875 operands[1] = mem1;
6876 operands[2] = gen_reg_rtx (SImode);
6877 operands[3] = gen_reg_rtx (SImode);
6878 operands[6] = mem2;
6879
6880 if (BYTES_BIG_ENDIAN)
6881 {
6882 operands[4] = operands[2];
6883 operands[5] = operands[3];
6884 }
6885 else
6886 {
6887 operands[4] = operands[3];
6888 operands[5] = operands[2];
6889 }
6890 }"
6891 )
6892
6893 (define_expand "movhi_bigend"
6894 [(set (match_dup 2)
6895 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6896 (const_int 16)))
6897 (set (match_dup 3)
6898 (ashiftrt:SI (match_dup 2) (const_int 16)))
6899 (set (match_operand:HI 0 "s_register_operand" "")
6900 (match_dup 4))]
6901 "TARGET_ARM"
6902 "
6903 operands[2] = gen_reg_rtx (SImode);
6904 operands[3] = gen_reg_rtx (SImode);
6905 operands[4] = gen_lowpart (HImode, operands[3]);
6906 "
6907 )
6908
6909 ;; Pattern to recognize insn generated default case above
6910 (define_insn "*movhi_insn_arch4"
6911 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
6912 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
6913 "TARGET_ARM
6914 && arm_arch4
6915 && (register_operand (operands[0], HImode)
6916 || register_operand (operands[1], HImode))"
6917 "@
6918 mov%?\\t%0, %1\\t%@ movhi
6919 mvn%?\\t%0, #%B1\\t%@ movhi
6920 str%(h%)\\t%1, %0\\t%@ movhi
6921 ldr%(h%)\\t%0, %1\\t%@ movhi"
6922 [(set_attr "predicable" "yes")
6923 (set_attr "insn" "mov,mvn,*,*")
6924 (set_attr "pool_range" "*,*,*,256")
6925 (set_attr "neg_pool_range" "*,*,*,244")
6926 (set_attr_alternative "type"
6927 [(if_then_else (match_operand 1 "const_int_operand" "")
6928 (const_string "simple_alu_imm" )
6929 (const_string "*"))
6930 (const_string "simple_alu_imm")
6931 (const_string "store1")
6932 (const_string "load1")])]
6933 )
6934
6935 (define_insn "*movhi_bytes"
6936 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6937 (match_operand:HI 1 "arm_rhs_operand" "I,r,K"))]
6938 "TARGET_ARM"
6939 "@
6940 mov%?\\t%0, %1\\t%@ movhi
6941 mov%?\\t%0, %1\\t%@ movhi
6942 mvn%?\\t%0, #%B1\\t%@ movhi"
6943 [(set_attr "predicable" "yes")
6944 (set_attr "insn" "mov, mov,mvn")
6945 (set_attr "type" "simple_alu_imm,*,simple_alu_imm")]
6946 )
6947
6948 (define_expand "thumb_movhi_clobber"
6949 [(set (match_operand:HI 0 "memory_operand" "")
6950 (match_operand:HI 1 "register_operand" ""))
6951 (clobber (match_operand:DI 2 "register_operand" ""))]
6952 "TARGET_THUMB1"
6953 "
6954 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
6955 && REGNO (operands[1]) <= LAST_LO_REGNUM)
6956 {
6957 emit_insn (gen_movhi (operands[0], operands[1]));
6958 DONE;
6959 }
6960 /* XXX Fixme, need to handle other cases here as well. */
6961 gcc_unreachable ();
6962 "
6963 )
6964
6965 ;; We use a DImode scratch because we may occasionally need an additional
6966 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6967 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6968 (define_expand "reload_outhi"
6969 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6970 (match_operand:HI 1 "s_register_operand" "r")
6971 (match_operand:DI 2 "s_register_operand" "=&l")])]
6972 "TARGET_EITHER"
6973 "if (TARGET_ARM)
6974 arm_reload_out_hi (operands);
6975 else
6976 thumb_reload_out_hi (operands);
6977 DONE;
6978 "
6979 )
6980
6981 (define_expand "reload_inhi"
6982 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6983 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6984 (match_operand:DI 2 "s_register_operand" "=&r")])]
6985 "TARGET_EITHER"
6986 "
6987 if (TARGET_ARM)
6988 arm_reload_in_hi (operands);
6989 else
6990 thumb_reload_out_hi (operands);
6991 DONE;
6992 ")
6993
6994 (define_expand "movqi"
6995 [(set (match_operand:QI 0 "general_operand" "")
6996 (match_operand:QI 1 "general_operand" ""))]
6997 "TARGET_EITHER"
6998 "
6999 /* Everything except mem = const or mem = mem can be done easily */
7000
7001 if (can_create_pseudo_p ())
7002 {
7003 if (CONST_INT_P (operands[1]))
7004 {
7005 rtx reg = gen_reg_rtx (SImode);
7006
7007 /* For thumb we want an unsigned immediate, then we are more likely
7008 to be able to use a movs insn. */
7009 if (TARGET_THUMB)
7010 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7011
7012 emit_insn (gen_movsi (reg, operands[1]));
7013 operands[1] = gen_lowpart (QImode, reg);
7014 }
7015
7016 if (TARGET_THUMB)
7017 {
7018 /* ??? We shouldn't really get invalid addresses here, but this can
7019 happen if we are passed a SP (never OK for HImode/QImode) or
7020 virtual register (also rejected as illegitimate for HImode/QImode)
7021 relative address. */
7022 /* ??? This should perhaps be fixed elsewhere, for instance, in
7023 fixup_stack_1, by checking for other kinds of invalid addresses,
7024 e.g. a bare reference to a virtual register. This may confuse the
7025 alpha though, which must handle this case differently. */
7026 if (MEM_P (operands[0])
7027 && !memory_address_p (GET_MODE (operands[0]),
7028 XEXP (operands[0], 0)))
7029 operands[0]
7030 = replace_equiv_address (operands[0],
7031 copy_to_reg (XEXP (operands[0], 0)));
7032 if (MEM_P (operands[1])
7033 && !memory_address_p (GET_MODE (operands[1]),
7034 XEXP (operands[1], 0)))
7035 operands[1]
7036 = replace_equiv_address (operands[1],
7037 copy_to_reg (XEXP (operands[1], 0)));
7038 }
7039
7040 if (MEM_P (operands[1]) && optimize > 0)
7041 {
7042 rtx reg = gen_reg_rtx (SImode);
7043
7044 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7045 operands[1] = gen_lowpart (QImode, reg);
7046 }
7047
7048 if (MEM_P (operands[0]))
7049 operands[1] = force_reg (QImode, operands[1]);
7050 }
7051 else if (TARGET_THUMB
7052 && CONST_INT_P (operands[1])
7053 && !satisfies_constraint_I (operands[1]))
7054 {
7055 /* Handle loading a large integer during reload. */
7056
7057 /* Writing a constant to memory needs a scratch, which should
7058 be handled with SECONDARY_RELOADs. */
7059 gcc_assert (REG_P (operands[0]));
7060
7061 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7062 emit_insn (gen_movsi (operands[0], operands[1]));
7063 DONE;
7064 }
7065 "
7066 )
7067
7068
7069 (define_insn "*arm_movqi_insn"
7070 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,Uu,r,m")
7071 (match_operand:QI 1 "general_operand" "r,I,K,Uu,l,m,r"))]
7072 "TARGET_32BIT
7073 && ( register_operand (operands[0], QImode)
7074 || register_operand (operands[1], QImode))"
7075 "@
7076 mov%?\\t%0, %1
7077 mov%?\\t%0, %1
7078 mvn%?\\t%0, #%B1
7079 ldr%(b%)\\t%0, %1
7080 str%(b%)\\t%1, %0
7081 ldr%(b%)\\t%0, %1
7082 str%(b%)\\t%1, %0"
7083 [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,load1, store1, load1, store1")
7084 (set_attr "insn" "mov,mov,mvn,*,*,*,*")
7085 (set_attr "predicable" "yes")
7086 (set_attr "arch" "any,any,any,t2,t2,any,any")
7087 (set_attr "length" "4,4,4,2,2,4,4")]
7088 )
7089
7090 (define_insn "*thumb1_movqi_insn"
7091 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
7092 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
7093 "TARGET_THUMB1
7094 && ( register_operand (operands[0], QImode)
7095 || register_operand (operands[1], QImode))"
7096 "@
7097 add\\t%0, %1, #0
7098 ldrb\\t%0, %1
7099 strb\\t%1, %0
7100 mov\\t%0, %1
7101 mov\\t%0, %1
7102 mov\\t%0, %1"
7103 [(set_attr "length" "2")
7104 (set_attr "type" "simple_alu_imm,load1,store1,*,*,simple_alu_imm")
7105 (set_attr "insn" "*,*,*,mov,mov,mov")
7106 (set_attr "pool_range" "*,32,*,*,*,*")
7107 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
7108
7109 ;; HFmode moves
7110 (define_expand "movhf"
7111 [(set (match_operand:HF 0 "general_operand" "")
7112 (match_operand:HF 1 "general_operand" ""))]
7113 "TARGET_EITHER"
7114 "
7115 if (TARGET_32BIT)
7116 {
7117 if (MEM_P (operands[0]))
7118 operands[1] = force_reg (HFmode, operands[1]);
7119 }
7120 else /* TARGET_THUMB1 */
7121 {
7122 if (can_create_pseudo_p ())
7123 {
7124 if (!REG_P (operands[0]))
7125 operands[1] = force_reg (HFmode, operands[1]);
7126 }
7127 }
7128 "
7129 )
7130
7131 (define_insn "*arm32_movhf"
7132 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
7133 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
7134 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
7135 && ( s_register_operand (operands[0], HFmode)
7136 || s_register_operand (operands[1], HFmode))"
7137 "*
7138 switch (which_alternative)
7139 {
7140 case 0: /* ARM register from memory */
7141 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
7142 case 1: /* memory from ARM register */
7143 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
7144 case 2: /* ARM register from ARM register */
7145 return \"mov%?\\t%0, %1\\t%@ __fp16\";
7146 case 3: /* ARM register from constant */
7147 {
7148 REAL_VALUE_TYPE r;
7149 long bits;
7150 rtx ops[4];
7151
7152 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7153 bits = real_to_target (NULL, &r, HFmode);
7154 ops[0] = operands[0];
7155 ops[1] = GEN_INT (bits);
7156 ops[2] = GEN_INT (bits & 0xff00);
7157 ops[3] = GEN_INT (bits & 0x00ff);
7158
7159 if (arm_arch_thumb2)
7160 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7161 else
7162 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7163 return \"\";
7164 }
7165 default:
7166 gcc_unreachable ();
7167 }
7168 "
7169 [(set_attr "conds" "unconditional")
7170 (set_attr "type" "load1,store1,*,*")
7171 (set_attr "insn" "*,*,mov,mov")
7172 (set_attr "length" "4,4,4,8")
7173 (set_attr "predicable" "yes")]
7174 )
7175
7176 (define_insn "*thumb1_movhf"
7177 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
7178 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
7179 "TARGET_THUMB1
7180 && ( s_register_operand (operands[0], HFmode)
7181 || s_register_operand (operands[1], HFmode))"
7182 "*
7183 switch (which_alternative)
7184 {
7185 case 1:
7186 {
7187 rtx addr;
7188 gcc_assert (MEM_P (operands[1]));
7189 addr = XEXP (operands[1], 0);
7190 if (GET_CODE (addr) == LABEL_REF
7191 || (GET_CODE (addr) == CONST
7192 && GET_CODE (XEXP (addr, 0)) == PLUS
7193 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
7194 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
7195 {
7196 /* Constant pool entry. */
7197 return \"ldr\\t%0, %1\";
7198 }
7199 return \"ldrh\\t%0, %1\";
7200 }
7201 case 2: return \"strh\\t%1, %0\";
7202 default: return \"mov\\t%0, %1\";
7203 }
7204 "
7205 [(set_attr "length" "2")
7206 (set_attr "type" "*,load1,store1,*,*")
7207 (set_attr "insn" "mov,*,*,mov,mov")
7208 (set_attr "pool_range" "*,1018,*,*,*")
7209 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
7210
7211 (define_expand "movsf"
7212 [(set (match_operand:SF 0 "general_operand" "")
7213 (match_operand:SF 1 "general_operand" ""))]
7214 "TARGET_EITHER"
7215 "
7216 if (TARGET_32BIT)
7217 {
7218 if (MEM_P (operands[0]))
7219 operands[1] = force_reg (SFmode, operands[1]);
7220 }
7221 else /* TARGET_THUMB1 */
7222 {
7223 if (can_create_pseudo_p ())
7224 {
7225 if (!REG_P (operands[0]))
7226 operands[1] = force_reg (SFmode, operands[1]);
7227 }
7228 }
7229 "
7230 )
7231
7232 ;; Transform a floating-point move of a constant into a core register into
7233 ;; an SImode operation.
7234 (define_split
7235 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7236 (match_operand:SF 1 "immediate_operand" ""))]
7237 "TARGET_EITHER
7238 && reload_completed
7239 && CONST_DOUBLE_P (operands[1])"
7240 [(set (match_dup 2) (match_dup 3))]
7241 "
7242 operands[2] = gen_lowpart (SImode, operands[0]);
7243 operands[3] = gen_lowpart (SImode, operands[1]);
7244 if (operands[2] == 0 || operands[3] == 0)
7245 FAIL;
7246 "
7247 )
7248
7249 (define_insn "*arm_movsf_soft_insn"
7250 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7251 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7252 "TARGET_32BIT
7253 && TARGET_SOFT_FLOAT
7254 && (!MEM_P (operands[0])
7255 || register_operand (operands[1], SFmode))"
7256 "@
7257 mov%?\\t%0, %1
7258 ldr%?\\t%0, %1\\t%@ float
7259 str%?\\t%1, %0\\t%@ float"
7260 [(set_attr "predicable" "yes")
7261 (set_attr "type" "*,load1,store1")
7262 (set_attr "insn" "mov,*,*")
7263 (set_attr "arm_pool_range" "*,4096,*")
7264 (set_attr "thumb2_pool_range" "*,4094,*")
7265 (set_attr "arm_neg_pool_range" "*,4084,*")
7266 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7267 )
7268
7269 ;;; ??? This should have alternatives for constants.
7270 (define_insn "*thumb1_movsf_insn"
7271 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
7272 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
7273 "TARGET_THUMB1
7274 && ( register_operand (operands[0], SFmode)
7275 || register_operand (operands[1], SFmode))"
7276 "@
7277 add\\t%0, %1, #0
7278 ldmia\\t%1, {%0}
7279 stmia\\t%0, {%1}
7280 ldr\\t%0, %1
7281 str\\t%1, %0
7282 mov\\t%0, %1
7283 mov\\t%0, %1"
7284 [(set_attr "length" "2")
7285 (set_attr "type" "*,load1,store1,load1,store1,*,*")
7286 (set_attr "pool_range" "*,*,*,1018,*,*,*")
7287 (set_attr "insn" "*,*,*,*,*,mov,mov")
7288 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
7289 )
7290
7291 (define_expand "movdf"
7292 [(set (match_operand:DF 0 "general_operand" "")
7293 (match_operand:DF 1 "general_operand" ""))]
7294 "TARGET_EITHER"
7295 "
7296 if (TARGET_32BIT)
7297 {
7298 if (MEM_P (operands[0]))
7299 operands[1] = force_reg (DFmode, operands[1]);
7300 }
7301 else /* TARGET_THUMB */
7302 {
7303 if (can_create_pseudo_p ())
7304 {
7305 if (!REG_P (operands[0]))
7306 operands[1] = force_reg (DFmode, operands[1]);
7307 }
7308 }
7309 "
7310 )
7311
7312 ;; Reloading a df mode value stored in integer regs to memory can require a
7313 ;; scratch reg.
7314 (define_expand "reload_outdf"
7315 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7316 (match_operand:DF 1 "s_register_operand" "r")
7317 (match_operand:SI 2 "s_register_operand" "=&r")]
7318 "TARGET_THUMB2"
7319 "
7320 {
7321 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7322
7323 if (code == REG)
7324 operands[2] = XEXP (operands[0], 0);
7325 else if (code == POST_INC || code == PRE_DEC)
7326 {
7327 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7328 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7329 emit_insn (gen_movdi (operands[0], operands[1]));
7330 DONE;
7331 }
7332 else if (code == PRE_INC)
7333 {
7334 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7335
7336 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7337 operands[2] = reg;
7338 }
7339 else if (code == POST_DEC)
7340 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7341 else
7342 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7343 XEXP (XEXP (operands[0], 0), 1)));
7344
7345 emit_insn (gen_rtx_SET (VOIDmode,
7346 replace_equiv_address (operands[0], operands[2]),
7347 operands[1]));
7348
7349 if (code == POST_DEC)
7350 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7351
7352 DONE;
7353 }"
7354 )
7355
7356 (define_insn "*movdf_soft_insn"
7357 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,q,m")
7358 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,q"))]
7359 "TARGET_32BIT && TARGET_SOFT_FLOAT
7360 && ( register_operand (operands[0], DFmode)
7361 || register_operand (operands[1], DFmode))"
7362 "*
7363 switch (which_alternative)
7364 {
7365 case 0:
7366 case 1:
7367 case 2:
7368 return \"#\";
7369 default:
7370 return output_move_double (operands, true, NULL);
7371 }
7372 "
7373 [(set_attr "length" "8,12,16,8,8")
7374 (set_attr "type" "*,*,*,load2,store2")
7375 (set_attr "arm_pool_range" "*,*,*,1020,*")
7376 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7377 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7378 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7379 )
7380
7381 ;;; ??? This should have alternatives for constants.
7382 ;;; ??? This was originally identical to the movdi_insn pattern.
7383 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
7384 ;;; thumb_reorg with a memory reference.
7385 (define_insn "*thumb_movdf_insn"
7386 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
7387 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
7388 "TARGET_THUMB1
7389 && ( register_operand (operands[0], DFmode)
7390 || register_operand (operands[1], DFmode))"
7391 "*
7392 switch (which_alternative)
7393 {
7394 default:
7395 case 0:
7396 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7397 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
7398 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
7399 case 1:
7400 return \"ldmia\\t%1, {%0, %H0}\";
7401 case 2:
7402 return \"stmia\\t%0, {%1, %H1}\";
7403 case 3:
7404 return thumb_load_double_from_address (operands);
7405 case 4:
7406 operands[2] = gen_rtx_MEM (SImode,
7407 plus_constant (Pmode,
7408 XEXP (operands[0], 0), 4));
7409 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
7410 return \"\";
7411 case 5:
7412 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7413 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
7414 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
7415 }
7416 "
7417 [(set_attr "length" "4,2,2,6,4,4")
7418 (set_attr "type" "*,load2,store2,load2,store2,*")
7419 (set_attr "insn" "*,*,*,*,*,mov")
7420 (set_attr "pool_range" "*,*,*,1018,*,*")]
7421 )
7422 \f
7423
7424 ;; load- and store-multiple insns
7425 ;; The arm can load/store any set of registers, provided that they are in
7426 ;; ascending order, but these expanders assume a contiguous set.
7427
7428 (define_expand "load_multiple"
7429 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7430 (match_operand:SI 1 "" ""))
7431 (use (match_operand:SI 2 "" ""))])]
7432 "TARGET_32BIT"
7433 {
7434 HOST_WIDE_INT offset = 0;
7435
7436 /* Support only fixed point registers. */
7437 if (!CONST_INT_P (operands[2])
7438 || INTVAL (operands[2]) > 14
7439 || INTVAL (operands[2]) < 2
7440 || !MEM_P (operands[1])
7441 || !REG_P (operands[0])
7442 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7443 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7444 FAIL;
7445
7446 operands[3]
7447 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7448 INTVAL (operands[2]),
7449 force_reg (SImode, XEXP (operands[1], 0)),
7450 FALSE, operands[1], &offset);
7451 })
7452
7453 (define_expand "store_multiple"
7454 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7455 (match_operand:SI 1 "" ""))
7456 (use (match_operand:SI 2 "" ""))])]
7457 "TARGET_32BIT"
7458 {
7459 HOST_WIDE_INT offset = 0;
7460
7461 /* Support only fixed point registers. */
7462 if (!CONST_INT_P (operands[2])
7463 || INTVAL (operands[2]) > 14
7464 || INTVAL (operands[2]) < 2
7465 || !REG_P (operands[1])
7466 || !MEM_P (operands[0])
7467 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7468 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7469 FAIL;
7470
7471 operands[3]
7472 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7473 INTVAL (operands[2]),
7474 force_reg (SImode, XEXP (operands[0], 0)),
7475 FALSE, operands[0], &offset);
7476 })
7477
7478
7479 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7480 ;; We could let this apply for blocks of less than this, but it clobbers so
7481 ;; many registers that there is then probably a better way.
7482
7483 (define_expand "movmemqi"
7484 [(match_operand:BLK 0 "general_operand" "")
7485 (match_operand:BLK 1 "general_operand" "")
7486 (match_operand:SI 2 "const_int_operand" "")
7487 (match_operand:SI 3 "const_int_operand" "")]
7488 ""
7489 "
7490 if (TARGET_32BIT)
7491 {
7492 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7493 && !optimize_function_for_size_p (cfun))
7494 {
7495 if (gen_movmem_ldrd_strd (operands))
7496 DONE;
7497 FAIL;
7498 }
7499
7500 if (arm_gen_movmemqi (operands))
7501 DONE;
7502 FAIL;
7503 }
7504 else /* TARGET_THUMB1 */
7505 {
7506 if ( INTVAL (operands[3]) != 4
7507 || INTVAL (operands[2]) > 48)
7508 FAIL;
7509
7510 thumb_expand_movmemqi (operands);
7511 DONE;
7512 }
7513 "
7514 )
7515
7516 ;; Thumb block-move insns
7517
7518 (define_insn "movmem12b"
7519 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7520 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7521 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7522 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7523 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
7524 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
7525 (set (match_operand:SI 0 "register_operand" "=l")
7526 (plus:SI (match_dup 2) (const_int 12)))
7527 (set (match_operand:SI 1 "register_operand" "=l")
7528 (plus:SI (match_dup 3) (const_int 12)))
7529 (clobber (match_scratch:SI 4 "=&l"))
7530 (clobber (match_scratch:SI 5 "=&l"))
7531 (clobber (match_scratch:SI 6 "=&l"))]
7532 "TARGET_THUMB1"
7533 "* return thumb_output_move_mem_multiple (3, operands);"
7534 [(set_attr "length" "4")
7535 ; This isn't entirely accurate... It loads as well, but in terms of
7536 ; scheduling the following insn it is better to consider it as a store
7537 (set_attr "type" "store3")]
7538 )
7539
7540 (define_insn "movmem8b"
7541 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7542 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7543 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7544 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7545 (set (match_operand:SI 0 "register_operand" "=l")
7546 (plus:SI (match_dup 2) (const_int 8)))
7547 (set (match_operand:SI 1 "register_operand" "=l")
7548 (plus:SI (match_dup 3) (const_int 8)))
7549 (clobber (match_scratch:SI 4 "=&l"))
7550 (clobber (match_scratch:SI 5 "=&l"))]
7551 "TARGET_THUMB1"
7552 "* return thumb_output_move_mem_multiple (2, operands);"
7553 [(set_attr "length" "4")
7554 ; This isn't entirely accurate... It loads as well, but in terms of
7555 ; scheduling the following insn it is better to consider it as a store
7556 (set_attr "type" "store2")]
7557 )
7558
7559 \f
7560
7561 ;; Compare & branch insns
7562 ;; The range calculations are based as follows:
7563 ;; For forward branches, the address calculation returns the address of
7564 ;; the next instruction. This is 2 beyond the branch instruction.
7565 ;; For backward branches, the address calculation returns the address of
7566 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7567 ;; instruction for the shortest sequence, and 4 before the branch instruction
7568 ;; if we have to jump around an unconditional branch.
7569 ;; To the basic branch range the PC offset must be added (this is +4).
7570 ;; So for forward branches we have
7571 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7572 ;; And for backward branches we have
7573 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7574 ;;
7575 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7576 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7577
7578 (define_expand "cbranchsi4"
7579 [(set (pc) (if_then_else
7580 (match_operator 0 "expandable_comparison_operator"
7581 [(match_operand:SI 1 "s_register_operand" "")
7582 (match_operand:SI 2 "nonmemory_operand" "")])
7583 (label_ref (match_operand 3 "" ""))
7584 (pc)))]
7585 "TARGET_EITHER"
7586 "
7587 if (!TARGET_THUMB1)
7588 {
7589 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7590 FAIL;
7591 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7592 operands[3]));
7593 DONE;
7594 }
7595 if (thumb1_cmpneg_operand (operands[2], SImode))
7596 {
7597 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7598 operands[3], operands[0]));
7599 DONE;
7600 }
7601 if (!thumb1_cmp_operand (operands[2], SImode))
7602 operands[2] = force_reg (SImode, operands[2]);
7603 ")
7604
7605 ;; A pattern to recognize a special situation and optimize for it.
7606 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
7607 ;; due to the available addressing modes. Hence, convert a signed comparison
7608 ;; with zero into an unsigned comparison with 127 if possible.
7609 (define_expand "cbranchqi4"
7610 [(set (pc) (if_then_else
7611 (match_operator 0 "lt_ge_comparison_operator"
7612 [(match_operand:QI 1 "memory_operand" "")
7613 (match_operand:QI 2 "const0_operand" "")])
7614 (label_ref (match_operand 3 "" ""))
7615 (pc)))]
7616 "TARGET_THUMB1"
7617 {
7618 rtx xops[4];
7619 xops[1] = gen_reg_rtx (SImode);
7620 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
7621 xops[2] = GEN_INT (127);
7622 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
7623 VOIDmode, xops[1], xops[2]);
7624 xops[3] = operands[3];
7625 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
7626 DONE;
7627 })
7628
7629 (define_expand "cbranchsf4"
7630 [(set (pc) (if_then_else
7631 (match_operator 0 "expandable_comparison_operator"
7632 [(match_operand:SF 1 "s_register_operand" "")
7633 (match_operand:SF 2 "arm_float_compare_operand" "")])
7634 (label_ref (match_operand 3 "" ""))
7635 (pc)))]
7636 "TARGET_32BIT && TARGET_HARD_FLOAT"
7637 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7638 operands[3])); DONE;"
7639 )
7640
7641 (define_expand "cbranchdf4"
7642 [(set (pc) (if_then_else
7643 (match_operator 0 "expandable_comparison_operator"
7644 [(match_operand:DF 1 "s_register_operand" "")
7645 (match_operand:DF 2 "arm_float_compare_operand" "")])
7646 (label_ref (match_operand 3 "" ""))
7647 (pc)))]
7648 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7649 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7650 operands[3])); DONE;"
7651 )
7652
7653 (define_expand "cbranchdi4"
7654 [(set (pc) (if_then_else
7655 (match_operator 0 "expandable_comparison_operator"
7656 [(match_operand:DI 1 "s_register_operand" "")
7657 (match_operand:DI 2 "cmpdi_operand" "")])
7658 (label_ref (match_operand 3 "" ""))
7659 (pc)))]
7660 "TARGET_32BIT"
7661 "{
7662 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7663 FAIL;
7664 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7665 operands[3]));
7666 DONE;
7667 }"
7668 )
7669
7670 (define_insn "cbranchsi4_insn"
7671 [(set (pc) (if_then_else
7672 (match_operator 0 "arm_comparison_operator"
7673 [(match_operand:SI 1 "s_register_operand" "l,l*h")
7674 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
7675 (label_ref (match_operand 3 "" ""))
7676 (pc)))]
7677 "TARGET_THUMB1"
7678 {
7679 rtx t = cfun->machine->thumb1_cc_insn;
7680 if (t != NULL_RTX)
7681 {
7682 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
7683 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
7684 t = NULL_RTX;
7685 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
7686 {
7687 if (!noov_comparison_operator (operands[0], VOIDmode))
7688 t = NULL_RTX;
7689 }
7690 else if (cfun->machine->thumb1_cc_mode != CCmode)
7691 t = NULL_RTX;
7692 }
7693 if (t == NULL_RTX)
7694 {
7695 output_asm_insn ("cmp\t%1, %2", operands);
7696 cfun->machine->thumb1_cc_insn = insn;
7697 cfun->machine->thumb1_cc_op0 = operands[1];
7698 cfun->machine->thumb1_cc_op1 = operands[2];
7699 cfun->machine->thumb1_cc_mode = CCmode;
7700 }
7701 else
7702 /* Ensure we emit the right type of condition code on the jump. */
7703 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
7704 CC_REGNUM);
7705
7706 switch (get_attr_length (insn))
7707 {
7708 case 4: return \"b%d0\\t%l3\";
7709 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7710 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7711 }
7712 }
7713 [(set (attr "far_jump")
7714 (if_then_else
7715 (eq_attr "length" "8")
7716 (const_string "yes")
7717 (const_string "no")))
7718 (set (attr "length")
7719 (if_then_else
7720 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7721 (le (minus (match_dup 3) (pc)) (const_int 256)))
7722 (const_int 4)
7723 (if_then_else
7724 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7725 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7726 (const_int 6)
7727 (const_int 8))))]
7728 )
7729
7730 (define_insn "cbranchsi4_scratch"
7731 [(set (pc) (if_then_else
7732 (match_operator 4 "arm_comparison_operator"
7733 [(match_operand:SI 1 "s_register_operand" "l,0")
7734 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
7735 (label_ref (match_operand 3 "" ""))
7736 (pc)))
7737 (clobber (match_scratch:SI 0 "=l,l"))]
7738 "TARGET_THUMB1"
7739 "*
7740 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
7741
7742 switch (get_attr_length (insn))
7743 {
7744 case 4: return \"b%d4\\t%l3\";
7745 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7746 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7747 }
7748 "
7749 [(set (attr "far_jump")
7750 (if_then_else
7751 (eq_attr "length" "8")
7752 (const_string "yes")
7753 (const_string "no")))
7754 (set (attr "length")
7755 (if_then_else
7756 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7757 (le (minus (match_dup 3) (pc)) (const_int 256)))
7758 (const_int 4)
7759 (if_then_else
7760 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7761 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7762 (const_int 6)
7763 (const_int 8))))]
7764 )
7765
7766 (define_insn "*negated_cbranchsi4"
7767 [(set (pc)
7768 (if_then_else
7769 (match_operator 0 "equality_operator"
7770 [(match_operand:SI 1 "s_register_operand" "l")
7771 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7772 (label_ref (match_operand 3 "" ""))
7773 (pc)))]
7774 "TARGET_THUMB1"
7775 "*
7776 output_asm_insn (\"cmn\\t%1, %2\", operands);
7777 switch (get_attr_length (insn))
7778 {
7779 case 4: return \"b%d0\\t%l3\";
7780 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7781 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7782 }
7783 "
7784 [(set (attr "far_jump")
7785 (if_then_else
7786 (eq_attr "length" "8")
7787 (const_string "yes")
7788 (const_string "no")))
7789 (set (attr "length")
7790 (if_then_else
7791 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7792 (le (minus (match_dup 3) (pc)) (const_int 256)))
7793 (const_int 4)
7794 (if_then_else
7795 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7796 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7797 (const_int 6)
7798 (const_int 8))))]
7799 )
7800
7801 (define_insn "*tbit_cbranch"
7802 [(set (pc)
7803 (if_then_else
7804 (match_operator 0 "equality_operator"
7805 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7806 (const_int 1)
7807 (match_operand:SI 2 "const_int_operand" "i"))
7808 (const_int 0)])
7809 (label_ref (match_operand 3 "" ""))
7810 (pc)))
7811 (clobber (match_scratch:SI 4 "=l"))]
7812 "TARGET_THUMB1"
7813 "*
7814 {
7815 rtx op[3];
7816 op[0] = operands[4];
7817 op[1] = operands[1];
7818 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7819
7820 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7821 switch (get_attr_length (insn))
7822 {
7823 case 4: return \"b%d0\\t%l3\";
7824 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7825 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7826 }
7827 }"
7828 [(set (attr "far_jump")
7829 (if_then_else
7830 (eq_attr "length" "8")
7831 (const_string "yes")
7832 (const_string "no")))
7833 (set (attr "length")
7834 (if_then_else
7835 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7836 (le (minus (match_dup 3) (pc)) (const_int 256)))
7837 (const_int 4)
7838 (if_then_else
7839 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7840 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7841 (const_int 6)
7842 (const_int 8))))]
7843 )
7844
7845 (define_insn "*tlobits_cbranch"
7846 [(set (pc)
7847 (if_then_else
7848 (match_operator 0 "equality_operator"
7849 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7850 (match_operand:SI 2 "const_int_operand" "i")
7851 (const_int 0))
7852 (const_int 0)])
7853 (label_ref (match_operand 3 "" ""))
7854 (pc)))
7855 (clobber (match_scratch:SI 4 "=l"))]
7856 "TARGET_THUMB1"
7857 "*
7858 {
7859 rtx op[3];
7860 op[0] = operands[4];
7861 op[1] = operands[1];
7862 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7863
7864 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7865 switch (get_attr_length (insn))
7866 {
7867 case 4: return \"b%d0\\t%l3\";
7868 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7869 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7870 }
7871 }"
7872 [(set (attr "far_jump")
7873 (if_then_else
7874 (eq_attr "length" "8")
7875 (const_string "yes")
7876 (const_string "no")))
7877 (set (attr "length")
7878 (if_then_else
7879 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7880 (le (minus (match_dup 3) (pc)) (const_int 256)))
7881 (const_int 4)
7882 (if_then_else
7883 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7884 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7885 (const_int 6)
7886 (const_int 8))))]
7887 )
7888
7889 (define_insn "*tstsi3_cbranch"
7890 [(set (pc)
7891 (if_then_else
7892 (match_operator 3 "equality_operator"
7893 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7894 (match_operand:SI 1 "s_register_operand" "l"))
7895 (const_int 0)])
7896 (label_ref (match_operand 2 "" ""))
7897 (pc)))]
7898 "TARGET_THUMB1"
7899 "*
7900 {
7901 output_asm_insn (\"tst\\t%0, %1\", operands);
7902 switch (get_attr_length (insn))
7903 {
7904 case 4: return \"b%d3\\t%l2\";
7905 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7906 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7907 }
7908 }"
7909 [(set (attr "far_jump")
7910 (if_then_else
7911 (eq_attr "length" "8")
7912 (const_string "yes")
7913 (const_string "no")))
7914 (set (attr "length")
7915 (if_then_else
7916 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7917 (le (minus (match_dup 2) (pc)) (const_int 256)))
7918 (const_int 4)
7919 (if_then_else
7920 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7921 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7922 (const_int 6)
7923 (const_int 8))))]
7924 )
7925
7926 (define_insn "*cbranchne_decr1"
7927 [(set (pc)
7928 (if_then_else (match_operator 3 "equality_operator"
7929 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7930 (const_int 0)])
7931 (label_ref (match_operand 4 "" ""))
7932 (pc)))
7933 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7934 (plus:SI (match_dup 2) (const_int -1)))
7935 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7936 "TARGET_THUMB1"
7937 "*
7938 {
7939 rtx cond[2];
7940 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7941 ? GEU : LTU),
7942 VOIDmode, operands[2], const1_rtx);
7943 cond[1] = operands[4];
7944
7945 if (which_alternative == 0)
7946 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7947 else if (which_alternative == 1)
7948 {
7949 /* We must provide an alternative for a hi reg because reload
7950 cannot handle output reloads on a jump instruction, but we
7951 can't subtract into that. Fortunately a mov from lo to hi
7952 does not clobber the condition codes. */
7953 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7954 output_asm_insn (\"mov\\t%0, %1\", operands);
7955 }
7956 else
7957 {
7958 /* Similarly, but the target is memory. */
7959 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7960 output_asm_insn (\"str\\t%1, %0\", operands);
7961 }
7962
7963 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7964 {
7965 case 4:
7966 output_asm_insn (\"b%d0\\t%l1\", cond);
7967 return \"\";
7968 case 6:
7969 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7970 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7971 default:
7972 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7973 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7974 }
7975 }
7976 "
7977 [(set (attr "far_jump")
7978 (if_then_else
7979 (ior (and (eq (symbol_ref ("which_alternative"))
7980 (const_int 0))
7981 (eq_attr "length" "8"))
7982 (eq_attr "length" "10"))
7983 (const_string "yes")
7984 (const_string "no")))
7985 (set_attr_alternative "length"
7986 [
7987 ;; Alternative 0
7988 (if_then_else
7989 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7990 (le (minus (match_dup 4) (pc)) (const_int 256)))
7991 (const_int 4)
7992 (if_then_else
7993 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7994 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7995 (const_int 6)
7996 (const_int 8)))
7997 ;; Alternative 1
7998 (if_then_else
7999 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8000 (le (minus (match_dup 4) (pc)) (const_int 256)))
8001 (const_int 6)
8002 (if_then_else
8003 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8004 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8005 (const_int 8)
8006 (const_int 10)))
8007 ;; Alternative 2
8008 (if_then_else
8009 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8010 (le (minus (match_dup 4) (pc)) (const_int 256)))
8011 (const_int 6)
8012 (if_then_else
8013 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8014 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8015 (const_int 8)
8016 (const_int 10)))
8017 ;; Alternative 3
8018 (if_then_else
8019 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8020 (le (minus (match_dup 4) (pc)) (const_int 256)))
8021 (const_int 6)
8022 (if_then_else
8023 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8024 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8025 (const_int 8)
8026 (const_int 10)))])]
8027 )
8028
8029 (define_insn "*addsi3_cbranch"
8030 [(set (pc)
8031 (if_then_else
8032 (match_operator 4 "arm_comparison_operator"
8033 [(plus:SI
8034 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
8035 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
8036 (const_int 0)])
8037 (label_ref (match_operand 5 "" ""))
8038 (pc)))
8039 (set
8040 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
8041 (plus:SI (match_dup 2) (match_dup 3)))
8042 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
8043 "TARGET_THUMB1
8044 && (GET_CODE (operands[4]) == EQ
8045 || GET_CODE (operands[4]) == NE
8046 || GET_CODE (operands[4]) == GE
8047 || GET_CODE (operands[4]) == LT)"
8048 "*
8049 {
8050 rtx cond[3];
8051
8052 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
8053 cond[1] = operands[2];
8054 cond[2] = operands[3];
8055
8056 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
8057 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
8058 else
8059 output_asm_insn (\"add\\t%0, %1, %2\", cond);
8060
8061 if (which_alternative >= 2
8062 && which_alternative < 4)
8063 output_asm_insn (\"mov\\t%0, %1\", operands);
8064 else if (which_alternative >= 4)
8065 output_asm_insn (\"str\\t%1, %0\", operands);
8066
8067 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
8068 {
8069 case 4:
8070 return \"b%d4\\t%l5\";
8071 case 6:
8072 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
8073 default:
8074 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
8075 }
8076 }
8077 "
8078 [(set (attr "far_jump")
8079 (if_then_else
8080 (ior (and (lt (symbol_ref ("which_alternative"))
8081 (const_int 2))
8082 (eq_attr "length" "8"))
8083 (eq_attr "length" "10"))
8084 (const_string "yes")
8085 (const_string "no")))
8086 (set (attr "length")
8087 (if_then_else
8088 (lt (symbol_ref ("which_alternative"))
8089 (const_int 2))
8090 (if_then_else
8091 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
8092 (le (minus (match_dup 5) (pc)) (const_int 256)))
8093 (const_int 4)
8094 (if_then_else
8095 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
8096 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8097 (const_int 6)
8098 (const_int 8)))
8099 (if_then_else
8100 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
8101 (le (minus (match_dup 5) (pc)) (const_int 256)))
8102 (const_int 6)
8103 (if_then_else
8104 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
8105 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8106 (const_int 8)
8107 (const_int 10)))))]
8108 )
8109
8110 (define_insn "*addsi3_cbranch_scratch"
8111 [(set (pc)
8112 (if_then_else
8113 (match_operator 3 "arm_comparison_operator"
8114 [(plus:SI
8115 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
8116 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
8117 (const_int 0)])
8118 (label_ref (match_operand 4 "" ""))
8119 (pc)))
8120 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
8121 "TARGET_THUMB1
8122 && (GET_CODE (operands[3]) == EQ
8123 || GET_CODE (operands[3]) == NE
8124 || GET_CODE (operands[3]) == GE
8125 || GET_CODE (operands[3]) == LT)"
8126 "*
8127 {
8128 switch (which_alternative)
8129 {
8130 case 0:
8131 output_asm_insn (\"cmp\t%1, #%n2\", operands);
8132 break;
8133 case 1:
8134 output_asm_insn (\"cmn\t%1, %2\", operands);
8135 break;
8136 case 2:
8137 if (INTVAL (operands[2]) < 0)
8138 output_asm_insn (\"sub\t%0, %1, %2\", operands);
8139 else
8140 output_asm_insn (\"add\t%0, %1, %2\", operands);
8141 break;
8142 case 3:
8143 if (INTVAL (operands[2]) < 0)
8144 output_asm_insn (\"sub\t%0, %0, %2\", operands);
8145 else
8146 output_asm_insn (\"add\t%0, %0, %2\", operands);
8147 break;
8148 }
8149
8150 switch (get_attr_length (insn))
8151 {
8152 case 4:
8153 return \"b%d3\\t%l4\";
8154 case 6:
8155 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
8156 default:
8157 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8158 }
8159 }
8160 "
8161 [(set (attr "far_jump")
8162 (if_then_else
8163 (eq_attr "length" "8")
8164 (const_string "yes")
8165 (const_string "no")))
8166 (set (attr "length")
8167 (if_then_else
8168 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8169 (le (minus (match_dup 4) (pc)) (const_int 256)))
8170 (const_int 4)
8171 (if_then_else
8172 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8173 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8174 (const_int 6)
8175 (const_int 8))))]
8176 )
8177
8178
8179 ;; Comparison and test insns
8180
8181 (define_insn "*arm_cmpsi_insn"
8182 [(set (reg:CC CC_REGNUM)
8183 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
8184 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
8185 "TARGET_32BIT"
8186 "@
8187 cmp%?\\t%0, %1
8188 cmp%?\\t%0, %1
8189 cmp%?\\t%0, %1
8190 cmn%?\\t%0, #%n1"
8191 [(set_attr "conds" "set")
8192 (set_attr "arch" "t2,t2,any,any")
8193 (set_attr "length" "2,2,4,4")
8194 (set_attr "predicable" "yes")
8195 (set_attr "type" "*,*,*,simple_alu_imm")]
8196 )
8197
8198 (define_insn "*cmpsi_shiftsi"
8199 [(set (reg:CC CC_REGNUM)
8200 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
8201 (match_operator:SI 3 "shift_operator"
8202 [(match_operand:SI 1 "s_register_operand" "r,r")
8203 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
8204 "TARGET_32BIT"
8205 "cmp%?\\t%0, %1%S3"
8206 [(set_attr "conds" "set")
8207 (set_attr "shift" "1")
8208 (set_attr "arch" "32,a")
8209 (set_attr "type" "alu_shift,alu_shift_reg")])
8210
8211 (define_insn "*cmpsi_shiftsi_swp"
8212 [(set (reg:CC_SWP CC_REGNUM)
8213 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
8214 [(match_operand:SI 1 "s_register_operand" "r,r")
8215 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
8216 (match_operand:SI 0 "s_register_operand" "r,r")))]
8217 "TARGET_32BIT"
8218 "cmp%?\\t%0, %1%S3"
8219 [(set_attr "conds" "set")
8220 (set_attr "shift" "1")
8221 (set_attr "arch" "32,a")
8222 (set_attr "type" "alu_shift,alu_shift_reg")])
8223
8224 (define_insn "*arm_cmpsi_negshiftsi_si"
8225 [(set (reg:CC_Z CC_REGNUM)
8226 (compare:CC_Z
8227 (neg:SI (match_operator:SI 1 "shift_operator"
8228 [(match_operand:SI 2 "s_register_operand" "r")
8229 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
8230 (match_operand:SI 0 "s_register_operand" "r")))]
8231 "TARGET_ARM"
8232 "cmn%?\\t%0, %2%S1"
8233 [(set_attr "conds" "set")
8234 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
8235 (const_string "alu_shift")
8236 (const_string "alu_shift_reg")))
8237 (set_attr "predicable" "yes")]
8238 )
8239
8240 ;; DImode comparisons. The generic code generates branches that
8241 ;; if-conversion can not reduce to a conditional compare, so we do
8242 ;; that directly.
8243
8244 (define_insn_and_split "*arm_cmpdi_insn"
8245 [(set (reg:CC_NCV CC_REGNUM)
8246 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
8247 (match_operand:DI 1 "arm_di_operand" "rDi")))
8248 (clobber (match_scratch:SI 2 "=r"))]
8249 "TARGET_32BIT"
8250 "#" ; "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
8251 "&& reload_completed"
8252 [(set (reg:CC CC_REGNUM)
8253 (compare:CC (match_dup 0) (match_dup 1)))
8254 (parallel [(set (reg:CC CC_REGNUM)
8255 (compare:CC (match_dup 3) (match_dup 4)))
8256 (set (match_dup 2)
8257 (minus:SI (match_dup 5)
8258 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))])]
8259 {
8260 operands[3] = gen_highpart (SImode, operands[0]);
8261 operands[0] = gen_lowpart (SImode, operands[0]);
8262 if (CONST_INT_P (operands[1]))
8263 {
8264 operands[4] = GEN_INT (~INTVAL (gen_highpart_mode (SImode,
8265 DImode,
8266 operands[1])));
8267 operands[5] = gen_rtx_PLUS (SImode, operands[3], operands[4]);
8268 }
8269 else
8270 {
8271 operands[4] = gen_highpart (SImode, operands[1]);
8272 operands[5] = gen_rtx_MINUS (SImode, operands[3], operands[4]);
8273 }
8274 operands[1] = gen_lowpart (SImode, operands[1]);
8275 operands[2] = gen_lowpart (SImode, operands[2]);
8276 }
8277 [(set_attr "conds" "set")
8278 (set_attr "length" "8")]
8279 )
8280
8281 (define_insn_and_split "*arm_cmpdi_unsigned"
8282 [(set (reg:CC_CZ CC_REGNUM)
8283 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
8284 (match_operand:DI 1 "arm_di_operand" "rDi")))]
8285 "TARGET_32BIT"
8286 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
8287 "&& reload_completed"
8288 [(set (reg:CC CC_REGNUM)
8289 (compare:CC (match_dup 2) (match_dup 3)))
8290 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
8291 (set (reg:CC CC_REGNUM)
8292 (compare:CC (match_dup 0) (match_dup 1))))]
8293 {
8294 operands[2] = gen_highpart (SImode, operands[0]);
8295 operands[0] = gen_lowpart (SImode, operands[0]);
8296 if (CONST_INT_P (operands[1]))
8297 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
8298 else
8299 operands[3] = gen_highpart (SImode, operands[1]);
8300 operands[1] = gen_lowpart (SImode, operands[1]);
8301 }
8302 [(set_attr "conds" "set")
8303 (set_attr "length" "8")]
8304 )
8305
8306 (define_insn "*arm_cmpdi_zero"
8307 [(set (reg:CC_Z CC_REGNUM)
8308 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
8309 (const_int 0)))
8310 (clobber (match_scratch:SI 1 "=r"))]
8311 "TARGET_32BIT"
8312 "orr%.\\t%1, %Q0, %R0"
8313 [(set_attr "conds" "set")]
8314 )
8315
8316 (define_insn "*thumb_cmpdi_zero"
8317 [(set (reg:CC_Z CC_REGNUM)
8318 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
8319 (const_int 0)))
8320 (clobber (match_scratch:SI 1 "=l"))]
8321 "TARGET_THUMB1"
8322 "orr\\t%1, %Q0, %R0"
8323 [(set_attr "conds" "set")
8324 (set_attr "length" "2")]
8325 )
8326
8327 ; This insn allows redundant compares to be removed by cse, nothing should
8328 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
8329 ; is deleted later on. The match_dup will match the mode here, so that
8330 ; mode changes of the condition codes aren't lost by this even though we don't
8331 ; specify what they are.
8332
8333 (define_insn "*deleted_compare"
8334 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
8335 "TARGET_32BIT"
8336 "\\t%@ deleted compare"
8337 [(set_attr "conds" "set")
8338 (set_attr "length" "0")]
8339 )
8340
8341 \f
8342 ;; Conditional branch insns
8343
8344 (define_expand "cbranch_cc"
8345 [(set (pc)
8346 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
8347 (match_operand 2 "" "")])
8348 (label_ref (match_operand 3 "" ""))
8349 (pc)))]
8350 "TARGET_32BIT"
8351 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
8352 operands[1], operands[2], NULL_RTX);
8353 operands[2] = const0_rtx;"
8354 )
8355
8356 ;;
8357 ;; Patterns to match conditional branch insns.
8358 ;;
8359
8360 (define_insn "arm_cond_branch"
8361 [(set (pc)
8362 (if_then_else (match_operator 1 "arm_comparison_operator"
8363 [(match_operand 2 "cc_register" "") (const_int 0)])
8364 (label_ref (match_operand 0 "" ""))
8365 (pc)))]
8366 "TARGET_32BIT"
8367 "*
8368 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8369 {
8370 arm_ccfsm_state += 2;
8371 return \"\";
8372 }
8373 return \"b%d1\\t%l0\";
8374 "
8375 [(set_attr "conds" "use")
8376 (set_attr "type" "branch")
8377 (set (attr "length")
8378 (if_then_else
8379 (and (match_test "TARGET_THUMB2")
8380 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8381 (le (minus (match_dup 0) (pc)) (const_int 256))))
8382 (const_int 2)
8383 (const_int 4)))]
8384 )
8385
8386 (define_insn "*arm_cond_branch_reversed"
8387 [(set (pc)
8388 (if_then_else (match_operator 1 "arm_comparison_operator"
8389 [(match_operand 2 "cc_register" "") (const_int 0)])
8390 (pc)
8391 (label_ref (match_operand 0 "" ""))))]
8392 "TARGET_32BIT"
8393 "*
8394 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8395 {
8396 arm_ccfsm_state += 2;
8397 return \"\";
8398 }
8399 return \"b%D1\\t%l0\";
8400 "
8401 [(set_attr "conds" "use")
8402 (set_attr "type" "branch")
8403 (set (attr "length")
8404 (if_then_else
8405 (and (match_test "TARGET_THUMB2")
8406 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8407 (le (minus (match_dup 0) (pc)) (const_int 256))))
8408 (const_int 2)
8409 (const_int 4)))]
8410 )
8411
8412 \f
8413
8414 ; scc insns
8415
8416 (define_expand "cstore_cc"
8417 [(set (match_operand:SI 0 "s_register_operand" "")
8418 (match_operator:SI 1 "" [(match_operand 2 "" "")
8419 (match_operand 3 "" "")]))]
8420 "TARGET_32BIT"
8421 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8422 operands[2], operands[3], NULL_RTX);
8423 operands[3] = const0_rtx;"
8424 )
8425
8426 (define_insn_and_split "*mov_scc"
8427 [(set (match_operand:SI 0 "s_register_operand" "=r")
8428 (match_operator:SI 1 "arm_comparison_operator"
8429 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8430 "TARGET_ARM"
8431 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8432 "TARGET_ARM"
8433 [(set (match_dup 0)
8434 (if_then_else:SI (match_dup 1)
8435 (const_int 1)
8436 (const_int 0)))]
8437 ""
8438 [(set_attr "conds" "use")
8439 (set_attr "length" "8")]
8440 )
8441
8442 (define_insn_and_split "*mov_negscc"
8443 [(set (match_operand:SI 0 "s_register_operand" "=r")
8444 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8445 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8446 "TARGET_ARM"
8447 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8448 "TARGET_ARM"
8449 [(set (match_dup 0)
8450 (if_then_else:SI (match_dup 1)
8451 (match_dup 3)
8452 (const_int 0)))]
8453 {
8454 operands[3] = GEN_INT (~0);
8455 }
8456 [(set_attr "conds" "use")
8457 (set_attr "length" "8")]
8458 )
8459
8460 (define_insn_and_split "*mov_notscc"
8461 [(set (match_operand:SI 0 "s_register_operand" "=r")
8462 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8463 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8464 "TARGET_ARM"
8465 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8466 "TARGET_ARM"
8467 [(set (match_dup 0)
8468 (if_then_else:SI (match_dup 1)
8469 (match_dup 3)
8470 (match_dup 4)))]
8471 {
8472 operands[3] = GEN_INT (~1);
8473 operands[4] = GEN_INT (~0);
8474 }
8475 [(set_attr "conds" "use")
8476 (set_attr "length" "8")]
8477 )
8478
8479 (define_expand "cstoresi4"
8480 [(set (match_operand:SI 0 "s_register_operand" "")
8481 (match_operator:SI 1 "expandable_comparison_operator"
8482 [(match_operand:SI 2 "s_register_operand" "")
8483 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8484 "TARGET_32BIT || TARGET_THUMB1"
8485 "{
8486 rtx op3, scratch, scratch2;
8487
8488 if (!TARGET_THUMB1)
8489 {
8490 if (!arm_add_operand (operands[3], SImode))
8491 operands[3] = force_reg (SImode, operands[3]);
8492 emit_insn (gen_cstore_cc (operands[0], operands[1],
8493 operands[2], operands[3]));
8494 DONE;
8495 }
8496
8497 if (operands[3] == const0_rtx)
8498 {
8499 switch (GET_CODE (operands[1]))
8500 {
8501 case EQ:
8502 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8503 break;
8504
8505 case NE:
8506 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8507 break;
8508
8509 case LE:
8510 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8511 NULL_RTX, 0, OPTAB_WIDEN);
8512 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8513 NULL_RTX, 0, OPTAB_WIDEN);
8514 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8515 operands[0], 1, OPTAB_WIDEN);
8516 break;
8517
8518 case GE:
8519 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8520 NULL_RTX, 1);
8521 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8522 NULL_RTX, 1, OPTAB_WIDEN);
8523 break;
8524
8525 case GT:
8526 scratch = expand_binop (SImode, ashr_optab, operands[2],
8527 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8528 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8529 NULL_RTX, 0, OPTAB_WIDEN);
8530 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8531 0, OPTAB_WIDEN);
8532 break;
8533
8534 /* LT is handled by generic code. No need for unsigned with 0. */
8535 default:
8536 FAIL;
8537 }
8538 DONE;
8539 }
8540
8541 switch (GET_CODE (operands[1]))
8542 {
8543 case EQ:
8544 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8545 NULL_RTX, 0, OPTAB_WIDEN);
8546 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8547 break;
8548
8549 case NE:
8550 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8551 NULL_RTX, 0, OPTAB_WIDEN);
8552 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8553 break;
8554
8555 case LE:
8556 op3 = force_reg (SImode, operands[3]);
8557
8558 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8559 NULL_RTX, 1, OPTAB_WIDEN);
8560 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8561 NULL_RTX, 0, OPTAB_WIDEN);
8562 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8563 op3, operands[2]));
8564 break;
8565
8566 case GE:
8567 op3 = operands[3];
8568 if (!thumb1_cmp_operand (op3, SImode))
8569 op3 = force_reg (SImode, op3);
8570 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8571 NULL_RTX, 0, OPTAB_WIDEN);
8572 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8573 NULL_RTX, 1, OPTAB_WIDEN);
8574 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8575 operands[2], op3));
8576 break;
8577
8578 case LEU:
8579 op3 = force_reg (SImode, operands[3]);
8580 scratch = force_reg (SImode, const0_rtx);
8581 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8582 op3, operands[2]));
8583 break;
8584
8585 case GEU:
8586 op3 = operands[3];
8587 if (!thumb1_cmp_operand (op3, SImode))
8588 op3 = force_reg (SImode, op3);
8589 scratch = force_reg (SImode, const0_rtx);
8590 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8591 operands[2], op3));
8592 break;
8593
8594 case LTU:
8595 op3 = operands[3];
8596 if (!thumb1_cmp_operand (op3, SImode))
8597 op3 = force_reg (SImode, op3);
8598 scratch = gen_reg_rtx (SImode);
8599 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8600 break;
8601
8602 case GTU:
8603 op3 = force_reg (SImode, operands[3]);
8604 scratch = gen_reg_rtx (SImode);
8605 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8606 break;
8607
8608 /* No good sequences for GT, LT. */
8609 default:
8610 FAIL;
8611 }
8612 DONE;
8613 }")
8614
8615 (define_expand "cstoresf4"
8616 [(set (match_operand:SI 0 "s_register_operand" "")
8617 (match_operator:SI 1 "expandable_comparison_operator"
8618 [(match_operand:SF 2 "s_register_operand" "")
8619 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8620 "TARGET_32BIT && TARGET_HARD_FLOAT"
8621 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8622 operands[2], operands[3])); DONE;"
8623 )
8624
8625 (define_expand "cstoredf4"
8626 [(set (match_operand:SI 0 "s_register_operand" "")
8627 (match_operator:SI 1 "expandable_comparison_operator"
8628 [(match_operand:DF 2 "s_register_operand" "")
8629 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8630 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8631 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8632 operands[2], operands[3])); DONE;"
8633 )
8634
8635 (define_expand "cstoredi4"
8636 [(set (match_operand:SI 0 "s_register_operand" "")
8637 (match_operator:SI 1 "expandable_comparison_operator"
8638 [(match_operand:DI 2 "s_register_operand" "")
8639 (match_operand:DI 3 "cmpdi_operand" "")]))]
8640 "TARGET_32BIT"
8641 "{
8642 if (!arm_validize_comparison (&operands[1],
8643 &operands[2],
8644 &operands[3]))
8645 FAIL;
8646 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8647 operands[3]));
8648 DONE;
8649 }"
8650 )
8651
8652 (define_expand "cstoresi_eq0_thumb1"
8653 [(parallel
8654 [(set (match_operand:SI 0 "s_register_operand" "")
8655 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8656 (const_int 0)))
8657 (clobber (match_dup:SI 2))])]
8658 "TARGET_THUMB1"
8659 "operands[2] = gen_reg_rtx (SImode);"
8660 )
8661
8662 (define_expand "cstoresi_ne0_thumb1"
8663 [(parallel
8664 [(set (match_operand:SI 0 "s_register_operand" "")
8665 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8666 (const_int 0)))
8667 (clobber (match_dup:SI 2))])]
8668 "TARGET_THUMB1"
8669 "operands[2] = gen_reg_rtx (SImode);"
8670 )
8671
8672 (define_insn "*cstoresi_eq0_thumb1_insn"
8673 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8674 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8675 (const_int 0)))
8676 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8677 "TARGET_THUMB1"
8678 "@
8679 neg\\t%0, %1\;adc\\t%0, %0, %1
8680 neg\\t%2, %1\;adc\\t%0, %1, %2"
8681 [(set_attr "length" "4")]
8682 )
8683
8684 (define_insn "*cstoresi_ne0_thumb1_insn"
8685 [(set (match_operand:SI 0 "s_register_operand" "=l")
8686 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8687 (const_int 0)))
8688 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8689 "TARGET_THUMB1"
8690 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8691 [(set_attr "length" "4")]
8692 )
8693
8694 ;; Used as part of the expansion of thumb ltu and gtu sequences
8695 (define_insn "cstoresi_nltu_thumb1"
8696 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8697 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8698 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8699 "TARGET_THUMB1"
8700 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8701 [(set_attr "length" "4")]
8702 )
8703
8704 (define_insn_and_split "cstoresi_ltu_thumb1"
8705 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8706 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8707 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8708 "TARGET_THUMB1"
8709 "#"
8710 "TARGET_THUMB1"
8711 [(set (match_dup 3)
8712 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8713 (set (match_dup 0) (neg:SI (match_dup 3)))]
8714 "operands[3] = gen_reg_rtx (SImode);"
8715 [(set_attr "length" "4")]
8716 )
8717
8718 ;; Used as part of the expansion of thumb les sequence.
8719 (define_insn "thumb1_addsi3_addgeu"
8720 [(set (match_operand:SI 0 "s_register_operand" "=l")
8721 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8722 (match_operand:SI 2 "s_register_operand" "l"))
8723 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8724 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8725 "TARGET_THUMB1"
8726 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8727 [(set_attr "length" "4")]
8728 )
8729
8730 \f
8731 ;; Conditional move insns
8732
8733 (define_expand "movsicc"
8734 [(set (match_operand:SI 0 "s_register_operand" "")
8735 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
8736 (match_operand:SI 2 "arm_not_operand" "")
8737 (match_operand:SI 3 "arm_not_operand" "")))]
8738 "TARGET_32BIT"
8739 "
8740 {
8741 enum rtx_code code;
8742 rtx ccreg;
8743
8744 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8745 &XEXP (operands[1], 1)))
8746 FAIL;
8747
8748 code = GET_CODE (operands[1]);
8749 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8750 XEXP (operands[1], 1), NULL_RTX);
8751 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8752 }"
8753 )
8754
8755 (define_expand "movsfcc"
8756 [(set (match_operand:SF 0 "s_register_operand" "")
8757 (if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
8758 (match_operand:SF 2 "s_register_operand" "")
8759 (match_operand:SF 3 "s_register_operand" "")))]
8760 "TARGET_32BIT && TARGET_HARD_FLOAT"
8761 "
8762 {
8763 enum rtx_code code = GET_CODE (operands[1]);
8764 rtx ccreg;
8765
8766 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8767 &XEXP (operands[1], 1)))
8768 FAIL;
8769
8770 code = GET_CODE (operands[1]);
8771 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8772 XEXP (operands[1], 1), NULL_RTX);
8773 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8774 }"
8775 )
8776
8777 (define_expand "movdfcc"
8778 [(set (match_operand:DF 0 "s_register_operand" "")
8779 (if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
8780 (match_operand:DF 2 "s_register_operand" "")
8781 (match_operand:DF 3 "s_register_operand" "")))]
8782 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8783 "
8784 {
8785 enum rtx_code code = GET_CODE (operands[1]);
8786 rtx ccreg;
8787
8788 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8789 &XEXP (operands[1], 1)))
8790 FAIL;
8791 code = GET_CODE (operands[1]);
8792 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8793 XEXP (operands[1], 1), NULL_RTX);
8794 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8795 }"
8796 )
8797
8798 (define_insn "*cmov<mode>"
8799 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8800 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8801 [(match_operand 2 "cc_register" "") (const_int 0)])
8802 (match_operand:SDF 3 "s_register_operand"
8803 "<F_constraint>")
8804 (match_operand:SDF 4 "s_register_operand"
8805 "<F_constraint>")))]
8806 "TARGET_HARD_FLOAT && TARGET_FPU_ARMV8 <vfp_double_cond>"
8807 "*
8808 {
8809 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8810 switch (code)
8811 {
8812 case ARM_GE:
8813 case ARM_GT:
8814 case ARM_EQ:
8815 case ARM_VS:
8816 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8817 case ARM_LT:
8818 case ARM_LE:
8819 case ARM_NE:
8820 case ARM_VC:
8821 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8822 default:
8823 gcc_unreachable ();
8824 }
8825 return \"\";
8826 }"
8827 [(set_attr "conds" "use")
8828 (set_attr "type" "f_sel<vfp_type>")]
8829 )
8830
8831 (define_insn_and_split "*movsicc_insn"
8832 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8833 (if_then_else:SI
8834 (match_operator 3 "arm_comparison_operator"
8835 [(match_operand 4 "cc_register" "") (const_int 0)])
8836 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8837 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8838 "TARGET_ARM"
8839 "@
8840 mov%D3\\t%0, %2
8841 mvn%D3\\t%0, #%B2
8842 mov%d3\\t%0, %1
8843 mvn%d3\\t%0, #%B1
8844 #
8845 #
8846 #
8847 #"
8848 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8849 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8850 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8851 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8852 "&& reload_completed"
8853 [(const_int 0)]
8854 {
8855 enum rtx_code rev_code;
8856 enum machine_mode mode;
8857 rtx rev_cond;
8858
8859 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8860 operands[3],
8861 gen_rtx_SET (VOIDmode,
8862 operands[0],
8863 operands[1])));
8864
8865 rev_code = GET_CODE (operands[3]);
8866 mode = GET_MODE (operands[4]);
8867 if (mode == CCFPmode || mode == CCFPEmode)
8868 rev_code = reverse_condition_maybe_unordered (rev_code);
8869 else
8870 rev_code = reverse_condition (rev_code);
8871
8872 rev_cond = gen_rtx_fmt_ee (rev_code,
8873 VOIDmode,
8874 operands[4],
8875 const0_rtx);
8876 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8877 rev_cond,
8878 gen_rtx_SET (VOIDmode,
8879 operands[0],
8880 operands[2])));
8881 DONE;
8882 }
8883 [(set_attr "length" "4,4,4,4,8,8,8,8")
8884 (set_attr "conds" "use")
8885 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")
8886 (set_attr_alternative "type"
8887 [(if_then_else (match_operand 2 "const_int_operand" "")
8888 (const_string "simple_alu_imm")
8889 (const_string "*"))
8890 (const_string "simple_alu_imm")
8891 (if_then_else (match_operand 1 "const_int_operand" "")
8892 (const_string "simple_alu_imm")
8893 (const_string "*"))
8894 (const_string "simple_alu_imm")
8895 (const_string "*")
8896 (const_string "*")
8897 (const_string "*")
8898 (const_string "*")])]
8899 )
8900
8901 (define_insn "*movsfcc_soft_insn"
8902 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8903 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8904 [(match_operand 4 "cc_register" "") (const_int 0)])
8905 (match_operand:SF 1 "s_register_operand" "0,r")
8906 (match_operand:SF 2 "s_register_operand" "r,0")))]
8907 "TARGET_ARM && TARGET_SOFT_FLOAT"
8908 "@
8909 mov%D3\\t%0, %2
8910 mov%d3\\t%0, %1"
8911 [(set_attr "conds" "use")
8912 (set_attr "insn" "mov")]
8913 )
8914
8915 \f
8916 ;; Jump and linkage insns
8917
8918 (define_expand "jump"
8919 [(set (pc)
8920 (label_ref (match_operand 0 "" "")))]
8921 "TARGET_EITHER"
8922 ""
8923 )
8924
8925 (define_insn "*arm_jump"
8926 [(set (pc)
8927 (label_ref (match_operand 0 "" "")))]
8928 "TARGET_32BIT"
8929 "*
8930 {
8931 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8932 {
8933 arm_ccfsm_state += 2;
8934 return \"\";
8935 }
8936 return \"b%?\\t%l0\";
8937 }
8938 "
8939 [(set_attr "predicable" "yes")
8940 (set (attr "length")
8941 (if_then_else
8942 (and (match_test "TARGET_THUMB2")
8943 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8944 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8945 (const_int 2)
8946 (const_int 4)))]
8947 )
8948
8949 (define_insn "*thumb_jump"
8950 [(set (pc)
8951 (label_ref (match_operand 0 "" "")))]
8952 "TARGET_THUMB1"
8953 "*
8954 if (get_attr_length (insn) == 2)
8955 return \"b\\t%l0\";
8956 return \"bl\\t%l0\\t%@ far jump\";
8957 "
8958 [(set (attr "far_jump")
8959 (if_then_else
8960 (eq_attr "length" "4")
8961 (const_string "yes")
8962 (const_string "no")))
8963 (set (attr "length")
8964 (if_then_else
8965 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8966 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8967 (const_int 2)
8968 (const_int 4)))]
8969 )
8970
8971 (define_expand "call"
8972 [(parallel [(call (match_operand 0 "memory_operand" "")
8973 (match_operand 1 "general_operand" ""))
8974 (use (match_operand 2 "" ""))
8975 (clobber (reg:SI LR_REGNUM))])]
8976 "TARGET_EITHER"
8977 "
8978 {
8979 rtx callee, pat;
8980
8981 /* In an untyped call, we can get NULL for operand 2. */
8982 if (operands[2] == NULL_RTX)
8983 operands[2] = const0_rtx;
8984
8985 /* Decide if we should generate indirect calls by loading the
8986 32-bit address of the callee into a register before performing the
8987 branch and link. */
8988 callee = XEXP (operands[0], 0);
8989 if (GET_CODE (callee) == SYMBOL_REF
8990 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8991 : !REG_P (callee))
8992 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8993
8994 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8995 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8996 DONE;
8997 }"
8998 )
8999
9000 (define_expand "call_internal"
9001 [(parallel [(call (match_operand 0 "memory_operand" "")
9002 (match_operand 1 "general_operand" ""))
9003 (use (match_operand 2 "" ""))
9004 (clobber (reg:SI LR_REGNUM))])])
9005
9006 (define_insn "*call_reg_armv5"
9007 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9008 (match_operand 1 "" ""))
9009 (use (match_operand 2 "" ""))
9010 (clobber (reg:SI LR_REGNUM))]
9011 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9012 "blx%?\\t%0"
9013 [(set_attr "type" "call")]
9014 )
9015
9016 (define_insn "*call_reg_arm"
9017 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9018 (match_operand 1 "" ""))
9019 (use (match_operand 2 "" ""))
9020 (clobber (reg:SI LR_REGNUM))]
9021 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9022 "*
9023 return output_call (operands);
9024 "
9025 ;; length is worst case, normally it is only two
9026 [(set_attr "length" "12")
9027 (set_attr "type" "call")]
9028 )
9029
9030
9031 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
9032 ;; considered a function call by the branch predictor of some cores (PR40887).
9033 ;; Falls back to blx rN (*call_reg_armv5).
9034
9035 (define_insn "*call_mem"
9036 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
9037 (match_operand 1 "" ""))
9038 (use (match_operand 2 "" ""))
9039 (clobber (reg:SI LR_REGNUM))]
9040 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9041 "*
9042 return output_call_mem (operands);
9043 "
9044 [(set_attr "length" "12")
9045 (set_attr "type" "call")]
9046 )
9047
9048 (define_insn "*call_reg_thumb1_v5"
9049 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9050 (match_operand 1 "" ""))
9051 (use (match_operand 2 "" ""))
9052 (clobber (reg:SI LR_REGNUM))]
9053 "TARGET_THUMB1 && arm_arch5 && !SIBLING_CALL_P (insn)"
9054 "blx\\t%0"
9055 [(set_attr "length" "2")
9056 (set_attr "type" "call")]
9057 )
9058
9059 (define_insn "*call_reg_thumb1"
9060 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9061 (match_operand 1 "" ""))
9062 (use (match_operand 2 "" ""))
9063 (clobber (reg:SI LR_REGNUM))]
9064 "TARGET_THUMB1 && !arm_arch5 && !SIBLING_CALL_P (insn)"
9065 "*
9066 {
9067 if (!TARGET_CALLER_INTERWORKING)
9068 return thumb_call_via_reg (operands[0]);
9069 else if (operands[1] == const0_rtx)
9070 return \"bl\\t%__interwork_call_via_%0\";
9071 else if (frame_pointer_needed)
9072 return \"bl\\t%__interwork_r7_call_via_%0\";
9073 else
9074 return \"bl\\t%__interwork_r11_call_via_%0\";
9075 }"
9076 [(set_attr "type" "call")]
9077 )
9078
9079 (define_expand "call_value"
9080 [(parallel [(set (match_operand 0 "" "")
9081 (call (match_operand 1 "memory_operand" "")
9082 (match_operand 2 "general_operand" "")))
9083 (use (match_operand 3 "" ""))
9084 (clobber (reg:SI LR_REGNUM))])]
9085 "TARGET_EITHER"
9086 "
9087 {
9088 rtx pat, callee;
9089
9090 /* In an untyped call, we can get NULL for operand 2. */
9091 if (operands[3] == 0)
9092 operands[3] = const0_rtx;
9093
9094 /* Decide if we should generate indirect calls by loading the
9095 32-bit address of the callee into a register before performing the
9096 branch and link. */
9097 callee = XEXP (operands[1], 0);
9098 if (GET_CODE (callee) == SYMBOL_REF
9099 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9100 : !REG_P (callee))
9101 XEXP (operands[1], 0) = force_reg (Pmode, callee);
9102
9103 pat = gen_call_value_internal (operands[0], operands[1],
9104 operands[2], operands[3]);
9105 arm_emit_call_insn (pat, XEXP (operands[1], 0));
9106 DONE;
9107 }"
9108 )
9109
9110 (define_expand "call_value_internal"
9111 [(parallel [(set (match_operand 0 "" "")
9112 (call (match_operand 1 "memory_operand" "")
9113 (match_operand 2 "general_operand" "")))
9114 (use (match_operand 3 "" ""))
9115 (clobber (reg:SI LR_REGNUM))])])
9116
9117 (define_insn "*call_value_reg_armv5"
9118 [(set (match_operand 0 "" "")
9119 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9120 (match_operand 2 "" "")))
9121 (use (match_operand 3 "" ""))
9122 (clobber (reg:SI LR_REGNUM))]
9123 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9124 "blx%?\\t%1"
9125 [(set_attr "type" "call")]
9126 )
9127
9128 (define_insn "*call_value_reg_arm"
9129 [(set (match_operand 0 "" "")
9130 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9131 (match_operand 2 "" "")))
9132 (use (match_operand 3 "" ""))
9133 (clobber (reg:SI LR_REGNUM))]
9134 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9135 "*
9136 return output_call (&operands[1]);
9137 "
9138 [(set_attr "length" "12")
9139 (set_attr "type" "call")]
9140 )
9141
9142 ;; Note: see *call_mem
9143
9144 (define_insn "*call_value_mem"
9145 [(set (match_operand 0 "" "")
9146 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
9147 (match_operand 2 "" "")))
9148 (use (match_operand 3 "" ""))
9149 (clobber (reg:SI LR_REGNUM))]
9150 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))
9151 && !SIBLING_CALL_P (insn)"
9152 "*
9153 return output_call_mem (&operands[1]);
9154 "
9155 [(set_attr "length" "12")
9156 (set_attr "type" "call")]
9157 )
9158
9159 (define_insn "*call_value_reg_thumb1_v5"
9160 [(set (match_operand 0 "" "")
9161 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9162 (match_operand 2 "" "")))
9163 (use (match_operand 3 "" ""))
9164 (clobber (reg:SI LR_REGNUM))]
9165 "TARGET_THUMB1 && arm_arch5"
9166 "blx\\t%1"
9167 [(set_attr "length" "2")
9168 (set_attr "type" "call")]
9169 )
9170
9171 (define_insn "*call_value_reg_thumb1"
9172 [(set (match_operand 0 "" "")
9173 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9174 (match_operand 2 "" "")))
9175 (use (match_operand 3 "" ""))
9176 (clobber (reg:SI LR_REGNUM))]
9177 "TARGET_THUMB1 && !arm_arch5"
9178 "*
9179 {
9180 if (!TARGET_CALLER_INTERWORKING)
9181 return thumb_call_via_reg (operands[1]);
9182 else if (operands[2] == const0_rtx)
9183 return \"bl\\t%__interwork_call_via_%1\";
9184 else if (frame_pointer_needed)
9185 return \"bl\\t%__interwork_r7_call_via_%1\";
9186 else
9187 return \"bl\\t%__interwork_r11_call_via_%1\";
9188 }"
9189 [(set_attr "type" "call")]
9190 )
9191
9192 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
9193 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
9194
9195 (define_insn "*call_symbol"
9196 [(call (mem:SI (match_operand:SI 0 "" ""))
9197 (match_operand 1 "" ""))
9198 (use (match_operand 2 "" ""))
9199 (clobber (reg:SI LR_REGNUM))]
9200 "TARGET_32BIT
9201 && !SIBLING_CALL_P (insn)
9202 && (GET_CODE (operands[0]) == SYMBOL_REF)
9203 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9204 "*
9205 {
9206 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
9207 }"
9208 [(set_attr "type" "call")]
9209 )
9210
9211 (define_insn "*call_value_symbol"
9212 [(set (match_operand 0 "" "")
9213 (call (mem:SI (match_operand:SI 1 "" ""))
9214 (match_operand:SI 2 "" "")))
9215 (use (match_operand 3 "" ""))
9216 (clobber (reg:SI LR_REGNUM))]
9217 "TARGET_32BIT
9218 && !SIBLING_CALL_P (insn)
9219 && (GET_CODE (operands[1]) == SYMBOL_REF)
9220 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9221 "*
9222 {
9223 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
9224 }"
9225 [(set_attr "type" "call")]
9226 )
9227
9228 (define_insn "*call_insn"
9229 [(call (mem:SI (match_operand:SI 0 "" ""))
9230 (match_operand:SI 1 "" ""))
9231 (use (match_operand 2 "" ""))
9232 (clobber (reg:SI LR_REGNUM))]
9233 "TARGET_THUMB1
9234 && GET_CODE (operands[0]) == SYMBOL_REF
9235 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9236 "bl\\t%a0"
9237 [(set_attr "length" "4")
9238 (set_attr "type" "call")]
9239 )
9240
9241 (define_insn "*call_value_insn"
9242 [(set (match_operand 0 "" "")
9243 (call (mem:SI (match_operand 1 "" ""))
9244 (match_operand 2 "" "")))
9245 (use (match_operand 3 "" ""))
9246 (clobber (reg:SI LR_REGNUM))]
9247 "TARGET_THUMB1
9248 && GET_CODE (operands[1]) == SYMBOL_REF
9249 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9250 "bl\\t%a1"
9251 [(set_attr "length" "4")
9252 (set_attr "type" "call")]
9253 )
9254
9255 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
9256 (define_expand "sibcall"
9257 [(parallel [(call (match_operand 0 "memory_operand" "")
9258 (match_operand 1 "general_operand" ""))
9259 (return)
9260 (use (match_operand 2 "" ""))])]
9261 "TARGET_32BIT"
9262 "
9263 {
9264 if (!REG_P (XEXP (operands[0], 0))
9265 && (GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF))
9266 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
9267
9268 if (operands[2] == NULL_RTX)
9269 operands[2] = const0_rtx;
9270 }"
9271 )
9272
9273 (define_expand "sibcall_value"
9274 [(parallel [(set (match_operand 0 "" "")
9275 (call (match_operand 1 "memory_operand" "")
9276 (match_operand 2 "general_operand" "")))
9277 (return)
9278 (use (match_operand 3 "" ""))])]
9279 "TARGET_32BIT"
9280 "
9281 {
9282 if (!REG_P (XEXP (operands[1], 0)) &&
9283 (GET_CODE (XEXP (operands[1],0)) != SYMBOL_REF))
9284 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
9285
9286 if (operands[3] == NULL_RTX)
9287 operands[3] = const0_rtx;
9288 }"
9289 )
9290
9291 (define_insn "*sibcall_insn"
9292 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs,Ss"))
9293 (match_operand 1 "" ""))
9294 (return)
9295 (use (match_operand 2 "" ""))]
9296 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9297 "*
9298 if (which_alternative == 1)
9299 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
9300 else
9301 {
9302 if (arm_arch5 || arm_arch4t)
9303 return \" bx\\t%0\\t%@ indirect register sibling call\";
9304 else
9305 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
9306 }
9307 "
9308 [(set_attr "type" "call")]
9309 )
9310
9311 (define_insn "*sibcall_value_insn"
9312 [(set (match_operand 0 "s_register_operand" "")
9313 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,Ss"))
9314 (match_operand 2 "" "")))
9315 (return)
9316 (use (match_operand 3 "" ""))]
9317 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9318 "*
9319 if (which_alternative == 1)
9320 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
9321 else
9322 {
9323 if (arm_arch5 || arm_arch4t)
9324 return \"bx\\t%1\";
9325 else
9326 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
9327 }
9328 "
9329 [(set_attr "type" "call")]
9330 )
9331
9332 (define_expand "<return_str>return"
9333 [(returns)]
9334 "(TARGET_ARM || (TARGET_THUMB2
9335 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
9336 && !IS_STACKALIGN (arm_current_func_type ())))
9337 <return_cond_false>"
9338 "
9339 {
9340 if (TARGET_THUMB2)
9341 {
9342 thumb2_expand_return (<return_simple_p>);
9343 DONE;
9344 }
9345 }
9346 "
9347 )
9348
9349 ;; Often the return insn will be the same as loading from memory, so set attr
9350 (define_insn "*arm_return"
9351 [(return)]
9352 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
9353 "*
9354 {
9355 if (arm_ccfsm_state == 2)
9356 {
9357 arm_ccfsm_state += 2;
9358 return \"\";
9359 }
9360 return output_return_instruction (const_true_rtx, true, false, false);
9361 }"
9362 [(set_attr "type" "load1")
9363 (set_attr "length" "12")
9364 (set_attr "predicable" "yes")]
9365 )
9366
9367 (define_insn "*cond_<return_str>return"
9368 [(set (pc)
9369 (if_then_else (match_operator 0 "arm_comparison_operator"
9370 [(match_operand 1 "cc_register" "") (const_int 0)])
9371 (returns)
9372 (pc)))]
9373 "TARGET_ARM <return_cond_true>"
9374 "*
9375 {
9376 if (arm_ccfsm_state == 2)
9377 {
9378 arm_ccfsm_state += 2;
9379 return \"\";
9380 }
9381 return output_return_instruction (operands[0], true, false,
9382 <return_simple_p>);
9383 }"
9384 [(set_attr "conds" "use")
9385 (set_attr "length" "12")
9386 (set_attr "type" "load1")]
9387 )
9388
9389 (define_insn "*cond_<return_str>return_inverted"
9390 [(set (pc)
9391 (if_then_else (match_operator 0 "arm_comparison_operator"
9392 [(match_operand 1 "cc_register" "") (const_int 0)])
9393 (pc)
9394 (returns)))]
9395 "TARGET_ARM <return_cond_true>"
9396 "*
9397 {
9398 if (arm_ccfsm_state == 2)
9399 {
9400 arm_ccfsm_state += 2;
9401 return \"\";
9402 }
9403 return output_return_instruction (operands[0], true, true,
9404 <return_simple_p>);
9405 }"
9406 [(set_attr "conds" "use")
9407 (set_attr "length" "12")
9408 (set_attr "type" "load1")]
9409 )
9410
9411 (define_insn "*arm_simple_return"
9412 [(simple_return)]
9413 "TARGET_ARM"
9414 "*
9415 {
9416 if (arm_ccfsm_state == 2)
9417 {
9418 arm_ccfsm_state += 2;
9419 return \"\";
9420 }
9421 return output_return_instruction (const_true_rtx, true, false, true);
9422 }"
9423 [(set_attr "type" "branch")
9424 (set_attr "length" "4")
9425 (set_attr "predicable" "yes")]
9426 )
9427
9428 ;; Generate a sequence of instructions to determine if the processor is
9429 ;; in 26-bit or 32-bit mode, and return the appropriate return address
9430 ;; mask.
9431
9432 (define_expand "return_addr_mask"
9433 [(set (match_dup 1)
9434 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9435 (const_int 0)))
9436 (set (match_operand:SI 0 "s_register_operand" "")
9437 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9438 (const_int -1)
9439 (const_int 67108860)))] ; 0x03fffffc
9440 "TARGET_ARM"
9441 "
9442 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
9443 ")
9444
9445 (define_insn "*check_arch2"
9446 [(set (match_operand:CC_NOOV 0 "cc_register" "")
9447 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9448 (const_int 0)))]
9449 "TARGET_ARM"
9450 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9451 [(set_attr "length" "8")
9452 (set_attr "conds" "set")]
9453 )
9454
9455 ;; Call subroutine returning any type.
9456
9457 (define_expand "untyped_call"
9458 [(parallel [(call (match_operand 0 "" "")
9459 (const_int 0))
9460 (match_operand 1 "" "")
9461 (match_operand 2 "" "")])]
9462 "TARGET_EITHER"
9463 "
9464 {
9465 int i;
9466 rtx par = gen_rtx_PARALLEL (VOIDmode,
9467 rtvec_alloc (XVECLEN (operands[2], 0)));
9468 rtx addr = gen_reg_rtx (Pmode);
9469 rtx mem;
9470 int size = 0;
9471
9472 emit_move_insn (addr, XEXP (operands[1], 0));
9473 mem = change_address (operands[1], BLKmode, addr);
9474
9475 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9476 {
9477 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9478
9479 /* Default code only uses r0 as a return value, but we could
9480 be using anything up to 4 registers. */
9481 if (REGNO (src) == R0_REGNUM)
9482 src = gen_rtx_REG (TImode, R0_REGNUM);
9483
9484 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9485 GEN_INT (size));
9486 size += GET_MODE_SIZE (GET_MODE (src));
9487 }
9488
9489 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
9490 const0_rtx));
9491
9492 size = 0;
9493
9494 for (i = 0; i < XVECLEN (par, 0); i++)
9495 {
9496 HOST_WIDE_INT offset = 0;
9497 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9498
9499 if (size != 0)
9500 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9501
9502 mem = change_address (mem, GET_MODE (reg), NULL);
9503 if (REGNO (reg) == R0_REGNUM)
9504 {
9505 /* On thumb we have to use a write-back instruction. */
9506 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9507 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9508 size = TARGET_ARM ? 16 : 0;
9509 }
9510 else
9511 {
9512 emit_move_insn (mem, reg);
9513 size = GET_MODE_SIZE (GET_MODE (reg));
9514 }
9515 }
9516
9517 /* The optimizer does not know that the call sets the function value
9518 registers we stored in the result block. We avoid problems by
9519 claiming that all hard registers are used and clobbered at this
9520 point. */
9521 emit_insn (gen_blockage ());
9522
9523 DONE;
9524 }"
9525 )
9526
9527 (define_expand "untyped_return"
9528 [(match_operand:BLK 0 "memory_operand" "")
9529 (match_operand 1 "" "")]
9530 "TARGET_EITHER"
9531 "
9532 {
9533 int i;
9534 rtx addr = gen_reg_rtx (Pmode);
9535 rtx mem;
9536 int size = 0;
9537
9538 emit_move_insn (addr, XEXP (operands[0], 0));
9539 mem = change_address (operands[0], BLKmode, addr);
9540
9541 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9542 {
9543 HOST_WIDE_INT offset = 0;
9544 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9545
9546 if (size != 0)
9547 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9548
9549 mem = change_address (mem, GET_MODE (reg), NULL);
9550 if (REGNO (reg) == R0_REGNUM)
9551 {
9552 /* On thumb we have to use a write-back instruction. */
9553 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9554 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9555 size = TARGET_ARM ? 16 : 0;
9556 }
9557 else
9558 {
9559 emit_move_insn (reg, mem);
9560 size = GET_MODE_SIZE (GET_MODE (reg));
9561 }
9562 }
9563
9564 /* Emit USE insns before the return. */
9565 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9566 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9567
9568 /* Construct the return. */
9569 expand_naked_return ();
9570
9571 DONE;
9572 }"
9573 )
9574
9575 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9576 ;; all of memory. This blocks insns from being moved across this point.
9577
9578 (define_insn "blockage"
9579 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9580 "TARGET_EITHER"
9581 ""
9582 [(set_attr "length" "0")
9583 (set_attr "type" "block")]
9584 )
9585
9586 (define_expand "casesi"
9587 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
9588 (match_operand:SI 1 "const_int_operand" "") ; lower bound
9589 (match_operand:SI 2 "const_int_operand" "") ; total range
9590 (match_operand:SI 3 "" "") ; table label
9591 (match_operand:SI 4 "" "")] ; Out of range label
9592 "TARGET_32BIT || optimize_size || flag_pic"
9593 "
9594 {
9595 enum insn_code code;
9596 if (operands[1] != const0_rtx)
9597 {
9598 rtx reg = gen_reg_rtx (SImode);
9599
9600 emit_insn (gen_addsi3 (reg, operands[0],
9601 gen_int_mode (-INTVAL (operands[1]),
9602 SImode)));
9603 operands[0] = reg;
9604 }
9605
9606 if (TARGET_ARM)
9607 code = CODE_FOR_arm_casesi_internal;
9608 else if (TARGET_THUMB1)
9609 code = CODE_FOR_thumb1_casesi_internal_pic;
9610 else if (flag_pic)
9611 code = CODE_FOR_thumb2_casesi_internal_pic;
9612 else
9613 code = CODE_FOR_thumb2_casesi_internal;
9614
9615 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9616 operands[2] = force_reg (SImode, operands[2]);
9617
9618 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9619 operands[3], operands[4]));
9620 DONE;
9621 }"
9622 )
9623
9624 ;; The USE in this pattern is needed to tell flow analysis that this is
9625 ;; a CASESI insn. It has no other purpose.
9626 (define_insn "arm_casesi_internal"
9627 [(parallel [(set (pc)
9628 (if_then_else
9629 (leu (match_operand:SI 0 "s_register_operand" "r")
9630 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9631 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9632 (label_ref (match_operand 2 "" ""))))
9633 (label_ref (match_operand 3 "" ""))))
9634 (clobber (reg:CC CC_REGNUM))
9635 (use (label_ref (match_dup 2)))])]
9636 "TARGET_ARM"
9637 "*
9638 if (flag_pic)
9639 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9640 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9641 "
9642 [(set_attr "conds" "clob")
9643 (set_attr "length" "12")]
9644 )
9645
9646 (define_expand "thumb1_casesi_internal_pic"
9647 [(match_operand:SI 0 "s_register_operand" "")
9648 (match_operand:SI 1 "thumb1_cmp_operand" "")
9649 (match_operand 2 "" "")
9650 (match_operand 3 "" "")]
9651 "TARGET_THUMB1"
9652 {
9653 rtx reg0;
9654 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9655 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9656 operands[3]));
9657 reg0 = gen_rtx_REG (SImode, 0);
9658 emit_move_insn (reg0, operands[0]);
9659 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9660 DONE;
9661 }
9662 )
9663
9664 (define_insn "thumb1_casesi_dispatch"
9665 [(parallel [(set (pc) (unspec [(reg:SI 0)
9666 (label_ref (match_operand 0 "" ""))
9667 ;; (label_ref (match_operand 1 "" ""))
9668 ]
9669 UNSPEC_THUMB1_CASESI))
9670 (clobber (reg:SI IP_REGNUM))
9671 (clobber (reg:SI LR_REGNUM))])]
9672 "TARGET_THUMB1"
9673 "* return thumb1_output_casesi(operands);"
9674 [(set_attr "length" "4")]
9675 )
9676
9677 (define_expand "indirect_jump"
9678 [(set (pc)
9679 (match_operand:SI 0 "s_register_operand" ""))]
9680 "TARGET_EITHER"
9681 "
9682 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9683 address and use bx. */
9684 if (TARGET_THUMB2)
9685 {
9686 rtx tmp;
9687 tmp = gen_reg_rtx (SImode);
9688 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9689 operands[0] = tmp;
9690 }
9691 "
9692 )
9693
9694 ;; NB Never uses BX.
9695 (define_insn "*arm_indirect_jump"
9696 [(set (pc)
9697 (match_operand:SI 0 "s_register_operand" "r"))]
9698 "TARGET_ARM"
9699 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9700 [(set_attr "predicable" "yes")]
9701 )
9702
9703 (define_insn "*load_indirect_jump"
9704 [(set (pc)
9705 (match_operand:SI 0 "memory_operand" "m"))]
9706 "TARGET_ARM"
9707 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9708 [(set_attr "type" "load1")
9709 (set_attr "pool_range" "4096")
9710 (set_attr "neg_pool_range" "4084")
9711 (set_attr "predicable" "yes")]
9712 )
9713
9714 ;; NB Never uses BX.
9715 (define_insn "*thumb1_indirect_jump"
9716 [(set (pc)
9717 (match_operand:SI 0 "register_operand" "l*r"))]
9718 "TARGET_THUMB1"
9719 "mov\\tpc, %0"
9720 [(set_attr "conds" "clob")
9721 (set_attr "length" "2")]
9722 )
9723
9724 \f
9725 ;; Misc insns
9726
9727 (define_insn "nop"
9728 [(const_int 0)]
9729 "TARGET_EITHER"
9730 "*
9731 if (TARGET_UNIFIED_ASM)
9732 return \"nop\";
9733 if (TARGET_ARM)
9734 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9735 return \"mov\\tr8, r8\";
9736 "
9737 [(set (attr "length")
9738 (if_then_else (eq_attr "is_thumb" "yes")
9739 (const_int 2)
9740 (const_int 4)))]
9741 )
9742
9743 \f
9744 ;; Patterns to allow combination of arithmetic, cond code and shifts
9745
9746 (define_insn "*arith_shiftsi"
9747 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9748 (match_operator:SI 1 "shiftable_operator"
9749 [(match_operator:SI 3 "shift_operator"
9750 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
9751 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
9752 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
9753 "TARGET_32BIT"
9754 "%i1%?\\t%0, %2, %4%S3"
9755 [(set_attr "predicable" "yes")
9756 (set_attr "shift" "4")
9757 (set_attr "arch" "a,t2,t2,a")
9758 ;; Thumb2 doesn't allow the stack pointer to be used for
9759 ;; operand1 for all operations other than add and sub. In this case
9760 ;; the minus operation is a candidate for an rsub and hence needs
9761 ;; to be disabled.
9762 ;; We have to make sure to disable the fourth alternative if
9763 ;; the shift_operator is MULT, since otherwise the insn will
9764 ;; also match a multiply_accumulate pattern and validate_change
9765 ;; will allow a replacement of the constant with a register
9766 ;; despite the checks done in shift_operator.
9767 (set_attr_alternative "insn_enabled"
9768 [(const_string "yes")
9769 (if_then_else
9770 (match_operand:SI 1 "add_operator" "")
9771 (const_string "yes") (const_string "no"))
9772 (const_string "yes")
9773 (if_then_else
9774 (match_operand:SI 3 "mult_operator" "")
9775 (const_string "no") (const_string "yes"))])
9776 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
9777
9778 (define_split
9779 [(set (match_operand:SI 0 "s_register_operand" "")
9780 (match_operator:SI 1 "shiftable_operator"
9781 [(match_operator:SI 2 "shiftable_operator"
9782 [(match_operator:SI 3 "shift_operator"
9783 [(match_operand:SI 4 "s_register_operand" "")
9784 (match_operand:SI 5 "reg_or_int_operand" "")])
9785 (match_operand:SI 6 "s_register_operand" "")])
9786 (match_operand:SI 7 "arm_rhs_operand" "")]))
9787 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9788 "TARGET_32BIT"
9789 [(set (match_dup 8)
9790 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9791 (match_dup 6)]))
9792 (set (match_dup 0)
9793 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9794 "")
9795
9796 (define_insn "*arith_shiftsi_compare0"
9797 [(set (reg:CC_NOOV CC_REGNUM)
9798 (compare:CC_NOOV
9799 (match_operator:SI 1 "shiftable_operator"
9800 [(match_operator:SI 3 "shift_operator"
9801 [(match_operand:SI 4 "s_register_operand" "r,r")
9802 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9803 (match_operand:SI 2 "s_register_operand" "r,r")])
9804 (const_int 0)))
9805 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9806 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9807 (match_dup 2)]))]
9808 "TARGET_32BIT"
9809 "%i1%.\\t%0, %2, %4%S3"
9810 [(set_attr "conds" "set")
9811 (set_attr "shift" "4")
9812 (set_attr "arch" "32,a")
9813 (set_attr "type" "alu_shift,alu_shift_reg")])
9814
9815 (define_insn "*arith_shiftsi_compare0_scratch"
9816 [(set (reg:CC_NOOV CC_REGNUM)
9817 (compare:CC_NOOV
9818 (match_operator:SI 1 "shiftable_operator"
9819 [(match_operator:SI 3 "shift_operator"
9820 [(match_operand:SI 4 "s_register_operand" "r,r")
9821 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9822 (match_operand:SI 2 "s_register_operand" "r,r")])
9823 (const_int 0)))
9824 (clobber (match_scratch:SI 0 "=r,r"))]
9825 "TARGET_32BIT"
9826 "%i1%.\\t%0, %2, %4%S3"
9827 [(set_attr "conds" "set")
9828 (set_attr "shift" "4")
9829 (set_attr "arch" "32,a")
9830 (set_attr "type" "alu_shift,alu_shift_reg")])
9831
9832 (define_insn "*sub_shiftsi"
9833 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9834 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9835 (match_operator:SI 2 "shift_operator"
9836 [(match_operand:SI 3 "s_register_operand" "r,r")
9837 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9838 "TARGET_32BIT"
9839 "sub%?\\t%0, %1, %3%S2"
9840 [(set_attr "predicable" "yes")
9841 (set_attr "shift" "3")
9842 (set_attr "arch" "32,a")
9843 (set_attr "type" "alu_shift,alu_shift_reg")])
9844
9845 (define_insn "*sub_shiftsi_compare0"
9846 [(set (reg:CC_NOOV CC_REGNUM)
9847 (compare:CC_NOOV
9848 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9849 (match_operator:SI 2 "shift_operator"
9850 [(match_operand:SI 3 "s_register_operand" "r,r")
9851 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9852 (const_int 0)))
9853 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9854 (minus:SI (match_dup 1)
9855 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9856 "TARGET_32BIT"
9857 "sub%.\\t%0, %1, %3%S2"
9858 [(set_attr "conds" "set")
9859 (set_attr "shift" "3")
9860 (set_attr "arch" "32,a")
9861 (set_attr "type" "alu_shift,alu_shift_reg")])
9862
9863 (define_insn "*sub_shiftsi_compare0_scratch"
9864 [(set (reg:CC_NOOV CC_REGNUM)
9865 (compare:CC_NOOV
9866 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9867 (match_operator:SI 2 "shift_operator"
9868 [(match_operand:SI 3 "s_register_operand" "r,r")
9869 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9870 (const_int 0)))
9871 (clobber (match_scratch:SI 0 "=r,r"))]
9872 "TARGET_32BIT"
9873 "sub%.\\t%0, %1, %3%S2"
9874 [(set_attr "conds" "set")
9875 (set_attr "shift" "3")
9876 (set_attr "arch" "32,a")
9877 (set_attr "type" "alu_shift,alu_shift_reg")])
9878 \f
9879
9880 (define_insn_and_split "*and_scc"
9881 [(set (match_operand:SI 0 "s_register_operand" "=r")
9882 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9883 [(match_operand 2 "cc_register" "") (const_int 0)])
9884 (match_operand:SI 3 "s_register_operand" "r")))]
9885 "TARGET_ARM"
9886 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9887 "&& reload_completed"
9888 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9889 (cond_exec (match_dup 4) (set (match_dup 0)
9890 (and:SI (match_dup 3) (const_int 1))))]
9891 {
9892 enum machine_mode mode = GET_MODE (operands[2]);
9893 enum rtx_code rc = GET_CODE (operands[1]);
9894
9895 /* Note that operands[4] is the same as operands[1],
9896 but with VOIDmode as the result. */
9897 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9898 if (mode == CCFPmode || mode == CCFPEmode)
9899 rc = reverse_condition_maybe_unordered (rc);
9900 else
9901 rc = reverse_condition (rc);
9902 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9903 }
9904 [(set_attr "conds" "use")
9905 (set_attr "insn" "mov")
9906 (set_attr "length" "8")]
9907 )
9908
9909 (define_insn_and_split "*ior_scc"
9910 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9911 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9912 [(match_operand 2 "cc_register" "") (const_int 0)])
9913 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9914 "TARGET_ARM"
9915 "@
9916 orr%d1\\t%0, %3, #1
9917 #"
9918 "&& reload_completed
9919 && REGNO (operands [0]) != REGNO (operands[3])"
9920 ;; && which_alternative == 1
9921 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9922 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9923 (cond_exec (match_dup 4) (set (match_dup 0)
9924 (ior:SI (match_dup 3) (const_int 1))))]
9925 {
9926 enum machine_mode mode = GET_MODE (operands[2]);
9927 enum rtx_code rc = GET_CODE (operands[1]);
9928
9929 /* Note that operands[4] is the same as operands[1],
9930 but with VOIDmode as the result. */
9931 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9932 if (mode == CCFPmode || mode == CCFPEmode)
9933 rc = reverse_condition_maybe_unordered (rc);
9934 else
9935 rc = reverse_condition (rc);
9936 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9937 }
9938 [(set_attr "conds" "use")
9939 (set_attr "length" "4,8")]
9940 )
9941
9942 ; A series of splitters for the compare_scc pattern below. Note that
9943 ; order is important.
9944 (define_split
9945 [(set (match_operand:SI 0 "s_register_operand" "")
9946 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9947 (const_int 0)))
9948 (clobber (reg:CC CC_REGNUM))]
9949 "TARGET_32BIT && reload_completed"
9950 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9951
9952 (define_split
9953 [(set (match_operand:SI 0 "s_register_operand" "")
9954 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9955 (const_int 0)))
9956 (clobber (reg:CC CC_REGNUM))]
9957 "TARGET_32BIT && reload_completed"
9958 [(set (match_dup 0) (not:SI (match_dup 1)))
9959 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9960
9961 (define_split
9962 [(set (match_operand:SI 0 "s_register_operand" "")
9963 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9964 (const_int 0)))
9965 (clobber (reg:CC CC_REGNUM))]
9966 "TARGET_32BIT && reload_completed"
9967 [(parallel
9968 [(set (reg:CC CC_REGNUM)
9969 (compare:CC (const_int 1) (match_dup 1)))
9970 (set (match_dup 0)
9971 (minus:SI (const_int 1) (match_dup 1)))])
9972 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9973 (set (match_dup 0) (const_int 0)))])
9974
9975 (define_split
9976 [(set (match_operand:SI 0 "s_register_operand" "")
9977 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9978 (match_operand:SI 2 "const_int_operand" "")))
9979 (clobber (reg:CC CC_REGNUM))]
9980 "TARGET_32BIT && reload_completed"
9981 [(parallel
9982 [(set (reg:CC CC_REGNUM)
9983 (compare:CC (match_dup 1) (match_dup 2)))
9984 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9985 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9986 (set (match_dup 0) (const_int 1)))]
9987 {
9988 operands[3] = GEN_INT (-INTVAL (operands[2]));
9989 })
9990
9991 (define_split
9992 [(set (match_operand:SI 0 "s_register_operand" "")
9993 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9994 (match_operand:SI 2 "arm_add_operand" "")))
9995 (clobber (reg:CC CC_REGNUM))]
9996 "TARGET_32BIT && reload_completed"
9997 [(parallel
9998 [(set (reg:CC_NOOV CC_REGNUM)
9999 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
10000 (const_int 0)))
10001 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
10002 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
10003 (set (match_dup 0) (const_int 1)))])
10004
10005 (define_insn_and_split "*compare_scc"
10006 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10007 (match_operator:SI 1 "arm_comparison_operator"
10008 [(match_operand:SI 2 "s_register_operand" "r,r")
10009 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
10010 (clobber (reg:CC CC_REGNUM))]
10011 "TARGET_32BIT"
10012 "#"
10013 "&& reload_completed"
10014 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
10015 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
10016 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
10017 {
10018 rtx tmp1;
10019 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10020 operands[2], operands[3]);
10021 enum rtx_code rc = GET_CODE (operands[1]);
10022
10023 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
10024
10025 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10026 if (mode == CCFPmode || mode == CCFPEmode)
10027 rc = reverse_condition_maybe_unordered (rc);
10028 else
10029 rc = reverse_condition (rc);
10030 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10031 })
10032
10033 ;; Attempt to improve the sequence generated by the compare_scc splitters
10034 ;; not to use conditional execution.
10035 (define_peephole2
10036 [(set (reg:CC CC_REGNUM)
10037 (compare:CC (match_operand:SI 1 "register_operand" "")
10038 (match_operand:SI 2 "arm_rhs_operand" "")))
10039 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10040 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10041 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10042 (set (match_dup 0) (const_int 1)))
10043 (match_scratch:SI 3 "r")]
10044 "TARGET_32BIT"
10045 [(parallel
10046 [(set (reg:CC CC_REGNUM)
10047 (compare:CC (match_dup 1) (match_dup 2)))
10048 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
10049 (parallel
10050 [(set (reg:CC CC_REGNUM)
10051 (compare:CC (const_int 0) (match_dup 3)))
10052 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
10053 (parallel
10054 [(set (match_dup 0)
10055 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
10056 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
10057 (clobber (reg:CC CC_REGNUM))])])
10058
10059 (define_insn "*cond_move"
10060 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10061 (if_then_else:SI (match_operator 3 "equality_operator"
10062 [(match_operator 4 "arm_comparison_operator"
10063 [(match_operand 5 "cc_register" "") (const_int 0)])
10064 (const_int 0)])
10065 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10066 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
10067 "TARGET_ARM"
10068 "*
10069 if (GET_CODE (operands[3]) == NE)
10070 {
10071 if (which_alternative != 1)
10072 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
10073 if (which_alternative != 0)
10074 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
10075 return \"\";
10076 }
10077 if (which_alternative != 0)
10078 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10079 if (which_alternative != 1)
10080 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
10081 return \"\";
10082 "
10083 [(set_attr "conds" "use")
10084 (set_attr "insn" "mov")
10085 (set_attr "length" "4,4,8")]
10086 )
10087
10088 (define_insn "*cond_arith"
10089 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10090 (match_operator:SI 5 "shiftable_operator"
10091 [(match_operator:SI 4 "arm_comparison_operator"
10092 [(match_operand:SI 2 "s_register_operand" "r,r")
10093 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10094 (match_operand:SI 1 "s_register_operand" "0,?r")]))
10095 (clobber (reg:CC CC_REGNUM))]
10096 "TARGET_ARM"
10097 "*
10098 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
10099 return \"%i5\\t%0, %1, %2, lsr #31\";
10100
10101 output_asm_insn (\"cmp\\t%2, %3\", operands);
10102 if (GET_CODE (operands[5]) == AND)
10103 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
10104 else if (GET_CODE (operands[5]) == MINUS)
10105 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
10106 else if (which_alternative != 0)
10107 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10108 return \"%i5%d4\\t%0, %1, #1\";
10109 "
10110 [(set_attr "conds" "clob")
10111 (set_attr "length" "12")]
10112 )
10113
10114 (define_insn "*cond_sub"
10115 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10116 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
10117 (match_operator:SI 4 "arm_comparison_operator"
10118 [(match_operand:SI 2 "s_register_operand" "r,r")
10119 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10120 (clobber (reg:CC CC_REGNUM))]
10121 "TARGET_ARM"
10122 "*
10123 output_asm_insn (\"cmp\\t%2, %3\", operands);
10124 if (which_alternative != 0)
10125 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10126 return \"sub%d4\\t%0, %1, #1\";
10127 "
10128 [(set_attr "conds" "clob")
10129 (set_attr "length" "8,12")]
10130 )
10131
10132 (define_insn "*cmp_ite0"
10133 [(set (match_operand 6 "dominant_cc_register" "")
10134 (compare
10135 (if_then_else:SI
10136 (match_operator 4 "arm_comparison_operator"
10137 [(match_operand:SI 0 "s_register_operand"
10138 "l,l,l,r,r,r,r,r,r")
10139 (match_operand:SI 1 "arm_add_operand"
10140 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10141 (match_operator:SI 5 "arm_comparison_operator"
10142 [(match_operand:SI 2 "s_register_operand"
10143 "l,r,r,l,l,r,r,r,r")
10144 (match_operand:SI 3 "arm_add_operand"
10145 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10146 (const_int 0))
10147 (const_int 0)))]
10148 "TARGET_32BIT"
10149 "*
10150 {
10151 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10152 {
10153 {\"cmp%d5\\t%0, %1\",
10154 \"cmp%d4\\t%2, %3\"},
10155 {\"cmn%d5\\t%0, #%n1\",
10156 \"cmp%d4\\t%2, %3\"},
10157 {\"cmp%d5\\t%0, %1\",
10158 \"cmn%d4\\t%2, #%n3\"},
10159 {\"cmn%d5\\t%0, #%n1\",
10160 \"cmn%d4\\t%2, #%n3\"}
10161 };
10162 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10163 {
10164 {\"cmp\\t%2, %3\",
10165 \"cmp\\t%0, %1\"},
10166 {\"cmp\\t%2, %3\",
10167 \"cmn\\t%0, #%n1\"},
10168 {\"cmn\\t%2, #%n3\",
10169 \"cmp\\t%0, %1\"},
10170 {\"cmn\\t%2, #%n3\",
10171 \"cmn\\t%0, #%n1\"}
10172 };
10173 static const char * const ite[2] =
10174 {
10175 \"it\\t%d5\",
10176 \"it\\t%d4\"
10177 };
10178 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10179 CMP_CMP, CMN_CMP, CMP_CMP,
10180 CMN_CMP, CMP_CMN, CMN_CMN};
10181 int swap =
10182 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10183
10184 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10185 if (TARGET_THUMB2) {
10186 output_asm_insn (ite[swap], operands);
10187 }
10188 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10189 return \"\";
10190 }"
10191 [(set_attr "conds" "set")
10192 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10193 (set_attr_alternative "length"
10194 [(const_int 6)
10195 (const_int 8)
10196 (const_int 8)
10197 (const_int 8)
10198 (const_int 8)
10199 (if_then_else (eq_attr "is_thumb" "no")
10200 (const_int 8)
10201 (const_int 10))
10202 (if_then_else (eq_attr "is_thumb" "no")
10203 (const_int 8)
10204 (const_int 10))
10205 (if_then_else (eq_attr "is_thumb" "no")
10206 (const_int 8)
10207 (const_int 10))
10208 (if_then_else (eq_attr "is_thumb" "no")
10209 (const_int 8)
10210 (const_int 10))])]
10211 )
10212
10213 (define_insn "*cmp_ite1"
10214 [(set (match_operand 6 "dominant_cc_register" "")
10215 (compare
10216 (if_then_else:SI
10217 (match_operator 4 "arm_comparison_operator"
10218 [(match_operand:SI 0 "s_register_operand"
10219 "l,l,l,r,r,r,r,r,r")
10220 (match_operand:SI 1 "arm_add_operand"
10221 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10222 (match_operator:SI 5 "arm_comparison_operator"
10223 [(match_operand:SI 2 "s_register_operand"
10224 "l,r,r,l,l,r,r,r,r")
10225 (match_operand:SI 3 "arm_add_operand"
10226 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10227 (const_int 1))
10228 (const_int 0)))]
10229 "TARGET_32BIT"
10230 "*
10231 {
10232 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10233 {
10234 {\"cmp\\t%0, %1\",
10235 \"cmp\\t%2, %3\"},
10236 {\"cmn\\t%0, #%n1\",
10237 \"cmp\\t%2, %3\"},
10238 {\"cmp\\t%0, %1\",
10239 \"cmn\\t%2, #%n3\"},
10240 {\"cmn\\t%0, #%n1\",
10241 \"cmn\\t%2, #%n3\"}
10242 };
10243 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10244 {
10245 {\"cmp%d4\\t%2, %3\",
10246 \"cmp%D5\\t%0, %1\"},
10247 {\"cmp%d4\\t%2, %3\",
10248 \"cmn%D5\\t%0, #%n1\"},
10249 {\"cmn%d4\\t%2, #%n3\",
10250 \"cmp%D5\\t%0, %1\"},
10251 {\"cmn%d4\\t%2, #%n3\",
10252 \"cmn%D5\\t%0, #%n1\"}
10253 };
10254 static const char * const ite[2] =
10255 {
10256 \"it\\t%d4\",
10257 \"it\\t%D5\"
10258 };
10259 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10260 CMP_CMP, CMN_CMP, CMP_CMP,
10261 CMN_CMP, CMP_CMN, CMN_CMN};
10262 int swap =
10263 comparison_dominates_p (GET_CODE (operands[5]),
10264 reverse_condition (GET_CODE (operands[4])));
10265
10266 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10267 if (TARGET_THUMB2) {
10268 output_asm_insn (ite[swap], operands);
10269 }
10270 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10271 return \"\";
10272 }"
10273 [(set_attr "conds" "set")
10274 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10275 (set_attr_alternative "length"
10276 [(const_int 6)
10277 (const_int 8)
10278 (const_int 8)
10279 (const_int 8)
10280 (const_int 8)
10281 (if_then_else (eq_attr "is_thumb" "no")
10282 (const_int 8)
10283 (const_int 10))
10284 (if_then_else (eq_attr "is_thumb" "no")
10285 (const_int 8)
10286 (const_int 10))
10287 (if_then_else (eq_attr "is_thumb" "no")
10288 (const_int 8)
10289 (const_int 10))
10290 (if_then_else (eq_attr "is_thumb" "no")
10291 (const_int 8)
10292 (const_int 10))])]
10293 )
10294
10295 (define_insn "*cmp_and"
10296 [(set (match_operand 6 "dominant_cc_register" "")
10297 (compare
10298 (and:SI
10299 (match_operator 4 "arm_comparison_operator"
10300 [(match_operand:SI 0 "s_register_operand"
10301 "l,l,l,r,r,r,r,r,r")
10302 (match_operand:SI 1 "arm_add_operand"
10303 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10304 (match_operator:SI 5 "arm_comparison_operator"
10305 [(match_operand:SI 2 "s_register_operand"
10306 "l,r,r,l,l,r,r,r,r")
10307 (match_operand:SI 3 "arm_add_operand"
10308 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10309 (const_int 0)))]
10310 "TARGET_32BIT"
10311 "*
10312 {
10313 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10314 {
10315 {\"cmp%d5\\t%0, %1\",
10316 \"cmp%d4\\t%2, %3\"},
10317 {\"cmn%d5\\t%0, #%n1\",
10318 \"cmp%d4\\t%2, %3\"},
10319 {\"cmp%d5\\t%0, %1\",
10320 \"cmn%d4\\t%2, #%n3\"},
10321 {\"cmn%d5\\t%0, #%n1\",
10322 \"cmn%d4\\t%2, #%n3\"}
10323 };
10324 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10325 {
10326 {\"cmp\\t%2, %3\",
10327 \"cmp\\t%0, %1\"},
10328 {\"cmp\\t%2, %3\",
10329 \"cmn\\t%0, #%n1\"},
10330 {\"cmn\\t%2, #%n3\",
10331 \"cmp\\t%0, %1\"},
10332 {\"cmn\\t%2, #%n3\",
10333 \"cmn\\t%0, #%n1\"}
10334 };
10335 static const char *const ite[2] =
10336 {
10337 \"it\\t%d5\",
10338 \"it\\t%d4\"
10339 };
10340 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10341 CMP_CMP, CMN_CMP, CMP_CMP,
10342 CMN_CMP, CMP_CMN, CMN_CMN};
10343 int swap =
10344 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10345
10346 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10347 if (TARGET_THUMB2) {
10348 output_asm_insn (ite[swap], operands);
10349 }
10350 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10351 return \"\";
10352 }"
10353 [(set_attr "conds" "set")
10354 (set_attr "predicable" "no")
10355 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10356 (set_attr_alternative "length"
10357 [(const_int 6)
10358 (const_int 8)
10359 (const_int 8)
10360 (const_int 8)
10361 (const_int 8)
10362 (if_then_else (eq_attr "is_thumb" "no")
10363 (const_int 8)
10364 (const_int 10))
10365 (if_then_else (eq_attr "is_thumb" "no")
10366 (const_int 8)
10367 (const_int 10))
10368 (if_then_else (eq_attr "is_thumb" "no")
10369 (const_int 8)
10370 (const_int 10))
10371 (if_then_else (eq_attr "is_thumb" "no")
10372 (const_int 8)
10373 (const_int 10))])]
10374 )
10375
10376 (define_insn "*cmp_ior"
10377 [(set (match_operand 6 "dominant_cc_register" "")
10378 (compare
10379 (ior:SI
10380 (match_operator 4 "arm_comparison_operator"
10381 [(match_operand:SI 0 "s_register_operand"
10382 "l,l,l,r,r,r,r,r,r")
10383 (match_operand:SI 1 "arm_add_operand"
10384 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10385 (match_operator:SI 5 "arm_comparison_operator"
10386 [(match_operand:SI 2 "s_register_operand"
10387 "l,r,r,l,l,r,r,r,r")
10388 (match_operand:SI 3 "arm_add_operand"
10389 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10390 (const_int 0)))]
10391 "TARGET_32BIT"
10392 "*
10393 {
10394 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10395 {
10396 {\"cmp\\t%0, %1\",
10397 \"cmp\\t%2, %3\"},
10398 {\"cmn\\t%0, #%n1\",
10399 \"cmp\\t%2, %3\"},
10400 {\"cmp\\t%0, %1\",
10401 \"cmn\\t%2, #%n3\"},
10402 {\"cmn\\t%0, #%n1\",
10403 \"cmn\\t%2, #%n3\"}
10404 };
10405 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10406 {
10407 {\"cmp%D4\\t%2, %3\",
10408 \"cmp%D5\\t%0, %1\"},
10409 {\"cmp%D4\\t%2, %3\",
10410 \"cmn%D5\\t%0, #%n1\"},
10411 {\"cmn%D4\\t%2, #%n3\",
10412 \"cmp%D5\\t%0, %1\"},
10413 {\"cmn%D4\\t%2, #%n3\",
10414 \"cmn%D5\\t%0, #%n1\"}
10415 };
10416 static const char *const ite[2] =
10417 {
10418 \"it\\t%D4\",
10419 \"it\\t%D5\"
10420 };
10421 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10422 CMP_CMP, CMN_CMP, CMP_CMP,
10423 CMN_CMP, CMP_CMN, CMN_CMN};
10424 int swap =
10425 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10426
10427 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10428 if (TARGET_THUMB2) {
10429 output_asm_insn (ite[swap], operands);
10430 }
10431 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10432 return \"\";
10433 }
10434 "
10435 [(set_attr "conds" "set")
10436 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10437 (set_attr_alternative "length"
10438 [(const_int 6)
10439 (const_int 8)
10440 (const_int 8)
10441 (const_int 8)
10442 (const_int 8)
10443 (if_then_else (eq_attr "is_thumb" "no")
10444 (const_int 8)
10445 (const_int 10))
10446 (if_then_else (eq_attr "is_thumb" "no")
10447 (const_int 8)
10448 (const_int 10))
10449 (if_then_else (eq_attr "is_thumb" "no")
10450 (const_int 8)
10451 (const_int 10))
10452 (if_then_else (eq_attr "is_thumb" "no")
10453 (const_int 8)
10454 (const_int 10))])]
10455 )
10456
10457 (define_insn_and_split "*ior_scc_scc"
10458 [(set (match_operand:SI 0 "s_register_operand" "=r")
10459 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10460 [(match_operand:SI 1 "s_register_operand" "r")
10461 (match_operand:SI 2 "arm_add_operand" "rIL")])
10462 (match_operator:SI 6 "arm_comparison_operator"
10463 [(match_operand:SI 4 "s_register_operand" "r")
10464 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10465 (clobber (reg:CC CC_REGNUM))]
10466 "TARGET_32BIT
10467 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10468 != CCmode)"
10469 "#"
10470 "TARGET_32BIT && reload_completed"
10471 [(set (match_dup 7)
10472 (compare
10473 (ior:SI
10474 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10475 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10476 (const_int 0)))
10477 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10478 "operands[7]
10479 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10480 DOM_CC_X_OR_Y),
10481 CC_REGNUM);"
10482 [(set_attr "conds" "clob")
10483 (set_attr "length" "16")])
10484
10485 ; If the above pattern is followed by a CMP insn, then the compare is
10486 ; redundant, since we can rework the conditional instruction that follows.
10487 (define_insn_and_split "*ior_scc_scc_cmp"
10488 [(set (match_operand 0 "dominant_cc_register" "")
10489 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10490 [(match_operand:SI 1 "s_register_operand" "r")
10491 (match_operand:SI 2 "arm_add_operand" "rIL")])
10492 (match_operator:SI 6 "arm_comparison_operator"
10493 [(match_operand:SI 4 "s_register_operand" "r")
10494 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10495 (const_int 0)))
10496 (set (match_operand:SI 7 "s_register_operand" "=r")
10497 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10498 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10499 "TARGET_32BIT"
10500 "#"
10501 "TARGET_32BIT && reload_completed"
10502 [(set (match_dup 0)
10503 (compare
10504 (ior:SI
10505 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10506 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10507 (const_int 0)))
10508 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10509 ""
10510 [(set_attr "conds" "set")
10511 (set_attr "length" "16")])
10512
10513 (define_insn_and_split "*and_scc_scc"
10514 [(set (match_operand:SI 0 "s_register_operand" "=r")
10515 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10516 [(match_operand:SI 1 "s_register_operand" "r")
10517 (match_operand:SI 2 "arm_add_operand" "rIL")])
10518 (match_operator:SI 6 "arm_comparison_operator"
10519 [(match_operand:SI 4 "s_register_operand" "r")
10520 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10521 (clobber (reg:CC CC_REGNUM))]
10522 "TARGET_32BIT
10523 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10524 != CCmode)"
10525 "#"
10526 "TARGET_32BIT && reload_completed
10527 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10528 != CCmode)"
10529 [(set (match_dup 7)
10530 (compare
10531 (and:SI
10532 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10533 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10534 (const_int 0)))
10535 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10536 "operands[7]
10537 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10538 DOM_CC_X_AND_Y),
10539 CC_REGNUM);"
10540 [(set_attr "conds" "clob")
10541 (set_attr "length" "16")])
10542
10543 ; If the above pattern is followed by a CMP insn, then the compare is
10544 ; redundant, since we can rework the conditional instruction that follows.
10545 (define_insn_and_split "*and_scc_scc_cmp"
10546 [(set (match_operand 0 "dominant_cc_register" "")
10547 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10548 [(match_operand:SI 1 "s_register_operand" "r")
10549 (match_operand:SI 2 "arm_add_operand" "rIL")])
10550 (match_operator:SI 6 "arm_comparison_operator"
10551 [(match_operand:SI 4 "s_register_operand" "r")
10552 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10553 (const_int 0)))
10554 (set (match_operand:SI 7 "s_register_operand" "=r")
10555 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10556 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10557 "TARGET_32BIT"
10558 "#"
10559 "TARGET_32BIT && reload_completed"
10560 [(set (match_dup 0)
10561 (compare
10562 (and:SI
10563 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10564 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10565 (const_int 0)))
10566 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10567 ""
10568 [(set_attr "conds" "set")
10569 (set_attr "length" "16")])
10570
10571 ;; If there is no dominance in the comparison, then we can still save an
10572 ;; instruction in the AND case, since we can know that the second compare
10573 ;; need only zero the value if false (if true, then the value is already
10574 ;; correct).
10575 (define_insn_and_split "*and_scc_scc_nodom"
10576 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
10577 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10578 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10579 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10580 (match_operator:SI 6 "arm_comparison_operator"
10581 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10582 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10583 (clobber (reg:CC CC_REGNUM))]
10584 "TARGET_32BIT
10585 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10586 == CCmode)"
10587 "#"
10588 "TARGET_32BIT && reload_completed"
10589 [(parallel [(set (match_dup 0)
10590 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10591 (clobber (reg:CC CC_REGNUM))])
10592 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10593 (set (match_dup 0)
10594 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10595 (match_dup 0)
10596 (const_int 0)))]
10597 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10598 operands[4], operands[5]),
10599 CC_REGNUM);
10600 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10601 operands[5]);"
10602 [(set_attr "conds" "clob")
10603 (set_attr "length" "20")])
10604
10605 (define_split
10606 [(set (reg:CC_NOOV CC_REGNUM)
10607 (compare:CC_NOOV (ior:SI
10608 (and:SI (match_operand:SI 0 "s_register_operand" "")
10609 (const_int 1))
10610 (match_operator:SI 1 "arm_comparison_operator"
10611 [(match_operand:SI 2 "s_register_operand" "")
10612 (match_operand:SI 3 "arm_add_operand" "")]))
10613 (const_int 0)))
10614 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10615 "TARGET_ARM"
10616 [(set (match_dup 4)
10617 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10618 (match_dup 0)))
10619 (set (reg:CC_NOOV CC_REGNUM)
10620 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10621 (const_int 0)))]
10622 "")
10623
10624 (define_split
10625 [(set (reg:CC_NOOV CC_REGNUM)
10626 (compare:CC_NOOV (ior:SI
10627 (match_operator:SI 1 "arm_comparison_operator"
10628 [(match_operand:SI 2 "s_register_operand" "")
10629 (match_operand:SI 3 "arm_add_operand" "")])
10630 (and:SI (match_operand:SI 0 "s_register_operand" "")
10631 (const_int 1)))
10632 (const_int 0)))
10633 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10634 "TARGET_ARM"
10635 [(set (match_dup 4)
10636 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10637 (match_dup 0)))
10638 (set (reg:CC_NOOV CC_REGNUM)
10639 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10640 (const_int 0)))]
10641 "")
10642 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10643
10644 (define_insn_and_split "*negscc"
10645 [(set (match_operand:SI 0 "s_register_operand" "=r")
10646 (neg:SI (match_operator 3 "arm_comparison_operator"
10647 [(match_operand:SI 1 "s_register_operand" "r")
10648 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10649 (clobber (reg:CC CC_REGNUM))]
10650 "TARGET_ARM"
10651 "#"
10652 "&& reload_completed"
10653 [(const_int 0)]
10654 {
10655 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10656
10657 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10658 {
10659 /* Emit mov\\t%0, %1, asr #31 */
10660 emit_insn (gen_rtx_SET (VOIDmode,
10661 operands[0],
10662 gen_rtx_ASHIFTRT (SImode,
10663 operands[1],
10664 GEN_INT (31))));
10665 DONE;
10666 }
10667 else if (GET_CODE (operands[3]) == NE)
10668 {
10669 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10670 if (CONST_INT_P (operands[2]))
10671 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10672 GEN_INT (- INTVAL (operands[2]))));
10673 else
10674 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10675
10676 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10677 gen_rtx_NE (SImode,
10678 cc_reg,
10679 const0_rtx),
10680 gen_rtx_SET (SImode,
10681 operands[0],
10682 GEN_INT (~0))));
10683 DONE;
10684 }
10685 else
10686 {
10687 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10688 emit_insn (gen_rtx_SET (VOIDmode,
10689 cc_reg,
10690 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10691 enum rtx_code rc = GET_CODE (operands[3]);
10692
10693 rc = reverse_condition (rc);
10694 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10695 gen_rtx_fmt_ee (rc,
10696 VOIDmode,
10697 cc_reg,
10698 const0_rtx),
10699 gen_rtx_SET (VOIDmode, operands[0], const0_rtx)));
10700 rc = GET_CODE (operands[3]);
10701 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10702 gen_rtx_fmt_ee (rc,
10703 VOIDmode,
10704 cc_reg,
10705 const0_rtx),
10706 gen_rtx_SET (VOIDmode,
10707 operands[0],
10708 GEN_INT (~0))));
10709 DONE;
10710 }
10711 FAIL;
10712 }
10713 [(set_attr "conds" "clob")
10714 (set_attr "length" "12")]
10715 )
10716
10717 (define_insn "movcond"
10718 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10719 (if_then_else:SI
10720 (match_operator 5 "arm_comparison_operator"
10721 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10722 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10723 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10724 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10725 (clobber (reg:CC CC_REGNUM))]
10726 "TARGET_ARM"
10727 "*
10728 if (GET_CODE (operands[5]) == LT
10729 && (operands[4] == const0_rtx))
10730 {
10731 if (which_alternative != 1 && REG_P (operands[1]))
10732 {
10733 if (operands[2] == const0_rtx)
10734 return \"and\\t%0, %1, %3, asr #31\";
10735 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10736 }
10737 else if (which_alternative != 0 && REG_P (operands[2]))
10738 {
10739 if (operands[1] == const0_rtx)
10740 return \"bic\\t%0, %2, %3, asr #31\";
10741 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10742 }
10743 /* The only case that falls through to here is when both ops 1 & 2
10744 are constants. */
10745 }
10746
10747 if (GET_CODE (operands[5]) == GE
10748 && (operands[4] == const0_rtx))
10749 {
10750 if (which_alternative != 1 && REG_P (operands[1]))
10751 {
10752 if (operands[2] == const0_rtx)
10753 return \"bic\\t%0, %1, %3, asr #31\";
10754 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10755 }
10756 else if (which_alternative != 0 && REG_P (operands[2]))
10757 {
10758 if (operands[1] == const0_rtx)
10759 return \"and\\t%0, %2, %3, asr #31\";
10760 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10761 }
10762 /* The only case that falls through to here is when both ops 1 & 2
10763 are constants. */
10764 }
10765 if (CONST_INT_P (operands[4])
10766 && !const_ok_for_arm (INTVAL (operands[4])))
10767 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10768 else
10769 output_asm_insn (\"cmp\\t%3, %4\", operands);
10770 if (which_alternative != 0)
10771 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10772 if (which_alternative != 1)
10773 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10774 return \"\";
10775 "
10776 [(set_attr "conds" "clob")
10777 (set_attr "length" "8,8,12")]
10778 )
10779
10780 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10781
10782 (define_insn "*ifcompare_plus_move"
10783 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10784 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10785 [(match_operand:SI 4 "s_register_operand" "r,r")
10786 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10787 (plus:SI
10788 (match_operand:SI 2 "s_register_operand" "r,r")
10789 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10790 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10791 (clobber (reg:CC CC_REGNUM))]
10792 "TARGET_ARM"
10793 "#"
10794 [(set_attr "conds" "clob")
10795 (set_attr "length" "8,12")]
10796 )
10797
10798 (define_insn "*if_plus_move"
10799 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10800 (if_then_else:SI
10801 (match_operator 4 "arm_comparison_operator"
10802 [(match_operand 5 "cc_register" "") (const_int 0)])
10803 (plus:SI
10804 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10805 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10806 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10807 "TARGET_ARM"
10808 "@
10809 add%d4\\t%0, %2, %3
10810 sub%d4\\t%0, %2, #%n3
10811 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10812 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10813 [(set_attr "conds" "use")
10814 (set_attr "length" "4,4,8,8")
10815 (set_attr_alternative "type"
10816 [(if_then_else (match_operand 3 "const_int_operand" "")
10817 (const_string "simple_alu_imm" )
10818 (const_string "*"))
10819 (const_string "simple_alu_imm")
10820 (const_string "*")
10821 (const_string "*")])]
10822 )
10823
10824 (define_insn "*ifcompare_move_plus"
10825 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10826 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10827 [(match_operand:SI 4 "s_register_operand" "r,r")
10828 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10829 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10830 (plus:SI
10831 (match_operand:SI 2 "s_register_operand" "r,r")
10832 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10833 (clobber (reg:CC CC_REGNUM))]
10834 "TARGET_ARM"
10835 "#"
10836 [(set_attr "conds" "clob")
10837 (set_attr "length" "8,12")]
10838 )
10839
10840 (define_insn "*if_move_plus"
10841 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10842 (if_then_else:SI
10843 (match_operator 4 "arm_comparison_operator"
10844 [(match_operand 5 "cc_register" "") (const_int 0)])
10845 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10846 (plus:SI
10847 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10848 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10849 "TARGET_ARM"
10850 "@
10851 add%D4\\t%0, %2, %3
10852 sub%D4\\t%0, %2, #%n3
10853 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10854 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10855 [(set_attr "conds" "use")
10856 (set_attr "length" "4,4,8,8")
10857 (set_attr_alternative "type"
10858 [(if_then_else (match_operand 3 "const_int_operand" "")
10859 (const_string "simple_alu_imm" )
10860 (const_string "*"))
10861 (const_string "simple_alu_imm")
10862 (const_string "*")
10863 (const_string "*")])]
10864 )
10865
10866 (define_insn "*ifcompare_arith_arith"
10867 [(set (match_operand:SI 0 "s_register_operand" "=r")
10868 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10869 [(match_operand:SI 5 "s_register_operand" "r")
10870 (match_operand:SI 6 "arm_add_operand" "rIL")])
10871 (match_operator:SI 8 "shiftable_operator"
10872 [(match_operand:SI 1 "s_register_operand" "r")
10873 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10874 (match_operator:SI 7 "shiftable_operator"
10875 [(match_operand:SI 3 "s_register_operand" "r")
10876 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10877 (clobber (reg:CC CC_REGNUM))]
10878 "TARGET_ARM"
10879 "#"
10880 [(set_attr "conds" "clob")
10881 (set_attr "length" "12")]
10882 )
10883
10884 (define_insn "*if_arith_arith"
10885 [(set (match_operand:SI 0 "s_register_operand" "=r")
10886 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10887 [(match_operand 8 "cc_register" "") (const_int 0)])
10888 (match_operator:SI 6 "shiftable_operator"
10889 [(match_operand:SI 1 "s_register_operand" "r")
10890 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10891 (match_operator:SI 7 "shiftable_operator"
10892 [(match_operand:SI 3 "s_register_operand" "r")
10893 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10894 "TARGET_ARM"
10895 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10896 [(set_attr "conds" "use")
10897 (set_attr "length" "8")]
10898 )
10899
10900 (define_insn "*ifcompare_arith_move"
10901 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10902 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10903 [(match_operand:SI 2 "s_register_operand" "r,r")
10904 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10905 (match_operator:SI 7 "shiftable_operator"
10906 [(match_operand:SI 4 "s_register_operand" "r,r")
10907 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10908 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10909 (clobber (reg:CC CC_REGNUM))]
10910 "TARGET_ARM"
10911 "*
10912 /* If we have an operation where (op x 0) is the identity operation and
10913 the conditional operator is LT or GE and we are comparing against zero and
10914 everything is in registers then we can do this in two instructions. */
10915 if (operands[3] == const0_rtx
10916 && GET_CODE (operands[7]) != AND
10917 && REG_P (operands[5])
10918 && REG_P (operands[1])
10919 && REGNO (operands[1]) == REGNO (operands[4])
10920 && REGNO (operands[4]) != REGNO (operands[0]))
10921 {
10922 if (GET_CODE (operands[6]) == LT)
10923 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10924 else if (GET_CODE (operands[6]) == GE)
10925 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10926 }
10927 if (CONST_INT_P (operands[3])
10928 && !const_ok_for_arm (INTVAL (operands[3])))
10929 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10930 else
10931 output_asm_insn (\"cmp\\t%2, %3\", operands);
10932 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10933 if (which_alternative != 0)
10934 return \"mov%D6\\t%0, %1\";
10935 return \"\";
10936 "
10937 [(set_attr "conds" "clob")
10938 (set_attr "length" "8,12")]
10939 )
10940
10941 (define_insn "*if_arith_move"
10942 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10943 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10944 [(match_operand 6 "cc_register" "") (const_int 0)])
10945 (match_operator:SI 5 "shiftable_operator"
10946 [(match_operand:SI 2 "s_register_operand" "r,r")
10947 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10948 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10949 "TARGET_ARM"
10950 "@
10951 %I5%d4\\t%0, %2, %3
10952 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10953 [(set_attr "conds" "use")
10954 (set_attr "length" "4,8")
10955 (set_attr "type" "*,*")]
10956 )
10957
10958 (define_insn "*ifcompare_move_arith"
10959 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10960 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10961 [(match_operand:SI 4 "s_register_operand" "r,r")
10962 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10963 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10964 (match_operator:SI 7 "shiftable_operator"
10965 [(match_operand:SI 2 "s_register_operand" "r,r")
10966 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10967 (clobber (reg:CC CC_REGNUM))]
10968 "TARGET_ARM"
10969 "*
10970 /* If we have an operation where (op x 0) is the identity operation and
10971 the conditional operator is LT or GE and we are comparing against zero and
10972 everything is in registers then we can do this in two instructions */
10973 if (operands[5] == const0_rtx
10974 && GET_CODE (operands[7]) != AND
10975 && REG_P (operands[3])
10976 && REG_P (operands[1])
10977 && REGNO (operands[1]) == REGNO (operands[2])
10978 && REGNO (operands[2]) != REGNO (operands[0]))
10979 {
10980 if (GET_CODE (operands[6]) == GE)
10981 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10982 else if (GET_CODE (operands[6]) == LT)
10983 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10984 }
10985
10986 if (CONST_INT_P (operands[5])
10987 && !const_ok_for_arm (INTVAL (operands[5])))
10988 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10989 else
10990 output_asm_insn (\"cmp\\t%4, %5\", operands);
10991
10992 if (which_alternative != 0)
10993 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10994 return \"%I7%D6\\t%0, %2, %3\";
10995 "
10996 [(set_attr "conds" "clob")
10997 (set_attr "length" "8,12")]
10998 )
10999
11000 (define_insn "*if_move_arith"
11001 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11002 (if_then_else:SI
11003 (match_operator 4 "arm_comparison_operator"
11004 [(match_operand 6 "cc_register" "") (const_int 0)])
11005 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11006 (match_operator:SI 5 "shiftable_operator"
11007 [(match_operand:SI 2 "s_register_operand" "r,r")
11008 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
11009 "TARGET_ARM"
11010 "@
11011 %I5%D4\\t%0, %2, %3
11012 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
11013 [(set_attr "conds" "use")
11014 (set_attr "length" "4,8")
11015 (set_attr "type" "*,*")]
11016 )
11017
11018 (define_insn "*ifcompare_move_not"
11019 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11020 (if_then_else:SI
11021 (match_operator 5 "arm_comparison_operator"
11022 [(match_operand:SI 3 "s_register_operand" "r,r")
11023 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11024 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11025 (not:SI
11026 (match_operand:SI 2 "s_register_operand" "r,r"))))
11027 (clobber (reg:CC CC_REGNUM))]
11028 "TARGET_ARM"
11029 "#"
11030 [(set_attr "conds" "clob")
11031 (set_attr "length" "8,12")]
11032 )
11033
11034 (define_insn "*if_move_not"
11035 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11036 (if_then_else:SI
11037 (match_operator 4 "arm_comparison_operator"
11038 [(match_operand 3 "cc_register" "") (const_int 0)])
11039 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11040 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11041 "TARGET_ARM"
11042 "@
11043 mvn%D4\\t%0, %2
11044 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
11045 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
11046 [(set_attr "conds" "use")
11047 (set_attr "insn" "mvn")
11048 (set_attr "length" "4,8,8")]
11049 )
11050
11051 (define_insn "*ifcompare_not_move"
11052 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11053 (if_then_else:SI
11054 (match_operator 5 "arm_comparison_operator"
11055 [(match_operand:SI 3 "s_register_operand" "r,r")
11056 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11057 (not:SI
11058 (match_operand:SI 2 "s_register_operand" "r,r"))
11059 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11060 (clobber (reg:CC CC_REGNUM))]
11061 "TARGET_ARM"
11062 "#"
11063 [(set_attr "conds" "clob")
11064 (set_attr "length" "8,12")]
11065 )
11066
11067 (define_insn "*if_not_move"
11068 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11069 (if_then_else:SI
11070 (match_operator 4 "arm_comparison_operator"
11071 [(match_operand 3 "cc_register" "") (const_int 0)])
11072 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11073 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11074 "TARGET_ARM"
11075 "@
11076 mvn%d4\\t%0, %2
11077 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11078 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11079 [(set_attr "conds" "use")
11080 (set_attr "insn" "mvn")
11081 (set_attr "length" "4,8,8")]
11082 )
11083
11084 (define_insn "*ifcompare_shift_move"
11085 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11086 (if_then_else:SI
11087 (match_operator 6 "arm_comparison_operator"
11088 [(match_operand:SI 4 "s_register_operand" "r,r")
11089 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11090 (match_operator:SI 7 "shift_operator"
11091 [(match_operand:SI 2 "s_register_operand" "r,r")
11092 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11093 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11094 (clobber (reg:CC CC_REGNUM))]
11095 "TARGET_ARM"
11096 "#"
11097 [(set_attr "conds" "clob")
11098 (set_attr "length" "8,12")]
11099 )
11100
11101 (define_insn "*if_shift_move"
11102 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11103 (if_then_else:SI
11104 (match_operator 5 "arm_comparison_operator"
11105 [(match_operand 6 "cc_register" "") (const_int 0)])
11106 (match_operator:SI 4 "shift_operator"
11107 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11108 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11109 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11110 "TARGET_ARM"
11111 "@
11112 mov%d5\\t%0, %2%S4
11113 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11114 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11115 [(set_attr "conds" "use")
11116 (set_attr "shift" "2")
11117 (set_attr "length" "4,8,8")
11118 (set_attr "insn" "mov")
11119 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11120 (const_string "alu_shift")
11121 (const_string "alu_shift_reg")))]
11122 )
11123
11124 (define_insn "*ifcompare_move_shift"
11125 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11126 (if_then_else:SI
11127 (match_operator 6 "arm_comparison_operator"
11128 [(match_operand:SI 4 "s_register_operand" "r,r")
11129 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11130 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11131 (match_operator:SI 7 "shift_operator"
11132 [(match_operand:SI 2 "s_register_operand" "r,r")
11133 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11134 (clobber (reg:CC CC_REGNUM))]
11135 "TARGET_ARM"
11136 "#"
11137 [(set_attr "conds" "clob")
11138 (set_attr "length" "8,12")]
11139 )
11140
11141 (define_insn "*if_move_shift"
11142 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11143 (if_then_else:SI
11144 (match_operator 5 "arm_comparison_operator"
11145 [(match_operand 6 "cc_register" "") (const_int 0)])
11146 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11147 (match_operator:SI 4 "shift_operator"
11148 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11149 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11150 "TARGET_ARM"
11151 "@
11152 mov%D5\\t%0, %2%S4
11153 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11154 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11155 [(set_attr "conds" "use")
11156 (set_attr "shift" "2")
11157 (set_attr "length" "4,8,8")
11158 (set_attr "insn" "mov")
11159 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11160 (const_string "alu_shift")
11161 (const_string "alu_shift_reg")))]
11162 )
11163
11164 (define_insn "*ifcompare_shift_shift"
11165 [(set (match_operand:SI 0 "s_register_operand" "=r")
11166 (if_then_else:SI
11167 (match_operator 7 "arm_comparison_operator"
11168 [(match_operand:SI 5 "s_register_operand" "r")
11169 (match_operand:SI 6 "arm_add_operand" "rIL")])
11170 (match_operator:SI 8 "shift_operator"
11171 [(match_operand:SI 1 "s_register_operand" "r")
11172 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11173 (match_operator:SI 9 "shift_operator"
11174 [(match_operand:SI 3 "s_register_operand" "r")
11175 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11176 (clobber (reg:CC CC_REGNUM))]
11177 "TARGET_ARM"
11178 "#"
11179 [(set_attr "conds" "clob")
11180 (set_attr "length" "12")]
11181 )
11182
11183 (define_insn "*if_shift_shift"
11184 [(set (match_operand:SI 0 "s_register_operand" "=r")
11185 (if_then_else:SI
11186 (match_operator 5 "arm_comparison_operator"
11187 [(match_operand 8 "cc_register" "") (const_int 0)])
11188 (match_operator:SI 6 "shift_operator"
11189 [(match_operand:SI 1 "s_register_operand" "r")
11190 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11191 (match_operator:SI 7 "shift_operator"
11192 [(match_operand:SI 3 "s_register_operand" "r")
11193 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11194 "TARGET_ARM"
11195 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11196 [(set_attr "conds" "use")
11197 (set_attr "shift" "1")
11198 (set_attr "length" "8")
11199 (set_attr "insn" "mov")
11200 (set (attr "type") (if_then_else
11201 (and (match_operand 2 "const_int_operand" "")
11202 (match_operand 4 "const_int_operand" ""))
11203 (const_string "alu_shift")
11204 (const_string "alu_shift_reg")))]
11205 )
11206
11207 (define_insn "*ifcompare_not_arith"
11208 [(set (match_operand:SI 0 "s_register_operand" "=r")
11209 (if_then_else:SI
11210 (match_operator 6 "arm_comparison_operator"
11211 [(match_operand:SI 4 "s_register_operand" "r")
11212 (match_operand:SI 5 "arm_add_operand" "rIL")])
11213 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11214 (match_operator:SI 7 "shiftable_operator"
11215 [(match_operand:SI 2 "s_register_operand" "r")
11216 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11217 (clobber (reg:CC CC_REGNUM))]
11218 "TARGET_ARM"
11219 "#"
11220 [(set_attr "conds" "clob")
11221 (set_attr "length" "12")]
11222 )
11223
11224 (define_insn "*if_not_arith"
11225 [(set (match_operand:SI 0 "s_register_operand" "=r")
11226 (if_then_else:SI
11227 (match_operator 5 "arm_comparison_operator"
11228 [(match_operand 4 "cc_register" "") (const_int 0)])
11229 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11230 (match_operator:SI 6 "shiftable_operator"
11231 [(match_operand:SI 2 "s_register_operand" "r")
11232 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11233 "TARGET_ARM"
11234 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11235 [(set_attr "conds" "use")
11236 (set_attr "insn" "mvn")
11237 (set_attr "length" "8")]
11238 )
11239
11240 (define_insn "*ifcompare_arith_not"
11241 [(set (match_operand:SI 0 "s_register_operand" "=r")
11242 (if_then_else:SI
11243 (match_operator 6 "arm_comparison_operator"
11244 [(match_operand:SI 4 "s_register_operand" "r")
11245 (match_operand:SI 5 "arm_add_operand" "rIL")])
11246 (match_operator:SI 7 "shiftable_operator"
11247 [(match_operand:SI 2 "s_register_operand" "r")
11248 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11249 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11250 (clobber (reg:CC CC_REGNUM))]
11251 "TARGET_ARM"
11252 "#"
11253 [(set_attr "conds" "clob")
11254 (set_attr "length" "12")]
11255 )
11256
11257 (define_insn "*if_arith_not"
11258 [(set (match_operand:SI 0 "s_register_operand" "=r")
11259 (if_then_else:SI
11260 (match_operator 5 "arm_comparison_operator"
11261 [(match_operand 4 "cc_register" "") (const_int 0)])
11262 (match_operator:SI 6 "shiftable_operator"
11263 [(match_operand:SI 2 "s_register_operand" "r")
11264 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11265 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11266 "TARGET_ARM"
11267 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11268 [(set_attr "conds" "use")
11269 (set_attr "insn" "mvn")
11270 (set_attr "length" "8")]
11271 )
11272
11273 (define_insn "*ifcompare_neg_move"
11274 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11275 (if_then_else:SI
11276 (match_operator 5 "arm_comparison_operator"
11277 [(match_operand:SI 3 "s_register_operand" "r,r")
11278 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11279 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11280 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11281 (clobber (reg:CC CC_REGNUM))]
11282 "TARGET_ARM"
11283 "#"
11284 [(set_attr "conds" "clob")
11285 (set_attr "length" "8,12")]
11286 )
11287
11288 (define_insn "*if_neg_move"
11289 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11290 (if_then_else:SI
11291 (match_operator 4 "arm_comparison_operator"
11292 [(match_operand 3 "cc_register" "") (const_int 0)])
11293 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11294 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11295 "TARGET_ARM"
11296 "@
11297 rsb%d4\\t%0, %2, #0
11298 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
11299 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
11300 [(set_attr "conds" "use")
11301 (set_attr "length" "4,8,8")]
11302 )
11303
11304 (define_insn "*ifcompare_move_neg"
11305 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11306 (if_then_else:SI
11307 (match_operator 5 "arm_comparison_operator"
11308 [(match_operand:SI 3 "s_register_operand" "r,r")
11309 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11310 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11311 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11312 (clobber (reg:CC CC_REGNUM))]
11313 "TARGET_ARM"
11314 "#"
11315 [(set_attr "conds" "clob")
11316 (set_attr "length" "8,12")]
11317 )
11318
11319 (define_insn "*if_move_neg"
11320 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11321 (if_then_else:SI
11322 (match_operator 4 "arm_comparison_operator"
11323 [(match_operand 3 "cc_register" "") (const_int 0)])
11324 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11325 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11326 "TARGET_ARM"
11327 "@
11328 rsb%D4\\t%0, %2, #0
11329 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
11330 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
11331 [(set_attr "conds" "use")
11332 (set_attr "length" "4,8,8")]
11333 )
11334
11335 (define_insn "*arith_adjacentmem"
11336 [(set (match_operand:SI 0 "s_register_operand" "=r")
11337 (match_operator:SI 1 "shiftable_operator"
11338 [(match_operand:SI 2 "memory_operand" "m")
11339 (match_operand:SI 3 "memory_operand" "m")]))
11340 (clobber (match_scratch:SI 4 "=r"))]
11341 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11342 "*
11343 {
11344 rtx ldm[3];
11345 rtx arith[4];
11346 rtx base_reg;
11347 HOST_WIDE_INT val1 = 0, val2 = 0;
11348
11349 if (REGNO (operands[0]) > REGNO (operands[4]))
11350 {
11351 ldm[1] = operands[4];
11352 ldm[2] = operands[0];
11353 }
11354 else
11355 {
11356 ldm[1] = operands[0];
11357 ldm[2] = operands[4];
11358 }
11359
11360 base_reg = XEXP (operands[2], 0);
11361
11362 if (!REG_P (base_reg))
11363 {
11364 val1 = INTVAL (XEXP (base_reg, 1));
11365 base_reg = XEXP (base_reg, 0);
11366 }
11367
11368 if (!REG_P (XEXP (operands[3], 0)))
11369 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11370
11371 arith[0] = operands[0];
11372 arith[3] = operands[1];
11373
11374 if (val1 < val2)
11375 {
11376 arith[1] = ldm[1];
11377 arith[2] = ldm[2];
11378 }
11379 else
11380 {
11381 arith[1] = ldm[2];
11382 arith[2] = ldm[1];
11383 }
11384
11385 ldm[0] = base_reg;
11386 if (val1 !=0 && val2 != 0)
11387 {
11388 rtx ops[3];
11389
11390 if (val1 == 4 || val2 == 4)
11391 /* Other val must be 8, since we know they are adjacent and neither
11392 is zero. */
11393 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
11394 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11395 {
11396 ldm[0] = ops[0] = operands[4];
11397 ops[1] = base_reg;
11398 ops[2] = GEN_INT (val1);
11399 output_add_immediate (ops);
11400 if (val1 < val2)
11401 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11402 else
11403 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11404 }
11405 else
11406 {
11407 /* Offset is out of range for a single add, so use two ldr. */
11408 ops[0] = ldm[1];
11409 ops[1] = base_reg;
11410 ops[2] = GEN_INT (val1);
11411 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11412 ops[0] = ldm[2];
11413 ops[2] = GEN_INT (val2);
11414 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11415 }
11416 }
11417 else if (val1 != 0)
11418 {
11419 if (val1 < val2)
11420 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11421 else
11422 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11423 }
11424 else
11425 {
11426 if (val1 < val2)
11427 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11428 else
11429 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11430 }
11431 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11432 return \"\";
11433 }"
11434 [(set_attr "length" "12")
11435 (set_attr "predicable" "yes")
11436 (set_attr "type" "load1")]
11437 )
11438
11439 ; This pattern is never tried by combine, so do it as a peephole
11440
11441 (define_peephole2
11442 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11443 (match_operand:SI 1 "arm_general_register_operand" ""))
11444 (set (reg:CC CC_REGNUM)
11445 (compare:CC (match_dup 1) (const_int 0)))]
11446 "TARGET_ARM"
11447 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11448 (set (match_dup 0) (match_dup 1))])]
11449 ""
11450 )
11451
11452 (define_split
11453 [(set (match_operand:SI 0 "s_register_operand" "")
11454 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11455 (const_int 0))
11456 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11457 [(match_operand:SI 3 "s_register_operand" "")
11458 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11459 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11460 "TARGET_ARM"
11461 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11462 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11463 (match_dup 5)))]
11464 ""
11465 )
11466
11467 ;; This split can be used because CC_Z mode implies that the following
11468 ;; branch will be an equality, or an unsigned inequality, so the sign
11469 ;; extension is not needed.
11470
11471 (define_split
11472 [(set (reg:CC_Z CC_REGNUM)
11473 (compare:CC_Z
11474 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11475 (const_int 24))
11476 (match_operand 1 "const_int_operand" "")))
11477 (clobber (match_scratch:SI 2 ""))]
11478 "TARGET_ARM
11479 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
11480 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
11481 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11482 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11483 "
11484 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11485 "
11486 )
11487 ;; ??? Check the patterns above for Thumb-2 usefulness
11488
11489 (define_expand "prologue"
11490 [(clobber (const_int 0))]
11491 "TARGET_EITHER"
11492 "if (TARGET_32BIT)
11493 arm_expand_prologue ();
11494 else
11495 thumb1_expand_prologue ();
11496 DONE;
11497 "
11498 )
11499
11500 (define_expand "epilogue"
11501 [(clobber (const_int 0))]
11502 "TARGET_EITHER"
11503 "
11504 if (crtl->calls_eh_return)
11505 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11506 if (TARGET_THUMB1)
11507 {
11508 thumb1_expand_epilogue ();
11509 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11510 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11511 }
11512 else if (HAVE_return)
11513 {
11514 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11515 no need for explicit testing again. */
11516 emit_jump_insn (gen_return ());
11517 }
11518 else if (TARGET_32BIT)
11519 {
11520 arm_expand_epilogue (true);
11521 }
11522 DONE;
11523 "
11524 )
11525
11526 (define_insn "prologue_thumb1_interwork"
11527 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
11528 "TARGET_THUMB1"
11529 "* return thumb1_output_interwork ();"
11530 [(set_attr "length" "8")]
11531 )
11532
11533 ;; Note - although unspec_volatile's USE all hard registers,
11534 ;; USEs are ignored after relaod has completed. Thus we need
11535 ;; to add an unspec of the link register to ensure that flow
11536 ;; does not think that it is unused by the sibcall branch that
11537 ;; will replace the standard function epilogue.
11538 (define_expand "sibcall_epilogue"
11539 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11540 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11541 "TARGET_32BIT"
11542 "
11543 arm_expand_epilogue (false);
11544 DONE;
11545 "
11546 )
11547
11548 (define_insn "*epilogue_insns"
11549 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
11550 "TARGET_THUMB1"
11551 "*
11552 return thumb1_unexpanded_epilogue ();
11553 "
11554 ; Length is absolute worst case
11555 [(set_attr "length" "44")
11556 (set_attr "type" "block")
11557 ;; We don't clobber the conditions, but the potential length of this
11558 ;; operation is sufficient to make conditionalizing the sequence
11559 ;; unlikely to be profitable.
11560 (set_attr "conds" "clob")]
11561 )
11562
11563 (define_expand "eh_epilogue"
11564 [(use (match_operand:SI 0 "register_operand" ""))
11565 (use (match_operand:SI 1 "register_operand" ""))
11566 (use (match_operand:SI 2 "register_operand" ""))]
11567 "TARGET_EITHER"
11568 "
11569 {
11570 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11571 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11572 {
11573 rtx ra = gen_rtx_REG (Pmode, 2);
11574
11575 emit_move_insn (ra, operands[2]);
11576 operands[2] = ra;
11577 }
11578 /* This is a hack -- we may have crystalized the function type too
11579 early. */
11580 cfun->machine->func_type = 0;
11581 }"
11582 )
11583
11584 ;; This split is only used during output to reduce the number of patterns
11585 ;; that need assembler instructions adding to them. We allowed the setting
11586 ;; of the conditions to be implicit during rtl generation so that
11587 ;; the conditional compare patterns would work. However this conflicts to
11588 ;; some extent with the conditional data operations, so we have to split them
11589 ;; up again here.
11590
11591 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11592 ;; conditional execution sufficient?
11593
11594 (define_split
11595 [(set (match_operand:SI 0 "s_register_operand" "")
11596 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11597 [(match_operand 2 "" "") (match_operand 3 "" "")])
11598 (match_dup 0)
11599 (match_operand 4 "" "")))
11600 (clobber (reg:CC CC_REGNUM))]
11601 "TARGET_ARM && reload_completed"
11602 [(set (match_dup 5) (match_dup 6))
11603 (cond_exec (match_dup 7)
11604 (set (match_dup 0) (match_dup 4)))]
11605 "
11606 {
11607 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11608 operands[2], operands[3]);
11609 enum rtx_code rc = GET_CODE (operands[1]);
11610
11611 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11612 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11613 if (mode == CCFPmode || mode == CCFPEmode)
11614 rc = reverse_condition_maybe_unordered (rc);
11615 else
11616 rc = reverse_condition (rc);
11617
11618 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11619 }"
11620 )
11621
11622 (define_split
11623 [(set (match_operand:SI 0 "s_register_operand" "")
11624 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11625 [(match_operand 2 "" "") (match_operand 3 "" "")])
11626 (match_operand 4 "" "")
11627 (match_dup 0)))
11628 (clobber (reg:CC CC_REGNUM))]
11629 "TARGET_ARM && reload_completed"
11630 [(set (match_dup 5) (match_dup 6))
11631 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11632 (set (match_dup 0) (match_dup 4)))]
11633 "
11634 {
11635 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11636 operands[2], operands[3]);
11637
11638 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11639 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11640 }"
11641 )
11642
11643 (define_split
11644 [(set (match_operand:SI 0 "s_register_operand" "")
11645 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11646 [(match_operand 2 "" "") (match_operand 3 "" "")])
11647 (match_operand 4 "" "")
11648 (match_operand 5 "" "")))
11649 (clobber (reg:CC CC_REGNUM))]
11650 "TARGET_ARM && reload_completed"
11651 [(set (match_dup 6) (match_dup 7))
11652 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11653 (set (match_dup 0) (match_dup 4)))
11654 (cond_exec (match_dup 8)
11655 (set (match_dup 0) (match_dup 5)))]
11656 "
11657 {
11658 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11659 operands[2], operands[3]);
11660 enum rtx_code rc = GET_CODE (operands[1]);
11661
11662 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11663 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11664 if (mode == CCFPmode || mode == CCFPEmode)
11665 rc = reverse_condition_maybe_unordered (rc);
11666 else
11667 rc = reverse_condition (rc);
11668
11669 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11670 }"
11671 )
11672
11673 (define_split
11674 [(set (match_operand:SI 0 "s_register_operand" "")
11675 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11676 [(match_operand:SI 2 "s_register_operand" "")
11677 (match_operand:SI 3 "arm_add_operand" "")])
11678 (match_operand:SI 4 "arm_rhs_operand" "")
11679 (not:SI
11680 (match_operand:SI 5 "s_register_operand" ""))))
11681 (clobber (reg:CC CC_REGNUM))]
11682 "TARGET_ARM && reload_completed"
11683 [(set (match_dup 6) (match_dup 7))
11684 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11685 (set (match_dup 0) (match_dup 4)))
11686 (cond_exec (match_dup 8)
11687 (set (match_dup 0) (not:SI (match_dup 5))))]
11688 "
11689 {
11690 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11691 operands[2], operands[3]);
11692 enum rtx_code rc = GET_CODE (operands[1]);
11693
11694 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11695 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11696 if (mode == CCFPmode || mode == CCFPEmode)
11697 rc = reverse_condition_maybe_unordered (rc);
11698 else
11699 rc = reverse_condition (rc);
11700
11701 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11702 }"
11703 )
11704
11705 (define_insn "*cond_move_not"
11706 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11707 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11708 [(match_operand 3 "cc_register" "") (const_int 0)])
11709 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11710 (not:SI
11711 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11712 "TARGET_ARM"
11713 "@
11714 mvn%D4\\t%0, %2
11715 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11716 [(set_attr "conds" "use")
11717 (set_attr "insn" "mvn")
11718 (set_attr "length" "4,8")]
11719 )
11720
11721 ;; The next two patterns occur when an AND operation is followed by a
11722 ;; scc insn sequence
11723
11724 (define_insn "*sign_extract_onebit"
11725 [(set (match_operand:SI 0 "s_register_operand" "=r")
11726 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11727 (const_int 1)
11728 (match_operand:SI 2 "const_int_operand" "n")))
11729 (clobber (reg:CC CC_REGNUM))]
11730 "TARGET_ARM"
11731 "*
11732 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11733 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11734 return \"mvnne\\t%0, #0\";
11735 "
11736 [(set_attr "conds" "clob")
11737 (set_attr "length" "8")]
11738 )
11739
11740 (define_insn "*not_signextract_onebit"
11741 [(set (match_operand:SI 0 "s_register_operand" "=r")
11742 (not:SI
11743 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11744 (const_int 1)
11745 (match_operand:SI 2 "const_int_operand" "n"))))
11746 (clobber (reg:CC CC_REGNUM))]
11747 "TARGET_ARM"
11748 "*
11749 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11750 output_asm_insn (\"tst\\t%1, %2\", operands);
11751 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11752 return \"movne\\t%0, #0\";
11753 "
11754 [(set_attr "conds" "clob")
11755 (set_attr "length" "12")]
11756 )
11757 ;; ??? The above patterns need auditing for Thumb-2
11758
11759 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11760 ;; expressions. For simplicity, the first register is also in the unspec
11761 ;; part.
11762 ;; To avoid the usage of GNU extension, the length attribute is computed
11763 ;; in a C function arm_attr_length_push_multi.
11764 (define_insn "*push_multi"
11765 [(match_parallel 2 "multi_register_push"
11766 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11767 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11768 UNSPEC_PUSH_MULT))])]
11769 ""
11770 "*
11771 {
11772 int num_saves = XVECLEN (operands[2], 0);
11773
11774 /* For the StrongARM at least it is faster to
11775 use STR to store only a single register.
11776 In Thumb mode always use push, and the assembler will pick
11777 something appropriate. */
11778 if (num_saves == 1 && TARGET_ARM)
11779 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11780 else
11781 {
11782 int i;
11783 char pattern[100];
11784
11785 if (TARGET_ARM)
11786 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
11787 else if (TARGET_THUMB2)
11788 strcpy (pattern, \"push%?\\t{%1\");
11789 else
11790 strcpy (pattern, \"push\\t{%1\");
11791
11792 for (i = 1; i < num_saves; i++)
11793 {
11794 strcat (pattern, \", %|\");
11795 strcat (pattern,
11796 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11797 }
11798
11799 strcat (pattern, \"}\");
11800 output_asm_insn (pattern, operands);
11801 }
11802
11803 return \"\";
11804 }"
11805 [(set_attr "type" "store4")
11806 (set (attr "length")
11807 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11808 )
11809
11810 (define_insn "stack_tie"
11811 [(set (mem:BLK (scratch))
11812 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11813 (match_operand:SI 1 "s_register_operand" "rk")]
11814 UNSPEC_PRLG_STK))]
11815 ""
11816 ""
11817 [(set_attr "length" "0")]
11818 )
11819
11820 ;; Pop (as used in epilogue RTL)
11821 ;;
11822 (define_insn "*load_multiple_with_writeback"
11823 [(match_parallel 0 "load_multiple_operation"
11824 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11825 (plus:SI (match_dup 1)
11826 (match_operand:SI 2 "const_int_operand" "I")))
11827 (set (match_operand:SI 3 "s_register_operand" "=rk")
11828 (mem:SI (match_dup 1)))
11829 ])]
11830 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11831 "*
11832 {
11833 arm_output_multireg_pop (operands, /*return_pc=*/false,
11834 /*cond=*/const_true_rtx,
11835 /*reverse=*/false,
11836 /*update=*/true);
11837 return \"\";
11838 }
11839 "
11840 [(set_attr "type" "load4")
11841 (set_attr "predicable" "yes")]
11842 )
11843
11844 ;; Pop with return (as used in epilogue RTL)
11845 ;;
11846 ;; This instruction is generated when the registers are popped at the end of
11847 ;; epilogue. Here, instead of popping the value into LR and then generating
11848 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11849 ;; with (return).
11850 (define_insn "*pop_multiple_with_writeback_and_return"
11851 [(match_parallel 0 "pop_multiple_return"
11852 [(return)
11853 (set (match_operand:SI 1 "s_register_operand" "+rk")
11854 (plus:SI (match_dup 1)
11855 (match_operand:SI 2 "const_int_operand" "I")))
11856 (set (match_operand:SI 3 "s_register_operand" "=rk")
11857 (mem:SI (match_dup 1)))
11858 ])]
11859 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11860 "*
11861 {
11862 arm_output_multireg_pop (operands, /*return_pc=*/true,
11863 /*cond=*/const_true_rtx,
11864 /*reverse=*/false,
11865 /*update=*/true);
11866 return \"\";
11867 }
11868 "
11869 [(set_attr "type" "load4")
11870 (set_attr "predicable" "yes")]
11871 )
11872
11873 (define_insn "*pop_multiple_with_return"
11874 [(match_parallel 0 "pop_multiple_return"
11875 [(return)
11876 (set (match_operand:SI 2 "s_register_operand" "=rk")
11877 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11878 ])]
11879 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11880 "*
11881 {
11882 arm_output_multireg_pop (operands, /*return_pc=*/true,
11883 /*cond=*/const_true_rtx,
11884 /*reverse=*/false,
11885 /*update=*/false);
11886 return \"\";
11887 }
11888 "
11889 [(set_attr "type" "load4")
11890 (set_attr "predicable" "yes")]
11891 )
11892
11893 ;; Load into PC and return
11894 (define_insn "*ldr_with_return"
11895 [(return)
11896 (set (reg:SI PC_REGNUM)
11897 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11898 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11899 "ldr%?\t%|pc, [%0], #4"
11900 [(set_attr "type" "load1")
11901 (set_attr "predicable" "yes")]
11902 )
11903 ;; Pop for floating point registers (as used in epilogue RTL)
11904 (define_insn "*vfp_pop_multiple_with_writeback"
11905 [(match_parallel 0 "pop_multiple_fp"
11906 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11907 (plus:SI (match_dup 1)
11908 (match_operand:SI 2 "const_int_operand" "I")))
11909 (set (match_operand:DF 3 "arm_hard_register_operand" "")
11910 (mem:DF (match_dup 1)))])]
11911 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
11912 "*
11913 {
11914 int num_regs = XVECLEN (operands[0], 0);
11915 char pattern[100];
11916 rtx op_list[2];
11917 strcpy (pattern, \"fldmfdd\\t\");
11918 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11919 strcat (pattern, \"!, {\");
11920 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11921 strcat (pattern, \"%P0\");
11922 if ((num_regs - 1) > 1)
11923 {
11924 strcat (pattern, \"-%P1\");
11925 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11926 }
11927
11928 strcat (pattern, \"}\");
11929 output_asm_insn (pattern, op_list);
11930 return \"\";
11931 }
11932 "
11933 [(set_attr "type" "load4")
11934 (set_attr "conds" "unconditional")
11935 (set_attr "predicable" "no")]
11936 )
11937
11938 ;; Special patterns for dealing with the constant pool
11939
11940 (define_insn "align_4"
11941 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11942 "TARGET_EITHER"
11943 "*
11944 assemble_align (32);
11945 return \"\";
11946 "
11947 )
11948
11949 (define_insn "align_8"
11950 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11951 "TARGET_EITHER"
11952 "*
11953 assemble_align (64);
11954 return \"\";
11955 "
11956 )
11957
11958 (define_insn "consttable_end"
11959 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11960 "TARGET_EITHER"
11961 "*
11962 making_const_table = FALSE;
11963 return \"\";
11964 "
11965 )
11966
11967 (define_insn "consttable_1"
11968 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11969 "TARGET_THUMB1"
11970 "*
11971 making_const_table = TRUE;
11972 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11973 assemble_zeros (3);
11974 return \"\";
11975 "
11976 [(set_attr "length" "4")]
11977 )
11978
11979 (define_insn "consttable_2"
11980 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11981 "TARGET_THUMB1"
11982 "*
11983 making_const_table = TRUE;
11984 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
11985 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11986 assemble_zeros (2);
11987 return \"\";
11988 "
11989 [(set_attr "length" "4")]
11990 )
11991
11992 (define_insn "consttable_4"
11993 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11994 "TARGET_EITHER"
11995 "*
11996 {
11997 rtx x = operands[0];
11998 making_const_table = TRUE;
11999 switch (GET_MODE_CLASS (GET_MODE (x)))
12000 {
12001 case MODE_FLOAT:
12002 if (GET_MODE (x) == HFmode)
12003 arm_emit_fp16_const (x);
12004 else
12005 {
12006 REAL_VALUE_TYPE r;
12007 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
12008 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
12009 }
12010 break;
12011 default:
12012 /* XXX: Sometimes gcc does something really dumb and ends up with
12013 a HIGH in a constant pool entry, usually because it's trying to
12014 load into a VFP register. We know this will always be used in
12015 combination with a LO_SUM which ignores the high bits, so just
12016 strip off the HIGH. */
12017 if (GET_CODE (x) == HIGH)
12018 x = XEXP (x, 0);
12019 assemble_integer (x, 4, BITS_PER_WORD, 1);
12020 mark_symbol_refs_as_used (x);
12021 break;
12022 }
12023 return \"\";
12024 }"
12025 [(set_attr "length" "4")]
12026 )
12027
12028 (define_insn "consttable_8"
12029 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
12030 "TARGET_EITHER"
12031 "*
12032 {
12033 making_const_table = TRUE;
12034 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12035 {
12036 case MODE_FLOAT:
12037 {
12038 REAL_VALUE_TYPE r;
12039 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12040 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12041 break;
12042 }
12043 default:
12044 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12045 break;
12046 }
12047 return \"\";
12048 }"
12049 [(set_attr "length" "8")]
12050 )
12051
12052 (define_insn "consttable_16"
12053 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12054 "TARGET_EITHER"
12055 "*
12056 {
12057 making_const_table = TRUE;
12058 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12059 {
12060 case MODE_FLOAT:
12061 {
12062 REAL_VALUE_TYPE r;
12063 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12064 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12065 break;
12066 }
12067 default:
12068 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12069 break;
12070 }
12071 return \"\";
12072 }"
12073 [(set_attr "length" "16")]
12074 )
12075
12076 ;; Miscellaneous Thumb patterns
12077
12078 (define_expand "tablejump"
12079 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
12080 (use (label_ref (match_operand 1 "" "")))])]
12081 "TARGET_THUMB1"
12082 "
12083 if (flag_pic)
12084 {
12085 /* Hopefully, CSE will eliminate this copy. */
12086 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
12087 rtx reg2 = gen_reg_rtx (SImode);
12088
12089 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
12090 operands[0] = reg2;
12091 }
12092 "
12093 )
12094
12095 ;; NB never uses BX.
12096 (define_insn "*thumb1_tablejump"
12097 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
12098 (use (label_ref (match_operand 1 "" "")))]
12099 "TARGET_THUMB1"
12100 "mov\\t%|pc, %0"
12101 [(set_attr "length" "2")]
12102 )
12103
12104 ;; V5 Instructions,
12105
12106 (define_insn "clzsi2"
12107 [(set (match_operand:SI 0 "s_register_operand" "=r")
12108 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12109 "TARGET_32BIT && arm_arch5"
12110 "clz%?\\t%0, %1"
12111 [(set_attr "predicable" "yes")
12112 (set_attr "insn" "clz")])
12113
12114 (define_insn "rbitsi2"
12115 [(set (match_operand:SI 0 "s_register_operand" "=r")
12116 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12117 "TARGET_32BIT && arm_arch_thumb2"
12118 "rbit%?\\t%0, %1"
12119 [(set_attr "predicable" "yes")
12120 (set_attr "insn" "clz")])
12121
12122 (define_expand "ctzsi2"
12123 [(set (match_operand:SI 0 "s_register_operand" "")
12124 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
12125 "TARGET_32BIT && arm_arch_thumb2"
12126 "
12127 {
12128 rtx tmp = gen_reg_rtx (SImode);
12129 emit_insn (gen_rbitsi2 (tmp, operands[1]));
12130 emit_insn (gen_clzsi2 (operands[0], tmp));
12131 }
12132 DONE;
12133 "
12134 )
12135
12136 ;; V5E instructions.
12137
12138 (define_insn "prefetch"
12139 [(prefetch (match_operand:SI 0 "address_operand" "p")
12140 (match_operand:SI 1 "" "")
12141 (match_operand:SI 2 "" ""))]
12142 "TARGET_32BIT && arm_arch5e"
12143 "pld\\t%a0")
12144
12145 ;; General predication pattern
12146
12147 (define_cond_exec
12148 [(match_operator 0 "arm_comparison_operator"
12149 [(match_operand 1 "cc_register" "")
12150 (const_int 0)])]
12151 "TARGET_32BIT"
12152 ""
12153 [(set_attr "predicated" "yes")]
12154 )
12155
12156 (define_insn "force_register_use"
12157 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12158 ""
12159 "%@ %0 needed"
12160 [(set_attr "length" "0")]
12161 )
12162
12163
12164 ;; Patterns for exception handling
12165
12166 (define_expand "eh_return"
12167 [(use (match_operand 0 "general_operand" ""))]
12168 "TARGET_EITHER"
12169 "
12170 {
12171 if (TARGET_32BIT)
12172 emit_insn (gen_arm_eh_return (operands[0]));
12173 else
12174 emit_insn (gen_thumb_eh_return (operands[0]));
12175 DONE;
12176 }"
12177 )
12178
12179 ;; We can't expand this before we know where the link register is stored.
12180 (define_insn_and_split "arm_eh_return"
12181 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12182 VUNSPEC_EH_RETURN)
12183 (clobber (match_scratch:SI 1 "=&r"))]
12184 "TARGET_ARM"
12185 "#"
12186 "&& reload_completed"
12187 [(const_int 0)]
12188 "
12189 {
12190 arm_set_return_address (operands[0], operands[1]);
12191 DONE;
12192 }"
12193 )
12194
12195 (define_insn_and_split "thumb_eh_return"
12196 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
12197 VUNSPEC_EH_RETURN)
12198 (clobber (match_scratch:SI 1 "=&l"))]
12199 "TARGET_THUMB1"
12200 "#"
12201 "&& reload_completed"
12202 [(const_int 0)]
12203 "
12204 {
12205 thumb_set_return_address (operands[0], operands[1]);
12206 DONE;
12207 }"
12208 )
12209
12210 \f
12211 ;; TLS support
12212
12213 (define_insn "load_tp_hard"
12214 [(set (match_operand:SI 0 "register_operand" "=r")
12215 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12216 "TARGET_HARD_TP"
12217 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12218 [(set_attr "predicable" "yes")]
12219 )
12220
12221 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12222 (define_insn "load_tp_soft"
12223 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12224 (clobber (reg:SI LR_REGNUM))
12225 (clobber (reg:SI IP_REGNUM))
12226 (clobber (reg:CC CC_REGNUM))]
12227 "TARGET_SOFT_TP"
12228 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12229 [(set_attr "conds" "clob")]
12230 )
12231
12232 ;; tls descriptor call
12233 (define_insn "tlscall"
12234 [(set (reg:SI R0_REGNUM)
12235 (unspec:SI [(reg:SI R0_REGNUM)
12236 (match_operand:SI 0 "" "X")
12237 (match_operand 1 "" "")] UNSPEC_TLS))
12238 (clobber (reg:SI R1_REGNUM))
12239 (clobber (reg:SI LR_REGNUM))
12240 (clobber (reg:SI CC_REGNUM))]
12241 "TARGET_GNU2_TLS"
12242 {
12243 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12244 INTVAL (operands[1]));
12245 return "bl\\t%c0(tlscall)";
12246 }
12247 [(set_attr "conds" "clob")
12248 (set_attr "length" "4")]
12249 )
12250
12251 ;; For thread pointer builtin
12252 (define_expand "get_thread_pointersi"
12253 [(match_operand:SI 0 "s_register_operand" "=r")]
12254 ""
12255 "
12256 {
12257 arm_load_tp (operands[0]);
12258 DONE;
12259 }")
12260
12261 ;;
12262
12263 ;; We only care about the lower 16 bits of the constant
12264 ;; being inserted into the upper 16 bits of the register.
12265 (define_insn "*arm_movtas_ze"
12266 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
12267 (const_int 16)
12268 (const_int 16))
12269 (match_operand:SI 1 "const_int_operand" ""))]
12270 "arm_arch_thumb2"
12271 "movt%?\t%0, %L1"
12272 [(set_attr "predicable" "yes")
12273 (set_attr "length" "4")]
12274 )
12275
12276 (define_insn "*arm_rev"
12277 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12278 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12279 "arm_arch6"
12280 "@
12281 rev\t%0, %1
12282 rev%?\t%0, %1
12283 rev%?\t%0, %1"
12284 [(set_attr "arch" "t1,t2,32")
12285 (set_attr "length" "2,2,4")]
12286 )
12287
12288 (define_expand "arm_legacy_rev"
12289 [(set (match_operand:SI 2 "s_register_operand" "")
12290 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
12291 (const_int 16))
12292 (match_dup 1)))
12293 (set (match_dup 2)
12294 (lshiftrt:SI (match_dup 2)
12295 (const_int 8)))
12296 (set (match_operand:SI 3 "s_register_operand" "")
12297 (rotatert:SI (match_dup 1)
12298 (const_int 8)))
12299 (set (match_dup 2)
12300 (and:SI (match_dup 2)
12301 (const_int -65281)))
12302 (set (match_operand:SI 0 "s_register_operand" "")
12303 (xor:SI (match_dup 3)
12304 (match_dup 2)))]
12305 "TARGET_32BIT"
12306 ""
12307 )
12308
12309 ;; Reuse temporaries to keep register pressure down.
12310 (define_expand "thumb_legacy_rev"
12311 [(set (match_operand:SI 2 "s_register_operand" "")
12312 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
12313 (const_int 24)))
12314 (set (match_operand:SI 3 "s_register_operand" "")
12315 (lshiftrt:SI (match_dup 1)
12316 (const_int 24)))
12317 (set (match_dup 3)
12318 (ior:SI (match_dup 3)
12319 (match_dup 2)))
12320 (set (match_operand:SI 4 "s_register_operand" "")
12321 (const_int 16))
12322 (set (match_operand:SI 5 "s_register_operand" "")
12323 (rotatert:SI (match_dup 1)
12324 (match_dup 4)))
12325 (set (match_dup 2)
12326 (ashift:SI (match_dup 5)
12327 (const_int 24)))
12328 (set (match_dup 5)
12329 (lshiftrt:SI (match_dup 5)
12330 (const_int 24)))
12331 (set (match_dup 5)
12332 (ior:SI (match_dup 5)
12333 (match_dup 2)))
12334 (set (match_dup 5)
12335 (rotatert:SI (match_dup 5)
12336 (match_dup 4)))
12337 (set (match_operand:SI 0 "s_register_operand" "")
12338 (ior:SI (match_dup 5)
12339 (match_dup 3)))]
12340 "TARGET_THUMB"
12341 ""
12342 )
12343
12344 (define_expand "bswapsi2"
12345 [(set (match_operand:SI 0 "s_register_operand" "=r")
12346 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
12347 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12348 "
12349 if (!arm_arch6)
12350 {
12351 rtx op2 = gen_reg_rtx (SImode);
12352 rtx op3 = gen_reg_rtx (SImode);
12353
12354 if (TARGET_THUMB)
12355 {
12356 rtx op4 = gen_reg_rtx (SImode);
12357 rtx op5 = gen_reg_rtx (SImode);
12358
12359 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12360 op2, op3, op4, op5));
12361 }
12362 else
12363 {
12364 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12365 op2, op3));
12366 }
12367
12368 DONE;
12369 }
12370 "
12371 )
12372
12373 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12374 ;; and unsigned variants, respectively. For rev16, expose
12375 ;; byte-swapping in the lower 16 bits only.
12376 (define_insn "*arm_revsh"
12377 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12378 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12379 "arm_arch6"
12380 "@
12381 revsh\t%0, %1
12382 revsh%?\t%0, %1
12383 revsh%?\t%0, %1"
12384 [(set_attr "arch" "t1,t2,32")
12385 (set_attr "length" "2,2,4")]
12386 )
12387
12388 (define_insn "*arm_rev16"
12389 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12390 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12391 "arm_arch6"
12392 "@
12393 rev16\t%0, %1
12394 rev16%?\t%0, %1
12395 rev16%?\t%0, %1"
12396 [(set_attr "arch" "t1,t2,32")
12397 (set_attr "length" "2,2,4")]
12398 )
12399
12400 (define_expand "bswaphi2"
12401 [(set (match_operand:HI 0 "s_register_operand" "=r")
12402 (bswap:HI (match_operand:HI 1 "s_register_operand" "r")))]
12403 "arm_arch6"
12404 ""
12405 )
12406
12407 ;; Patterns for LDRD/STRD in Thumb2 mode
12408
12409 (define_insn "*thumb2_ldrd"
12410 [(set (match_operand:SI 0 "s_register_operand" "=r")
12411 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12412 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12413 (set (match_operand:SI 3 "s_register_operand" "=r")
12414 (mem:SI (plus:SI (match_dup 1)
12415 (match_operand:SI 4 "const_int_operand" ""))))]
12416 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12417 && current_tune->prefer_ldrd_strd
12418 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12419 && (operands_ok_ldrd_strd (operands[0], operands[3],
12420 operands[1], INTVAL (operands[2]),
12421 false, true))"
12422 "ldrd%?\t%0, %3, [%1, %2]"
12423 [(set_attr "type" "load2")
12424 (set_attr "predicable" "yes")])
12425
12426 (define_insn "*thumb2_ldrd_base"
12427 [(set (match_operand:SI 0 "s_register_operand" "=r")
12428 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12429 (set (match_operand:SI 2 "s_register_operand" "=r")
12430 (mem:SI (plus:SI (match_dup 1)
12431 (const_int 4))))]
12432 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12433 && current_tune->prefer_ldrd_strd
12434 && (operands_ok_ldrd_strd (operands[0], operands[2],
12435 operands[1], 0, false, true))"
12436 "ldrd%?\t%0, %2, [%1]"
12437 [(set_attr "type" "load2")
12438 (set_attr "predicable" "yes")])
12439
12440 (define_insn "*thumb2_ldrd_base_neg"
12441 [(set (match_operand:SI 0 "s_register_operand" "=r")
12442 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12443 (const_int -4))))
12444 (set (match_operand:SI 2 "s_register_operand" "=r")
12445 (mem:SI (match_dup 1)))]
12446 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12447 && current_tune->prefer_ldrd_strd
12448 && (operands_ok_ldrd_strd (operands[0], operands[2],
12449 operands[1], -4, false, true))"
12450 "ldrd%?\t%0, %2, [%1, #-4]"
12451 [(set_attr "type" "load2")
12452 (set_attr "predicable" "yes")])
12453
12454 (define_insn "*thumb2_strd"
12455 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12456 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12457 (match_operand:SI 2 "s_register_operand" "r"))
12458 (set (mem:SI (plus:SI (match_dup 0)
12459 (match_operand:SI 3 "const_int_operand" "")))
12460 (match_operand:SI 4 "s_register_operand" "r"))]
12461 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12462 && current_tune->prefer_ldrd_strd
12463 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12464 && (operands_ok_ldrd_strd (operands[2], operands[4],
12465 operands[0], INTVAL (operands[1]),
12466 false, false))"
12467 "strd%?\t%2, %4, [%0, %1]"
12468 [(set_attr "type" "store2")
12469 (set_attr "predicable" "yes")])
12470
12471 (define_insn "*thumb2_strd_base"
12472 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12473 (match_operand:SI 1 "s_register_operand" "r"))
12474 (set (mem:SI (plus:SI (match_dup 0)
12475 (const_int 4)))
12476 (match_operand:SI 2 "s_register_operand" "r"))]
12477 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12478 && current_tune->prefer_ldrd_strd
12479 && (operands_ok_ldrd_strd (operands[1], operands[2],
12480 operands[0], 0, false, false))"
12481 "strd%?\t%1, %2, [%0]"
12482 [(set_attr "type" "store2")
12483 (set_attr "predicable" "yes")])
12484
12485 (define_insn "*thumb2_strd_base_neg"
12486 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12487 (const_int -4)))
12488 (match_operand:SI 1 "s_register_operand" "r"))
12489 (set (mem:SI (match_dup 0))
12490 (match_operand:SI 2 "s_register_operand" "r"))]
12491 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12492 && current_tune->prefer_ldrd_strd
12493 && (operands_ok_ldrd_strd (operands[1], operands[2],
12494 operands[0], -4, false, false))"
12495 "strd%?\t%1, %2, [%0, #-4]"
12496 [(set_attr "type" "store2")
12497 (set_attr "predicable" "yes")])
12498
12499
12500 ;; Load the load/store double peephole optimizations.
12501 (include "ldrdstrd.md")
12502
12503 ;; Load the load/store multiple patterns
12504 (include "ldmstm.md")
12505
12506 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12507 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12508 (define_insn "*load_multiple"
12509 [(match_parallel 0 "load_multiple_operation"
12510 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12511 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12512 ])]
12513 "TARGET_32BIT"
12514 "*
12515 {
12516 arm_output_multireg_pop (operands, /*return_pc=*/false,
12517 /*cond=*/const_true_rtx,
12518 /*reverse=*/false,
12519 /*update=*/false);
12520 return \"\";
12521 }
12522 "
12523 [(set_attr "predicable" "yes")]
12524 )
12525
12526 ;; Vector bits common to IWMMXT and Neon
12527 (include "vec-common.md")
12528 ;; Load the Intel Wireless Multimedia Extension patterns
12529 (include "iwmmxt.md")
12530 ;; Load the VFP co-processor patterns
12531 (include "vfp.md")
12532 ;; Thumb-2 patterns
12533 (include "thumb2.md")
12534 ;; Neon patterns
12535 (include "neon.md")
12536 ;; Synchronization Primitives
12537 (include "sync.md")
12538 ;; Fixed-point patterns
12539 (include "arm-fixed.md")