1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2013 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
25 ;; Beware of splitting Thumb1 patterns that output multiple
26 ;; assembly instructions, in particular instruction such as SBC and
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29 ;; and then the Carry flag is used by SBC to compute the correct
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
31 ;; insns (using define_insn_and_split), the scheduler might place
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
34 ;; for flag-setting instructions do not have explicit RTL for setting
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
36 ;; with value "set" or "clob". However, this attribute is not used to
37 ;; identify dependencies and therefore the scheduler might reorder
38 ;; these instruction. Currenly, this problem cannot happen because
39 ;; there are no separate Thumb1 patterns for individual instruction
40 ;; that consume flags (except conditional execution, which is treated
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
45 ;;---------------------------------------------------------------------------
48 ;; Register numbers -- All machine registers should be defined here
50 [(R0_REGNUM 0) ; First CORE register
51 (R1_REGNUM 1) ; Second CORE register
52 (IP_REGNUM 12) ; Scratch register
53 (SP_REGNUM 13) ; Stack pointer
54 (LR_REGNUM 14) ; Return address register
55 (PC_REGNUM 15) ; Program counter
56 (LAST_ARM_REGNUM 15) ;
57 (CC_REGNUM 100) ; Condition code pseudo register
58 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
61 ;; 3rd operand to select_dominance_cc_mode
68 ;; conditional compare combination
79 ;;---------------------------------------------------------------------------
82 ;; Processor type. This is created automatically from arm-cores.def.
83 (include "arm-tune.md")
85 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
86 ; generating ARM code. This is used to control the length of some insn
87 ; patterns that share the same RTL in both ARM and Thumb code.
88 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
90 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
91 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
93 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
94 (define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
96 ; We use this attribute to disable alternatives that can produce 32-bit
97 ; instructions inside an IT-block in Thumb2 state. ARMv8 deprecates IT blocks
98 ; that contain 32-bit instructions.
99 (define_attr "enabled_for_depr_it" "no,yes" (const_string "yes"))
101 ; This attribute is used to disable a predicated alternative when we have
103 (define_attr "predicable_short_it" "no,yes" (const_string "yes"))
105 ;; Operand number of an input operand that is shifted. Zero if the
106 ;; given instruction does not shift one of its input operands.
107 (define_attr "shift" "" (const_int 0))
109 ; Floating Point Unit. If we only have floating point emulation, then there
110 ; is no point in scheduling the floating point insns. (Well, for best
111 ; performance we should try and group them together).
112 (define_attr "fpu" "none,vfp"
113 (const (symbol_ref "arm_fpu_attr")))
115 (define_attr "predicated" "yes,no" (const_string "no"))
117 ; LENGTH of an instruction (in bytes)
118 (define_attr "length" ""
121 ; The architecture which supports the instruction (or alternative).
122 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
123 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
124 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
125 ; arm_arch6. This attribute is used to compute attribute "enabled",
126 ; use type "any" to enable an alternative in all cases.
127 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,neon_for_64bits,avoid_neon_for_64bits,iwmmxt,iwmmxt2"
128 (const_string "any"))
130 (define_attr "arch_enabled" "no,yes"
131 (cond [(eq_attr "arch" "any")
134 (and (eq_attr "arch" "a")
135 (match_test "TARGET_ARM"))
138 (and (eq_attr "arch" "t")
139 (match_test "TARGET_THUMB"))
142 (and (eq_attr "arch" "t1")
143 (match_test "TARGET_THUMB1"))
146 (and (eq_attr "arch" "t2")
147 (match_test "TARGET_THUMB2"))
150 (and (eq_attr "arch" "32")
151 (match_test "TARGET_32BIT"))
154 (and (eq_attr "arch" "v6")
155 (match_test "TARGET_32BIT && arm_arch6"))
158 (and (eq_attr "arch" "nov6")
159 (match_test "TARGET_32BIT && !arm_arch6"))
162 (and (eq_attr "arch" "avoid_neon_for_64bits")
163 (match_test "TARGET_NEON")
164 (not (match_test "TARGET_PREFER_NEON_64BITS")))
167 (and (eq_attr "arch" "neon_for_64bits")
168 (match_test "TARGET_NEON")
169 (match_test "TARGET_PREFER_NEON_64BITS"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
174 (const_string "yes")]
176 (const_string "no")))
178 (define_attr "opt" "any,speed,size"
179 (const_string "any"))
181 (define_attr "opt_enabled" "no,yes"
182 (cond [(eq_attr "opt" "any")
185 (and (eq_attr "opt" "speed")
186 (match_test "optimize_function_for_speed_p (cfun)"))
189 (and (eq_attr "opt" "size")
190 (match_test "optimize_function_for_size_p (cfun)"))
191 (const_string "yes")]
192 (const_string "no")))
194 ; Allows an insn to disable certain alternatives for reasons other than
196 (define_attr "insn_enabled" "no,yes"
197 (const_string "yes"))
199 ; Enable all alternatives that are both arch_enabled and insn_enabled.
200 (define_attr "enabled" "no,yes"
201 (cond [(eq_attr "insn_enabled" "no")
204 (and (eq_attr "predicable_short_it" "no")
205 (and (eq_attr "predicated" "yes")
206 (match_test "arm_restrict_it")))
209 (and (eq_attr "enabled_for_depr_it" "no")
210 (match_test "arm_restrict_it"))
213 (eq_attr "arch_enabled" "no")
216 (eq_attr "opt_enabled" "no")
218 (const_string "yes")))
220 ; POOL_RANGE is how far away from a constant pool entry that this insn
221 ; can be placed. If the distance is zero, then this insn will never
222 ; reference the pool.
223 ; Note that for Thumb constant pools the PC value is rounded down to the
224 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
225 ; Thumb insns) should be set to <max_range> - 2.
226 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
227 ; before its address. It is set to <max_range> - (8 + <data_size>).
228 (define_attr "arm_pool_range" "" (const_int 0))
229 (define_attr "thumb2_pool_range" "" (const_int 0))
230 (define_attr "arm_neg_pool_range" "" (const_int 0))
231 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
233 (define_attr "pool_range" ""
234 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
235 (attr "arm_pool_range")))
236 (define_attr "neg_pool_range" ""
237 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
238 (attr "arm_neg_pool_range")))
240 ; An assembler sequence may clobber the condition codes without us knowing.
241 ; If such an insn references the pool, then we have no way of knowing how,
242 ; so use the most conservative value for pool_range.
243 (define_asm_attributes
244 [(set_attr "conds" "clob")
245 (set_attr "length" "4")
246 (set_attr "pool_range" "250")])
248 ;; The instruction used to implement a particular pattern. This
249 ;; information is used by pipeline descriptions to provide accurate
250 ;; scheduling information.
253 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,sat,other"
254 (const_string "other"))
256 ; TYPE attribute is used to detect floating point instructions which, if
257 ; running on a co-processor can run in parallel with other, basic instructions
258 ; If write-buffer scheduling is enabled then it can also be used in the
259 ; scheduling of writes.
261 ; Classification of each insn
262 ; Note: vfp.md has different meanings for some of these, and some further
263 ; types as well. See that file for details.
264 ; simple_alu_imm a simple alu instruction that doesn't hit memory or fp
265 ; regs or have a shifted source operand and has an immediate
266 ; operand. This currently only tracks very basic immediate
268 ; alu_reg any alu instruction that doesn't hit memory or fp
269 ; regs or have a shifted source operand
270 ; and does not have an immediate operand. This is
272 ; simple_alu_shift covers UXTH, UXTB, SXTH, SXTB
273 ; alu_shift any data instruction that doesn't hit memory or fp
274 ; regs, but has a source operand shifted by a constant
275 ; alu_shift_reg any data instruction that doesn't hit memory or fp
276 ; regs, but has a source operand shifted by a register value
277 ; mult a multiply instruction
278 ; block blockage insn, this blocks all functional units
279 ; float a floating point arithmetic operation (subject to expansion)
280 ; fdivd DFmode floating point division
281 ; fdivs SFmode floating point division
282 ; f_load[sd] A single/double load from memory. Used for VFP unit.
283 ; f_store[sd] A single/double store to memory. Used for VFP unit.
284 ; f_flag a transfer of co-processor flags to the CPSR
285 ; f_2_r transfer float to core (no memory needed)
286 ; r_2_f transfer core to float
287 ; f_cvt convert floating<->integral
289 ; call a subroutine call
290 ; load_byte load byte(s) from memory to arm registers
291 ; load1 load 1 word from memory to arm registers
292 ; load2 load 2 words from memory to arm registers
293 ; load3 load 3 words from memory to arm registers
294 ; load4 load 4 words from memory to arm registers
295 ; store store 1 word to memory from arm registers
296 ; store2 store 2 words
297 ; store3 store 3 words
298 ; store4 store 4 (or more) words
353 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,\
354 umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
355 (const_string "mult")
356 (const_string "alu_reg")))
358 ; Is this an (integer side) multiply with a 64-bit result?
359 (define_attr "mul64" "no,yes"
362 "smlalxy,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
364 (const_string "no")))
366 ; wtype for WMMX insn scheduling purposes.
368 "none,wor,wxor,wand,wandn,wmov,tmcrr,tmrrc,wldr,wstr,tmcr,tmrc,wadd,wsub,wmul,wmac,wavg2,tinsr,textrm,wshufh,wcmpeq,wcmpgt,wmax,wmin,wpack,wunpckih,wunpckil,wunpckeh,wunpckel,wror,wsra,wsrl,wsll,wmadd,tmia,tmiaph,tmiaxy,tbcst,tmovmsk,wacc,waligni,walignr,tandc,textrc,torc,torvsc,wsad,wabs,wabsdiff,waddsubhx,wsubaddhx,wavg4,wmulw,wqmulm,wqmulwm,waddbhus,wqmiaxy,wmiaxy,wmiawxy,wmerge" (const_string "none"))
370 ; Load scheduling, set from the arm_ld_sched variable
371 ; initialized by arm_option_override()
372 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
374 ;; Classification of NEON instructions for scheduling purposes.
375 (define_attr "neon_type"
386 neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
387 neon_mul_qqq_8_16_32_ddd_32,\
388 neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
389 neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
391 neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
392 neon_mla_qqq_32_qqd_32_scalar,\
393 neon_mul_ddd_16_scalar_32_16_long_scalar,\
394 neon_mul_qqd_32_scalar,\
395 neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
400 neon_vqshl_vrshl_vqrshl_qqq,\
402 neon_fp_vadd_ddd_vabs_dd,\
403 neon_fp_vadd_qqq_vabs_qq,\
409 neon_fp_vmla_ddd_scalar,\
410 neon_fp_vmla_qqq_scalar,\
411 neon_fp_vrecps_vrsqrts_ddd,\
412 neon_fp_vrecps_vrsqrts_qqq,\
420 neon_vld2_2_regs_vld1_vld2_all_lanes,\
423 neon_vst1_1_2_regs_vst2_2_regs,\
425 neon_vst2_4_regs_vst3_vst4,\
427 neon_vld1_vld2_lane,\
428 neon_vld3_vld4_lane,\
429 neon_vst1_vst2_lane,\
430 neon_vst3_vst4_lane,\
431 neon_vld3_vld4_all_lanes,\
439 (const_string "none"))
441 ; condition codes: this one is used by final_prescan_insn to speed up
442 ; conditionalizing instructions. It saves having to scan the rtl to see if
443 ; it uses or alters the condition codes.
445 ; USE means that the condition codes are used by the insn in the process of
446 ; outputting code, this means (at present) that we can't use the insn in
449 ; SET means that the purpose of the insn is to set the condition codes in a
450 ; well defined manner.
452 ; CLOB means that the condition codes are altered in an undefined manner, if
453 ; they are altered at all
455 ; UNCONDITIONAL means the instruction can not be conditionally executed and
456 ; that the instruction does not use or alter the condition codes.
458 ; NOCOND means that the instruction does not use or alter the condition
459 ; codes but can be converted into a conditionally exectuted instruction.
461 (define_attr "conds" "use,set,clob,unconditional,nocond"
463 (ior (eq_attr "is_thumb1" "yes")
464 (eq_attr "type" "call"))
465 (const_string "clob")
466 (if_then_else (eq_attr "neon_type" "none")
467 (const_string "nocond")
468 (const_string "unconditional"))))
470 ; Predicable means that the insn can be conditionally executed based on
471 ; an automatically added predicate (additional patterns are generated by
472 ; gen...). We default to 'no' because no Thumb patterns match this rule
473 ; and not all ARM patterns do.
474 (define_attr "predicable" "no,yes" (const_string "no"))
476 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
477 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
478 ; suffer blockages enough to warrant modelling this (and it can adversely
479 ; affect the schedule).
480 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
482 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
483 ; to stall the processor. Used with model_wbuf above.
484 (define_attr "write_conflict" "no,yes"
485 (if_then_else (eq_attr "type"
488 (const_string "no")))
490 ; Classify the insns into those that take one cycle and those that take more
491 ; than one on the main cpu execution unit.
492 (define_attr "core_cycles" "single,multi"
493 (if_then_else (eq_attr "type"
494 "simple_alu_imm,alu_reg,\
495 simple_alu_shift,alu_shift,\
497 (const_string "single")
498 (const_string "multi")))
500 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
501 ;; distant label. Only applicable to Thumb code.
502 (define_attr "far_jump" "yes,no" (const_string "no"))
505 ;; The number of machine instructions this pattern expands to.
506 ;; Used for Thumb-2 conditional execution.
507 (define_attr "ce_count" "" (const_int 1))
509 ;;---------------------------------------------------------------------------
512 (include "unspecs.md")
514 ;;---------------------------------------------------------------------------
517 (include "iterators.md")
519 ;;---------------------------------------------------------------------------
522 (include "predicates.md")
523 (include "constraints.md")
525 ;;---------------------------------------------------------------------------
526 ;; Pipeline descriptions
528 (define_attr "tune_cortexr4" "yes,no"
530 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
532 (const_string "no"))))
534 ;; True if the generic scheduling description should be used.
536 (define_attr "generic_sched" "yes,no"
538 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa7,cortexa8,cortexa9,cortexa15,cortexa53,cortexm4,marvell_pj4")
539 (eq_attr "tune_cortexr4" "yes"))
541 (const_string "yes"))))
543 (define_attr "generic_vfp" "yes,no"
545 (and (eq_attr "fpu" "vfp")
546 (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa7,cortexa8,cortexa9,cortexa53,cortexm4,marvell_pj4")
547 (eq_attr "tune_cortexr4" "no"))
549 (const_string "no"))))
551 (include "marvell-f-iwmmxt.md")
552 (include "arm-generic.md")
553 (include "arm926ejs.md")
554 (include "arm1020e.md")
555 (include "arm1026ejs.md")
556 (include "arm1136jfs.md")
558 (include "fa606te.md")
559 (include "fa626te.md")
560 (include "fmp626.md")
561 (include "fa726te.md")
562 (include "cortex-a5.md")
563 (include "cortex-a7.md")
564 (include "cortex-a8.md")
565 (include "cortex-a9.md")
566 (include "cortex-a15.md")
567 (include "cortex-a53.md")
568 (include "cortex-r4.md")
569 (include "cortex-r4f.md")
570 (include "cortex-m4.md")
571 (include "cortex-m4-fpu.md")
573 (include "marvell-pj4.md")
576 ;;---------------------------------------------------------------------------
581 ;; Note: For DImode insns, there is normally no reason why operands should
582 ;; not be in the same register, what we don't want is for something being
583 ;; written to partially overlap something that is an input.
585 (define_expand "adddi3"
587 [(set (match_operand:DI 0 "s_register_operand" "")
588 (plus:DI (match_operand:DI 1 "s_register_operand" "")
589 (match_operand:DI 2 "arm_adddi_operand" "")))
590 (clobber (reg:CC CC_REGNUM))])]
595 if (!REG_P (operands[1]))
596 operands[1] = force_reg (DImode, operands[1]);
597 if (!REG_P (operands[2]))
598 operands[2] = force_reg (DImode, operands[2]);
603 (define_insn "*thumb1_adddi3"
604 [(set (match_operand:DI 0 "register_operand" "=l")
605 (plus:DI (match_operand:DI 1 "register_operand" "%0")
606 (match_operand:DI 2 "register_operand" "l")))
607 (clobber (reg:CC CC_REGNUM))
610 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
611 [(set_attr "length" "4")]
614 (define_insn_and_split "*arm_adddi3"
615 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,&r")
616 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0, r, 0, r")
617 (match_operand:DI 2 "arm_adddi_operand" "r, 0, r, Dd, Dd")))
618 (clobber (reg:CC CC_REGNUM))]
619 "TARGET_32BIT && !TARGET_NEON"
621 "TARGET_32BIT && reload_completed
622 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
623 [(parallel [(set (reg:CC_C CC_REGNUM)
624 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
626 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
627 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
628 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
631 operands[3] = gen_highpart (SImode, operands[0]);
632 operands[0] = gen_lowpart (SImode, operands[0]);
633 operands[4] = gen_highpart (SImode, operands[1]);
634 operands[1] = gen_lowpart (SImode, operands[1]);
635 operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
636 operands[2] = gen_lowpart (SImode, operands[2]);
638 [(set_attr "conds" "clob")
639 (set_attr "length" "8")]
642 (define_insn_and_split "*adddi_sesidi_di"
643 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
644 (plus:DI (sign_extend:DI
645 (match_operand:SI 2 "s_register_operand" "r,r"))
646 (match_operand:DI 1 "s_register_operand" "0,r")))
647 (clobber (reg:CC CC_REGNUM))]
650 "TARGET_32BIT && reload_completed"
651 [(parallel [(set (reg:CC_C CC_REGNUM)
652 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
654 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
655 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
658 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
661 operands[3] = gen_highpart (SImode, operands[0]);
662 operands[0] = gen_lowpart (SImode, operands[0]);
663 operands[4] = gen_highpart (SImode, operands[1]);
664 operands[1] = gen_lowpart (SImode, operands[1]);
665 operands[2] = gen_lowpart (SImode, operands[2]);
667 [(set_attr "conds" "clob")
668 (set_attr "length" "8")]
671 (define_insn_and_split "*adddi_zesidi_di"
672 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
673 (plus:DI (zero_extend:DI
674 (match_operand:SI 2 "s_register_operand" "r,r"))
675 (match_operand:DI 1 "s_register_operand" "0,r")))
676 (clobber (reg:CC CC_REGNUM))]
679 "TARGET_32BIT && reload_completed"
680 [(parallel [(set (reg:CC_C CC_REGNUM)
681 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
683 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
684 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
685 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
688 operands[3] = gen_highpart (SImode, operands[0]);
689 operands[0] = gen_lowpart (SImode, operands[0]);
690 operands[4] = gen_highpart (SImode, operands[1]);
691 operands[1] = gen_lowpart (SImode, operands[1]);
692 operands[2] = gen_lowpart (SImode, operands[2]);
694 [(set_attr "conds" "clob")
695 (set_attr "length" "8")]
698 (define_expand "addsi3"
699 [(set (match_operand:SI 0 "s_register_operand" "")
700 (plus:SI (match_operand:SI 1 "s_register_operand" "")
701 (match_operand:SI 2 "reg_or_int_operand" "")))]
704 if (TARGET_32BIT && CONST_INT_P (operands[2]))
706 arm_split_constant (PLUS, SImode, NULL_RTX,
707 INTVAL (operands[2]), operands[0], operands[1],
708 optimize && can_create_pseudo_p ());
714 ; If there is a scratch available, this will be faster than synthesizing the
717 [(match_scratch:SI 3 "r")
718 (set (match_operand:SI 0 "arm_general_register_operand" "")
719 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
720 (match_operand:SI 2 "const_int_operand" "")))]
722 !(const_ok_for_arm (INTVAL (operands[2]))
723 || const_ok_for_arm (-INTVAL (operands[2])))
724 && const_ok_for_arm (~INTVAL (operands[2]))"
725 [(set (match_dup 3) (match_dup 2))
726 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
730 ;; The r/r/k alternative is required when reloading the address
731 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
732 ;; put the duplicated register first, and not try the commutative version.
733 (define_insn_and_split "*arm_addsi3"
734 [(set (match_operand:SI 0 "s_register_operand" "=rk, r,k, r,r, k, r, k,k,r, k, r")
735 (plus:SI (match_operand:SI 1 "s_register_operand" "%0, rk,k, r,rk,k, rk,k,r,rk,k, rk")
736 (match_operand:SI 2 "reg_or_int_operand" "rk, rI,rI,k,Pj,Pj,L, L,L,PJ,PJ,?n")))]
748 subw%?\\t%0, %1, #%n2
749 subw%?\\t%0, %1, #%n2
752 && CONST_INT_P (operands[2])
753 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
754 && (reload_completed || !arm_eliminable_register (operands[1]))"
755 [(clobber (const_int 0))]
757 arm_split_constant (PLUS, SImode, curr_insn,
758 INTVAL (operands[2]), operands[0],
762 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,16")
763 (set_attr "predicable" "yes")
764 (set_attr "arch" "t2,*,*,*,t2,t2,*,*,a,t2,t2,*")
765 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
766 (const_string "simple_alu_imm")
767 (const_string "alu_reg")))
771 (define_insn_and_split "*thumb1_addsi3"
772 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
773 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
774 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
777 static const char * const asms[] =
779 \"add\\t%0, %0, %2\",
780 \"sub\\t%0, %0, #%n2\",
781 \"add\\t%0, %1, %2\",
782 \"add\\t%0, %0, %2\",
783 \"add\\t%0, %0, %2\",
784 \"add\\t%0, %1, %2\",
785 \"add\\t%0, %1, %2\",
790 if ((which_alternative == 2 || which_alternative == 6)
791 && CONST_INT_P (operands[2])
792 && INTVAL (operands[2]) < 0)
793 return \"sub\\t%0, %1, #%n2\";
794 return asms[which_alternative];
796 "&& reload_completed && CONST_INT_P (operands[2])
797 && ((operands[1] != stack_pointer_rtx
798 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
799 || (operands[1] == stack_pointer_rtx
800 && INTVAL (operands[2]) > 1020))"
801 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
802 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
804 HOST_WIDE_INT offset = INTVAL (operands[2]);
805 if (operands[1] == stack_pointer_rtx)
811 else if (offset < -255)
814 operands[3] = GEN_INT (offset);
815 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
817 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
820 ;; Reloading and elimination of the frame pointer can
821 ;; sometimes cause this optimization to be missed.
823 [(set (match_operand:SI 0 "arm_general_register_operand" "")
824 (match_operand:SI 1 "const_int_operand" ""))
826 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
828 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
829 && (INTVAL (operands[1]) & 3) == 0"
830 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
834 (define_insn "addsi3_compare0"
835 [(set (reg:CC_NOOV CC_REGNUM)
837 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
838 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
840 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
841 (plus:SI (match_dup 1) (match_dup 2)))]
847 [(set_attr "conds" "set")
848 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")]
851 (define_insn "*addsi3_compare0_scratch"
852 [(set (reg:CC_NOOV CC_REGNUM)
854 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
855 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
862 [(set_attr "conds" "set")
863 (set_attr "predicable" "yes")
864 (set_attr "type" "simple_alu_imm, simple_alu_imm, *")
868 (define_insn "*compare_negsi_si"
869 [(set (reg:CC_Z CC_REGNUM)
871 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
872 (match_operand:SI 1 "s_register_operand" "r")))]
875 [(set_attr "conds" "set")
876 (set_attr "predicable" "yes")]
879 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
880 ;; addend is a constant.
881 (define_insn "cmpsi2_addneg"
882 [(set (reg:CC CC_REGNUM)
884 (match_operand:SI 1 "s_register_operand" "r,r")
885 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
886 (set (match_operand:SI 0 "s_register_operand" "=r,r")
887 (plus:SI (match_dup 1)
888 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
889 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
892 sub%.\\t%0, %1, #%n3"
893 [(set_attr "conds" "set")]
896 ;; Convert the sequence
898 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
902 ;; bcs dest ((unsigned)rn >= 1)
903 ;; similarly for the beq variant using bcc.
904 ;; This is a common looping idiom (while (n--))
906 [(set (match_operand:SI 0 "arm_general_register_operand" "")
907 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
909 (set (match_operand 2 "cc_register" "")
910 (compare (match_dup 0) (const_int -1)))
912 (if_then_else (match_operator 3 "equality_operator"
913 [(match_dup 2) (const_int 0)])
914 (match_operand 4 "" "")
915 (match_operand 5 "" "")))]
916 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
920 (match_dup 1) (const_int 1)))
921 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
923 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
926 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
927 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
930 operands[2], const0_rtx);"
933 ;; The next four insns work because they compare the result with one of
934 ;; the operands, and we know that the use of the condition code is
935 ;; either GEU or LTU, so we can use the carry flag from the addition
936 ;; instead of doing the compare a second time.
937 (define_insn "*addsi3_compare_op1"
938 [(set (reg:CC_C CC_REGNUM)
940 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
941 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
943 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
944 (plus:SI (match_dup 1) (match_dup 2)))]
950 [(set_attr "conds" "set")
951 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
954 (define_insn "*addsi3_compare_op2"
955 [(set (reg:CC_C CC_REGNUM)
957 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
958 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
960 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
961 (plus:SI (match_dup 1) (match_dup 2)))]
966 sub%.\\t%0, %1, #%n2"
967 [(set_attr "conds" "set")
968 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
971 (define_insn "*compare_addsi2_op0"
972 [(set (reg:CC_C CC_REGNUM)
974 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
975 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
982 [(set_attr "conds" "set")
983 (set_attr "predicable" "yes")
984 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
987 (define_insn "*compare_addsi2_op1"
988 [(set (reg:CC_C CC_REGNUM)
990 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
991 (match_operand:SI 1 "arm_add_operand" "I,L,r"))
998 [(set_attr "conds" "set")
999 (set_attr "predicable" "yes")
1000 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
1003 (define_insn "*addsi3_carryin_<optab>"
1004 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1005 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r")
1006 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1007 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1011 sbc%?\\t%0, %1, #%B2"
1012 [(set_attr "conds" "use")
1013 (set_attr "predicable" "yes")]
1016 (define_insn "*addsi3_carryin_alt2_<optab>"
1017 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1018 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
1019 (match_operand:SI 1 "s_register_operand" "%r,r"))
1020 (match_operand:SI 2 "arm_rhs_operand" "rI,K")))]
1024 sbc%?\\t%0, %1, #%B2"
1025 [(set_attr "conds" "use")
1026 (set_attr "predicable" "yes")]
1029 (define_insn "*addsi3_carryin_shift_<optab>"
1030 [(set (match_operand:SI 0 "s_register_operand" "=r")
1032 (match_operator:SI 2 "shift_operator"
1033 [(match_operand:SI 3 "s_register_operand" "r")
1034 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1035 (match_operand:SI 1 "s_register_operand" "r"))
1036 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1038 "adc%?\\t%0, %1, %3%S2"
1039 [(set_attr "conds" "use")
1040 (set_attr "predicable" "yes")
1041 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1042 (const_string "alu_shift")
1043 (const_string "alu_shift_reg")))]
1046 (define_insn "*addsi3_carryin_clobercc_<optab>"
1047 [(set (match_operand:SI 0 "s_register_operand" "=r")
1048 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1049 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1050 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1051 (clobber (reg:CC CC_REGNUM))]
1053 "adc%.\\t%0, %1, %2"
1054 [(set_attr "conds" "set")]
1057 (define_insn "*subsi3_carryin"
1058 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1059 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I")
1060 (match_operand:SI 2 "s_register_operand" "r,r"))
1061 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1066 [(set_attr "conds" "use")
1067 (set_attr "arch" "*,a")
1068 (set_attr "predicable" "yes")]
1071 (define_insn "*subsi3_carryin_const"
1072 [(set (match_operand:SI 0 "s_register_operand" "=r")
1073 (minus:SI (plus:SI (match_operand:SI 1 "reg_or_int_operand" "r")
1074 (match_operand:SI 2 "arm_not_operand" "K"))
1075 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1077 "sbc\\t%0, %1, #%B2"
1078 [(set_attr "conds" "use")]
1081 (define_insn "*subsi3_carryin_compare"
1082 [(set (reg:CC CC_REGNUM)
1083 (compare:CC (match_operand:SI 1 "s_register_operand" "r")
1084 (match_operand:SI 2 "s_register_operand" "r")))
1085 (set (match_operand:SI 0 "s_register_operand" "=r")
1086 (minus:SI (minus:SI (match_dup 1)
1088 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1091 [(set_attr "conds" "set")]
1094 (define_insn "*subsi3_carryin_compare_const"
1095 [(set (reg:CC CC_REGNUM)
1096 (compare:CC (match_operand:SI 1 "reg_or_int_operand" "r")
1097 (match_operand:SI 2 "arm_not_operand" "K")))
1098 (set (match_operand:SI 0 "s_register_operand" "=r")
1099 (minus:SI (plus:SI (match_dup 1)
1101 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1103 "sbcs\\t%0, %1, #%B2"
1104 [(set_attr "conds" "set")]
1107 (define_insn "*subsi3_carryin_shift"
1108 [(set (match_operand:SI 0 "s_register_operand" "=r")
1110 (match_operand:SI 1 "s_register_operand" "r")
1111 (match_operator:SI 2 "shift_operator"
1112 [(match_operand:SI 3 "s_register_operand" "r")
1113 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1114 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1116 "sbc%?\\t%0, %1, %3%S2"
1117 [(set_attr "conds" "use")
1118 (set_attr "predicable" "yes")
1119 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1120 (const_string "alu_shift")
1121 (const_string "alu_shift_reg")))]
1124 (define_insn "*rsbsi3_carryin_shift"
1125 [(set (match_operand:SI 0 "s_register_operand" "=r")
1127 (match_operator:SI 2 "shift_operator"
1128 [(match_operand:SI 3 "s_register_operand" "r")
1129 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1130 (match_operand:SI 1 "s_register_operand" "r"))
1131 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1133 "rsc%?\\t%0, %1, %3%S2"
1134 [(set_attr "conds" "use")
1135 (set_attr "predicable" "yes")
1136 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1137 (const_string "alu_shift")
1138 (const_string "alu_shift_reg")))]
1141 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1143 [(set (match_operand:SI 0 "s_register_operand" "")
1144 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1145 (match_operand:SI 2 "s_register_operand" ""))
1147 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1149 [(set (match_dup 3) (match_dup 1))
1150 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1152 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1155 (define_expand "addsf3"
1156 [(set (match_operand:SF 0 "s_register_operand" "")
1157 (plus:SF (match_operand:SF 1 "s_register_operand" "")
1158 (match_operand:SF 2 "s_register_operand" "")))]
1159 "TARGET_32BIT && TARGET_HARD_FLOAT"
1163 (define_expand "adddf3"
1164 [(set (match_operand:DF 0 "s_register_operand" "")
1165 (plus:DF (match_operand:DF 1 "s_register_operand" "")
1166 (match_operand:DF 2 "s_register_operand" "")))]
1167 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1171 (define_expand "subdi3"
1173 [(set (match_operand:DI 0 "s_register_operand" "")
1174 (minus:DI (match_operand:DI 1 "s_register_operand" "")
1175 (match_operand:DI 2 "s_register_operand" "")))
1176 (clobber (reg:CC CC_REGNUM))])]
1181 if (!REG_P (operands[1]))
1182 operands[1] = force_reg (DImode, operands[1]);
1183 if (!REG_P (operands[2]))
1184 operands[2] = force_reg (DImode, operands[2]);
1189 (define_insn_and_split "*arm_subdi3"
1190 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
1191 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1192 (match_operand:DI 2 "s_register_operand" "r,0,0")))
1193 (clobber (reg:CC CC_REGNUM))]
1194 "TARGET_32BIT && !TARGET_NEON"
1195 "#" ; "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1196 "&& reload_completed"
1197 [(parallel [(set (reg:CC CC_REGNUM)
1198 (compare:CC (match_dup 1) (match_dup 2)))
1199 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1200 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
1201 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1203 operands[3] = gen_highpart (SImode, operands[0]);
1204 operands[0] = gen_lowpart (SImode, operands[0]);
1205 operands[4] = gen_highpart (SImode, operands[1]);
1206 operands[1] = gen_lowpart (SImode, operands[1]);
1207 operands[5] = gen_highpart (SImode, operands[2]);
1208 operands[2] = gen_lowpart (SImode, operands[2]);
1210 [(set_attr "conds" "clob")
1211 (set_attr "length" "8")]
1214 (define_insn "*thumb_subdi3"
1215 [(set (match_operand:DI 0 "register_operand" "=l")
1216 (minus:DI (match_operand:DI 1 "register_operand" "0")
1217 (match_operand:DI 2 "register_operand" "l")))
1218 (clobber (reg:CC CC_REGNUM))]
1220 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1221 [(set_attr "length" "4")]
1224 (define_insn_and_split "*subdi_di_zesidi"
1225 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1226 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1228 (match_operand:SI 2 "s_register_operand" "r,r"))))
1229 (clobber (reg:CC CC_REGNUM))]
1231 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1232 "&& reload_completed"
1233 [(parallel [(set (reg:CC CC_REGNUM)
1234 (compare:CC (match_dup 1) (match_dup 2)))
1235 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1236 (set (match_dup 3) (minus:SI (plus:SI (match_dup 4) (match_dup 5))
1237 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1239 operands[3] = gen_highpart (SImode, operands[0]);
1240 operands[0] = gen_lowpart (SImode, operands[0]);
1241 operands[4] = gen_highpart (SImode, operands[1]);
1242 operands[1] = gen_lowpart (SImode, operands[1]);
1243 operands[5] = GEN_INT (~0);
1245 [(set_attr "conds" "clob")
1246 (set_attr "length" "8")]
1249 (define_insn_and_split "*subdi_di_sesidi"
1250 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1251 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1253 (match_operand:SI 2 "s_register_operand" "r,r"))))
1254 (clobber (reg:CC CC_REGNUM))]
1256 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1257 "&& reload_completed"
1258 [(parallel [(set (reg:CC CC_REGNUM)
1259 (compare:CC (match_dup 1) (match_dup 2)))
1260 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1261 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4)
1262 (ashiftrt:SI (match_dup 2)
1264 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1266 operands[3] = gen_highpart (SImode, operands[0]);
1267 operands[0] = gen_lowpart (SImode, operands[0]);
1268 operands[4] = gen_highpart (SImode, operands[1]);
1269 operands[1] = gen_lowpart (SImode, operands[1]);
1271 [(set_attr "conds" "clob")
1272 (set_attr "length" "8")]
1275 (define_insn_and_split "*subdi_zesidi_di"
1276 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1277 (minus:DI (zero_extend:DI
1278 (match_operand:SI 2 "s_register_operand" "r,r"))
1279 (match_operand:DI 1 "s_register_operand" "0,r")))
1280 (clobber (reg:CC CC_REGNUM))]
1282 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1284 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, #0"
1285 "&& reload_completed"
1286 [(parallel [(set (reg:CC CC_REGNUM)
1287 (compare:CC (match_dup 2) (match_dup 1)))
1288 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1289 (set (match_dup 3) (minus:SI (minus:SI (const_int 0) (match_dup 4))
1290 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1292 operands[3] = gen_highpart (SImode, operands[0]);
1293 operands[0] = gen_lowpart (SImode, operands[0]);
1294 operands[4] = gen_highpart (SImode, operands[1]);
1295 operands[1] = gen_lowpart (SImode, operands[1]);
1297 [(set_attr "conds" "clob")
1298 (set_attr "length" "8")]
1301 (define_insn_and_split "*subdi_sesidi_di"
1302 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1303 (minus:DI (sign_extend:DI
1304 (match_operand:SI 2 "s_register_operand" "r,r"))
1305 (match_operand:DI 1 "s_register_operand" "0,r")))
1306 (clobber (reg:CC CC_REGNUM))]
1308 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1310 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, %2, asr #31"
1311 "&& reload_completed"
1312 [(parallel [(set (reg:CC CC_REGNUM)
1313 (compare:CC (match_dup 2) (match_dup 1)))
1314 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1315 (set (match_dup 3) (minus:SI (minus:SI
1316 (ashiftrt:SI (match_dup 2)
1319 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1321 operands[3] = gen_highpart (SImode, operands[0]);
1322 operands[0] = gen_lowpart (SImode, operands[0]);
1323 operands[4] = gen_highpart (SImode, operands[1]);
1324 operands[1] = gen_lowpart (SImode, operands[1]);
1326 [(set_attr "conds" "clob")
1327 (set_attr "length" "8")]
1330 (define_insn_and_split "*subdi_zesidi_zesidi"
1331 [(set (match_operand:DI 0 "s_register_operand" "=r")
1332 (minus:DI (zero_extend:DI
1333 (match_operand:SI 1 "s_register_operand" "r"))
1335 (match_operand:SI 2 "s_register_operand" "r"))))
1336 (clobber (reg:CC CC_REGNUM))]
1338 "#" ; "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1339 "&& reload_completed"
1340 [(parallel [(set (reg:CC CC_REGNUM)
1341 (compare:CC (match_dup 1) (match_dup 2)))
1342 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1343 (set (match_dup 3) (minus:SI (minus:SI (match_dup 1) (match_dup 1))
1344 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
1346 operands[3] = gen_highpart (SImode, operands[0]);
1347 operands[0] = gen_lowpart (SImode, operands[0]);
1349 [(set_attr "conds" "clob")
1350 (set_attr "length" "8")]
1353 (define_expand "subsi3"
1354 [(set (match_operand:SI 0 "s_register_operand" "")
1355 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1356 (match_operand:SI 2 "s_register_operand" "")))]
1359 if (CONST_INT_P (operands[1]))
1363 arm_split_constant (MINUS, SImode, NULL_RTX,
1364 INTVAL (operands[1]), operands[0],
1365 operands[2], optimize && can_create_pseudo_p ());
1368 else /* TARGET_THUMB1 */
1369 operands[1] = force_reg (SImode, operands[1]);
1374 (define_insn "thumb1_subsi3_insn"
1375 [(set (match_operand:SI 0 "register_operand" "=l")
1376 (minus:SI (match_operand:SI 1 "register_operand" "l")
1377 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1380 [(set_attr "length" "2")
1381 (set_attr "conds" "set")])
1383 ; ??? Check Thumb-2 split length
1384 (define_insn_and_split "*arm_subsi3_insn"
1385 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,rk,r")
1386 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,r,k,?n")
1387 (match_operand:SI 2 "reg_or_int_operand" "r,I,r,r, r")))]
1395 "&& (CONST_INT_P (operands[1])
1396 && !const_ok_for_arm (INTVAL (operands[1])))"
1397 [(clobber (const_int 0))]
1399 arm_split_constant (MINUS, SImode, curr_insn,
1400 INTVAL (operands[1]), operands[0], operands[2], 0);
1403 [(set_attr "length" "4,4,4,4,16")
1404 (set_attr "predicable" "yes")
1405 (set_attr "type" "*,simple_alu_imm,*,*,*")]
1409 [(match_scratch:SI 3 "r")
1410 (set (match_operand:SI 0 "arm_general_register_operand" "")
1411 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1412 (match_operand:SI 2 "arm_general_register_operand" "")))]
1414 && !const_ok_for_arm (INTVAL (operands[1]))
1415 && const_ok_for_arm (~INTVAL (operands[1]))"
1416 [(set (match_dup 3) (match_dup 1))
1417 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1421 (define_insn "*subsi3_compare0"
1422 [(set (reg:CC_NOOV CC_REGNUM)
1424 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1425 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1427 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1428 (minus:SI (match_dup 1) (match_dup 2)))]
1434 [(set_attr "conds" "set")
1435 (set_attr "type" "simple_alu_imm,*,*")]
1438 (define_insn "subsi3_compare"
1439 [(set (reg:CC CC_REGNUM)
1440 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1441 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1442 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1443 (minus:SI (match_dup 1) (match_dup 2)))]
1449 [(set_attr "conds" "set")
1450 (set_attr "type" "simple_alu_imm,*,*")]
1453 (define_expand "subsf3"
1454 [(set (match_operand:SF 0 "s_register_operand" "")
1455 (minus:SF (match_operand:SF 1 "s_register_operand" "")
1456 (match_operand:SF 2 "s_register_operand" "")))]
1457 "TARGET_32BIT && TARGET_HARD_FLOAT"
1461 (define_expand "subdf3"
1462 [(set (match_operand:DF 0 "s_register_operand" "")
1463 (minus:DF (match_operand:DF 1 "s_register_operand" "")
1464 (match_operand:DF 2 "s_register_operand" "")))]
1465 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1470 ;; Multiplication insns
1472 (define_expand "mulsi3"
1473 [(set (match_operand:SI 0 "s_register_operand" "")
1474 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1475 (match_operand:SI 1 "s_register_operand" "")))]
1480 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1481 (define_insn "*arm_mulsi3"
1482 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1483 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1484 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1485 "TARGET_32BIT && !arm_arch6"
1486 "mul%?\\t%0, %2, %1"
1487 [(set_attr "insn" "mul")
1488 (set_attr "predicable" "yes")]
1491 (define_insn "*arm_mulsi3_v6"
1492 [(set (match_operand:SI 0 "s_register_operand" "=r")
1493 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1494 (match_operand:SI 2 "s_register_operand" "r")))]
1495 "TARGET_32BIT && arm_arch6"
1496 "mul%?\\t%0, %1, %2"
1497 [(set_attr "insn" "mul")
1498 (set_attr "predicable" "yes")]
1501 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1502 ; 1 and 2; are the same, because reload will make operand 0 match
1503 ; operand 1 without realizing that this conflicts with operand 2. We fix
1504 ; this by adding another alternative to match this case, and then `reload'
1505 ; it ourselves. This alternative must come first.
1506 (define_insn "*thumb_mulsi3"
1507 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1508 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1509 (match_operand:SI 2 "register_operand" "l,l,l")))]
1510 "TARGET_THUMB1 && !arm_arch6"
1512 if (which_alternative < 2)
1513 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1515 return \"mul\\t%0, %2\";
1517 [(set_attr "length" "4,4,2")
1518 (set_attr "insn" "mul")]
1521 (define_insn "*thumb_mulsi3_v6"
1522 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1523 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1524 (match_operand:SI 2 "register_operand" "l,0,0")))]
1525 "TARGET_THUMB1 && arm_arch6"
1530 [(set_attr "length" "2")
1531 (set_attr "insn" "mul")]
1534 (define_insn "*mulsi3_compare0"
1535 [(set (reg:CC_NOOV CC_REGNUM)
1536 (compare:CC_NOOV (mult:SI
1537 (match_operand:SI 2 "s_register_operand" "r,r")
1538 (match_operand:SI 1 "s_register_operand" "%0,r"))
1540 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1541 (mult:SI (match_dup 2) (match_dup 1)))]
1542 "TARGET_ARM && !arm_arch6"
1543 "mul%.\\t%0, %2, %1"
1544 [(set_attr "conds" "set")
1545 (set_attr "insn" "muls")]
1548 (define_insn "*mulsi3_compare0_v6"
1549 [(set (reg:CC_NOOV CC_REGNUM)
1550 (compare:CC_NOOV (mult:SI
1551 (match_operand:SI 2 "s_register_operand" "r")
1552 (match_operand:SI 1 "s_register_operand" "r"))
1554 (set (match_operand:SI 0 "s_register_operand" "=r")
1555 (mult:SI (match_dup 2) (match_dup 1)))]
1556 "TARGET_ARM && arm_arch6 && optimize_size"
1557 "mul%.\\t%0, %2, %1"
1558 [(set_attr "conds" "set")
1559 (set_attr "insn" "muls")]
1562 (define_insn "*mulsi_compare0_scratch"
1563 [(set (reg:CC_NOOV CC_REGNUM)
1564 (compare:CC_NOOV (mult:SI
1565 (match_operand:SI 2 "s_register_operand" "r,r")
1566 (match_operand:SI 1 "s_register_operand" "%0,r"))
1568 (clobber (match_scratch:SI 0 "=&r,&r"))]
1569 "TARGET_ARM && !arm_arch6"
1570 "mul%.\\t%0, %2, %1"
1571 [(set_attr "conds" "set")
1572 (set_attr "insn" "muls")]
1575 (define_insn "*mulsi_compare0_scratch_v6"
1576 [(set (reg:CC_NOOV CC_REGNUM)
1577 (compare:CC_NOOV (mult:SI
1578 (match_operand:SI 2 "s_register_operand" "r")
1579 (match_operand:SI 1 "s_register_operand" "r"))
1581 (clobber (match_scratch:SI 0 "=r"))]
1582 "TARGET_ARM && arm_arch6 && optimize_size"
1583 "mul%.\\t%0, %2, %1"
1584 [(set_attr "conds" "set")
1585 (set_attr "insn" "muls")]
1588 ;; Unnamed templates to match MLA instruction.
1590 (define_insn "*mulsi3addsi"
1591 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1593 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1594 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1595 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1596 "TARGET_32BIT && !arm_arch6"
1597 "mla%?\\t%0, %2, %1, %3"
1598 [(set_attr "insn" "mla")
1599 (set_attr "predicable" "yes")]
1602 (define_insn "*mulsi3addsi_v6"
1603 [(set (match_operand:SI 0 "s_register_operand" "=r")
1605 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1606 (match_operand:SI 1 "s_register_operand" "r"))
1607 (match_operand:SI 3 "s_register_operand" "r")))]
1608 "TARGET_32BIT && arm_arch6"
1609 "mla%?\\t%0, %2, %1, %3"
1610 [(set_attr "insn" "mla")
1611 (set_attr "predicable" "yes")]
1614 (define_insn "*mulsi3addsi_compare0"
1615 [(set (reg:CC_NOOV CC_REGNUM)
1618 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1619 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1620 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1622 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1623 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1625 "TARGET_ARM && arm_arch6"
1626 "mla%.\\t%0, %2, %1, %3"
1627 [(set_attr "conds" "set")
1628 (set_attr "insn" "mlas")]
1631 (define_insn "*mulsi3addsi_compare0_v6"
1632 [(set (reg:CC_NOOV CC_REGNUM)
1635 (match_operand:SI 2 "s_register_operand" "r")
1636 (match_operand:SI 1 "s_register_operand" "r"))
1637 (match_operand:SI 3 "s_register_operand" "r"))
1639 (set (match_operand:SI 0 "s_register_operand" "=r")
1640 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1642 "TARGET_ARM && arm_arch6 && optimize_size"
1643 "mla%.\\t%0, %2, %1, %3"
1644 [(set_attr "conds" "set")
1645 (set_attr "insn" "mlas")]
1648 (define_insn "*mulsi3addsi_compare0_scratch"
1649 [(set (reg:CC_NOOV CC_REGNUM)
1652 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1653 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1654 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1656 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1657 "TARGET_ARM && !arm_arch6"
1658 "mla%.\\t%0, %2, %1, %3"
1659 [(set_attr "conds" "set")
1660 (set_attr "insn" "mlas")]
1663 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1664 [(set (reg:CC_NOOV CC_REGNUM)
1667 (match_operand:SI 2 "s_register_operand" "r")
1668 (match_operand:SI 1 "s_register_operand" "r"))
1669 (match_operand:SI 3 "s_register_operand" "r"))
1671 (clobber (match_scratch:SI 0 "=r"))]
1672 "TARGET_ARM && arm_arch6 && optimize_size"
1673 "mla%.\\t%0, %2, %1, %3"
1674 [(set_attr "conds" "set")
1675 (set_attr "insn" "mlas")]
1678 (define_insn "*mulsi3subsi"
1679 [(set (match_operand:SI 0 "s_register_operand" "=r")
1681 (match_operand:SI 3 "s_register_operand" "r")
1682 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1683 (match_operand:SI 1 "s_register_operand" "r"))))]
1684 "TARGET_32BIT && arm_arch_thumb2"
1685 "mls%?\\t%0, %2, %1, %3"
1686 [(set_attr "insn" "mla")
1687 (set_attr "predicable" "yes")]
1690 (define_expand "maddsidi4"
1691 [(set (match_operand:DI 0 "s_register_operand" "")
1694 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1695 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1696 (match_operand:DI 3 "s_register_operand" "")))]
1697 "TARGET_32BIT && arm_arch3m"
1700 (define_insn "*mulsidi3adddi"
1701 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1704 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1705 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1706 (match_operand:DI 1 "s_register_operand" "0")))]
1707 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1708 "smlal%?\\t%Q0, %R0, %3, %2"
1709 [(set_attr "insn" "smlal")
1710 (set_attr "predicable" "yes")]
1713 (define_insn "*mulsidi3adddi_v6"
1714 [(set (match_operand:DI 0 "s_register_operand" "=r")
1717 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1718 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1719 (match_operand:DI 1 "s_register_operand" "0")))]
1720 "TARGET_32BIT && arm_arch6"
1721 "smlal%?\\t%Q0, %R0, %3, %2"
1722 [(set_attr "insn" "smlal")
1723 (set_attr "predicable" "yes")]
1726 ;; 32x32->64 widening multiply.
1727 ;; As with mulsi3, the only difference between the v3-5 and v6+
1728 ;; versions of these patterns is the requirement that the output not
1729 ;; overlap the inputs, but that still means we have to have a named
1730 ;; expander and two different starred insns.
1732 (define_expand "mulsidi3"
1733 [(set (match_operand:DI 0 "s_register_operand" "")
1735 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1736 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1737 "TARGET_32BIT && arm_arch3m"
1741 (define_insn "*mulsidi3_nov6"
1742 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1744 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1745 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1746 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1747 "smull%?\\t%Q0, %R0, %1, %2"
1748 [(set_attr "insn" "smull")
1749 (set_attr "predicable" "yes")]
1752 (define_insn "*mulsidi3_v6"
1753 [(set (match_operand:DI 0 "s_register_operand" "=r")
1755 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1756 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1757 "TARGET_32BIT && arm_arch6"
1758 "smull%?\\t%Q0, %R0, %1, %2"
1759 [(set_attr "insn" "smull")
1760 (set_attr "predicable" "yes")]
1763 (define_expand "umulsidi3"
1764 [(set (match_operand:DI 0 "s_register_operand" "")
1766 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1767 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1768 "TARGET_32BIT && arm_arch3m"
1772 (define_insn "*umulsidi3_nov6"
1773 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1775 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1776 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1777 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1778 "umull%?\\t%Q0, %R0, %1, %2"
1779 [(set_attr "insn" "umull")
1780 (set_attr "predicable" "yes")]
1783 (define_insn "*umulsidi3_v6"
1784 [(set (match_operand:DI 0 "s_register_operand" "=r")
1786 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1787 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1788 "TARGET_32BIT && arm_arch6"
1789 "umull%?\\t%Q0, %R0, %1, %2"
1790 [(set_attr "insn" "umull")
1791 (set_attr "predicable" "yes")]
1794 (define_expand "umaddsidi4"
1795 [(set (match_operand:DI 0 "s_register_operand" "")
1798 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1799 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1800 (match_operand:DI 3 "s_register_operand" "")))]
1801 "TARGET_32BIT && arm_arch3m"
1804 (define_insn "*umulsidi3adddi"
1805 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1808 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1809 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1810 (match_operand:DI 1 "s_register_operand" "0")))]
1811 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1812 "umlal%?\\t%Q0, %R0, %3, %2"
1813 [(set_attr "insn" "umlal")
1814 (set_attr "predicable" "yes")]
1817 (define_insn "*umulsidi3adddi_v6"
1818 [(set (match_operand:DI 0 "s_register_operand" "=r")
1821 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1822 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1823 (match_operand:DI 1 "s_register_operand" "0")))]
1824 "TARGET_32BIT && arm_arch6"
1825 "umlal%?\\t%Q0, %R0, %3, %2"
1826 [(set_attr "insn" "umlal")
1827 (set_attr "predicable" "yes")]
1830 (define_expand "smulsi3_highpart"
1832 [(set (match_operand:SI 0 "s_register_operand" "")
1836 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1837 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1839 (clobber (match_scratch:SI 3 ""))])]
1840 "TARGET_32BIT && arm_arch3m"
1844 (define_insn "*smulsi3_highpart_nov6"
1845 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1849 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1850 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1852 (clobber (match_scratch:SI 3 "=&r,&r"))]
1853 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1854 "smull%?\\t%3, %0, %2, %1"
1855 [(set_attr "insn" "smull")
1856 (set_attr "predicable" "yes")]
1859 (define_insn "*smulsi3_highpart_v6"
1860 [(set (match_operand:SI 0 "s_register_operand" "=r")
1864 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1865 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1867 (clobber (match_scratch:SI 3 "=r"))]
1868 "TARGET_32BIT && arm_arch6"
1869 "smull%?\\t%3, %0, %2, %1"
1870 [(set_attr "insn" "smull")
1871 (set_attr "predicable" "yes")]
1874 (define_expand "umulsi3_highpart"
1876 [(set (match_operand:SI 0 "s_register_operand" "")
1880 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1881 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1883 (clobber (match_scratch:SI 3 ""))])]
1884 "TARGET_32BIT && arm_arch3m"
1888 (define_insn "*umulsi3_highpart_nov6"
1889 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1893 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1894 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1896 (clobber (match_scratch:SI 3 "=&r,&r"))]
1897 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1898 "umull%?\\t%3, %0, %2, %1"
1899 [(set_attr "insn" "umull")
1900 (set_attr "predicable" "yes")]
1903 (define_insn "*umulsi3_highpart_v6"
1904 [(set (match_operand:SI 0 "s_register_operand" "=r")
1908 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1909 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1911 (clobber (match_scratch:SI 3 "=r"))]
1912 "TARGET_32BIT && arm_arch6"
1913 "umull%?\\t%3, %0, %2, %1"
1914 [(set_attr "insn" "umull")
1915 (set_attr "predicable" "yes")]
1918 (define_insn "mulhisi3"
1919 [(set (match_operand:SI 0 "s_register_operand" "=r")
1920 (mult:SI (sign_extend:SI
1921 (match_operand:HI 1 "s_register_operand" "%r"))
1923 (match_operand:HI 2 "s_register_operand" "r"))))]
1924 "TARGET_DSP_MULTIPLY"
1925 "smulbb%?\\t%0, %1, %2"
1926 [(set_attr "insn" "smulxy")
1927 (set_attr "predicable" "yes")]
1930 (define_insn "*mulhisi3tb"
1931 [(set (match_operand:SI 0 "s_register_operand" "=r")
1932 (mult:SI (ashiftrt:SI
1933 (match_operand:SI 1 "s_register_operand" "r")
1936 (match_operand:HI 2 "s_register_operand" "r"))))]
1937 "TARGET_DSP_MULTIPLY"
1938 "smultb%?\\t%0, %1, %2"
1939 [(set_attr "insn" "smulxy")
1940 (set_attr "predicable" "yes")]
1943 (define_insn "*mulhisi3bt"
1944 [(set (match_operand:SI 0 "s_register_operand" "=r")
1945 (mult:SI (sign_extend:SI
1946 (match_operand:HI 1 "s_register_operand" "r"))
1948 (match_operand:SI 2 "s_register_operand" "r")
1950 "TARGET_DSP_MULTIPLY"
1951 "smulbt%?\\t%0, %1, %2"
1952 [(set_attr "insn" "smulxy")
1953 (set_attr "predicable" "yes")]
1956 (define_insn "*mulhisi3tt"
1957 [(set (match_operand:SI 0 "s_register_operand" "=r")
1958 (mult:SI (ashiftrt:SI
1959 (match_operand:SI 1 "s_register_operand" "r")
1962 (match_operand:SI 2 "s_register_operand" "r")
1964 "TARGET_DSP_MULTIPLY"
1965 "smultt%?\\t%0, %1, %2"
1966 [(set_attr "insn" "smulxy")
1967 (set_attr "predicable" "yes")]
1970 (define_insn "maddhisi4"
1971 [(set (match_operand:SI 0 "s_register_operand" "=r")
1972 (plus:SI (mult:SI (sign_extend:SI
1973 (match_operand:HI 1 "s_register_operand" "r"))
1975 (match_operand:HI 2 "s_register_operand" "r")))
1976 (match_operand:SI 3 "s_register_operand" "r")))]
1977 "TARGET_DSP_MULTIPLY"
1978 "smlabb%?\\t%0, %1, %2, %3"
1979 [(set_attr "insn" "smlaxy")
1980 (set_attr "predicable" "yes")]
1983 ;; Note: there is no maddhisi4ibt because this one is canonical form
1984 (define_insn "*maddhisi4tb"
1985 [(set (match_operand:SI 0 "s_register_operand" "=r")
1986 (plus:SI (mult:SI (ashiftrt:SI
1987 (match_operand:SI 1 "s_register_operand" "r")
1990 (match_operand:HI 2 "s_register_operand" "r")))
1991 (match_operand:SI 3 "s_register_operand" "r")))]
1992 "TARGET_DSP_MULTIPLY"
1993 "smlatb%?\\t%0, %1, %2, %3"
1994 [(set_attr "insn" "smlaxy")
1995 (set_attr "predicable" "yes")]
1998 (define_insn "*maddhisi4tt"
1999 [(set (match_operand:SI 0 "s_register_operand" "=r")
2000 (plus:SI (mult:SI (ashiftrt:SI
2001 (match_operand:SI 1 "s_register_operand" "r")
2004 (match_operand:SI 2 "s_register_operand" "r")
2006 (match_operand:SI 3 "s_register_operand" "r")))]
2007 "TARGET_DSP_MULTIPLY"
2008 "smlatt%?\\t%0, %1, %2, %3"
2009 [(set_attr "insn" "smlaxy")
2010 (set_attr "predicable" "yes")]
2013 (define_insn "maddhidi4"
2014 [(set (match_operand:DI 0 "s_register_operand" "=r")
2016 (mult:DI (sign_extend:DI
2017 (match_operand:HI 1 "s_register_operand" "r"))
2019 (match_operand:HI 2 "s_register_operand" "r")))
2020 (match_operand:DI 3 "s_register_operand" "0")))]
2021 "TARGET_DSP_MULTIPLY"
2022 "smlalbb%?\\t%Q0, %R0, %1, %2"
2023 [(set_attr "insn" "smlalxy")
2024 (set_attr "predicable" "yes")])
2026 ;; Note: there is no maddhidi4ibt because this one is canonical form
2027 (define_insn "*maddhidi4tb"
2028 [(set (match_operand:DI 0 "s_register_operand" "=r")
2030 (mult:DI (sign_extend:DI
2032 (match_operand:SI 1 "s_register_operand" "r")
2035 (match_operand:HI 2 "s_register_operand" "r")))
2036 (match_operand:DI 3 "s_register_operand" "0")))]
2037 "TARGET_DSP_MULTIPLY"
2038 "smlaltb%?\\t%Q0, %R0, %1, %2"
2039 [(set_attr "insn" "smlalxy")
2040 (set_attr "predicable" "yes")])
2042 (define_insn "*maddhidi4tt"
2043 [(set (match_operand:DI 0 "s_register_operand" "=r")
2045 (mult:DI (sign_extend:DI
2047 (match_operand:SI 1 "s_register_operand" "r")
2051 (match_operand:SI 2 "s_register_operand" "r")
2053 (match_operand:DI 3 "s_register_operand" "0")))]
2054 "TARGET_DSP_MULTIPLY"
2055 "smlaltt%?\\t%Q0, %R0, %1, %2"
2056 [(set_attr "insn" "smlalxy")
2057 (set_attr "predicable" "yes")])
2059 (define_expand "mulsf3"
2060 [(set (match_operand:SF 0 "s_register_operand" "")
2061 (mult:SF (match_operand:SF 1 "s_register_operand" "")
2062 (match_operand:SF 2 "s_register_operand" "")))]
2063 "TARGET_32BIT && TARGET_HARD_FLOAT"
2067 (define_expand "muldf3"
2068 [(set (match_operand:DF 0 "s_register_operand" "")
2069 (mult:DF (match_operand:DF 1 "s_register_operand" "")
2070 (match_operand:DF 2 "s_register_operand" "")))]
2071 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2077 (define_expand "divsf3"
2078 [(set (match_operand:SF 0 "s_register_operand" "")
2079 (div:SF (match_operand:SF 1 "s_register_operand" "")
2080 (match_operand:SF 2 "s_register_operand" "")))]
2081 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
2084 (define_expand "divdf3"
2085 [(set (match_operand:DF 0 "s_register_operand" "")
2086 (div:DF (match_operand:DF 1 "s_register_operand" "")
2087 (match_operand:DF 2 "s_register_operand" "")))]
2088 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2091 ;; Boolean and,ior,xor insns
2093 ;; Split up double word logical operations
2095 ;; Split up simple DImode logical operations. Simply perform the logical
2096 ;; operation on the upper and lower halves of the registers.
2098 [(set (match_operand:DI 0 "s_register_operand" "")
2099 (match_operator:DI 6 "logical_binary_operator"
2100 [(match_operand:DI 1 "s_register_operand" "")
2101 (match_operand:DI 2 "s_register_operand" "")]))]
2102 "TARGET_32BIT && reload_completed
2103 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2104 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2105 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2106 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2109 operands[3] = gen_highpart (SImode, operands[0]);
2110 operands[0] = gen_lowpart (SImode, operands[0]);
2111 operands[4] = gen_highpart (SImode, operands[1]);
2112 operands[1] = gen_lowpart (SImode, operands[1]);
2113 operands[5] = gen_highpart (SImode, operands[2]);
2114 operands[2] = gen_lowpart (SImode, operands[2]);
2119 [(set (match_operand:DI 0 "s_register_operand" "")
2120 (match_operator:DI 6 "logical_binary_operator"
2121 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2122 (match_operand:DI 1 "s_register_operand" "")]))]
2123 "TARGET_32BIT && reload_completed"
2124 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2125 (set (match_dup 3) (match_op_dup:SI 6
2126 [(ashiftrt:SI (match_dup 2) (const_int 31))
2130 operands[3] = gen_highpart (SImode, operands[0]);
2131 operands[0] = gen_lowpart (SImode, operands[0]);
2132 operands[4] = gen_highpart (SImode, operands[1]);
2133 operands[1] = gen_lowpart (SImode, operands[1]);
2134 operands[5] = gen_highpart (SImode, operands[2]);
2135 operands[2] = gen_lowpart (SImode, operands[2]);
2139 ;; The zero extend of operand 2 means we can just copy the high part of
2140 ;; operand1 into operand0.
2142 [(set (match_operand:DI 0 "s_register_operand" "")
2144 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2145 (match_operand:DI 1 "s_register_operand" "")))]
2146 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2147 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2148 (set (match_dup 3) (match_dup 4))]
2151 operands[4] = gen_highpart (SImode, operands[1]);
2152 operands[3] = gen_highpart (SImode, operands[0]);
2153 operands[0] = gen_lowpart (SImode, operands[0]);
2154 operands[1] = gen_lowpart (SImode, operands[1]);
2158 ;; The zero extend of operand 2 means we can just copy the high part of
2159 ;; operand1 into operand0.
2161 [(set (match_operand:DI 0 "s_register_operand" "")
2163 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2164 (match_operand:DI 1 "s_register_operand" "")))]
2165 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2166 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2167 (set (match_dup 3) (match_dup 4))]
2170 operands[4] = gen_highpart (SImode, operands[1]);
2171 operands[3] = gen_highpart (SImode, operands[0]);
2172 operands[0] = gen_lowpart (SImode, operands[0]);
2173 operands[1] = gen_lowpart (SImode, operands[1]);
2177 (define_expand "anddi3"
2178 [(set (match_operand:DI 0 "s_register_operand" "")
2179 (and:DI (match_operand:DI 1 "s_register_operand" "")
2180 (match_operand:DI 2 "neon_inv_logic_op2" "")))]
2185 (define_insn_and_split "*anddi3_insn"
2186 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r,&r,w,w ,?&r,?&r,?w,?w")
2187 (and:DI (match_operand:DI 1 "s_register_operand" "%0 ,r ,0,r ,w,0 ,0 ,r ,w ,0")
2188 (match_operand:DI 2 "arm_anddi_operand_neon" "r ,r ,De,De,w,DL,r ,r ,w ,DL")))]
2189 "TARGET_32BIT && !TARGET_IWMMXT"
2191 switch (which_alternative)
2196 case 3: /* fall through */
2198 case 4: /* fall through */
2199 case 8: return "vand\t%P0, %P1, %P2";
2200 case 5: /* fall through */
2201 case 9: return neon_output_logic_immediate ("vand", &operands[2],
2202 DImode, 1, VALID_NEON_QREG_MODE (DImode));
2205 default: gcc_unreachable ();
2208 "TARGET_32BIT && !TARGET_IWMMXT"
2209 [(set (match_dup 3) (match_dup 4))
2210 (set (match_dup 5) (match_dup 6))]
2213 operands[3] = gen_lowpart (SImode, operands[0]);
2214 operands[5] = gen_highpart (SImode, operands[0]);
2216 operands[4] = simplify_gen_binary (AND, SImode,
2217 gen_lowpart (SImode, operands[1]),
2218 gen_lowpart (SImode, operands[2]));
2219 operands[6] = simplify_gen_binary (AND, SImode,
2220 gen_highpart (SImode, operands[1]),
2221 gen_highpart_mode (SImode, DImode, operands[2]));
2224 [(set_attr "neon_type" "*,*,*,*,neon_int_1,neon_int_1,*,*,neon_int_1,neon_int_1")
2225 (set_attr "arch" "*,*,*,*,neon_for_64bits,neon_for_64bits,*,*,
2226 avoid_neon_for_64bits,avoid_neon_for_64bits")
2227 (set_attr "length" "8,8,8,8,*,*,8,8,*,*")
2228 (set (attr "insn_enabled") (if_then_else
2229 (lt (symbol_ref "which_alternative")
2231 (if_then_else (match_test "!TARGET_NEON")
2232 (const_string "yes")
2233 (const_string "no"))
2234 (if_then_else (match_test "TARGET_NEON")
2235 (const_string "yes")
2236 (const_string "no"))))]
2239 (define_insn_and_split "*anddi_zesidi_di"
2240 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2241 (and:DI (zero_extend:DI
2242 (match_operand:SI 2 "s_register_operand" "r,r"))
2243 (match_operand:DI 1 "s_register_operand" "0,r")))]
2246 "TARGET_32BIT && reload_completed"
2247 ; The zero extend of operand 2 clears the high word of the output
2249 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2250 (set (match_dup 3) (const_int 0))]
2253 operands[3] = gen_highpart (SImode, operands[0]);
2254 operands[0] = gen_lowpart (SImode, operands[0]);
2255 operands[1] = gen_lowpart (SImode, operands[1]);
2257 [(set_attr "length" "8")]
2260 (define_insn "*anddi_sesdi_di"
2261 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2262 (and:DI (sign_extend:DI
2263 (match_operand:SI 2 "s_register_operand" "r,r"))
2264 (match_operand:DI 1 "s_register_operand" "0,r")))]
2267 [(set_attr "length" "8")]
2270 (define_expand "andsi3"
2271 [(set (match_operand:SI 0 "s_register_operand" "")
2272 (and:SI (match_operand:SI 1 "s_register_operand" "")
2273 (match_operand:SI 2 "reg_or_int_operand" "")))]
2278 if (CONST_INT_P (operands[2]))
2280 if (INTVAL (operands[2]) == 255 && arm_arch6)
2282 operands[1] = convert_to_mode (QImode, operands[1], 1);
2283 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2287 arm_split_constant (AND, SImode, NULL_RTX,
2288 INTVAL (operands[2]), operands[0],
2290 optimize && can_create_pseudo_p ());
2295 else /* TARGET_THUMB1 */
2297 if (!CONST_INT_P (operands[2]))
2299 rtx tmp = force_reg (SImode, operands[2]);
2300 if (rtx_equal_p (operands[0], operands[1]))
2304 operands[2] = operands[1];
2312 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2314 operands[2] = force_reg (SImode,
2315 GEN_INT (~INTVAL (operands[2])));
2317 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2322 for (i = 9; i <= 31; i++)
2324 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2326 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2330 else if ((((HOST_WIDE_INT) 1) << i) - 1
2331 == ~INTVAL (operands[2]))
2333 rtx shift = GEN_INT (i);
2334 rtx reg = gen_reg_rtx (SImode);
2336 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2337 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2343 operands[2] = force_reg (SImode, operands[2]);
2349 ; ??? Check split length for Thumb-2
2350 (define_insn_and_split "*arm_andsi3_insn"
2351 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
2352 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r,r")
2353 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
2357 bic%?\\t%0, %1, #%B2
2361 && CONST_INT_P (operands[2])
2362 && !(const_ok_for_arm (INTVAL (operands[2]))
2363 || const_ok_for_arm (~INTVAL (operands[2])))"
2364 [(clobber (const_int 0))]
2366 arm_split_constant (AND, SImode, curr_insn,
2367 INTVAL (operands[2]), operands[0], operands[1], 0);
2370 [(set_attr "length" "4,4,4,16")
2371 (set_attr "predicable" "yes")
2372 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,simple_alu_imm")]
2375 (define_insn "*thumb1_andsi3_insn"
2376 [(set (match_operand:SI 0 "register_operand" "=l")
2377 (and:SI (match_operand:SI 1 "register_operand" "%0")
2378 (match_operand:SI 2 "register_operand" "l")))]
2381 [(set_attr "length" "2")
2382 (set_attr "type" "simple_alu_imm")
2383 (set_attr "conds" "set")])
2385 (define_insn "*andsi3_compare0"
2386 [(set (reg:CC_NOOV CC_REGNUM)
2388 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2389 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2391 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2392 (and:SI (match_dup 1) (match_dup 2)))]
2396 bic%.\\t%0, %1, #%B2
2398 [(set_attr "conds" "set")
2399 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2402 (define_insn "*andsi3_compare0_scratch"
2403 [(set (reg:CC_NOOV CC_REGNUM)
2405 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2406 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2408 (clobber (match_scratch:SI 2 "=X,r,X"))]
2412 bic%.\\t%2, %0, #%B1
2414 [(set_attr "conds" "set")
2415 (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2418 (define_insn "*zeroextractsi_compare0_scratch"
2419 [(set (reg:CC_NOOV CC_REGNUM)
2420 (compare:CC_NOOV (zero_extract:SI
2421 (match_operand:SI 0 "s_register_operand" "r")
2422 (match_operand 1 "const_int_operand" "n")
2423 (match_operand 2 "const_int_operand" "n"))
2426 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2427 && INTVAL (operands[1]) > 0
2428 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2429 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2431 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2432 << INTVAL (operands[2]));
2433 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2436 [(set_attr "conds" "set")
2437 (set_attr "predicable" "yes")
2438 (set_attr "type" "simple_alu_imm")]
2441 (define_insn_and_split "*ne_zeroextractsi"
2442 [(set (match_operand:SI 0 "s_register_operand" "=r")
2443 (ne:SI (zero_extract:SI
2444 (match_operand:SI 1 "s_register_operand" "r")
2445 (match_operand:SI 2 "const_int_operand" "n")
2446 (match_operand:SI 3 "const_int_operand" "n"))
2448 (clobber (reg:CC CC_REGNUM))]
2450 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2451 && INTVAL (operands[2]) > 0
2452 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2453 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2456 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2457 && INTVAL (operands[2]) > 0
2458 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2459 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2460 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2461 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2463 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2465 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2466 (match_dup 0) (const_int 1)))]
2468 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2469 << INTVAL (operands[3]));
2471 [(set_attr "conds" "clob")
2472 (set (attr "length")
2473 (if_then_else (eq_attr "is_thumb" "yes")
2478 (define_insn_and_split "*ne_zeroextractsi_shifted"
2479 [(set (match_operand:SI 0 "s_register_operand" "=r")
2480 (ne:SI (zero_extract:SI
2481 (match_operand:SI 1 "s_register_operand" "r")
2482 (match_operand:SI 2 "const_int_operand" "n")
2485 (clobber (reg:CC CC_REGNUM))]
2489 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2490 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2492 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2494 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2495 (match_dup 0) (const_int 1)))]
2497 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2499 [(set_attr "conds" "clob")
2500 (set_attr "length" "8")]
2503 (define_insn_and_split "*ite_ne_zeroextractsi"
2504 [(set (match_operand:SI 0 "s_register_operand" "=r")
2505 (if_then_else:SI (ne (zero_extract:SI
2506 (match_operand:SI 1 "s_register_operand" "r")
2507 (match_operand:SI 2 "const_int_operand" "n")
2508 (match_operand:SI 3 "const_int_operand" "n"))
2510 (match_operand:SI 4 "arm_not_operand" "rIK")
2512 (clobber (reg:CC CC_REGNUM))]
2514 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2515 && INTVAL (operands[2]) > 0
2516 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2517 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2518 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2521 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2522 && INTVAL (operands[2]) > 0
2523 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2524 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2525 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2526 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2527 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2529 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2531 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2532 (match_dup 0) (match_dup 4)))]
2534 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2535 << INTVAL (operands[3]));
2537 [(set_attr "conds" "clob")
2538 (set_attr "length" "8")]
2541 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2542 [(set (match_operand:SI 0 "s_register_operand" "=r")
2543 (if_then_else:SI (ne (zero_extract:SI
2544 (match_operand:SI 1 "s_register_operand" "r")
2545 (match_operand:SI 2 "const_int_operand" "n")
2548 (match_operand:SI 3 "arm_not_operand" "rIK")
2550 (clobber (reg:CC CC_REGNUM))]
2551 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2553 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2554 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2555 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2557 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2559 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2560 (match_dup 0) (match_dup 3)))]
2562 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2564 [(set_attr "conds" "clob")
2565 (set_attr "length" "8")]
2569 [(set (match_operand:SI 0 "s_register_operand" "")
2570 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2571 (match_operand:SI 2 "const_int_operand" "")
2572 (match_operand:SI 3 "const_int_operand" "")))
2573 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2575 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2576 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2578 HOST_WIDE_INT temp = INTVAL (operands[2]);
2580 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2581 operands[3] = GEN_INT (32 - temp);
2585 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2587 [(set (match_operand:SI 0 "s_register_operand" "")
2588 (match_operator:SI 1 "shiftable_operator"
2589 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2590 (match_operand:SI 3 "const_int_operand" "")
2591 (match_operand:SI 4 "const_int_operand" ""))
2592 (match_operand:SI 5 "s_register_operand" "")]))
2593 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2595 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2598 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2601 HOST_WIDE_INT temp = INTVAL (operands[3]);
2603 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2604 operands[4] = GEN_INT (32 - temp);
2609 [(set (match_operand:SI 0 "s_register_operand" "")
2610 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2611 (match_operand:SI 2 "const_int_operand" "")
2612 (match_operand:SI 3 "const_int_operand" "")))]
2614 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2615 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2617 HOST_WIDE_INT temp = INTVAL (operands[2]);
2619 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2620 operands[3] = GEN_INT (32 - temp);
2625 [(set (match_operand:SI 0 "s_register_operand" "")
2626 (match_operator:SI 1 "shiftable_operator"
2627 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2628 (match_operand:SI 3 "const_int_operand" "")
2629 (match_operand:SI 4 "const_int_operand" ""))
2630 (match_operand:SI 5 "s_register_operand" "")]))
2631 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2633 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2636 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2639 HOST_WIDE_INT temp = INTVAL (operands[3]);
2641 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2642 operands[4] = GEN_INT (32 - temp);
2646 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2647 ;;; represented by the bitfield, then this will produce incorrect results.
2648 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2649 ;;; which have a real bit-field insert instruction, the truncation happens
2650 ;;; in the bit-field insert instruction itself. Since arm does not have a
2651 ;;; bit-field insert instruction, we would have to emit code here to truncate
2652 ;;; the value before we insert. This loses some of the advantage of having
2653 ;;; this insv pattern, so this pattern needs to be reevalutated.
2655 (define_expand "insv"
2656 [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2657 (match_operand 1 "general_operand" "")
2658 (match_operand 2 "general_operand" ""))
2659 (match_operand 3 "reg_or_int_operand" ""))]
2660 "TARGET_ARM || arm_arch_thumb2"
2663 int start_bit = INTVAL (operands[2]);
2664 int width = INTVAL (operands[1]);
2665 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2666 rtx target, subtarget;
2668 if (arm_arch_thumb2)
2670 if (unaligned_access && MEM_P (operands[0])
2671 && s_register_operand (operands[3], GET_MODE (operands[3]))
2672 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2676 if (BYTES_BIG_ENDIAN)
2677 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2682 base_addr = adjust_address (operands[0], SImode,
2683 start_bit / BITS_PER_UNIT);
2684 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2688 rtx tmp = gen_reg_rtx (HImode);
2690 base_addr = adjust_address (operands[0], HImode,
2691 start_bit / BITS_PER_UNIT);
2692 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2693 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2697 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2699 bool use_bfi = TRUE;
2701 if (CONST_INT_P (operands[3]))
2703 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2707 emit_insn (gen_insv_zero (operands[0], operands[1],
2712 /* See if the set can be done with a single orr instruction. */
2713 if (val == mask && const_ok_for_arm (val << start_bit))
2719 if (!REG_P (operands[3]))
2720 operands[3] = force_reg (SImode, operands[3]);
2722 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2731 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2734 target = copy_rtx (operands[0]);
2735 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2736 subreg as the final target. */
2737 if (GET_CODE (target) == SUBREG)
2739 subtarget = gen_reg_rtx (SImode);
2740 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2741 < GET_MODE_SIZE (SImode))
2742 target = SUBREG_REG (target);
2747 if (CONST_INT_P (operands[3]))
2749 /* Since we are inserting a known constant, we may be able to
2750 reduce the number of bits that we have to clear so that
2751 the mask becomes simple. */
2752 /* ??? This code does not check to see if the new mask is actually
2753 simpler. It may not be. */
2754 rtx op1 = gen_reg_rtx (SImode);
2755 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2756 start of this pattern. */
2757 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2758 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2760 emit_insn (gen_andsi3 (op1, operands[0],
2761 gen_int_mode (~mask2, SImode)));
2762 emit_insn (gen_iorsi3 (subtarget, op1,
2763 gen_int_mode (op3_value << start_bit, SImode)));
2765 else if (start_bit == 0
2766 && !(const_ok_for_arm (mask)
2767 || const_ok_for_arm (~mask)))
2769 /* A Trick, since we are setting the bottom bits in the word,
2770 we can shift operand[3] up, operand[0] down, OR them together
2771 and rotate the result back again. This takes 3 insns, and
2772 the third might be mergeable into another op. */
2773 /* The shift up copes with the possibility that operand[3] is
2774 wider than the bitfield. */
2775 rtx op0 = gen_reg_rtx (SImode);
2776 rtx op1 = gen_reg_rtx (SImode);
2778 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2779 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2780 emit_insn (gen_iorsi3 (op1, op1, op0));
2781 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2783 else if ((width + start_bit == 32)
2784 && !(const_ok_for_arm (mask)
2785 || const_ok_for_arm (~mask)))
2787 /* Similar trick, but slightly less efficient. */
2789 rtx op0 = gen_reg_rtx (SImode);
2790 rtx op1 = gen_reg_rtx (SImode);
2792 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2793 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2794 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2795 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2799 rtx op0 = gen_int_mode (mask, SImode);
2800 rtx op1 = gen_reg_rtx (SImode);
2801 rtx op2 = gen_reg_rtx (SImode);
2803 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2805 rtx tmp = gen_reg_rtx (SImode);
2807 emit_insn (gen_movsi (tmp, op0));
2811 /* Mask out any bits in operand[3] that are not needed. */
2812 emit_insn (gen_andsi3 (op1, operands[3], op0));
2814 if (CONST_INT_P (op0)
2815 && (const_ok_for_arm (mask << start_bit)
2816 || const_ok_for_arm (~(mask << start_bit))))
2818 op0 = gen_int_mode (~(mask << start_bit), SImode);
2819 emit_insn (gen_andsi3 (op2, operands[0], op0));
2823 if (CONST_INT_P (op0))
2825 rtx tmp = gen_reg_rtx (SImode);
2827 emit_insn (gen_movsi (tmp, op0));
2832 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2834 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2838 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2840 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2843 if (subtarget != target)
2845 /* If TARGET is still a SUBREG, then it must be wider than a word,
2846 so we must be careful only to set the subword we were asked to. */
2847 if (GET_CODE (target) == SUBREG)
2848 emit_move_insn (target, subtarget);
2850 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2857 (define_insn "insv_zero"
2858 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2859 (match_operand:SI 1 "const_int_operand" "M")
2860 (match_operand:SI 2 "const_int_operand" "M"))
2864 [(set_attr "length" "4")
2865 (set_attr "predicable" "yes")]
2868 (define_insn "insv_t2"
2869 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2870 (match_operand:SI 1 "const_int_operand" "M")
2871 (match_operand:SI 2 "const_int_operand" "M"))
2872 (match_operand:SI 3 "s_register_operand" "r"))]
2874 "bfi%?\t%0, %3, %2, %1"
2875 [(set_attr "length" "4")
2876 (set_attr "predicable" "yes")]
2879 ; constants for op 2 will never be given to these patterns.
2880 (define_insn_and_split "*anddi_notdi_di"
2881 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2882 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2883 (match_operand:DI 2 "s_register_operand" "r,0")))]
2886 "TARGET_32BIT && reload_completed
2887 && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2888 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2889 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2890 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2893 operands[3] = gen_highpart (SImode, operands[0]);
2894 operands[0] = gen_lowpart (SImode, operands[0]);
2895 operands[4] = gen_highpart (SImode, operands[1]);
2896 operands[1] = gen_lowpart (SImode, operands[1]);
2897 operands[5] = gen_highpart (SImode, operands[2]);
2898 operands[2] = gen_lowpart (SImode, operands[2]);
2900 [(set_attr "length" "8")
2901 (set_attr "predicable" "yes")]
2904 (define_insn_and_split "*anddi_notzesidi_di"
2905 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2906 (and:DI (not:DI (zero_extend:DI
2907 (match_operand:SI 2 "s_register_operand" "r,r")))
2908 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2911 bic%?\\t%Q0, %Q1, %2
2913 ; (not (zero_extend ...)) allows us to just copy the high word from
2914 ; operand1 to operand0.
2917 && operands[0] != operands[1]"
2918 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2919 (set (match_dup 3) (match_dup 4))]
2922 operands[3] = gen_highpart (SImode, operands[0]);
2923 operands[0] = gen_lowpart (SImode, operands[0]);
2924 operands[4] = gen_highpart (SImode, operands[1]);
2925 operands[1] = gen_lowpart (SImode, operands[1]);
2927 [(set_attr "length" "4,8")
2928 (set_attr "predicable" "yes")]
2931 (define_insn_and_split "*anddi_notsesidi_di"
2932 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2933 (and:DI (not:DI (sign_extend:DI
2934 (match_operand:SI 2 "s_register_operand" "r,r")))
2935 (match_operand:DI 1 "s_register_operand" "0,r")))]
2938 "TARGET_32BIT && reload_completed"
2939 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2940 (set (match_dup 3) (and:SI (not:SI
2941 (ashiftrt:SI (match_dup 2) (const_int 31)))
2945 operands[3] = gen_highpart (SImode, operands[0]);
2946 operands[0] = gen_lowpart (SImode, operands[0]);
2947 operands[4] = gen_highpart (SImode, operands[1]);
2948 operands[1] = gen_lowpart (SImode, operands[1]);
2950 [(set_attr "length" "8")
2951 (set_attr "predicable" "yes")]
2954 (define_insn "andsi_notsi_si"
2955 [(set (match_operand:SI 0 "s_register_operand" "=r")
2956 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2957 (match_operand:SI 1 "s_register_operand" "r")))]
2959 "bic%?\\t%0, %1, %2"
2960 [(set_attr "predicable" "yes")]
2963 (define_insn "thumb1_bicsi3"
2964 [(set (match_operand:SI 0 "register_operand" "=l")
2965 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2966 (match_operand:SI 2 "register_operand" "0")))]
2969 [(set_attr "length" "2")
2970 (set_attr "conds" "set")])
2972 (define_insn "andsi_not_shiftsi_si"
2973 [(set (match_operand:SI 0 "s_register_operand" "=r")
2974 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2975 [(match_operand:SI 2 "s_register_operand" "r")
2976 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2977 (match_operand:SI 1 "s_register_operand" "r")))]
2979 "bic%?\\t%0, %1, %2%S4"
2980 [(set_attr "predicable" "yes")
2981 (set_attr "shift" "2")
2982 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2983 (const_string "alu_shift")
2984 (const_string "alu_shift_reg")))]
2987 (define_insn "*andsi_notsi_si_compare0"
2988 [(set (reg:CC_NOOV CC_REGNUM)
2990 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2991 (match_operand:SI 1 "s_register_operand" "r"))
2993 (set (match_operand:SI 0 "s_register_operand" "=r")
2994 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2996 "bic%.\\t%0, %1, %2"
2997 [(set_attr "conds" "set")]
3000 (define_insn "*andsi_notsi_si_compare0_scratch"
3001 [(set (reg:CC_NOOV CC_REGNUM)
3003 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3004 (match_operand:SI 1 "s_register_operand" "r"))
3006 (clobber (match_scratch:SI 0 "=r"))]
3008 "bic%.\\t%0, %1, %2"
3009 [(set_attr "conds" "set")]
3012 (define_expand "iordi3"
3013 [(set (match_operand:DI 0 "s_register_operand" "")
3014 (ior:DI (match_operand:DI 1 "s_register_operand" "")
3015 (match_operand:DI 2 "neon_logic_op2" "")))]
3020 (define_insn_and_split "*iordi3_insn"
3021 [(set (match_operand:DI 0 "s_register_operand" "=w,w ,&r,&r,&r,&r,?w,?w")
3022 (ior:DI (match_operand:DI 1 "s_register_operand" "%w,0 ,0 ,r ,0 ,r ,w ,0")
3023 (match_operand:DI 2 "arm_iordi_operand_neon" "w ,Dl,r ,r ,Df,Df,w ,Dl")))]
3024 "TARGET_32BIT && !TARGET_IWMMXT"
3026 switch (which_alternative)
3028 case 0: /* fall through */
3029 case 6: return "vorr\t%P0, %P1, %P2";
3030 case 1: /* fall through */
3031 case 7: return neon_output_logic_immediate ("vorr", &operands[2],
3032 DImode, 0, VALID_NEON_QREG_MODE (DImode));
3038 default: gcc_unreachable ();
3041 "TARGET_32BIT && !TARGET_IWMMXT && reload_completed
3042 && !(IS_VFP_REGNUM (REGNO (operands[0])))"
3043 [(set (match_dup 3) (match_dup 4))
3044 (set (match_dup 5) (match_dup 6))]
3047 operands[3] = gen_lowpart (SImode, operands[0]);
3048 operands[5] = gen_highpart (SImode, operands[0]);
3050 operands[4] = simplify_gen_binary (IOR, SImode,
3051 gen_lowpart (SImode, operands[1]),
3052 gen_lowpart (SImode, operands[2]));
3053 operands[6] = simplify_gen_binary (IOR, SImode,
3054 gen_highpart (SImode, operands[1]),
3055 gen_highpart_mode (SImode, DImode, operands[2]));
3058 [(set_attr "neon_type" "neon_int_1,neon_int_1,*,*,*,*,neon_int_1,neon_int_1")
3059 (set_attr "length" "*,*,8,8,8,8,*,*")
3060 (set_attr "arch" "neon_for_64bits,neon_for_64bits,*,*,*,*,avoid_neon_for_64bits,avoid_neon_for_64bits")]
3063 (define_insn "*iordi_zesidi_di"
3064 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3065 (ior:DI (zero_extend:DI
3066 (match_operand:SI 2 "s_register_operand" "r,r"))
3067 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3070 orr%?\\t%Q0, %Q1, %2
3072 [(set_attr "length" "4,8")
3073 (set_attr "predicable" "yes")]
3076 (define_insn "*iordi_sesidi_di"
3077 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3078 (ior:DI (sign_extend:DI
3079 (match_operand:SI 2 "s_register_operand" "r,r"))
3080 (match_operand:DI 1 "s_register_operand" "0,r")))]
3083 [(set_attr "length" "8")
3084 (set_attr "predicable" "yes")]
3087 (define_expand "iorsi3"
3088 [(set (match_operand:SI 0 "s_register_operand" "")
3089 (ior:SI (match_operand:SI 1 "s_register_operand" "")
3090 (match_operand:SI 2 "reg_or_int_operand" "")))]
3093 if (CONST_INT_P (operands[2]))
3097 arm_split_constant (IOR, SImode, NULL_RTX,
3098 INTVAL (operands[2]), operands[0], operands[1],
3099 optimize && can_create_pseudo_p ());
3102 else /* TARGET_THUMB1 */
3104 rtx tmp = force_reg (SImode, operands[2]);
3105 if (rtx_equal_p (operands[0], operands[1]))
3109 operands[2] = operands[1];
3117 (define_insn_and_split "*iorsi3_insn"
3118 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
3119 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r,r")
3120 (match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
3124 orn%?\\t%0, %1, #%B2
3128 && CONST_INT_P (operands[2])
3129 && !(const_ok_for_arm (INTVAL (operands[2]))
3130 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3131 [(clobber (const_int 0))]
3133 arm_split_constant (IOR, SImode, curr_insn,
3134 INTVAL (operands[2]), operands[0], operands[1], 0);
3137 [(set_attr "length" "4,4,4,16")
3138 (set_attr "arch" "32,t2,32,32")
3139 (set_attr "predicable" "yes")
3140 (set_attr "type" "simple_alu_imm,simple_alu_imm,*,*")]
3143 (define_insn "*thumb1_iorsi3_insn"
3144 [(set (match_operand:SI 0 "register_operand" "=l")
3145 (ior:SI (match_operand:SI 1 "register_operand" "%0")
3146 (match_operand:SI 2 "register_operand" "l")))]
3149 [(set_attr "length" "2")
3150 (set_attr "conds" "set")])
3153 [(match_scratch:SI 3 "r")
3154 (set (match_operand:SI 0 "arm_general_register_operand" "")
3155 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3156 (match_operand:SI 2 "const_int_operand" "")))]
3158 && !const_ok_for_arm (INTVAL (operands[2]))
3159 && const_ok_for_arm (~INTVAL (operands[2]))"
3160 [(set (match_dup 3) (match_dup 2))
3161 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3165 (define_insn "*iorsi3_compare0"
3166 [(set (reg:CC_NOOV CC_REGNUM)
3167 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3168 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3170 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3171 (ior:SI (match_dup 1) (match_dup 2)))]
3173 "orr%.\\t%0, %1, %2"
3174 [(set_attr "conds" "set")
3175 (set_attr "type" "simple_alu_imm,*")]
3178 (define_insn "*iorsi3_compare0_scratch"
3179 [(set (reg:CC_NOOV CC_REGNUM)
3180 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
3181 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3183 (clobber (match_scratch:SI 0 "=r,r"))]
3185 "orr%.\\t%0, %1, %2"
3186 [(set_attr "conds" "set")
3187 (set_attr "type" "simple_alu_imm, *")]
3190 (define_expand "xordi3"
3191 [(set (match_operand:DI 0 "s_register_operand" "")
3192 (xor:DI (match_operand:DI 1 "s_register_operand" "")
3193 (match_operand:DI 2 "s_register_operand" "")))]
3198 (define_insn "*xordi3_insn"
3199 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3200 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
3201 (match_operand:DI 2 "s_register_operand" "r,r")))]
3202 "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
3204 [(set_attr "length" "8")
3205 (set_attr "predicable" "yes")]
3208 (define_insn "*xordi_zesidi_di"
3209 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3210 (xor:DI (zero_extend:DI
3211 (match_operand:SI 2 "s_register_operand" "r,r"))
3212 (match_operand:DI 1 "s_register_operand" "0,?r")))]
3215 eor%?\\t%Q0, %Q1, %2
3217 [(set_attr "length" "4,8")
3218 (set_attr "predicable" "yes")]
3221 (define_insn "*xordi_sesidi_di"
3222 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3223 (xor:DI (sign_extend:DI
3224 (match_operand:SI 2 "s_register_operand" "r,r"))
3225 (match_operand:DI 1 "s_register_operand" "0,r")))]
3228 [(set_attr "length" "8")
3229 (set_attr "predicable" "yes")]
3232 (define_expand "xorsi3"
3233 [(set (match_operand:SI 0 "s_register_operand" "")
3234 (xor:SI (match_operand:SI 1 "s_register_operand" "")
3235 (match_operand:SI 2 "reg_or_int_operand" "")))]
3237 "if (CONST_INT_P (operands[2]))
3241 arm_split_constant (XOR, SImode, NULL_RTX,
3242 INTVAL (operands[2]), operands[0], operands[1],
3243 optimize && can_create_pseudo_p ());
3246 else /* TARGET_THUMB1 */
3248 rtx tmp = force_reg (SImode, operands[2]);
3249 if (rtx_equal_p (operands[0], operands[1]))
3253 operands[2] = operands[1];
3260 (define_insn_and_split "*arm_xorsi3"
3261 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3262 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
3263 (match_operand:SI 2 "reg_or_int_operand" "I,r,?n")))]
3270 && CONST_INT_P (operands[2])
3271 && !const_ok_for_arm (INTVAL (operands[2]))"
3272 [(clobber (const_int 0))]
3274 arm_split_constant (XOR, SImode, curr_insn,
3275 INTVAL (operands[2]), operands[0], operands[1], 0);
3278 [(set_attr "length" "4,4,16")
3279 (set_attr "predicable" "yes")
3280 (set_attr "type" "simple_alu_imm,*,*")]
3283 (define_insn "*thumb1_xorsi3_insn"
3284 [(set (match_operand:SI 0 "register_operand" "=l")
3285 (xor:SI (match_operand:SI 1 "register_operand" "%0")
3286 (match_operand:SI 2 "register_operand" "l")))]
3289 [(set_attr "length" "2")
3290 (set_attr "conds" "set")
3291 (set_attr "type" "simple_alu_imm")]
3294 (define_insn "*xorsi3_compare0"
3295 [(set (reg:CC_NOOV CC_REGNUM)
3296 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3297 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3299 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3300 (xor:SI (match_dup 1) (match_dup 2)))]
3302 "eor%.\\t%0, %1, %2"
3303 [(set_attr "conds" "set")
3304 (set_attr "type" "simple_alu_imm,*")]
3307 (define_insn "*xorsi3_compare0_scratch"
3308 [(set (reg:CC_NOOV CC_REGNUM)
3309 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3310 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3314 [(set_attr "conds" "set")
3315 (set_attr "type" "simple_alu_imm, *")]
3318 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3319 ; (NOT D) we can sometimes merge the final NOT into one of the following
3323 [(set (match_operand:SI 0 "s_register_operand" "")
3324 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3325 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3326 (match_operand:SI 3 "arm_rhs_operand" "")))
3327 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3329 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3330 (not:SI (match_dup 3))))
3331 (set (match_dup 0) (not:SI (match_dup 4)))]
3335 (define_insn_and_split "*andsi_iorsi3_notsi"
3336 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3337 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3338 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3339 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3341 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3342 "&& reload_completed"
3343 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3344 (set (match_dup 0) (and:SI (not:SI (match_dup 3)) (match_dup 0)))]
3346 [(set_attr "length" "8")
3347 (set_attr "ce_count" "2")
3348 (set_attr "predicable" "yes")]
3351 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3352 ; insns are available?
3354 [(set (match_operand:SI 0 "s_register_operand" "")
3355 (match_operator:SI 1 "logical_binary_operator"
3356 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3357 (match_operand:SI 3 "const_int_operand" "")
3358 (match_operand:SI 4 "const_int_operand" ""))
3359 (match_operator:SI 9 "logical_binary_operator"
3360 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3361 (match_operand:SI 6 "const_int_operand" ""))
3362 (match_operand:SI 7 "s_register_operand" "")])]))
3363 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3365 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3366 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3369 [(ashift:SI (match_dup 2) (match_dup 4))
3373 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3376 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3380 [(set (match_operand:SI 0 "s_register_operand" "")
3381 (match_operator:SI 1 "logical_binary_operator"
3382 [(match_operator:SI 9 "logical_binary_operator"
3383 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3384 (match_operand:SI 6 "const_int_operand" ""))
3385 (match_operand:SI 7 "s_register_operand" "")])
3386 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3387 (match_operand:SI 3 "const_int_operand" "")
3388 (match_operand:SI 4 "const_int_operand" ""))]))
3389 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3391 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3392 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3395 [(ashift:SI (match_dup 2) (match_dup 4))
3399 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3402 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3406 [(set (match_operand:SI 0 "s_register_operand" "")
3407 (match_operator:SI 1 "logical_binary_operator"
3408 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3409 (match_operand:SI 3 "const_int_operand" "")
3410 (match_operand:SI 4 "const_int_operand" ""))
3411 (match_operator:SI 9 "logical_binary_operator"
3412 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3413 (match_operand:SI 6 "const_int_operand" ""))
3414 (match_operand:SI 7 "s_register_operand" "")])]))
3415 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3417 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3418 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3421 [(ashift:SI (match_dup 2) (match_dup 4))
3425 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3428 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3432 [(set (match_operand:SI 0 "s_register_operand" "")
3433 (match_operator:SI 1 "logical_binary_operator"
3434 [(match_operator:SI 9 "logical_binary_operator"
3435 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3436 (match_operand:SI 6 "const_int_operand" ""))
3437 (match_operand:SI 7 "s_register_operand" "")])
3438 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3439 (match_operand:SI 3 "const_int_operand" "")
3440 (match_operand:SI 4 "const_int_operand" ""))]))
3441 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3443 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3444 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3447 [(ashift:SI (match_dup 2) (match_dup 4))
3451 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3454 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3458 ;; Minimum and maximum insns
3460 (define_expand "smaxsi3"
3462 (set (match_operand:SI 0 "s_register_operand" "")
3463 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3464 (match_operand:SI 2 "arm_rhs_operand" "")))
3465 (clobber (reg:CC CC_REGNUM))])]
3468 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3470 /* No need for a clobber of the condition code register here. */
3471 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3472 gen_rtx_SMAX (SImode, operands[1],
3478 (define_insn "*smax_0"
3479 [(set (match_operand:SI 0 "s_register_operand" "=r")
3480 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3483 "bic%?\\t%0, %1, %1, asr #31"
3484 [(set_attr "predicable" "yes")]
3487 (define_insn "*smax_m1"
3488 [(set (match_operand:SI 0 "s_register_operand" "=r")
3489 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3492 "orr%?\\t%0, %1, %1, asr #31"
3493 [(set_attr "predicable" "yes")]
3496 (define_insn_and_split "*arm_smax_insn"
3497 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3498 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3499 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3500 (clobber (reg:CC CC_REGNUM))]
3503 ; cmp\\t%1, %2\;movlt\\t%0, %2
3504 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3506 [(set (reg:CC CC_REGNUM)
3507 (compare:CC (match_dup 1) (match_dup 2)))
3509 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3513 [(set_attr "conds" "clob")
3514 (set_attr "length" "8,12")]
3517 (define_expand "sminsi3"
3519 (set (match_operand:SI 0 "s_register_operand" "")
3520 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3521 (match_operand:SI 2 "arm_rhs_operand" "")))
3522 (clobber (reg:CC CC_REGNUM))])]
3525 if (operands[2] == const0_rtx)
3527 /* No need for a clobber of the condition code register here. */
3528 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3529 gen_rtx_SMIN (SImode, operands[1],
3535 (define_insn "*smin_0"
3536 [(set (match_operand:SI 0 "s_register_operand" "=r")
3537 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3540 "and%?\\t%0, %1, %1, asr #31"
3541 [(set_attr "predicable" "yes")]
3544 (define_insn_and_split "*arm_smin_insn"
3545 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3546 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3547 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3548 (clobber (reg:CC CC_REGNUM))]
3551 ; cmp\\t%1, %2\;movge\\t%0, %2
3552 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3554 [(set (reg:CC CC_REGNUM)
3555 (compare:CC (match_dup 1) (match_dup 2)))
3557 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3561 [(set_attr "conds" "clob")
3562 (set_attr "length" "8,12")]
3565 (define_expand "umaxsi3"
3567 (set (match_operand:SI 0 "s_register_operand" "")
3568 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3569 (match_operand:SI 2 "arm_rhs_operand" "")))
3570 (clobber (reg:CC CC_REGNUM))])]
3575 (define_insn_and_split "*arm_umaxsi3"
3576 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3577 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3578 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3579 (clobber (reg:CC CC_REGNUM))]
3582 ; cmp\\t%1, %2\;movcc\\t%0, %2
3583 ; cmp\\t%1, %2\;movcs\\t%0, %1
3584 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3586 [(set (reg:CC CC_REGNUM)
3587 (compare:CC (match_dup 1) (match_dup 2)))
3589 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3593 [(set_attr "conds" "clob")
3594 (set_attr "length" "8,8,12")]
3597 (define_expand "uminsi3"
3599 (set (match_operand:SI 0 "s_register_operand" "")
3600 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3601 (match_operand:SI 2 "arm_rhs_operand" "")))
3602 (clobber (reg:CC CC_REGNUM))])]
3607 (define_insn_and_split "*arm_uminsi3"
3608 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3609 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3610 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3611 (clobber (reg:CC CC_REGNUM))]
3614 ; cmp\\t%1, %2\;movcs\\t%0, %2
3615 ; cmp\\t%1, %2\;movcc\\t%0, %1
3616 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3618 [(set (reg:CC CC_REGNUM)
3619 (compare:CC (match_dup 1) (match_dup 2)))
3621 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3625 [(set_attr "conds" "clob")
3626 (set_attr "length" "8,8,12")]
3629 (define_insn "*store_minmaxsi"
3630 [(set (match_operand:SI 0 "memory_operand" "=m")
3631 (match_operator:SI 3 "minmax_operator"
3632 [(match_operand:SI 1 "s_register_operand" "r")
3633 (match_operand:SI 2 "s_register_operand" "r")]))
3634 (clobber (reg:CC CC_REGNUM))]
3635 "TARGET_32BIT && optimize_insn_for_size_p()"
3637 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3638 operands[1], operands[2]);
3639 output_asm_insn (\"cmp\\t%1, %2\", operands);
3641 output_asm_insn (\"ite\t%d3\", operands);
3642 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3643 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3646 [(set_attr "conds" "clob")
3647 (set (attr "length")
3648 (if_then_else (eq_attr "is_thumb" "yes")
3651 (set_attr "type" "store1")]
3654 ; Reject the frame pointer in operand[1], since reloading this after
3655 ; it has been eliminated can cause carnage.
3656 (define_insn "*minmax_arithsi"
3657 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3658 (match_operator:SI 4 "shiftable_operator"
3659 [(match_operator:SI 5 "minmax_operator"
3660 [(match_operand:SI 2 "s_register_operand" "r,r")
3661 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3662 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3663 (clobber (reg:CC CC_REGNUM))]
3664 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3667 enum rtx_code code = GET_CODE (operands[4]);
3670 if (which_alternative != 0 || operands[3] != const0_rtx
3671 || (code != PLUS && code != IOR && code != XOR))
3676 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3677 operands[2], operands[3]);
3678 output_asm_insn (\"cmp\\t%2, %3\", operands);
3682 output_asm_insn (\"ite\\t%d5\", operands);
3684 output_asm_insn (\"it\\t%d5\", operands);
3686 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3688 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3691 [(set_attr "conds" "clob")
3692 (set (attr "length")
3693 (if_then_else (eq_attr "is_thumb" "yes")
3698 ; Reject the frame pointer in operand[1], since reloading this after
3699 ; it has been eliminated can cause carnage.
3700 (define_insn_and_split "*minmax_arithsi_non_canon"
3701 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3703 (match_operand:SI 1 "s_register_operand" "0,?r")
3704 (match_operator:SI 4 "minmax_operator"
3705 [(match_operand:SI 2 "s_register_operand" "r,r")
3706 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
3707 (clobber (reg:CC CC_REGNUM))]
3708 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3710 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3711 [(set (reg:CC CC_REGNUM)
3712 (compare:CC (match_dup 2) (match_dup 3)))
3714 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3716 (minus:SI (match_dup 1)
3718 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3720 (minus:SI (match_dup 1)
3723 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3724 operands[2], operands[3]);
3725 enum rtx_code rc = minmax_code (operands[4]);
3726 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3727 operands[2], operands[3]);
3729 if (mode == CCFPmode || mode == CCFPEmode)
3730 rc = reverse_condition_maybe_unordered (rc);
3732 rc = reverse_condition (rc);
3733 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3735 [(set_attr "conds" "clob")
3736 (set (attr "length")
3737 (if_then_else (eq_attr "is_thumb" "yes")
3742 (define_code_iterator SAT [smin smax])
3743 (define_code_iterator SATrev [smin smax])
3744 (define_code_attr SATlo [(smin "1") (smax "2")])
3745 (define_code_attr SAThi [(smin "2") (smax "1")])
3747 (define_insn "*satsi_<SAT:code>"
3748 [(set (match_operand:SI 0 "s_register_operand" "=r")
3749 (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
3750 (match_operand:SI 1 "const_int_operand" "i"))
3751 (match_operand:SI 2 "const_int_operand" "i")))]
3752 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3753 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3757 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3758 &mask, &signed_sat))
3761 operands[1] = GEN_INT (mask);
3763 return "ssat%?\t%0, %1, %3";
3765 return "usat%?\t%0, %1, %3";
3767 [(set_attr "predicable" "yes")
3768 (set_attr "insn" "sat")])
3770 (define_insn "*satsi_<SAT:code>_shift"
3771 [(set (match_operand:SI 0 "s_register_operand" "=r")
3772 (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
3773 [(match_operand:SI 4 "s_register_operand" "r")
3774 (match_operand:SI 5 "const_int_operand" "i")])
3775 (match_operand:SI 1 "const_int_operand" "i"))
3776 (match_operand:SI 2 "const_int_operand" "i")))]
3777 "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3778 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3782 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3783 &mask, &signed_sat))
3786 operands[1] = GEN_INT (mask);
3788 return "ssat%?\t%0, %1, %4%S3";
3790 return "usat%?\t%0, %1, %4%S3";
3792 [(set_attr "predicable" "yes")
3793 (set_attr "insn" "sat")
3794 (set_attr "shift" "3")
3795 (set_attr "type" "alu_shift")])
3797 ;; Shift and rotation insns
3799 (define_expand "ashldi3"
3800 [(set (match_operand:DI 0 "s_register_operand" "")
3801 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3802 (match_operand:SI 2 "general_operand" "")))]
3807 /* Delay the decision whether to use NEON or core-regs until
3808 register allocation. */
3809 emit_insn (gen_ashldi3_neon (operands[0], operands[1], operands[2]));
3814 /* Only the NEON case can handle in-memory shift counts. */
3815 if (!reg_or_int_operand (operands[2], SImode))
3816 operands[2] = force_reg (SImode, operands[2]);
3819 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3820 ; /* No special preparation statements; expand pattern as above. */
3823 rtx scratch1, scratch2;
3825 if (CONST_INT_P (operands[2])
3826 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3828 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3832 /* Ideally we should use iwmmxt here if we could know that operands[1]
3833 ends up already living in an iwmmxt register. Otherwise it's
3834 cheaper to have the alternate code being generated than moving
3835 values to iwmmxt regs and back. */
3837 /* If we're optimizing for size, we prefer the libgcc calls. */
3838 if (optimize_function_for_size_p (cfun))
3841 /* Expand operation using core-registers.
3842 'FAIL' would achieve the same thing, but this is a bit smarter. */
3843 scratch1 = gen_reg_rtx (SImode);
3844 scratch2 = gen_reg_rtx (SImode);
3845 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3846 operands[2], scratch1, scratch2);
3852 (define_insn_and_split "arm_ashldi3_1bit"
3853 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3854 (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3856 (clobber (reg:CC CC_REGNUM))]
3858 "#" ; "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3859 "&& reload_completed"
3860 [(parallel [(set (reg:CC CC_REGNUM)
3861 (compare:CC (ashift:SI (match_dup 1) (const_int 1))
3863 (set (match_dup 0) (ashift:SI (match_dup 1) (const_int 1)))])
3864 (set (match_dup 2) (plus:SI (plus:SI (match_dup 3) (match_dup 3))
3865 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
3867 operands[2] = gen_highpart (SImode, operands[0]);
3868 operands[0] = gen_lowpart (SImode, operands[0]);
3869 operands[3] = gen_highpart (SImode, operands[1]);
3870 operands[1] = gen_lowpart (SImode, operands[1]);
3872 [(set_attr "conds" "clob")
3873 (set_attr "length" "8")]
3876 (define_expand "ashlsi3"
3877 [(set (match_operand:SI 0 "s_register_operand" "")
3878 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3879 (match_operand:SI 2 "arm_rhs_operand" "")))]
3882 if (CONST_INT_P (operands[2])
3883 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3885 emit_insn (gen_movsi (operands[0], const0_rtx));
3891 (define_insn "*thumb1_ashlsi3"
3892 [(set (match_operand:SI 0 "register_operand" "=l,l")
3893 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3894 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3897 [(set_attr "length" "2")
3898 (set_attr "conds" "set")])
3900 (define_expand "ashrdi3"
3901 [(set (match_operand:DI 0 "s_register_operand" "")
3902 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3903 (match_operand:SI 2 "reg_or_int_operand" "")))]
3908 /* Delay the decision whether to use NEON or core-regs until
3909 register allocation. */
3910 emit_insn (gen_ashrdi3_neon (operands[0], operands[1], operands[2]));
3914 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3915 ; /* No special preparation statements; expand pattern as above. */
3918 rtx scratch1, scratch2;
3920 if (CONST_INT_P (operands[2])
3921 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3923 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3927 /* Ideally we should use iwmmxt here if we could know that operands[1]
3928 ends up already living in an iwmmxt register. Otherwise it's
3929 cheaper to have the alternate code being generated than moving
3930 values to iwmmxt regs and back. */
3932 /* If we're optimizing for size, we prefer the libgcc calls. */
3933 if (optimize_function_for_size_p (cfun))
3936 /* Expand operation using core-registers.
3937 'FAIL' would achieve the same thing, but this is a bit smarter. */
3938 scratch1 = gen_reg_rtx (SImode);
3939 scratch2 = gen_reg_rtx (SImode);
3940 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3941 operands[2], scratch1, scratch2);
3947 (define_insn_and_split "arm_ashrdi3_1bit"
3948 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
3949 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3951 (clobber (reg:CC CC_REGNUM))]
3953 "#" ; "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3954 "&& reload_completed"
3955 [(parallel [(set (reg:CC CC_REGNUM)
3956 (compare:CC (ashiftrt:SI (match_dup 3) (const_int 1))
3958 (set (match_dup 2) (ashiftrt:SI (match_dup 3) (const_int 1)))])
3959 (set (match_dup 0) (unspec:SI [(match_dup 1)
3960 (reg:CC_C CC_REGNUM)]
3963 operands[2] = gen_highpart (SImode, operands[0]);
3964 operands[0] = gen_lowpart (SImode, operands[0]);
3965 operands[3] = gen_highpart (SImode, operands[1]);
3966 operands[1] = gen_lowpart (SImode, operands[1]);
3968 [(set_attr "conds" "clob")
3969 (set_attr "length" "8")]
3973 [(set (match_operand:SI 0 "s_register_operand" "=r")
3974 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
3975 (reg:CC_C CC_REGNUM)]
3979 [(set_attr "conds" "use")
3980 (set_attr "insn" "mov")
3981 (set_attr "type" "alu_shift")]
3984 (define_expand "ashrsi3"
3985 [(set (match_operand:SI 0 "s_register_operand" "")
3986 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3987 (match_operand:SI 2 "arm_rhs_operand" "")))]
3990 if (CONST_INT_P (operands[2])
3991 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3992 operands[2] = GEN_INT (31);
3996 (define_insn "*thumb1_ashrsi3"
3997 [(set (match_operand:SI 0 "register_operand" "=l,l")
3998 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3999 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4002 [(set_attr "length" "2")
4003 (set_attr "conds" "set")])
4005 (define_expand "lshrdi3"
4006 [(set (match_operand:DI 0 "s_register_operand" "")
4007 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
4008 (match_operand:SI 2 "reg_or_int_operand" "")))]
4013 /* Delay the decision whether to use NEON or core-regs until
4014 register allocation. */
4015 emit_insn (gen_lshrdi3_neon (operands[0], operands[1], operands[2]));
4019 if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
4020 ; /* No special preparation statements; expand pattern as above. */
4023 rtx scratch1, scratch2;
4025 if (CONST_INT_P (operands[2])
4026 && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
4028 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
4032 /* Ideally we should use iwmmxt here if we could know that operands[1]
4033 ends up already living in an iwmmxt register. Otherwise it's
4034 cheaper to have the alternate code being generated than moving
4035 values to iwmmxt regs and back. */
4037 /* If we're optimizing for size, we prefer the libgcc calls. */
4038 if (optimize_function_for_size_p (cfun))
4041 /* Expand operation using core-registers.
4042 'FAIL' would achieve the same thing, but this is a bit smarter. */
4043 scratch1 = gen_reg_rtx (SImode);
4044 scratch2 = gen_reg_rtx (SImode);
4045 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4046 operands[2], scratch1, scratch2);
4052 (define_insn_and_split "arm_lshrdi3_1bit"
4053 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4054 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
4056 (clobber (reg:CC CC_REGNUM))]
4058 "#" ; "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
4059 "&& reload_completed"
4060 [(parallel [(set (reg:CC CC_REGNUM)
4061 (compare:CC (lshiftrt:SI (match_dup 3) (const_int 1))
4063 (set (match_dup 2) (lshiftrt:SI (match_dup 3) (const_int 1)))])
4064 (set (match_dup 0) (unspec:SI [(match_dup 1)
4065 (reg:CC_C CC_REGNUM)]
4068 operands[2] = gen_highpart (SImode, operands[0]);
4069 operands[0] = gen_lowpart (SImode, operands[0]);
4070 operands[3] = gen_highpart (SImode, operands[1]);
4071 operands[1] = gen_lowpart (SImode, operands[1]);
4073 [(set_attr "conds" "clob")
4074 (set_attr "length" "8")]
4077 (define_expand "lshrsi3"
4078 [(set (match_operand:SI 0 "s_register_operand" "")
4079 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
4080 (match_operand:SI 2 "arm_rhs_operand" "")))]
4083 if (CONST_INT_P (operands[2])
4084 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4086 emit_insn (gen_movsi (operands[0], const0_rtx));
4092 (define_insn "*thumb1_lshrsi3"
4093 [(set (match_operand:SI 0 "register_operand" "=l,l")
4094 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
4095 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
4098 [(set_attr "length" "2")
4099 (set_attr "conds" "set")])
4101 (define_expand "rotlsi3"
4102 [(set (match_operand:SI 0 "s_register_operand" "")
4103 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4104 (match_operand:SI 2 "reg_or_int_operand" "")))]
4107 if (CONST_INT_P (operands[2]))
4108 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4111 rtx reg = gen_reg_rtx (SImode);
4112 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4118 (define_expand "rotrsi3"
4119 [(set (match_operand:SI 0 "s_register_operand" "")
4120 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
4121 (match_operand:SI 2 "arm_rhs_operand" "")))]
4126 if (CONST_INT_P (operands[2])
4127 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
4128 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4130 else /* TARGET_THUMB1 */
4132 if (CONST_INT_P (operands [2]))
4133 operands [2] = force_reg (SImode, operands[2]);
4138 (define_insn "*thumb1_rotrsi3"
4139 [(set (match_operand:SI 0 "register_operand" "=l")
4140 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
4141 (match_operand:SI 2 "register_operand" "l")))]
4144 [(set_attr "length" "2")]
4147 (define_insn "*arm_shiftsi3"
4148 [(set (match_operand:SI 0 "s_register_operand" "=r")
4149 (match_operator:SI 3 "shift_operator"
4150 [(match_operand:SI 1 "s_register_operand" "r")
4151 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
4153 "* return arm_output_shift(operands, 0);"
4154 [(set_attr "predicable" "yes")
4155 (set_attr "shift" "1")
4156 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4157 (const_string "alu_shift")
4158 (const_string "alu_shift_reg")))]
4161 (define_insn "*shiftsi3_compare"
4162 [(set (reg:CC CC_REGNUM)
4163 (compare:CC (match_operator:SI 3 "shift_operator"
4164 [(match_operand:SI 1 "s_register_operand" "r")
4165 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4167 (set (match_operand:SI 0 "s_register_operand" "=r")
4168 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4170 "* return arm_output_shift(operands, 1);"
4171 [(set_attr "conds" "set")
4172 (set_attr "shift" "1")
4173 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4174 (const_string "alu_shift")
4175 (const_string "alu_shift_reg")))]
4178 (define_insn "*shiftsi3_compare0"
4179 [(set (reg:CC_NOOV CC_REGNUM)
4180 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4181 [(match_operand:SI 1 "s_register_operand" "r")
4182 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4184 (set (match_operand:SI 0 "s_register_operand" "=r")
4185 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4187 "* return arm_output_shift(operands, 1);"
4188 [(set_attr "conds" "set")
4189 (set_attr "shift" "1")
4190 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
4191 (const_string "alu_shift")
4192 (const_string "alu_shift_reg")))]
4195 (define_insn "*shiftsi3_compare0_scratch"
4196 [(set (reg:CC_NOOV CC_REGNUM)
4197 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4198 [(match_operand:SI 1 "s_register_operand" "r")
4199 (match_operand:SI 2 "arm_rhs_operand" "rM")])
4201 (clobber (match_scratch:SI 0 "=r"))]
4203 "* return arm_output_shift(operands, 1);"
4204 [(set_attr "conds" "set")
4205 (set_attr "shift" "1")]
4208 (define_insn "*not_shiftsi"
4209 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4210 (not:SI (match_operator:SI 3 "shift_operator"
4211 [(match_operand:SI 1 "s_register_operand" "r,r")
4212 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4215 [(set_attr "predicable" "yes")
4216 (set_attr "shift" "1")
4217 (set_attr "insn" "mvn")
4218 (set_attr "arch" "32,a")
4219 (set_attr "type" "alu_shift,alu_shift_reg")])
4221 (define_insn "*not_shiftsi_compare0"
4222 [(set (reg:CC_NOOV CC_REGNUM)
4224 (not:SI (match_operator:SI 3 "shift_operator"
4225 [(match_operand:SI 1 "s_register_operand" "r,r")
4226 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4228 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4229 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4232 [(set_attr "conds" "set")
4233 (set_attr "shift" "1")
4234 (set_attr "insn" "mvn")
4235 (set_attr "arch" "32,a")
4236 (set_attr "type" "alu_shift,alu_shift_reg")])
4238 (define_insn "*not_shiftsi_compare0_scratch"
4239 [(set (reg:CC_NOOV CC_REGNUM)
4241 (not:SI (match_operator:SI 3 "shift_operator"
4242 [(match_operand:SI 1 "s_register_operand" "r,r")
4243 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4245 (clobber (match_scratch:SI 0 "=r,r"))]
4248 [(set_attr "conds" "set")
4249 (set_attr "shift" "1")
4250 (set_attr "insn" "mvn")
4251 (set_attr "arch" "32,a")
4252 (set_attr "type" "alu_shift,alu_shift_reg")])
4254 ;; We don't really have extzv, but defining this using shifts helps
4255 ;; to reduce register pressure later on.
4257 (define_expand "extzv"
4258 [(set (match_operand 0 "s_register_operand" "")
4259 (zero_extract (match_operand 1 "nonimmediate_operand" "")
4260 (match_operand 2 "const_int_operand" "")
4261 (match_operand 3 "const_int_operand" "")))]
4262 "TARGET_THUMB1 || arm_arch_thumb2"
4265 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4266 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4268 if (arm_arch_thumb2)
4270 HOST_WIDE_INT width = INTVAL (operands[2]);
4271 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4273 if (unaligned_access && MEM_P (operands[1])
4274 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4278 if (BYTES_BIG_ENDIAN)
4279 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4284 base_addr = adjust_address (operands[1], SImode,
4285 bitpos / BITS_PER_UNIT);
4286 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4290 rtx dest = operands[0];
4291 rtx tmp = gen_reg_rtx (SImode);
4293 /* We may get a paradoxical subreg here. Strip it off. */
4294 if (GET_CODE (dest) == SUBREG
4295 && GET_MODE (dest) == SImode
4296 && GET_MODE (SUBREG_REG (dest)) == HImode)
4297 dest = SUBREG_REG (dest);
4299 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4302 base_addr = adjust_address (operands[1], HImode,
4303 bitpos / BITS_PER_UNIT);
4304 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4305 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4309 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4311 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4319 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4322 operands[3] = GEN_INT (rshift);
4326 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4330 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4331 operands[3], gen_reg_rtx (SImode)));
4336 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4338 (define_expand "extzv_t1"
4339 [(set (match_operand:SI 4 "s_register_operand" "")
4340 (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
4341 (match_operand:SI 2 "const_int_operand" "")))
4342 (set (match_operand:SI 0 "s_register_operand" "")
4343 (lshiftrt:SI (match_dup 4)
4344 (match_operand:SI 3 "const_int_operand" "")))]
4348 (define_expand "extv"
4349 [(set (match_operand 0 "s_register_operand" "")
4350 (sign_extract (match_operand 1 "nonimmediate_operand" "")
4351 (match_operand 2 "const_int_operand" "")
4352 (match_operand 3 "const_int_operand" "")))]
4355 HOST_WIDE_INT width = INTVAL (operands[2]);
4356 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4358 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4359 && (bitpos % BITS_PER_UNIT) == 0)
4363 if (BYTES_BIG_ENDIAN)
4364 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4368 base_addr = adjust_address (operands[1], SImode,
4369 bitpos / BITS_PER_UNIT);
4370 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4374 rtx dest = operands[0];
4375 rtx tmp = gen_reg_rtx (SImode);
4377 /* We may get a paradoxical subreg here. Strip it off. */
4378 if (GET_CODE (dest) == SUBREG
4379 && GET_MODE (dest) == SImode
4380 && GET_MODE (SUBREG_REG (dest)) == HImode)
4381 dest = SUBREG_REG (dest);
4383 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4386 base_addr = adjust_address (operands[1], HImode,
4387 bitpos / BITS_PER_UNIT);
4388 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4389 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4394 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4396 else if (GET_MODE (operands[0]) == SImode
4397 && GET_MODE (operands[1]) == SImode)
4399 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4407 ; Helper to expand register forms of extv with the proper modes.
4409 (define_expand "extv_regsi"
4410 [(set (match_operand:SI 0 "s_register_operand" "")
4411 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
4412 (match_operand 2 "const_int_operand" "")
4413 (match_operand 3 "const_int_operand" "")))]
4418 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4420 (define_insn "unaligned_loadsi"
4421 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4422 (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
4423 UNSPEC_UNALIGNED_LOAD))]
4424 "unaligned_access && TARGET_32BIT"
4425 "ldr%?\t%0, %1\t@ unaligned"
4426 [(set_attr "arch" "t2,any")
4427 (set_attr "length" "2,4")
4428 (set_attr "predicable" "yes")
4429 (set_attr "type" "load1")])
4431 (define_insn "unaligned_loadhis"
4432 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4434 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4435 UNSPEC_UNALIGNED_LOAD)))]
4436 "unaligned_access && TARGET_32BIT"
4437 "ldr%(sh%)\t%0, %1\t@ unaligned"
4438 [(set_attr "arch" "t2,any")
4439 (set_attr "length" "2,4")
4440 (set_attr "predicable" "yes")
4441 (set_attr "type" "load_byte")])
4443 (define_insn "unaligned_loadhiu"
4444 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4446 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4447 UNSPEC_UNALIGNED_LOAD)))]
4448 "unaligned_access && TARGET_32BIT"
4449 "ldr%(h%)\t%0, %1\t@ unaligned"
4450 [(set_attr "arch" "t2,any")
4451 (set_attr "length" "2,4")
4452 (set_attr "predicable" "yes")
4453 (set_attr "type" "load_byte")])
4455 (define_insn "unaligned_storesi"
4456 [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
4457 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
4458 UNSPEC_UNALIGNED_STORE))]
4459 "unaligned_access && TARGET_32BIT"
4460 "str%?\t%1, %0\t@ unaligned"
4461 [(set_attr "arch" "t2,any")
4462 (set_attr "length" "2,4")
4463 (set_attr "predicable" "yes")
4464 (set_attr "type" "store1")])
4466 (define_insn "unaligned_storehi"
4467 [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
4468 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
4469 UNSPEC_UNALIGNED_STORE))]
4470 "unaligned_access && TARGET_32BIT"
4471 "str%(h%)\t%1, %0\t@ unaligned"
4472 [(set_attr "arch" "t2,any")
4473 (set_attr "length" "2,4")
4474 (set_attr "predicable" "yes")
4475 (set_attr "type" "store1")])
4477 ;; Unaligned double-word load and store.
4478 ;; Split after reload into two unaligned single-word accesses.
4479 ;; It prevents lower_subreg from splitting some other aligned
4480 ;; double-word accesses too early. Used for internal memcpy.
4482 (define_insn_and_split "unaligned_loaddi"
4483 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
4484 (unspec:DI [(match_operand:DI 1 "memory_operand" "o,o")]
4485 UNSPEC_UNALIGNED_LOAD))]
4486 "unaligned_access && TARGET_32BIT"
4488 "&& reload_completed"
4489 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_LOAD))
4490 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_LOAD))]
4492 operands[2] = gen_highpart (SImode, operands[0]);
4493 operands[0] = gen_lowpart (SImode, operands[0]);
4494 operands[3] = gen_highpart (SImode, operands[1]);
4495 operands[1] = gen_lowpart (SImode, operands[1]);
4497 /* If the first destination register overlaps with the base address,
4498 swap the order in which the loads are emitted. */
4499 if (reg_overlap_mentioned_p (operands[0], operands[1]))
4501 rtx tmp = operands[1];
4502 operands[1] = operands[3];
4505 operands[0] = operands[2];
4509 [(set_attr "arch" "t2,any")
4510 (set_attr "length" "4,8")
4511 (set_attr "predicable" "yes")
4512 (set_attr "type" "load2")])
4514 (define_insn_and_split "unaligned_storedi"
4515 [(set (match_operand:DI 0 "memory_operand" "=o,o")
4516 (unspec:DI [(match_operand:DI 1 "s_register_operand" "l,r")]
4517 UNSPEC_UNALIGNED_STORE))]
4518 "unaligned_access && TARGET_32BIT"
4520 "&& reload_completed"
4521 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_UNALIGNED_STORE))
4522 (set (match_dup 2) (unspec:SI [(match_dup 3)] UNSPEC_UNALIGNED_STORE))]
4524 operands[2] = gen_highpart (SImode, operands[0]);
4525 operands[0] = gen_lowpart (SImode, operands[0]);
4526 operands[3] = gen_highpart (SImode, operands[1]);
4527 operands[1] = gen_lowpart (SImode, operands[1]);
4529 [(set_attr "arch" "t2,any")
4530 (set_attr "length" "4,8")
4531 (set_attr "predicable" "yes")
4532 (set_attr "type" "store2")])
4535 (define_insn "*extv_reg"
4536 [(set (match_operand:SI 0 "s_register_operand" "=r")
4537 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4538 (match_operand:SI 2 "const_int_operand" "M")
4539 (match_operand:SI 3 "const_int_operand" "M")))]
4541 "sbfx%?\t%0, %1, %3, %2"
4542 [(set_attr "length" "4")
4543 (set_attr "predicable" "yes")]
4546 (define_insn "extzv_t2"
4547 [(set (match_operand:SI 0 "s_register_operand" "=r")
4548 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4549 (match_operand:SI 2 "const_int_operand" "M")
4550 (match_operand:SI 3 "const_int_operand" "M")))]
4552 "ubfx%?\t%0, %1, %3, %2"
4553 [(set_attr "length" "4")
4554 (set_attr "predicable" "yes")]
4558 ;; Division instructions
4559 (define_insn "divsi3"
4560 [(set (match_operand:SI 0 "s_register_operand" "=r")
4561 (div:SI (match_operand:SI 1 "s_register_operand" "r")
4562 (match_operand:SI 2 "s_register_operand" "r")))]
4564 "sdiv%?\t%0, %1, %2"
4565 [(set_attr "predicable" "yes")
4566 (set_attr "insn" "sdiv")]
4569 (define_insn "udivsi3"
4570 [(set (match_operand:SI 0 "s_register_operand" "=r")
4571 (udiv:SI (match_operand:SI 1 "s_register_operand" "r")
4572 (match_operand:SI 2 "s_register_operand" "r")))]
4574 "udiv%?\t%0, %1, %2"
4575 [(set_attr "predicable" "yes")
4576 (set_attr "insn" "udiv")]
4580 ;; Unary arithmetic insns
4582 (define_expand "negdi2"
4584 [(set (match_operand:DI 0 "s_register_operand" "")
4585 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4586 (clobber (reg:CC CC_REGNUM))])]
4591 emit_insn (gen_negdi2_neon (operands[0], operands[1]));
4597 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4598 ;; The first alternative allows the common case of a *full* overlap.
4599 (define_insn_and_split "*arm_negdi2"
4600 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4601 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4602 (clobber (reg:CC CC_REGNUM))]
4604 "#" ; "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4605 "&& reload_completed"
4606 [(parallel [(set (reg:CC CC_REGNUM)
4607 (compare:CC (const_int 0) (match_dup 1)))
4608 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4609 (set (match_dup 2) (minus:SI (minus:SI (const_int 0) (match_dup 3))
4610 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4612 operands[2] = gen_highpart (SImode, operands[0]);
4613 operands[0] = gen_lowpart (SImode, operands[0]);
4614 operands[3] = gen_highpart (SImode, operands[1]);
4615 operands[1] = gen_lowpart (SImode, operands[1]);
4617 [(set_attr "conds" "clob")
4618 (set_attr "length" "8")]
4621 (define_insn "*thumb1_negdi2"
4622 [(set (match_operand:DI 0 "register_operand" "=&l")
4623 (neg:DI (match_operand:DI 1 "register_operand" "l")))
4624 (clobber (reg:CC CC_REGNUM))]
4626 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4627 [(set_attr "length" "6")]
4630 (define_expand "negsi2"
4631 [(set (match_operand:SI 0 "s_register_operand" "")
4632 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4637 (define_insn "*arm_negsi2"
4638 [(set (match_operand:SI 0 "s_register_operand" "=r")
4639 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4641 "rsb%?\\t%0, %1, #0"
4642 [(set_attr "predicable" "yes")]
4645 (define_insn "*thumb1_negsi2"
4646 [(set (match_operand:SI 0 "register_operand" "=l")
4647 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
4650 [(set_attr "length" "2")]
4653 (define_expand "negsf2"
4654 [(set (match_operand:SF 0 "s_register_operand" "")
4655 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4656 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4660 (define_expand "negdf2"
4661 [(set (match_operand:DF 0 "s_register_operand" "")
4662 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4663 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4666 ;; Negate an extended 32-bit value.
4667 (define_insn_and_split "*negdi_extendsidi"
4668 [(set (match_operand:DI 0 "s_register_operand" "=r,&r,l,&l")
4669 (neg:DI (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r,0,l"))))
4670 (clobber (reg:CC CC_REGNUM))]
4672 "#" ; rsb\\t%Q0, %1, #0\;asr\\t%R0, %Q0, #31
4673 "&& reload_completed"
4676 operands[2] = gen_highpart (SImode, operands[0]);
4677 operands[0] = gen_lowpart (SImode, operands[0]);
4678 rtx tmp = gen_rtx_SET (VOIDmode,
4680 gen_rtx_MINUS (SImode,
4689 /* Set the flags, to emit the short encoding in Thumb2. */
4690 rtx flags = gen_rtx_SET (VOIDmode,
4691 gen_rtx_REG (CCmode, CC_REGNUM),
4692 gen_rtx_COMPARE (CCmode,
4695 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4700 emit_insn (gen_rtx_SET (VOIDmode,
4702 gen_rtx_ASHIFTRT (SImode,
4707 [(set_attr "length" "8,8,4,4")
4708 (set_attr "arch" "a,a,t2,t2")]
4711 (define_insn_and_split "*negdi_zero_extendsidi"
4712 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4713 (neg:DI (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))))
4714 (clobber (reg:CC CC_REGNUM))]
4716 "#" ; "rsbs\\t%Q0, %1, #0\;sbc\\t%R0,%R0,%R0"
4717 ;; Don't care what register is input to sbc,
4718 ;; since we just just need to propagate the carry.
4719 "&& reload_completed"
4720 [(parallel [(set (reg:CC CC_REGNUM)
4721 (compare:CC (const_int 0) (match_dup 1)))
4722 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4723 (set (match_dup 2) (minus:SI (minus:SI (match_dup 2) (match_dup 2))
4724 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
4726 operands[2] = gen_highpart (SImode, operands[0]);
4727 operands[0] = gen_lowpart (SImode, operands[0]);
4729 [(set_attr "conds" "clob")
4730 (set_attr "length" "8")] ;; length in thumb is 4
4733 ;; abssi2 doesn't really clobber the condition codes if a different register
4734 ;; is being set. To keep things simple, assume during rtl manipulations that
4735 ;; it does, but tell the final scan operator the truth. Similarly for
4738 (define_expand "abssi2"
4740 [(set (match_operand:SI 0 "s_register_operand" "")
4741 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4742 (clobber (match_dup 2))])]
4746 operands[2] = gen_rtx_SCRATCH (SImode);
4748 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4751 (define_insn_and_split "*arm_abssi2"
4752 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4753 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4754 (clobber (reg:CC CC_REGNUM))]
4757 "&& reload_completed"
4760 /* if (which_alternative == 0) */
4761 if (REGNO(operands[0]) == REGNO(operands[1]))
4763 /* Emit the pattern:
4764 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4765 [(set (reg:CC CC_REGNUM)
4766 (compare:CC (match_dup 0) (const_int 0)))
4767 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4768 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4770 emit_insn (gen_rtx_SET (VOIDmode,
4771 gen_rtx_REG (CCmode, CC_REGNUM),
4772 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4773 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4774 (gen_rtx_LT (SImode,
4775 gen_rtx_REG (CCmode, CC_REGNUM),
4777 (gen_rtx_SET (VOIDmode,
4779 (gen_rtx_MINUS (SImode,
4786 /* Emit the pattern:
4787 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4789 (xor:SI (match_dup 1)
4790 (ashiftrt:SI (match_dup 1) (const_int 31))))
4792 (minus:SI (match_dup 0)
4793 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4795 emit_insn (gen_rtx_SET (VOIDmode,
4797 gen_rtx_XOR (SImode,
4798 gen_rtx_ASHIFTRT (SImode,
4802 emit_insn (gen_rtx_SET (VOIDmode,
4804 gen_rtx_MINUS (SImode,
4806 gen_rtx_ASHIFTRT (SImode,
4812 [(set_attr "conds" "clob,*")
4813 (set_attr "shift" "1")
4814 (set_attr "predicable" "no, yes")
4815 (set_attr "length" "8")]
4818 (define_insn_and_split "*thumb1_abssi2"
4819 [(set (match_operand:SI 0 "s_register_operand" "=l")
4820 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4821 (clobber (match_scratch:SI 2 "=&l"))]
4824 "TARGET_THUMB1 && reload_completed"
4825 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4826 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4827 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4829 [(set_attr "length" "6")]
4832 (define_insn_and_split "*arm_neg_abssi2"
4833 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4834 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4835 (clobber (reg:CC CC_REGNUM))]
4838 "&& reload_completed"
4841 /* if (which_alternative == 0) */
4842 if (REGNO (operands[0]) == REGNO (operands[1]))
4844 /* Emit the pattern:
4845 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4847 emit_insn (gen_rtx_SET (VOIDmode,
4848 gen_rtx_REG (CCmode, CC_REGNUM),
4849 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4850 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4852 gen_rtx_REG (CCmode, CC_REGNUM),
4854 gen_rtx_SET (VOIDmode,
4856 (gen_rtx_MINUS (SImode,
4862 /* Emit the pattern:
4863 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4865 emit_insn (gen_rtx_SET (VOIDmode,
4867 gen_rtx_XOR (SImode,
4868 gen_rtx_ASHIFTRT (SImode,
4872 emit_insn (gen_rtx_SET (VOIDmode,
4874 gen_rtx_MINUS (SImode,
4875 gen_rtx_ASHIFTRT (SImode,
4882 [(set_attr "conds" "clob,*")
4883 (set_attr "shift" "1")
4884 (set_attr "predicable" "no, yes")
4885 (set_attr "length" "8")]
4888 (define_insn_and_split "*thumb1_neg_abssi2"
4889 [(set (match_operand:SI 0 "s_register_operand" "=l")
4890 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4891 (clobber (match_scratch:SI 2 "=&l"))]
4894 "TARGET_THUMB1 && reload_completed"
4895 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4896 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4897 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4899 [(set_attr "length" "6")]
4902 (define_expand "abssf2"
4903 [(set (match_operand:SF 0 "s_register_operand" "")
4904 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
4905 "TARGET_32BIT && TARGET_HARD_FLOAT"
4908 (define_expand "absdf2"
4909 [(set (match_operand:DF 0 "s_register_operand" "")
4910 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
4911 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4914 (define_expand "sqrtsf2"
4915 [(set (match_operand:SF 0 "s_register_operand" "")
4916 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
4917 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4920 (define_expand "sqrtdf2"
4921 [(set (match_operand:DF 0 "s_register_operand" "")
4922 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
4923 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4926 (define_insn_and_split "one_cmpldi2"
4927 [(set (match_operand:DI 0 "s_register_operand" "=w,&r,&r,?w")
4928 (not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
4935 "TARGET_32BIT && reload_completed
4936 && arm_general_register_operand (operands[0], DImode)"
4937 [(set (match_dup 0) (not:SI (match_dup 1)))
4938 (set (match_dup 2) (not:SI (match_dup 3)))]
4941 operands[2] = gen_highpart (SImode, operands[0]);
4942 operands[0] = gen_lowpart (SImode, operands[0]);
4943 operands[3] = gen_highpart (SImode, operands[1]);
4944 operands[1] = gen_lowpart (SImode, operands[1]);
4946 [(set_attr "length" "*,8,8,*")
4947 (set_attr "predicable" "no,yes,yes,no")
4948 (set_attr "neon_type" "neon_int_1,*,*,neon_int_1")
4949 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")]
4952 (define_expand "one_cmplsi2"
4953 [(set (match_operand:SI 0 "s_register_operand" "")
4954 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
4959 (define_insn "*arm_one_cmplsi2"
4960 [(set (match_operand:SI 0 "s_register_operand" "=r")
4961 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
4964 [(set_attr "predicable" "yes")
4965 (set_attr "insn" "mvn")]
4968 (define_insn "*thumb1_one_cmplsi2"
4969 [(set (match_operand:SI 0 "register_operand" "=l")
4970 (not:SI (match_operand:SI 1 "register_operand" "l")))]
4973 [(set_attr "length" "2")
4974 (set_attr "insn" "mvn")]
4977 (define_insn "*notsi_compare0"
4978 [(set (reg:CC_NOOV CC_REGNUM)
4979 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4981 (set (match_operand:SI 0 "s_register_operand" "=r")
4982 (not:SI (match_dup 1)))]
4985 [(set_attr "conds" "set")
4986 (set_attr "insn" "mvn")]
4989 (define_insn "*notsi_compare0_scratch"
4990 [(set (reg:CC_NOOV CC_REGNUM)
4991 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4993 (clobber (match_scratch:SI 0 "=r"))]
4996 [(set_attr "conds" "set")
4997 (set_attr "insn" "mvn")]
5000 ;; Fixed <--> Floating conversion insns
5002 (define_expand "floatsihf2"
5003 [(set (match_operand:HF 0 "general_operand" "")
5004 (float:HF (match_operand:SI 1 "general_operand" "")))]
5008 rtx op1 = gen_reg_rtx (SFmode);
5009 expand_float (op1, operands[1], 0);
5010 op1 = convert_to_mode (HFmode, op1, 0);
5011 emit_move_insn (operands[0], op1);
5016 (define_expand "floatdihf2"
5017 [(set (match_operand:HF 0 "general_operand" "")
5018 (float:HF (match_operand:DI 1 "general_operand" "")))]
5022 rtx op1 = gen_reg_rtx (SFmode);
5023 expand_float (op1, operands[1], 0);
5024 op1 = convert_to_mode (HFmode, op1, 0);
5025 emit_move_insn (operands[0], op1);
5030 (define_expand "floatsisf2"
5031 [(set (match_operand:SF 0 "s_register_operand" "")
5032 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
5033 "TARGET_32BIT && TARGET_HARD_FLOAT"
5037 (define_expand "floatsidf2"
5038 [(set (match_operand:DF 0 "s_register_operand" "")
5039 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
5040 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5044 (define_expand "fix_trunchfsi2"
5045 [(set (match_operand:SI 0 "general_operand" "")
5046 (fix:SI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5050 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5051 expand_fix (operands[0], op1, 0);
5056 (define_expand "fix_trunchfdi2"
5057 [(set (match_operand:DI 0 "general_operand" "")
5058 (fix:DI (fix:HF (match_operand:HF 1 "general_operand" ""))))]
5062 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5063 expand_fix (operands[0], op1, 0);
5068 (define_expand "fix_truncsfsi2"
5069 [(set (match_operand:SI 0 "s_register_operand" "")
5070 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
5071 "TARGET_32BIT && TARGET_HARD_FLOAT"
5075 (define_expand "fix_truncdfsi2"
5076 [(set (match_operand:SI 0 "s_register_operand" "")
5077 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
5078 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5084 (define_expand "truncdfsf2"
5085 [(set (match_operand:SF 0 "s_register_operand" "")
5087 (match_operand:DF 1 "s_register_operand" "")))]
5088 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5092 /* DFmode -> HFmode conversions have to go through SFmode. */
5093 (define_expand "truncdfhf2"
5094 [(set (match_operand:HF 0 "general_operand" "")
5096 (match_operand:DF 1 "general_operand" "")))]
5101 op1 = convert_to_mode (SFmode, operands[1], 0);
5102 op1 = convert_to_mode (HFmode, op1, 0);
5103 emit_move_insn (operands[0], op1);
5108 ;; Zero and sign extension instructions.
5110 (define_insn "zero_extend<mode>di2"
5111 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,w")
5112 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
5113 "<qhs_zextenddi_cstr>")))]
5114 "TARGET_32BIT <qhs_zextenddi_cond>"
5116 [(set_attr "length" "8,4,8,8")
5117 (set_attr "arch" "neon_for_64bits,*,*,avoid_neon_for_64bits")
5118 (set_attr "ce_count" "2")
5119 (set_attr "predicable" "yes")]
5122 (define_insn "extend<mode>di2"
5123 [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,?r,w")
5124 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
5125 "<qhs_extenddi_cstr>")))]
5126 "TARGET_32BIT <qhs_sextenddi_cond>"
5128 [(set_attr "length" "8,4,8,8,8")
5129 (set_attr "ce_count" "2")
5130 (set_attr "shift" "1")
5131 (set_attr "predicable" "yes")
5132 (set_attr "arch" "neon_for_64bits,*,a,t,avoid_neon_for_64bits")]
5135 ;; Splits for all extensions to DImode
5137 [(set (match_operand:DI 0 "s_register_operand" "")
5138 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5139 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5140 [(set (match_dup 0) (match_dup 1))]
5142 rtx lo_part = gen_lowpart (SImode, operands[0]);
5143 enum machine_mode src_mode = GET_MODE (operands[1]);
5145 if (REG_P (operands[0])
5146 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5147 emit_clobber (operands[0]);
5148 if (!REG_P (lo_part) || src_mode != SImode
5149 || !rtx_equal_p (lo_part, operands[1]))
5151 if (src_mode == SImode)
5152 emit_move_insn (lo_part, operands[1]);
5154 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5155 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5156 operands[1] = lo_part;
5158 operands[0] = gen_highpart (SImode, operands[0]);
5159 operands[1] = const0_rtx;
5163 [(set (match_operand:DI 0 "s_register_operand" "")
5164 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5165 "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
5166 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5168 rtx lo_part = gen_lowpart (SImode, operands[0]);
5169 enum machine_mode src_mode = GET_MODE (operands[1]);
5171 if (REG_P (operands[0])
5172 && !reg_overlap_mentioned_p (operands[0], operands[1]))
5173 emit_clobber (operands[0]);
5175 if (!REG_P (lo_part) || src_mode != SImode
5176 || !rtx_equal_p (lo_part, operands[1]))
5178 if (src_mode == SImode)
5179 emit_move_insn (lo_part, operands[1]);
5181 emit_insn (gen_rtx_SET (VOIDmode, lo_part,
5182 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5183 operands[1] = lo_part;
5185 operands[0] = gen_highpart (SImode, operands[0]);
5188 (define_expand "zero_extendhisi2"
5189 [(set (match_operand:SI 0 "s_register_operand" "")
5190 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5193 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5195 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5198 if (!arm_arch6 && !MEM_P (operands[1]))
5200 rtx t = gen_lowpart (SImode, operands[1]);
5201 rtx tmp = gen_reg_rtx (SImode);
5202 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5203 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5209 [(set (match_operand:SI 0 "s_register_operand" "")
5210 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5211 "!TARGET_THUMB2 && !arm_arch6"
5212 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5213 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5215 operands[2] = gen_lowpart (SImode, operands[1]);
5218 (define_insn "*thumb1_zero_extendhisi2"
5219 [(set (match_operand:SI 0 "register_operand" "=l,l")
5220 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
5225 if (which_alternative == 0 && arm_arch6)
5226 return "uxth\t%0, %1";
5227 if (which_alternative == 0)
5230 mem = XEXP (operands[1], 0);
5232 if (GET_CODE (mem) == CONST)
5233 mem = XEXP (mem, 0);
5235 if (GET_CODE (mem) == PLUS)
5237 rtx a = XEXP (mem, 0);
5239 /* This can happen due to bugs in reload. */
5240 if (REG_P (a) && REGNO (a) == SP_REGNUM)
5243 ops[0] = operands[0];
5246 output_asm_insn ("mov\t%0, %1", ops);
5248 XEXP (mem, 0) = operands[0];
5252 return "ldrh\t%0, %1";
5254 [(set_attr_alternative "length"
5255 [(if_then_else (eq_attr "is_arch6" "yes")
5256 (const_int 2) (const_int 4))
5258 (set_attr "type" "simple_alu_shift, load_byte")]
5261 (define_insn "*arm_zero_extendhisi2"
5262 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5263 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5264 "TARGET_ARM && arm_arch4 && !arm_arch6"
5268 [(set_attr "type" "alu_shift,load_byte")
5269 (set_attr "predicable" "yes")]
5272 (define_insn "*arm_zero_extendhisi2_v6"
5273 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5274 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5275 "TARGET_ARM && arm_arch6"
5279 [(set_attr "predicable" "yes")
5280 (set_attr "type" "simple_alu_shift,load_byte")]
5283 (define_insn "*arm_zero_extendhisi2addsi"
5284 [(set (match_operand:SI 0 "s_register_operand" "=r")
5285 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5286 (match_operand:SI 2 "s_register_operand" "r")))]
5288 "uxtah%?\\t%0, %2, %1"
5289 [(set_attr "type" "alu_shift")
5290 (set_attr "predicable" "yes")]
5293 (define_expand "zero_extendqisi2"
5294 [(set (match_operand:SI 0 "s_register_operand" "")
5295 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
5298 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5300 emit_insn (gen_andsi3 (operands[0],
5301 gen_lowpart (SImode, operands[1]),
5305 if (!arm_arch6 && !MEM_P (operands[1]))
5307 rtx t = gen_lowpart (SImode, operands[1]);
5308 rtx tmp = gen_reg_rtx (SImode);
5309 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5310 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5316 [(set (match_operand:SI 0 "s_register_operand" "")
5317 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5319 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5320 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5322 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5325 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5330 (define_insn "*thumb1_zero_extendqisi2"
5331 [(set (match_operand:SI 0 "register_operand" "=l,l")
5332 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5333 "TARGET_THUMB1 && !arm_arch6"
5337 [(set_attr "length" "4,2")
5338 (set_attr "type" "alu_shift,load_byte")
5339 (set_attr "pool_range" "*,32")]
5342 (define_insn "*thumb1_zero_extendqisi2_v6"
5343 [(set (match_operand:SI 0 "register_operand" "=l,l")
5344 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
5345 "TARGET_THUMB1 && arm_arch6"
5349 [(set_attr "length" "2")
5350 (set_attr "type" "simple_alu_shift,load_byte")]
5353 (define_insn "*arm_zero_extendqisi2"
5354 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5355 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5356 "TARGET_ARM && !arm_arch6"
5359 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5360 [(set_attr "length" "8,4")
5361 (set_attr "type" "alu_shift,load_byte")
5362 (set_attr "predicable" "yes")]
5365 (define_insn "*arm_zero_extendqisi2_v6"
5366 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5367 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5368 "TARGET_ARM && arm_arch6"
5371 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
5372 [(set_attr "type" "simple_alu_shift,load_byte")
5373 (set_attr "predicable" "yes")]
5376 (define_insn "*arm_zero_extendqisi2addsi"
5377 [(set (match_operand:SI 0 "s_register_operand" "=r")
5378 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5379 (match_operand:SI 2 "s_register_operand" "r")))]
5381 "uxtab%?\\t%0, %2, %1"
5382 [(set_attr "predicable" "yes")
5383 (set_attr "insn" "xtab")
5384 (set_attr "type" "alu_shift")]
5388 [(set (match_operand:SI 0 "s_register_operand" "")
5389 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5390 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5391 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5392 [(set (match_dup 2) (match_dup 1))
5393 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5398 [(set (match_operand:SI 0 "s_register_operand" "")
5399 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5400 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5401 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5402 [(set (match_dup 2) (match_dup 1))
5403 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5409 [(set (match_operand:SI 0 "s_register_operand" "")
5410 (ior_xor:SI (and:SI (ashift:SI
5411 (match_operand:SI 1 "s_register_operand" "")
5412 (match_operand:SI 2 "const_int_operand" ""))
5413 (match_operand:SI 3 "const_int_operand" ""))
5415 (match_operator 5 "subreg_lowpart_operator"
5416 [(match_operand:SI 4 "s_register_operand" "")]))))]
5418 && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
5419 == (GET_MODE_MASK (GET_MODE (operands[5]))
5420 & (GET_MODE_MASK (GET_MODE (operands[5]))
5421 << (INTVAL (operands[2])))))"
5422 [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
5424 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5425 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5428 (define_insn "*compareqi_eq0"
5429 [(set (reg:CC_Z CC_REGNUM)
5430 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5434 [(set_attr "conds" "set")
5435 (set_attr "predicable" "yes")]
5438 (define_expand "extendhisi2"
5439 [(set (match_operand:SI 0 "s_register_operand" "")
5440 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
5445 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5448 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5450 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5454 if (!arm_arch6 && !MEM_P (operands[1]))
5456 rtx t = gen_lowpart (SImode, operands[1]);
5457 rtx tmp = gen_reg_rtx (SImode);
5458 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5459 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5466 [(set (match_operand:SI 0 "register_operand" "")
5467 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5468 (clobber (match_scratch:SI 2 ""))])]
5470 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5471 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5473 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5476 ;; We used to have an early-clobber on the scratch register here.
5477 ;; However, there's a bug somewhere in reload which means that this
5478 ;; can be partially ignored during spill allocation if the memory
5479 ;; address also needs reloading; this causes us to die later on when
5480 ;; we try to verify the operands. Fortunately, we don't really need
5481 ;; the early-clobber: we can always use operand 0 if operand 2
5482 ;; overlaps the address.
5483 (define_insn "thumb1_extendhisi2"
5484 [(set (match_operand:SI 0 "register_operand" "=l,l")
5485 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
5486 (clobber (match_scratch:SI 2 "=X,l"))]
5493 if (which_alternative == 0 && !arm_arch6)
5495 if (which_alternative == 0)
5496 return \"sxth\\t%0, %1\";
5498 mem = XEXP (operands[1], 0);
5500 /* This code used to try to use 'V', and fix the address only if it was
5501 offsettable, but this fails for e.g. REG+48 because 48 is outside the
5502 range of QImode offsets, and offsettable_address_p does a QImode
5505 if (GET_CODE (mem) == CONST)
5506 mem = XEXP (mem, 0);
5508 if (GET_CODE (mem) == LABEL_REF)
5509 return \"ldr\\t%0, %1\";
5511 if (GET_CODE (mem) == PLUS)
5513 rtx a = XEXP (mem, 0);
5514 rtx b = XEXP (mem, 1);
5516 if (GET_CODE (a) == LABEL_REF
5518 return \"ldr\\t%0, %1\";
5521 return \"ldrsh\\t%0, %1\";
5529 ops[2] = const0_rtx;
5532 gcc_assert (REG_P (ops[1]));
5534 ops[0] = operands[0];
5535 if (reg_mentioned_p (operands[2], ops[1]))
5538 ops[3] = operands[2];
5539 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
5542 [(set_attr_alternative "length"
5543 [(if_then_else (eq_attr "is_arch6" "yes")
5544 (const_int 2) (const_int 4))
5546 (set_attr "type" "simple_alu_shift,load_byte")
5547 (set_attr "pool_range" "*,1018")]
5550 ;; This pattern will only be used when ldsh is not available
5551 (define_expand "extendhisi2_mem"
5552 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5554 (zero_extend:SI (match_dup 7)))
5555 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5556 (set (match_operand:SI 0 "" "")
5557 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5562 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5564 mem1 = change_address (operands[1], QImode, addr);
5565 mem2 = change_address (operands[1], QImode,
5566 plus_constant (Pmode, addr, 1));
5567 operands[0] = gen_lowpart (SImode, operands[0]);
5569 operands[2] = gen_reg_rtx (SImode);
5570 operands[3] = gen_reg_rtx (SImode);
5571 operands[6] = gen_reg_rtx (SImode);
5574 if (BYTES_BIG_ENDIAN)
5576 operands[4] = operands[2];
5577 operands[5] = operands[3];
5581 operands[4] = operands[3];
5582 operands[5] = operands[2];
5588 [(set (match_operand:SI 0 "register_operand" "")
5589 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5591 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5592 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5594 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5597 (define_insn "*arm_extendhisi2"
5598 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5599 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5600 "TARGET_ARM && arm_arch4 && !arm_arch6"
5604 [(set_attr "length" "8,4")
5605 (set_attr "type" "alu_shift,load_byte")
5606 (set_attr "predicable" "yes")
5607 (set_attr "pool_range" "*,256")
5608 (set_attr "neg_pool_range" "*,244")]
5611 ;; ??? Check Thumb-2 pool range
5612 (define_insn "*arm_extendhisi2_v6"
5613 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5614 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5615 "TARGET_32BIT && arm_arch6"
5619 [(set_attr "type" "simple_alu_shift,load_byte")
5620 (set_attr "predicable" "yes")
5621 (set_attr "pool_range" "*,256")
5622 (set_attr "neg_pool_range" "*,244")]
5625 (define_insn "*arm_extendhisi2addsi"
5626 [(set (match_operand:SI 0 "s_register_operand" "=r")
5627 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5628 (match_operand:SI 2 "s_register_operand" "r")))]
5630 "sxtah%?\\t%0, %2, %1"
5633 (define_expand "extendqihi2"
5635 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
5637 (set (match_operand:HI 0 "s_register_operand" "")
5638 (ashiftrt:SI (match_dup 2)
5643 if (arm_arch4 && MEM_P (operands[1]))
5645 emit_insn (gen_rtx_SET (VOIDmode,
5647 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5650 if (!s_register_operand (operands[1], QImode))
5651 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5652 operands[0] = gen_lowpart (SImode, operands[0]);
5653 operands[1] = gen_lowpart (SImode, operands[1]);
5654 operands[2] = gen_reg_rtx (SImode);
5658 (define_insn "*arm_extendqihi_insn"
5659 [(set (match_operand:HI 0 "s_register_operand" "=r")
5660 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5661 "TARGET_ARM && arm_arch4"
5662 "ldr%(sb%)\\t%0, %1"
5663 [(set_attr "type" "load_byte")
5664 (set_attr "predicable" "yes")
5665 (set_attr "pool_range" "256")
5666 (set_attr "neg_pool_range" "244")]
5669 (define_expand "extendqisi2"
5670 [(set (match_operand:SI 0 "s_register_operand" "")
5671 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
5674 if (!arm_arch4 && MEM_P (operands[1]))
5675 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5677 if (!arm_arch6 && !MEM_P (operands[1]))
5679 rtx t = gen_lowpart (SImode, operands[1]);
5680 rtx tmp = gen_reg_rtx (SImode);
5681 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5682 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5688 [(set (match_operand:SI 0 "register_operand" "")
5689 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5691 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5692 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5694 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5697 (define_insn "*arm_extendqisi"
5698 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5699 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5700 "TARGET_ARM && arm_arch4 && !arm_arch6"
5704 [(set_attr "length" "8,4")
5705 (set_attr "type" "alu_shift,load_byte")
5706 (set_attr "predicable" "yes")
5707 (set_attr "pool_range" "*,256")
5708 (set_attr "neg_pool_range" "*,244")]
5711 (define_insn "*arm_extendqisi_v6"
5712 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5714 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5715 "TARGET_ARM && arm_arch6"
5719 [(set_attr "type" "simple_alu_shift,load_byte")
5720 (set_attr "predicable" "yes")
5721 (set_attr "pool_range" "*,256")
5722 (set_attr "neg_pool_range" "*,244")]
5725 (define_insn "*arm_extendqisi2addsi"
5726 [(set (match_operand:SI 0 "s_register_operand" "=r")
5727 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5728 (match_operand:SI 2 "s_register_operand" "r")))]
5730 "sxtab%?\\t%0, %2, %1"
5731 [(set_attr "type" "alu_shift")
5732 (set_attr "insn" "xtab")
5733 (set_attr "predicable" "yes")]
5737 [(set (match_operand:SI 0 "register_operand" "")
5738 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5739 "TARGET_THUMB1 && reload_completed"
5740 [(set (match_dup 0) (match_dup 2))
5741 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5743 rtx addr = XEXP (operands[1], 0);
5745 if (GET_CODE (addr) == CONST)
5746 addr = XEXP (addr, 0);
5748 if (GET_CODE (addr) == PLUS
5749 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5750 /* No split necessary. */
5753 if (GET_CODE (addr) == PLUS
5754 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5757 if (reg_overlap_mentioned_p (operands[0], addr))
5759 rtx t = gen_lowpart (QImode, operands[0]);
5760 emit_move_insn (t, operands[1]);
5761 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5767 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5768 operands[2] = const0_rtx;
5770 else if (GET_CODE (addr) != PLUS)
5772 else if (REG_P (XEXP (addr, 0)))
5774 operands[2] = XEXP (addr, 1);
5775 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5779 operands[2] = XEXP (addr, 0);
5780 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5783 operands[3] = change_address (operands[1], QImode, addr);
5787 [(set (match_operand:SI 0 "register_operand" "")
5788 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5789 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5790 (set (match_operand:SI 3 "register_operand" "")
5791 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5793 && GET_CODE (XEXP (operands[4], 0)) == PLUS
5794 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5795 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5796 && (peep2_reg_dead_p (3, operands[0])
5797 || rtx_equal_p (operands[0], operands[3]))
5798 && (peep2_reg_dead_p (3, operands[2])
5799 || rtx_equal_p (operands[2], operands[3]))"
5800 [(set (match_dup 2) (match_dup 1))
5801 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5803 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5804 operands[4] = change_address (operands[4], QImode, addr);
5807 (define_insn "thumb1_extendqisi2"
5808 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5809 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5814 if (which_alternative == 0 && arm_arch6)
5815 return "sxtb\\t%0, %1";
5816 if (which_alternative == 0)
5819 addr = XEXP (operands[1], 0);
5820 if (GET_CODE (addr) == PLUS
5821 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5822 return "ldrsb\\t%0, %1";
5826 [(set_attr_alternative "length"
5827 [(if_then_else (eq_attr "is_arch6" "yes")
5828 (const_int 2) (const_int 4))
5830 (if_then_else (eq_attr "is_arch6" "yes")
5831 (const_int 4) (const_int 6))])
5832 (set_attr "type" "simple_alu_shift,load_byte,load_byte")]
5835 (define_expand "extendsfdf2"
5836 [(set (match_operand:DF 0 "s_register_operand" "")
5837 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
5838 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5842 /* HFmode -> DFmode conversions have to go through SFmode. */
5843 (define_expand "extendhfdf2"
5844 [(set (match_operand:DF 0 "general_operand" "")
5845 (float_extend:DF (match_operand:HF 1 "general_operand" "")))]
5850 op1 = convert_to_mode (SFmode, operands[1], 0);
5851 op1 = convert_to_mode (DFmode, op1, 0);
5852 emit_insn (gen_movdf (operands[0], op1));
5857 ;; Move insns (including loads and stores)
5859 ;; XXX Just some ideas about movti.
5860 ;; I don't think these are a good idea on the arm, there just aren't enough
5862 ;;(define_expand "loadti"
5863 ;; [(set (match_operand:TI 0 "s_register_operand" "")
5864 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
5867 ;;(define_expand "storeti"
5868 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5869 ;; (match_operand:TI 1 "s_register_operand" ""))]
5872 ;;(define_expand "movti"
5873 ;; [(set (match_operand:TI 0 "general_operand" "")
5874 ;; (match_operand:TI 1 "general_operand" ""))]
5880 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5881 ;; operands[1] = copy_to_reg (operands[1]);
5882 ;; if (MEM_P (operands[0]))
5883 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5884 ;; else if (MEM_P (operands[1]))
5885 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5889 ;; emit_insn (insn);
5893 ;; Recognize garbage generated above.
5896 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5897 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5901 ;; register mem = (which_alternative < 3);
5902 ;; register const char *template;
5904 ;; operands[mem] = XEXP (operands[mem], 0);
5905 ;; switch (which_alternative)
5907 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5908 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5909 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5910 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5911 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5912 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5914 ;; output_asm_insn (template, operands);
5918 (define_expand "movdi"
5919 [(set (match_operand:DI 0 "general_operand" "")
5920 (match_operand:DI 1 "general_operand" ""))]
5923 if (can_create_pseudo_p ())
5925 if (!REG_P (operands[0]))
5926 operands[1] = force_reg (DImode, operands[1]);
5931 (define_insn "*arm_movdi"
5932 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, q, m")
5933 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,q"))]
5935 && !(TARGET_HARD_FLOAT && TARGET_VFP)
5937 && ( register_operand (operands[0], DImode)
5938 || register_operand (operands[1], DImode))"
5940 switch (which_alternative)
5947 return output_move_double (operands, true, NULL);
5950 [(set_attr "length" "8,12,16,8,8")
5951 (set_attr "type" "*,*,*,load2,store2")
5952 (set_attr "arm_pool_range" "*,*,*,1020,*")
5953 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5954 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5955 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5959 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5960 (match_operand:ANY64 1 "const_double_operand" ""))]
5963 && (arm_const_double_inline_cost (operands[1])
5964 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5967 arm_split_constant (SET, SImode, curr_insn,
5968 INTVAL (gen_lowpart (SImode, operands[1])),
5969 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5970 arm_split_constant (SET, SImode, curr_insn,
5971 INTVAL (gen_highpart_mode (SImode,
5972 GET_MODE (operands[0]),
5974 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5979 ; If optimizing for size, or if we have load delay slots, then
5980 ; we want to split the constant into two separate operations.
5981 ; In both cases this may split a trivial part into a single data op
5982 ; leaving a single complex constant to load. We can also get longer
5983 ; offsets in a LDR which means we get better chances of sharing the pool
5984 ; entries. Finally, we can normally do a better job of scheduling
5985 ; LDR instructions than we can with LDM.
5986 ; This pattern will only match if the one above did not.
5988 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5989 (match_operand:ANY64 1 "const_double_operand" ""))]
5990 "TARGET_ARM && reload_completed
5991 && arm_const_double_by_parts (operands[1])"
5992 [(set (match_dup 0) (match_dup 1))
5993 (set (match_dup 2) (match_dup 3))]
5995 operands[2] = gen_highpart (SImode, operands[0]);
5996 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5998 operands[0] = gen_lowpart (SImode, operands[0]);
5999 operands[1] = gen_lowpart (SImode, operands[1]);
6004 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6005 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
6006 "TARGET_EITHER && reload_completed"
6007 [(set (match_dup 0) (match_dup 1))
6008 (set (match_dup 2) (match_dup 3))]
6010 operands[2] = gen_highpart (SImode, operands[0]);
6011 operands[3] = gen_highpart (SImode, operands[1]);
6012 operands[0] = gen_lowpart (SImode, operands[0]);
6013 operands[1] = gen_lowpart (SImode, operands[1]);
6015 /* Handle a partial overlap. */
6016 if (rtx_equal_p (operands[0], operands[3]))
6018 rtx tmp0 = operands[0];
6019 rtx tmp1 = operands[1];
6021 operands[0] = operands[2];
6022 operands[1] = operands[3];
6029 ;; We can't actually do base+index doubleword loads if the index and
6030 ;; destination overlap. Split here so that we at least have chance to
6033 [(set (match_operand:DI 0 "s_register_operand" "")
6034 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6035 (match_operand:SI 2 "s_register_operand" ""))))]
6037 && reg_overlap_mentioned_p (operands[0], operands[1])
6038 && reg_overlap_mentioned_p (operands[0], operands[2])"
6040 (plus:SI (match_dup 1)
6043 (mem:DI (match_dup 4)))]
6045 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6049 ;;; ??? This should have alternatives for constants.
6050 ;;; ??? This was originally identical to the movdf_insn pattern.
6051 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
6052 ;;; thumb_reorg with a memory reference.
6053 (define_insn "*thumb1_movdi_insn"
6054 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
6055 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
6057 && ( register_operand (operands[0], DImode)
6058 || register_operand (operands[1], DImode))"
6061 switch (which_alternative)
6065 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6066 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6067 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6069 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
6071 operands[1] = GEN_INT (- INTVAL (operands[1]));
6072 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
6074 return \"ldmia\\t%1, {%0, %H0}\";
6076 return \"stmia\\t%0, {%1, %H1}\";
6078 return thumb_load_double_from_address (operands);
6080 operands[2] = gen_rtx_MEM (SImode,
6081 plus_constant (Pmode, XEXP (operands[0], 0), 4));
6082 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6085 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6086 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6087 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6090 [(set_attr "length" "4,4,6,2,2,6,4,4")
6091 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
6092 (set_attr "insn" "*,mov,*,*,*,*,*,mov")
6093 (set_attr "pool_range" "*,*,*,*,*,1018,*,*")]
6096 (define_expand "movsi"
6097 [(set (match_operand:SI 0 "general_operand" "")
6098 (match_operand:SI 1 "general_operand" ""))]
6102 rtx base, offset, tmp;
6106 /* Everything except mem = const or mem = mem can be done easily. */
6107 if (MEM_P (operands[0]))
6108 operands[1] = force_reg (SImode, operands[1]);
6109 if (arm_general_register_operand (operands[0], SImode)
6110 && CONST_INT_P (operands[1])
6111 && !(const_ok_for_arm (INTVAL (operands[1]))
6112 || const_ok_for_arm (~INTVAL (operands[1]))))
6114 arm_split_constant (SET, SImode, NULL_RTX,
6115 INTVAL (operands[1]), operands[0], NULL_RTX,
6116 optimize && can_create_pseudo_p ());
6120 else /* TARGET_THUMB1... */
6122 if (can_create_pseudo_p ())
6124 if (!REG_P (operands[0]))
6125 operands[1] = force_reg (SImode, operands[1]);
6129 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
6131 split_const (operands[1], &base, &offset);
6132 if (GET_CODE (base) == SYMBOL_REF
6133 && !offset_within_block_p (base, INTVAL (offset)))
6135 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6136 emit_move_insn (tmp, base);
6137 emit_insn (gen_addsi3 (operands[0], tmp, offset));
6142 /* Recognize the case where operand[1] is a reference to thread-local
6143 data and load its address to a register. */
6144 if (arm_tls_referenced_p (operands[1]))
6146 rtx tmp = operands[1];
6149 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
6151 addend = XEXP (XEXP (tmp, 0), 1);
6152 tmp = XEXP (XEXP (tmp, 0), 0);
6155 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
6156 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
6158 tmp = legitimize_tls_address (tmp,
6159 !can_create_pseudo_p () ? operands[0] : 0);
6162 tmp = gen_rtx_PLUS (SImode, tmp, addend);
6163 tmp = force_operand (tmp, operands[0]);
6168 && (CONSTANT_P (operands[1])
6169 || symbol_mentioned_p (operands[1])
6170 || label_mentioned_p (operands[1])))
6171 operands[1] = legitimize_pic_address (operands[1], SImode,
6172 (!can_create_pseudo_p ()
6179 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6180 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
6181 ;; so this does not matter.
6182 (define_insn "*arm_movt"
6183 [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
6184 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
6185 (match_operand:SI 2 "general_operand" "i")))]
6187 "movt%?\t%0, #:upper16:%c2"
6188 [(set_attr "predicable" "yes")
6189 (set_attr "length" "4")]
6192 (define_insn "*arm_movsi_insn"
6193 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6194 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
6195 "TARGET_ARM && ! TARGET_IWMMXT
6196 && !(TARGET_HARD_FLOAT && TARGET_VFP)
6197 && ( register_operand (operands[0], SImode)
6198 || register_operand (operands[1], SImode))"
6206 [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,simple_alu_imm,load1,store1")
6207 (set_attr "insn" "mov,mov,mvn,mov,*,*")
6208 (set_attr "predicable" "yes")
6209 (set_attr "pool_range" "*,*,*,*,4096,*")
6210 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6214 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6215 (match_operand:SI 1 "const_int_operand" ""))]
6217 && (!(const_ok_for_arm (INTVAL (operands[1]))
6218 || const_ok_for_arm (~INTVAL (operands[1]))))"
6219 [(clobber (const_int 0))]
6221 arm_split_constant (SET, SImode, NULL_RTX,
6222 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6227 ;; Split symbol_refs at the later stage (after cprop), instead of generating
6228 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
6229 ;; and lo_sum would be merged back into memory load at cprop. However,
6230 ;; if the default is to prefer movt/movw rather than a load from the constant
6231 ;; pool, the performance is better.
6233 [(set (match_operand:SI 0 "arm_general_register_operand" "")
6234 (match_operand:SI 1 "general_operand" ""))]
6236 && TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6237 && !flag_pic && !target_word_relocations
6238 && !arm_tls_referenced_p (operands[1])"
6239 [(clobber (const_int 0))]
6241 arm_emit_movpair (operands[0], operands[1]);
6245 (define_insn "*thumb1_movsi_insn"
6246 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
6247 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*l*h*k"))]
6249 && ( register_operand (operands[0], SImode)
6250 || register_operand (operands[1], SImode))"
6261 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
6262 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
6263 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")
6264 (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
6267 [(set (match_operand:SI 0 "register_operand" "")
6268 (match_operand:SI 1 "const_int_operand" ""))]
6269 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
6270 [(set (match_dup 2) (match_dup 1))
6271 (set (match_dup 0) (neg:SI (match_dup 2)))]
6274 operands[1] = GEN_INT (- INTVAL (operands[1]));
6275 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6280 [(set (match_operand:SI 0 "register_operand" "")
6281 (match_operand:SI 1 "const_int_operand" ""))]
6282 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
6283 [(set (match_dup 2) (match_dup 1))
6284 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
6287 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
6288 unsigned HOST_WIDE_INT mask = 0xff;
6291 for (i = 0; i < 25; i++)
6292 if ((val & (mask << i)) == val)
6295 /* Don't split if the shift is zero. */
6299 operands[1] = GEN_INT (val >> i);
6300 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6301 operands[3] = GEN_INT (i);
6305 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
6307 [(set (match_operand:SI 0 "register_operand" "")
6308 (match_operand:SI 1 "const_int_operand" ""))]
6309 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
6310 [(set (match_dup 2) (match_dup 1))
6311 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
6314 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
6315 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6316 operands[3] = GEN_INT (255);
6320 ;; When generating pic, we need to load the symbol offset into a register.
6321 ;; So that the optimizer does not confuse this with a normal symbol load
6322 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6323 ;; since that is the only type of relocation we can use.
6325 ;; Wrap calculation of the whole PIC address in a single pattern for the
6326 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6327 ;; a PIC address involves two loads from memory, so we want to CSE it
6328 ;; as often as possible.
6329 ;; This pattern will be split into one of the pic_load_addr_* patterns
6330 ;; and a move after GCSE optimizations.
6332 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6333 (define_expand "calculate_pic_address"
6334 [(set (match_operand:SI 0 "register_operand" "")
6335 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6336 (unspec:SI [(match_operand:SI 2 "" "")]
6341 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6343 [(set (match_operand:SI 0 "register_operand" "")
6344 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6345 (unspec:SI [(match_operand:SI 2 "" "")]
6348 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6349 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6350 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6353 ;; operand1 is the memory address to go into
6354 ;; pic_load_addr_32bit.
6355 ;; operand2 is the PIC label to be emitted
6356 ;; from pic_add_dot_plus_eight.
6357 ;; We do this to allow hoisting of the entire insn.
6358 (define_insn_and_split "pic_load_addr_unified"
6359 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6360 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6361 (match_operand:SI 2 "" "")]
6362 UNSPEC_PIC_UNIFIED))]
6365 "&& reload_completed"
6366 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6367 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6368 (match_dup 2)] UNSPEC_PIC_BASE))]
6369 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6370 [(set_attr "type" "load1,load1,load1")
6371 (set_attr "pool_range" "4096,4094,1022")
6372 (set_attr "neg_pool_range" "4084,0,0")
6373 (set_attr "arch" "a,t2,t1")
6374 (set_attr "length" "8,6,4")]
6377 ;; The rather odd constraints on the following are to force reload to leave
6378 ;; the insn alone, and to force the minipool generation pass to then move
6379 ;; the GOT symbol to memory.
6381 (define_insn "pic_load_addr_32bit"
6382 [(set (match_operand:SI 0 "s_register_operand" "=r")
6383 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6384 "TARGET_32BIT && flag_pic"
6386 [(set_attr "type" "load1")
6387 (set (attr "pool_range")
6388 (if_then_else (eq_attr "is_thumb" "no")
6391 (set (attr "neg_pool_range")
6392 (if_then_else (eq_attr "is_thumb" "no")
6397 (define_insn "pic_load_addr_thumb1"
6398 [(set (match_operand:SI 0 "s_register_operand" "=l")
6399 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6400 "TARGET_THUMB1 && flag_pic"
6402 [(set_attr "type" "load1")
6403 (set (attr "pool_range") (const_int 1018))]
6406 (define_insn "pic_add_dot_plus_four"
6407 [(set (match_operand:SI 0 "register_operand" "=r")
6408 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6410 (match_operand 2 "" "")]
6414 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6415 INTVAL (operands[2]));
6416 return \"add\\t%0, %|pc\";
6418 [(set_attr "length" "2")]
6421 (define_insn "pic_add_dot_plus_eight"
6422 [(set (match_operand:SI 0 "register_operand" "=r")
6423 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6425 (match_operand 2 "" "")]
6429 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6430 INTVAL (operands[2]));
6431 return \"add%?\\t%0, %|pc, %1\";
6433 [(set_attr "predicable" "yes")]
6436 (define_insn "tls_load_dot_plus_eight"
6437 [(set (match_operand:SI 0 "register_operand" "=r")
6438 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6440 (match_operand 2 "" "")]
6444 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6445 INTVAL (operands[2]));
6446 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6448 [(set_attr "predicable" "yes")]
6451 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6452 ;; followed by a load. These sequences can be crunched down to
6453 ;; tls_load_dot_plus_eight by a peephole.
6456 [(set (match_operand:SI 0 "register_operand" "")
6457 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6459 (match_operand 1 "" "")]
6461 (set (match_operand:SI 2 "arm_general_register_operand" "")
6462 (mem:SI (match_dup 0)))]
6463 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6465 (mem:SI (unspec:SI [(match_dup 3)
6472 (define_insn "pic_offset_arm"
6473 [(set (match_operand:SI 0 "register_operand" "=r")
6474 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6475 (unspec:SI [(match_operand:SI 2 "" "X")]
6476 UNSPEC_PIC_OFFSET))))]
6477 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6478 "ldr%?\\t%0, [%1,%2]"
6479 [(set_attr "type" "load1")]
6482 (define_expand "builtin_setjmp_receiver"
6483 [(label_ref (match_operand 0 "" ""))]
6487 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6489 if (arm_pic_register != INVALID_REGNUM)
6490 arm_load_pic_register (1UL << 3);
6494 ;; If copying one reg to another we can set the condition codes according to
6495 ;; its value. Such a move is common after a return from subroutine and the
6496 ;; result is being tested against zero.
6498 (define_insn "*movsi_compare0"
6499 [(set (reg:CC CC_REGNUM)
6500 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6502 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6508 [(set_attr "conds" "set")
6509 (set_attr "type" "simple_alu_imm,simple_alu_imm")]
6512 ;; Subroutine to store a half word from a register into memory.
6513 ;; Operand 0 is the source register (HImode)
6514 ;; Operand 1 is the destination address in a register (SImode)
6516 ;; In both this routine and the next, we must be careful not to spill
6517 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6518 ;; can generate unrecognizable rtl.
6520 (define_expand "storehi"
6521 [;; store the low byte
6522 (set (match_operand 1 "" "") (match_dup 3))
6523 ;; extract the high byte
6525 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6526 ;; store the high byte
6527 (set (match_dup 4) (match_dup 5))]
6531 rtx op1 = operands[1];
6532 rtx addr = XEXP (op1, 0);
6533 enum rtx_code code = GET_CODE (addr);
6535 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6537 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6539 operands[4] = adjust_address (op1, QImode, 1);
6540 operands[1] = adjust_address (operands[1], QImode, 0);
6541 operands[3] = gen_lowpart (QImode, operands[0]);
6542 operands[0] = gen_lowpart (SImode, operands[0]);
6543 operands[2] = gen_reg_rtx (SImode);
6544 operands[5] = gen_lowpart (QImode, operands[2]);
6548 (define_expand "storehi_bigend"
6549 [(set (match_dup 4) (match_dup 3))
6551 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6552 (set (match_operand 1 "" "") (match_dup 5))]
6556 rtx op1 = operands[1];
6557 rtx addr = XEXP (op1, 0);
6558 enum rtx_code code = GET_CODE (addr);
6560 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6562 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6564 operands[4] = adjust_address (op1, QImode, 1);
6565 operands[1] = adjust_address (operands[1], QImode, 0);
6566 operands[3] = gen_lowpart (QImode, operands[0]);
6567 operands[0] = gen_lowpart (SImode, operands[0]);
6568 operands[2] = gen_reg_rtx (SImode);
6569 operands[5] = gen_lowpart (QImode, operands[2]);
6573 ;; Subroutine to store a half word integer constant into memory.
6574 (define_expand "storeinthi"
6575 [(set (match_operand 0 "" "")
6576 (match_operand 1 "" ""))
6577 (set (match_dup 3) (match_dup 2))]
6581 HOST_WIDE_INT value = INTVAL (operands[1]);
6582 rtx addr = XEXP (operands[0], 0);
6583 rtx op0 = operands[0];
6584 enum rtx_code code = GET_CODE (addr);
6586 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6588 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6590 operands[1] = gen_reg_rtx (SImode);
6591 if (BYTES_BIG_ENDIAN)
6593 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6594 if ((value & 255) == ((value >> 8) & 255))
6595 operands[2] = operands[1];
6598 operands[2] = gen_reg_rtx (SImode);
6599 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6604 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6605 if ((value & 255) == ((value >> 8) & 255))
6606 operands[2] = operands[1];
6609 operands[2] = gen_reg_rtx (SImode);
6610 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6614 operands[3] = adjust_address (op0, QImode, 1);
6615 operands[0] = adjust_address (operands[0], QImode, 0);
6616 operands[2] = gen_lowpart (QImode, operands[2]);
6617 operands[1] = gen_lowpart (QImode, operands[1]);
6621 (define_expand "storehi_single_op"
6622 [(set (match_operand:HI 0 "memory_operand" "")
6623 (match_operand:HI 1 "general_operand" ""))]
6624 "TARGET_32BIT && arm_arch4"
6626 if (!s_register_operand (operands[1], HImode))
6627 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6631 (define_expand "movhi"
6632 [(set (match_operand:HI 0 "general_operand" "")
6633 (match_operand:HI 1 "general_operand" ""))]
6638 if (can_create_pseudo_p ())
6640 if (MEM_P (operands[0]))
6644 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6647 if (CONST_INT_P (operands[1]))
6648 emit_insn (gen_storeinthi (operands[0], operands[1]));
6651 if (MEM_P (operands[1]))
6652 operands[1] = force_reg (HImode, operands[1]);
6653 if (BYTES_BIG_ENDIAN)
6654 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6656 emit_insn (gen_storehi (operands[1], operands[0]));
6660 /* Sign extend a constant, and keep it in an SImode reg. */
6661 else if (CONST_INT_P (operands[1]))
6663 rtx reg = gen_reg_rtx (SImode);
6664 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6666 /* If the constant is already valid, leave it alone. */
6667 if (!const_ok_for_arm (val))
6669 /* If setting all the top bits will make the constant
6670 loadable in a single instruction, then set them.
6671 Otherwise, sign extend the number. */
6673 if (const_ok_for_arm (~(val | ~0xffff)))
6675 else if (val & 0x8000)
6679 emit_insn (gen_movsi (reg, GEN_INT (val)));
6680 operands[1] = gen_lowpart (HImode, reg);
6682 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6683 && MEM_P (operands[1]))
6685 rtx reg = gen_reg_rtx (SImode);
6687 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6688 operands[1] = gen_lowpart (HImode, reg);
6690 else if (!arm_arch4)
6692 if (MEM_P (operands[1]))
6695 rtx offset = const0_rtx;
6696 rtx reg = gen_reg_rtx (SImode);
6698 if ((REG_P (base = XEXP (operands[1], 0))
6699 || (GET_CODE (base) == PLUS
6700 && (CONST_INT_P (offset = XEXP (base, 1)))
6701 && ((INTVAL(offset) & 1) != 1)
6702 && REG_P (base = XEXP (base, 0))))
6703 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6707 new_rtx = widen_memory_access (operands[1], SImode,
6708 ((INTVAL (offset) & ~3)
6709 - INTVAL (offset)));
6710 emit_insn (gen_movsi (reg, new_rtx));
6711 if (((INTVAL (offset) & 2) != 0)
6712 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6714 rtx reg2 = gen_reg_rtx (SImode);
6716 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6721 emit_insn (gen_movhi_bytes (reg, operands[1]));
6723 operands[1] = gen_lowpart (HImode, reg);
6727 /* Handle loading a large integer during reload. */
6728 else if (CONST_INT_P (operands[1])
6729 && !const_ok_for_arm (INTVAL (operands[1]))
6730 && !const_ok_for_arm (~INTVAL (operands[1])))
6732 /* Writing a constant to memory needs a scratch, which should
6733 be handled with SECONDARY_RELOADs. */
6734 gcc_assert (REG_P (operands[0]));
6736 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6737 emit_insn (gen_movsi (operands[0], operands[1]));
6741 else if (TARGET_THUMB2)
6743 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6744 if (can_create_pseudo_p ())
6746 if (!REG_P (operands[0]))
6747 operands[1] = force_reg (HImode, operands[1]);
6748 /* Zero extend a constant, and keep it in an SImode reg. */
6749 else if (CONST_INT_P (operands[1]))
6751 rtx reg = gen_reg_rtx (SImode);
6752 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6754 emit_insn (gen_movsi (reg, GEN_INT (val)));
6755 operands[1] = gen_lowpart (HImode, reg);
6759 else /* TARGET_THUMB1 */
6761 if (can_create_pseudo_p ())
6763 if (CONST_INT_P (operands[1]))
6765 rtx reg = gen_reg_rtx (SImode);
6767 emit_insn (gen_movsi (reg, operands[1]));
6768 operands[1] = gen_lowpart (HImode, reg);
6771 /* ??? We shouldn't really get invalid addresses here, but this can
6772 happen if we are passed a SP (never OK for HImode/QImode) or
6773 virtual register (also rejected as illegitimate for HImode/QImode)
6774 relative address. */
6775 /* ??? This should perhaps be fixed elsewhere, for instance, in
6776 fixup_stack_1, by checking for other kinds of invalid addresses,
6777 e.g. a bare reference to a virtual register. This may confuse the
6778 alpha though, which must handle this case differently. */
6779 if (MEM_P (operands[0])
6780 && !memory_address_p (GET_MODE (operands[0]),
6781 XEXP (operands[0], 0)))
6783 = replace_equiv_address (operands[0],
6784 copy_to_reg (XEXP (operands[0], 0)));
6786 if (MEM_P (operands[1])
6787 && !memory_address_p (GET_MODE (operands[1]),
6788 XEXP (operands[1], 0)))
6790 = replace_equiv_address (operands[1],
6791 copy_to_reg (XEXP (operands[1], 0)));
6793 if (MEM_P (operands[1]) && optimize > 0)
6795 rtx reg = gen_reg_rtx (SImode);
6797 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6798 operands[1] = gen_lowpart (HImode, reg);
6801 if (MEM_P (operands[0]))
6802 operands[1] = force_reg (HImode, operands[1]);
6804 else if (CONST_INT_P (operands[1])
6805 && !satisfies_constraint_I (operands[1]))
6807 /* Handle loading a large integer during reload. */
6809 /* Writing a constant to memory needs a scratch, which should
6810 be handled with SECONDARY_RELOADs. */
6811 gcc_assert (REG_P (operands[0]));
6813 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6814 emit_insn (gen_movsi (operands[0], operands[1]));
6821 (define_insn "*thumb1_movhi_insn"
6822 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6823 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
6825 && ( register_operand (operands[0], HImode)
6826 || register_operand (operands[1], HImode))"
6828 switch (which_alternative)
6830 case 0: return \"add %0, %1, #0\";
6831 case 2: return \"strh %1, %0\";
6832 case 3: return \"mov %0, %1\";
6833 case 4: return \"mov %0, %1\";
6834 case 5: return \"mov %0, %1\";
6835 default: gcc_unreachable ();
6837 /* The stack pointer can end up being taken as an index register.
6838 Catch this case here and deal with it. */
6839 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6840 && REG_P (XEXP (XEXP (operands[1], 0), 0))
6841 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6844 ops[0] = operands[0];
6845 ops[1] = XEXP (XEXP (operands[1], 0), 0);
6847 output_asm_insn (\"mov %0, %1\", ops);
6849 XEXP (XEXP (operands[1], 0), 0) = operands[0];
6852 return \"ldrh %0, %1\";
6854 [(set_attr "length" "2,4,2,2,2,2")
6855 (set_attr "type" "*,load1,store1,*,*,*")
6856 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6859 (define_expand "movhi_bytes"
6860 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6862 (zero_extend:SI (match_dup 6)))
6863 (set (match_operand:SI 0 "" "")
6864 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6869 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6871 mem1 = change_address (operands[1], QImode, addr);
6872 mem2 = change_address (operands[1], QImode,
6873 plus_constant (Pmode, addr, 1));
6874 operands[0] = gen_lowpart (SImode, operands[0]);
6876 operands[2] = gen_reg_rtx (SImode);
6877 operands[3] = gen_reg_rtx (SImode);
6880 if (BYTES_BIG_ENDIAN)
6882 operands[4] = operands[2];
6883 operands[5] = operands[3];
6887 operands[4] = operands[3];
6888 operands[5] = operands[2];
6893 (define_expand "movhi_bigend"
6895 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6898 (ashiftrt:SI (match_dup 2) (const_int 16)))
6899 (set (match_operand:HI 0 "s_register_operand" "")
6903 operands[2] = gen_reg_rtx (SImode);
6904 operands[3] = gen_reg_rtx (SImode);
6905 operands[4] = gen_lowpart (HImode, operands[3]);
6909 ;; Pattern to recognize insn generated default case above
6910 (define_insn "*movhi_insn_arch4"
6911 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
6912 (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
6915 && (register_operand (operands[0], HImode)
6916 || register_operand (operands[1], HImode))"
6918 mov%?\\t%0, %1\\t%@ movhi
6919 mvn%?\\t%0, #%B1\\t%@ movhi
6920 str%(h%)\\t%1, %0\\t%@ movhi
6921 ldr%(h%)\\t%0, %1\\t%@ movhi"
6922 [(set_attr "predicable" "yes")
6923 (set_attr "insn" "mov,mvn,*,*")
6924 (set_attr "pool_range" "*,*,*,256")
6925 (set_attr "neg_pool_range" "*,*,*,244")
6926 (set_attr_alternative "type"
6927 [(if_then_else (match_operand 1 "const_int_operand" "")
6928 (const_string "simple_alu_imm" )
6930 (const_string "simple_alu_imm")
6931 (const_string "store1")
6932 (const_string "load1")])]
6935 (define_insn "*movhi_bytes"
6936 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6937 (match_operand:HI 1 "arm_rhs_operand" "I,r,K"))]
6940 mov%?\\t%0, %1\\t%@ movhi
6941 mov%?\\t%0, %1\\t%@ movhi
6942 mvn%?\\t%0, #%B1\\t%@ movhi"
6943 [(set_attr "predicable" "yes")
6944 (set_attr "insn" "mov, mov,mvn")
6945 (set_attr "type" "simple_alu_imm,*,simple_alu_imm")]
6948 (define_expand "thumb_movhi_clobber"
6949 [(set (match_operand:HI 0 "memory_operand" "")
6950 (match_operand:HI 1 "register_operand" ""))
6951 (clobber (match_operand:DI 2 "register_operand" ""))]
6954 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
6955 && REGNO (operands[1]) <= LAST_LO_REGNUM)
6957 emit_insn (gen_movhi (operands[0], operands[1]));
6960 /* XXX Fixme, need to handle other cases here as well. */
6965 ;; We use a DImode scratch because we may occasionally need an additional
6966 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6967 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6968 (define_expand "reload_outhi"
6969 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6970 (match_operand:HI 1 "s_register_operand" "r")
6971 (match_operand:DI 2 "s_register_operand" "=&l")])]
6974 arm_reload_out_hi (operands);
6976 thumb_reload_out_hi (operands);
6981 (define_expand "reload_inhi"
6982 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6983 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6984 (match_operand:DI 2 "s_register_operand" "=&r")])]
6988 arm_reload_in_hi (operands);
6990 thumb_reload_out_hi (operands);
6994 (define_expand "movqi"
6995 [(set (match_operand:QI 0 "general_operand" "")
6996 (match_operand:QI 1 "general_operand" ""))]
6999 /* Everything except mem = const or mem = mem can be done easily */
7001 if (can_create_pseudo_p ())
7003 if (CONST_INT_P (operands[1]))
7005 rtx reg = gen_reg_rtx (SImode);
7007 /* For thumb we want an unsigned immediate, then we are more likely
7008 to be able to use a movs insn. */
7010 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7012 emit_insn (gen_movsi (reg, operands[1]));
7013 operands[1] = gen_lowpart (QImode, reg);
7018 /* ??? We shouldn't really get invalid addresses here, but this can
7019 happen if we are passed a SP (never OK for HImode/QImode) or
7020 virtual register (also rejected as illegitimate for HImode/QImode)
7021 relative address. */
7022 /* ??? This should perhaps be fixed elsewhere, for instance, in
7023 fixup_stack_1, by checking for other kinds of invalid addresses,
7024 e.g. a bare reference to a virtual register. This may confuse the
7025 alpha though, which must handle this case differently. */
7026 if (MEM_P (operands[0])
7027 && !memory_address_p (GET_MODE (operands[0]),
7028 XEXP (operands[0], 0)))
7030 = replace_equiv_address (operands[0],
7031 copy_to_reg (XEXP (operands[0], 0)));
7032 if (MEM_P (operands[1])
7033 && !memory_address_p (GET_MODE (operands[1]),
7034 XEXP (operands[1], 0)))
7036 = replace_equiv_address (operands[1],
7037 copy_to_reg (XEXP (operands[1], 0)));
7040 if (MEM_P (operands[1]) && optimize > 0)
7042 rtx reg = gen_reg_rtx (SImode);
7044 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7045 operands[1] = gen_lowpart (QImode, reg);
7048 if (MEM_P (operands[0]))
7049 operands[1] = force_reg (QImode, operands[1]);
7051 else if (TARGET_THUMB
7052 && CONST_INT_P (operands[1])
7053 && !satisfies_constraint_I (operands[1]))
7055 /* Handle loading a large integer during reload. */
7057 /* Writing a constant to memory needs a scratch, which should
7058 be handled with SECONDARY_RELOADs. */
7059 gcc_assert (REG_P (operands[0]));
7061 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7062 emit_insn (gen_movsi (operands[0], operands[1]));
7069 (define_insn "*arm_movqi_insn"
7070 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,Uu,r,m")
7071 (match_operand:QI 1 "general_operand" "r,I,K,Uu,l,m,r"))]
7073 && ( register_operand (operands[0], QImode)
7074 || register_operand (operands[1], QImode))"
7083 [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,load1, store1, load1, store1")
7084 (set_attr "insn" "mov,mov,mvn,*,*,*,*")
7085 (set_attr "predicable" "yes")
7086 (set_attr "arch" "any,any,any,t2,t2,any,any")
7087 (set_attr "length" "4,4,4,2,2,4,4")]
7090 (define_insn "*thumb1_movqi_insn"
7091 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
7092 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
7094 && ( register_operand (operands[0], QImode)
7095 || register_operand (operands[1], QImode))"
7103 [(set_attr "length" "2")
7104 (set_attr "type" "simple_alu_imm,load1,store1,*,*,simple_alu_imm")
7105 (set_attr "insn" "*,*,*,mov,mov,mov")
7106 (set_attr "pool_range" "*,32,*,*,*,*")
7107 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
7110 (define_expand "movhf"
7111 [(set (match_operand:HF 0 "general_operand" "")
7112 (match_operand:HF 1 "general_operand" ""))]
7117 if (MEM_P (operands[0]))
7118 operands[1] = force_reg (HFmode, operands[1]);
7120 else /* TARGET_THUMB1 */
7122 if (can_create_pseudo_p ())
7124 if (!REG_P (operands[0]))
7125 operands[1] = force_reg (HFmode, operands[1]);
7131 (define_insn "*arm32_movhf"
7132 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
7133 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
7134 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
7135 && ( s_register_operand (operands[0], HFmode)
7136 || s_register_operand (operands[1], HFmode))"
7138 switch (which_alternative)
7140 case 0: /* ARM register from memory */
7141 return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
7142 case 1: /* memory from ARM register */
7143 return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
7144 case 2: /* ARM register from ARM register */
7145 return \"mov%?\\t%0, %1\\t%@ __fp16\";
7146 case 3: /* ARM register from constant */
7152 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7153 bits = real_to_target (NULL, &r, HFmode);
7154 ops[0] = operands[0];
7155 ops[1] = GEN_INT (bits);
7156 ops[2] = GEN_INT (bits & 0xff00);
7157 ops[3] = GEN_INT (bits & 0x00ff);
7159 if (arm_arch_thumb2)
7160 output_asm_insn (\"movw%?\\t%0, %1\", ops);
7162 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7169 [(set_attr "conds" "unconditional")
7170 (set_attr "type" "load1,store1,*,*")
7171 (set_attr "insn" "*,*,mov,mov")
7172 (set_attr "length" "4,4,4,8")
7173 (set_attr "predicable" "yes")]
7176 (define_insn "*thumb1_movhf"
7177 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
7178 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
7180 && ( s_register_operand (operands[0], HFmode)
7181 || s_register_operand (operands[1], HFmode))"
7183 switch (which_alternative)
7188 gcc_assert (MEM_P (operands[1]));
7189 addr = XEXP (operands[1], 0);
7190 if (GET_CODE (addr) == LABEL_REF
7191 || (GET_CODE (addr) == CONST
7192 && GET_CODE (XEXP (addr, 0)) == PLUS
7193 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
7194 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
7196 /* Constant pool entry. */
7197 return \"ldr\\t%0, %1\";
7199 return \"ldrh\\t%0, %1\";
7201 case 2: return \"strh\\t%1, %0\";
7202 default: return \"mov\\t%0, %1\";
7205 [(set_attr "length" "2")
7206 (set_attr "type" "*,load1,store1,*,*")
7207 (set_attr "insn" "mov,*,*,mov,mov")
7208 (set_attr "pool_range" "*,1018,*,*,*")
7209 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
7211 (define_expand "movsf"
7212 [(set (match_operand:SF 0 "general_operand" "")
7213 (match_operand:SF 1 "general_operand" ""))]
7218 if (MEM_P (operands[0]))
7219 operands[1] = force_reg (SFmode, operands[1]);
7221 else /* TARGET_THUMB1 */
7223 if (can_create_pseudo_p ())
7225 if (!REG_P (operands[0]))
7226 operands[1] = force_reg (SFmode, operands[1]);
7232 ;; Transform a floating-point move of a constant into a core register into
7233 ;; an SImode operation.
7235 [(set (match_operand:SF 0 "arm_general_register_operand" "")
7236 (match_operand:SF 1 "immediate_operand" ""))]
7239 && CONST_DOUBLE_P (operands[1])"
7240 [(set (match_dup 2) (match_dup 3))]
7242 operands[2] = gen_lowpart (SImode, operands[0]);
7243 operands[3] = gen_lowpart (SImode, operands[1]);
7244 if (operands[2] == 0 || operands[3] == 0)
7249 (define_insn "*arm_movsf_soft_insn"
7250 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7251 (match_operand:SF 1 "general_operand" "r,mE,r"))]
7253 && TARGET_SOFT_FLOAT
7254 && (!MEM_P (operands[0])
7255 || register_operand (operands[1], SFmode))"
7258 ldr%?\\t%0, %1\\t%@ float
7259 str%?\\t%1, %0\\t%@ float"
7260 [(set_attr "predicable" "yes")
7261 (set_attr "type" "*,load1,store1")
7262 (set_attr "insn" "mov,*,*")
7263 (set_attr "arm_pool_range" "*,4096,*")
7264 (set_attr "thumb2_pool_range" "*,4094,*")
7265 (set_attr "arm_neg_pool_range" "*,4084,*")
7266 (set_attr "thumb2_neg_pool_range" "*,0,*")]
7269 ;;; ??? This should have alternatives for constants.
7270 (define_insn "*thumb1_movsf_insn"
7271 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
7272 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
7274 && ( register_operand (operands[0], SFmode)
7275 || register_operand (operands[1], SFmode))"
7284 [(set_attr "length" "2")
7285 (set_attr "type" "*,load1,store1,load1,store1,*,*")
7286 (set_attr "pool_range" "*,*,*,1018,*,*,*")
7287 (set_attr "insn" "*,*,*,*,*,mov,mov")
7288 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
7291 (define_expand "movdf"
7292 [(set (match_operand:DF 0 "general_operand" "")
7293 (match_operand:DF 1 "general_operand" ""))]
7298 if (MEM_P (operands[0]))
7299 operands[1] = force_reg (DFmode, operands[1]);
7301 else /* TARGET_THUMB */
7303 if (can_create_pseudo_p ())
7305 if (!REG_P (operands[0]))
7306 operands[1] = force_reg (DFmode, operands[1]);
7312 ;; Reloading a df mode value stored in integer regs to memory can require a
7314 (define_expand "reload_outdf"
7315 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7316 (match_operand:DF 1 "s_register_operand" "r")
7317 (match_operand:SI 2 "s_register_operand" "=&r")]
7321 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7324 operands[2] = XEXP (operands[0], 0);
7325 else if (code == POST_INC || code == PRE_DEC)
7327 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7328 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7329 emit_insn (gen_movdi (operands[0], operands[1]));
7332 else if (code == PRE_INC)
7334 rtx reg = XEXP (XEXP (operands[0], 0), 0);
7336 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7339 else if (code == POST_DEC)
7340 operands[2] = XEXP (XEXP (operands[0], 0), 0);
7342 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7343 XEXP (XEXP (operands[0], 0), 1)));
7345 emit_insn (gen_rtx_SET (VOIDmode,
7346 replace_equiv_address (operands[0], operands[2]),
7349 if (code == POST_DEC)
7350 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7356 (define_insn "*movdf_soft_insn"
7357 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,q,m")
7358 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,q"))]
7359 "TARGET_32BIT && TARGET_SOFT_FLOAT
7360 && ( register_operand (operands[0], DFmode)
7361 || register_operand (operands[1], DFmode))"
7363 switch (which_alternative)
7370 return output_move_double (operands, true, NULL);
7373 [(set_attr "length" "8,12,16,8,8")
7374 (set_attr "type" "*,*,*,load2,store2")
7375 (set_attr "arm_pool_range" "*,*,*,1020,*")
7376 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7377 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7378 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7381 ;;; ??? This should have alternatives for constants.
7382 ;;; ??? This was originally identical to the movdi_insn pattern.
7383 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
7384 ;;; thumb_reorg with a memory reference.
7385 (define_insn "*thumb_movdf_insn"
7386 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
7387 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
7389 && ( register_operand (operands[0], DFmode)
7390 || register_operand (operands[1], DFmode))"
7392 switch (which_alternative)
7396 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7397 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
7398 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
7400 return \"ldmia\\t%1, {%0, %H0}\";
7402 return \"stmia\\t%0, {%1, %H1}\";
7404 return thumb_load_double_from_address (operands);
7406 operands[2] = gen_rtx_MEM (SImode,
7407 plus_constant (Pmode,
7408 XEXP (operands[0], 0), 4));
7409 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
7412 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
7413 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
7414 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
7417 [(set_attr "length" "4,2,2,6,4,4")
7418 (set_attr "type" "*,load2,store2,load2,store2,*")
7419 (set_attr "insn" "*,*,*,*,*,mov")
7420 (set_attr "pool_range" "*,*,*,1018,*,*")]
7424 ;; load- and store-multiple insns
7425 ;; The arm can load/store any set of registers, provided that they are in
7426 ;; ascending order, but these expanders assume a contiguous set.
7428 (define_expand "load_multiple"
7429 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7430 (match_operand:SI 1 "" ""))
7431 (use (match_operand:SI 2 "" ""))])]
7434 HOST_WIDE_INT offset = 0;
7436 /* Support only fixed point registers. */
7437 if (!CONST_INT_P (operands[2])
7438 || INTVAL (operands[2]) > 14
7439 || INTVAL (operands[2]) < 2
7440 || !MEM_P (operands[1])
7441 || !REG_P (operands[0])
7442 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7443 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7447 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7448 INTVAL (operands[2]),
7449 force_reg (SImode, XEXP (operands[1], 0)),
7450 FALSE, operands[1], &offset);
7453 (define_expand "store_multiple"
7454 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7455 (match_operand:SI 1 "" ""))
7456 (use (match_operand:SI 2 "" ""))])]
7459 HOST_WIDE_INT offset = 0;
7461 /* Support only fixed point registers. */
7462 if (!CONST_INT_P (operands[2])
7463 || INTVAL (operands[2]) > 14
7464 || INTVAL (operands[2]) < 2
7465 || !REG_P (operands[1])
7466 || !MEM_P (operands[0])
7467 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7468 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7472 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7473 INTVAL (operands[2]),
7474 force_reg (SImode, XEXP (operands[0], 0)),
7475 FALSE, operands[0], &offset);
7479 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7480 ;; We could let this apply for blocks of less than this, but it clobbers so
7481 ;; many registers that there is then probably a better way.
7483 (define_expand "movmemqi"
7484 [(match_operand:BLK 0 "general_operand" "")
7485 (match_operand:BLK 1 "general_operand" "")
7486 (match_operand:SI 2 "const_int_operand" "")
7487 (match_operand:SI 3 "const_int_operand" "")]
7492 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7493 && !optimize_function_for_size_p (cfun))
7495 if (gen_movmem_ldrd_strd (operands))
7500 if (arm_gen_movmemqi (operands))
7504 else /* TARGET_THUMB1 */
7506 if ( INTVAL (operands[3]) != 4
7507 || INTVAL (operands[2]) > 48)
7510 thumb_expand_movmemqi (operands);
7516 ;; Thumb block-move insns
7518 (define_insn "movmem12b"
7519 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7520 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7521 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7522 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7523 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
7524 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
7525 (set (match_operand:SI 0 "register_operand" "=l")
7526 (plus:SI (match_dup 2) (const_int 12)))
7527 (set (match_operand:SI 1 "register_operand" "=l")
7528 (plus:SI (match_dup 3) (const_int 12)))
7529 (clobber (match_scratch:SI 4 "=&l"))
7530 (clobber (match_scratch:SI 5 "=&l"))
7531 (clobber (match_scratch:SI 6 "=&l"))]
7533 "* return thumb_output_move_mem_multiple (3, operands);"
7534 [(set_attr "length" "4")
7535 ; This isn't entirely accurate... It loads as well, but in terms of
7536 ; scheduling the following insn it is better to consider it as a store
7537 (set_attr "type" "store3")]
7540 (define_insn "movmem8b"
7541 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
7542 (mem:SI (match_operand:SI 3 "register_operand" "1")))
7543 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
7544 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
7545 (set (match_operand:SI 0 "register_operand" "=l")
7546 (plus:SI (match_dup 2) (const_int 8)))
7547 (set (match_operand:SI 1 "register_operand" "=l")
7548 (plus:SI (match_dup 3) (const_int 8)))
7549 (clobber (match_scratch:SI 4 "=&l"))
7550 (clobber (match_scratch:SI 5 "=&l"))]
7552 "* return thumb_output_move_mem_multiple (2, operands);"
7553 [(set_attr "length" "4")
7554 ; This isn't entirely accurate... It loads as well, but in terms of
7555 ; scheduling the following insn it is better to consider it as a store
7556 (set_attr "type" "store2")]
7561 ;; Compare & branch insns
7562 ;; The range calculations are based as follows:
7563 ;; For forward branches, the address calculation returns the address of
7564 ;; the next instruction. This is 2 beyond the branch instruction.
7565 ;; For backward branches, the address calculation returns the address of
7566 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7567 ;; instruction for the shortest sequence, and 4 before the branch instruction
7568 ;; if we have to jump around an unconditional branch.
7569 ;; To the basic branch range the PC offset must be added (this is +4).
7570 ;; So for forward branches we have
7571 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7572 ;; And for backward branches we have
7573 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7575 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7576 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7578 (define_expand "cbranchsi4"
7579 [(set (pc) (if_then_else
7580 (match_operator 0 "expandable_comparison_operator"
7581 [(match_operand:SI 1 "s_register_operand" "")
7582 (match_operand:SI 2 "nonmemory_operand" "")])
7583 (label_ref (match_operand 3 "" ""))
7589 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7591 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7595 if (thumb1_cmpneg_operand (operands[2], SImode))
7597 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7598 operands[3], operands[0]));
7601 if (!thumb1_cmp_operand (operands[2], SImode))
7602 operands[2] = force_reg (SImode, operands[2]);
7605 ;; A pattern to recognize a special situation and optimize for it.
7606 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
7607 ;; due to the available addressing modes. Hence, convert a signed comparison
7608 ;; with zero into an unsigned comparison with 127 if possible.
7609 (define_expand "cbranchqi4"
7610 [(set (pc) (if_then_else
7611 (match_operator 0 "lt_ge_comparison_operator"
7612 [(match_operand:QI 1 "memory_operand" "")
7613 (match_operand:QI 2 "const0_operand" "")])
7614 (label_ref (match_operand 3 "" ""))
7619 xops[1] = gen_reg_rtx (SImode);
7620 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
7621 xops[2] = GEN_INT (127);
7622 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
7623 VOIDmode, xops[1], xops[2]);
7624 xops[3] = operands[3];
7625 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
7629 (define_expand "cbranchsf4"
7630 [(set (pc) (if_then_else
7631 (match_operator 0 "expandable_comparison_operator"
7632 [(match_operand:SF 1 "s_register_operand" "")
7633 (match_operand:SF 2 "arm_float_compare_operand" "")])
7634 (label_ref (match_operand 3 "" ""))
7636 "TARGET_32BIT && TARGET_HARD_FLOAT"
7637 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7638 operands[3])); DONE;"
7641 (define_expand "cbranchdf4"
7642 [(set (pc) (if_then_else
7643 (match_operator 0 "expandable_comparison_operator"
7644 [(match_operand:DF 1 "s_register_operand" "")
7645 (match_operand:DF 2 "arm_float_compare_operand" "")])
7646 (label_ref (match_operand 3 "" ""))
7648 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7649 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7650 operands[3])); DONE;"
7653 (define_expand "cbranchdi4"
7654 [(set (pc) (if_then_else
7655 (match_operator 0 "expandable_comparison_operator"
7656 [(match_operand:DI 1 "s_register_operand" "")
7657 (match_operand:DI 2 "cmpdi_operand" "")])
7658 (label_ref (match_operand 3 "" ""))
7662 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7664 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7670 (define_insn "cbranchsi4_insn"
7671 [(set (pc) (if_then_else
7672 (match_operator 0 "arm_comparison_operator"
7673 [(match_operand:SI 1 "s_register_operand" "l,l*h")
7674 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
7675 (label_ref (match_operand 3 "" ""))
7679 rtx t = cfun->machine->thumb1_cc_insn;
7682 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
7683 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
7685 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
7687 if (!noov_comparison_operator (operands[0], VOIDmode))
7690 else if (cfun->machine->thumb1_cc_mode != CCmode)
7695 output_asm_insn ("cmp\t%1, %2", operands);
7696 cfun->machine->thumb1_cc_insn = insn;
7697 cfun->machine->thumb1_cc_op0 = operands[1];
7698 cfun->machine->thumb1_cc_op1 = operands[2];
7699 cfun->machine->thumb1_cc_mode = CCmode;
7702 /* Ensure we emit the right type of condition code on the jump. */
7703 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
7706 switch (get_attr_length (insn))
7708 case 4: return \"b%d0\\t%l3\";
7709 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7710 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7713 [(set (attr "far_jump")
7715 (eq_attr "length" "8")
7716 (const_string "yes")
7717 (const_string "no")))
7718 (set (attr "length")
7720 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7721 (le (minus (match_dup 3) (pc)) (const_int 256)))
7724 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7725 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7730 (define_insn "cbranchsi4_scratch"
7731 [(set (pc) (if_then_else
7732 (match_operator 4 "arm_comparison_operator"
7733 [(match_operand:SI 1 "s_register_operand" "l,0")
7734 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
7735 (label_ref (match_operand 3 "" ""))
7737 (clobber (match_scratch:SI 0 "=l,l"))]
7740 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
7742 switch (get_attr_length (insn))
7744 case 4: return \"b%d4\\t%l3\";
7745 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7746 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7749 [(set (attr "far_jump")
7751 (eq_attr "length" "8")
7752 (const_string "yes")
7753 (const_string "no")))
7754 (set (attr "length")
7756 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7757 (le (minus (match_dup 3) (pc)) (const_int 256)))
7760 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7761 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7766 (define_insn "*negated_cbranchsi4"
7769 (match_operator 0 "equality_operator"
7770 [(match_operand:SI 1 "s_register_operand" "l")
7771 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7772 (label_ref (match_operand 3 "" ""))
7776 output_asm_insn (\"cmn\\t%1, %2\", operands);
7777 switch (get_attr_length (insn))
7779 case 4: return \"b%d0\\t%l3\";
7780 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7781 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7784 [(set (attr "far_jump")
7786 (eq_attr "length" "8")
7787 (const_string "yes")
7788 (const_string "no")))
7789 (set (attr "length")
7791 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7792 (le (minus (match_dup 3) (pc)) (const_int 256)))
7795 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7796 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7801 (define_insn "*tbit_cbranch"
7804 (match_operator 0 "equality_operator"
7805 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7807 (match_operand:SI 2 "const_int_operand" "i"))
7809 (label_ref (match_operand 3 "" ""))
7811 (clobber (match_scratch:SI 4 "=l"))]
7816 op[0] = operands[4];
7817 op[1] = operands[1];
7818 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7820 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7821 switch (get_attr_length (insn))
7823 case 4: return \"b%d0\\t%l3\";
7824 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7825 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7828 [(set (attr "far_jump")
7830 (eq_attr "length" "8")
7831 (const_string "yes")
7832 (const_string "no")))
7833 (set (attr "length")
7835 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7836 (le (minus (match_dup 3) (pc)) (const_int 256)))
7839 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7840 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7845 (define_insn "*tlobits_cbranch"
7848 (match_operator 0 "equality_operator"
7849 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7850 (match_operand:SI 2 "const_int_operand" "i")
7853 (label_ref (match_operand 3 "" ""))
7855 (clobber (match_scratch:SI 4 "=l"))]
7860 op[0] = operands[4];
7861 op[1] = operands[1];
7862 op[2] = GEN_INT (32 - INTVAL (operands[2]));
7864 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7865 switch (get_attr_length (insn))
7867 case 4: return \"b%d0\\t%l3\";
7868 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7869 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7872 [(set (attr "far_jump")
7874 (eq_attr "length" "8")
7875 (const_string "yes")
7876 (const_string "no")))
7877 (set (attr "length")
7879 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7880 (le (minus (match_dup 3) (pc)) (const_int 256)))
7883 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7884 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7889 (define_insn "*tstsi3_cbranch"
7892 (match_operator 3 "equality_operator"
7893 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7894 (match_operand:SI 1 "s_register_operand" "l"))
7896 (label_ref (match_operand 2 "" ""))
7901 output_asm_insn (\"tst\\t%0, %1\", operands);
7902 switch (get_attr_length (insn))
7904 case 4: return \"b%d3\\t%l2\";
7905 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7906 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7909 [(set (attr "far_jump")
7911 (eq_attr "length" "8")
7912 (const_string "yes")
7913 (const_string "no")))
7914 (set (attr "length")
7916 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7917 (le (minus (match_dup 2) (pc)) (const_int 256)))
7920 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7921 (le (minus (match_dup 2) (pc)) (const_int 2048)))
7926 (define_insn "*cbranchne_decr1"
7928 (if_then_else (match_operator 3 "equality_operator"
7929 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7931 (label_ref (match_operand 4 "" ""))
7933 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7934 (plus:SI (match_dup 2) (const_int -1)))
7935 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7940 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7942 VOIDmode, operands[2], const1_rtx);
7943 cond[1] = operands[4];
7945 if (which_alternative == 0)
7946 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7947 else if (which_alternative == 1)
7949 /* We must provide an alternative for a hi reg because reload
7950 cannot handle output reloads on a jump instruction, but we
7951 can't subtract into that. Fortunately a mov from lo to hi
7952 does not clobber the condition codes. */
7953 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7954 output_asm_insn (\"mov\\t%0, %1\", operands);
7958 /* Similarly, but the target is memory. */
7959 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7960 output_asm_insn (\"str\\t%1, %0\", operands);
7963 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7966 output_asm_insn (\"b%d0\\t%l1\", cond);
7969 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7970 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7972 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7973 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7977 [(set (attr "far_jump")
7979 (ior (and (eq (symbol_ref ("which_alternative"))
7981 (eq_attr "length" "8"))
7982 (eq_attr "length" "10"))
7983 (const_string "yes")
7984 (const_string "no")))
7985 (set_attr_alternative "length"
7989 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7990 (le (minus (match_dup 4) (pc)) (const_int 256)))
7993 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7994 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7999 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8000 (le (minus (match_dup 4) (pc)) (const_int 256)))
8003 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8004 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8009 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8010 (le (minus (match_dup 4) (pc)) (const_int 256)))
8013 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8014 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8019 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
8020 (le (minus (match_dup 4) (pc)) (const_int 256)))
8023 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
8024 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8029 (define_insn "*addsi3_cbranch"
8032 (match_operator 4 "arm_comparison_operator"
8034 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
8035 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
8037 (label_ref (match_operand 5 "" ""))
8040 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
8041 (plus:SI (match_dup 2) (match_dup 3)))
8042 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
8044 && (GET_CODE (operands[4]) == EQ
8045 || GET_CODE (operands[4]) == NE
8046 || GET_CODE (operands[4]) == GE
8047 || GET_CODE (operands[4]) == LT)"
8052 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
8053 cond[1] = operands[2];
8054 cond[2] = operands[3];
8056 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
8057 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
8059 output_asm_insn (\"add\\t%0, %1, %2\", cond);
8061 if (which_alternative >= 2
8062 && which_alternative < 4)
8063 output_asm_insn (\"mov\\t%0, %1\", operands);
8064 else if (which_alternative >= 4)
8065 output_asm_insn (\"str\\t%1, %0\", operands);
8067 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
8070 return \"b%d4\\t%l5\";
8072 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
8074 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
8078 [(set (attr "far_jump")
8080 (ior (and (lt (symbol_ref ("which_alternative"))
8082 (eq_attr "length" "8"))
8083 (eq_attr "length" "10"))
8084 (const_string "yes")
8085 (const_string "no")))
8086 (set (attr "length")
8088 (lt (symbol_ref ("which_alternative"))
8091 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
8092 (le (minus (match_dup 5) (pc)) (const_int 256)))
8095 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
8096 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8100 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
8101 (le (minus (match_dup 5) (pc)) (const_int 256)))
8104 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
8105 (le (minus (match_dup 5) (pc)) (const_int 2048)))
8110 (define_insn "*addsi3_cbranch_scratch"
8113 (match_operator 3 "arm_comparison_operator"
8115 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
8116 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
8118 (label_ref (match_operand 4 "" ""))
8120 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
8122 && (GET_CODE (operands[3]) == EQ
8123 || GET_CODE (operands[3]) == NE
8124 || GET_CODE (operands[3]) == GE
8125 || GET_CODE (operands[3]) == LT)"
8128 switch (which_alternative)
8131 output_asm_insn (\"cmp\t%1, #%n2\", operands);
8134 output_asm_insn (\"cmn\t%1, %2\", operands);
8137 if (INTVAL (operands[2]) < 0)
8138 output_asm_insn (\"sub\t%0, %1, %2\", operands);
8140 output_asm_insn (\"add\t%0, %1, %2\", operands);
8143 if (INTVAL (operands[2]) < 0)
8144 output_asm_insn (\"sub\t%0, %0, %2\", operands);
8146 output_asm_insn (\"add\t%0, %0, %2\", operands);
8150 switch (get_attr_length (insn))
8153 return \"b%d3\\t%l4\";
8155 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
8157 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
8161 [(set (attr "far_jump")
8163 (eq_attr "length" "8")
8164 (const_string "yes")
8165 (const_string "no")))
8166 (set (attr "length")
8168 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
8169 (le (minus (match_dup 4) (pc)) (const_int 256)))
8172 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
8173 (le (minus (match_dup 4) (pc)) (const_int 2048)))
8179 ;; Comparison and test insns
8181 (define_insn "*arm_cmpsi_insn"
8182 [(set (reg:CC CC_REGNUM)
8183 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
8184 (match_operand:SI 1 "arm_add_operand" "Py,r,rI,L")))]
8191 [(set_attr "conds" "set")
8192 (set_attr "arch" "t2,t2,any,any")
8193 (set_attr "length" "2,2,4,4")
8194 (set_attr "predicable" "yes")
8195 (set_attr "type" "*,*,*,simple_alu_imm")]
8198 (define_insn "*cmpsi_shiftsi"
8199 [(set (reg:CC CC_REGNUM)
8200 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
8201 (match_operator:SI 3 "shift_operator"
8202 [(match_operand:SI 1 "s_register_operand" "r,r")
8203 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
8206 [(set_attr "conds" "set")
8207 (set_attr "shift" "1")
8208 (set_attr "arch" "32,a")
8209 (set_attr "type" "alu_shift,alu_shift_reg")])
8211 (define_insn "*cmpsi_shiftsi_swp"
8212 [(set (reg:CC_SWP CC_REGNUM)
8213 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
8214 [(match_operand:SI 1 "s_register_operand" "r,r")
8215 (match_operand:SI 2 "shift_amount_operand" "M,rM")])
8216 (match_operand:SI 0 "s_register_operand" "r,r")))]
8219 [(set_attr "conds" "set")
8220 (set_attr "shift" "1")
8221 (set_attr "arch" "32,a")
8222 (set_attr "type" "alu_shift,alu_shift_reg")])
8224 (define_insn "*arm_cmpsi_negshiftsi_si"
8225 [(set (reg:CC_Z CC_REGNUM)
8227 (neg:SI (match_operator:SI 1 "shift_operator"
8228 [(match_operand:SI 2 "s_register_operand" "r")
8229 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
8230 (match_operand:SI 0 "s_register_operand" "r")))]
8233 [(set_attr "conds" "set")
8234 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
8235 (const_string "alu_shift")
8236 (const_string "alu_shift_reg")))
8237 (set_attr "predicable" "yes")]
8240 ;; DImode comparisons. The generic code generates branches that
8241 ;; if-conversion can not reduce to a conditional compare, so we do
8244 (define_insn_and_split "*arm_cmpdi_insn"
8245 [(set (reg:CC_NCV CC_REGNUM)
8246 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
8247 (match_operand:DI 1 "arm_di_operand" "rDi")))
8248 (clobber (match_scratch:SI 2 "=r"))]
8250 "#" ; "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
8251 "&& reload_completed"
8252 [(set (reg:CC CC_REGNUM)
8253 (compare:CC (match_dup 0) (match_dup 1)))
8254 (parallel [(set (reg:CC CC_REGNUM)
8255 (compare:CC (match_dup 3) (match_dup 4)))
8257 (minus:SI (match_dup 5)
8258 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))])]
8260 operands[3] = gen_highpart (SImode, operands[0]);
8261 operands[0] = gen_lowpart (SImode, operands[0]);
8262 if (CONST_INT_P (operands[1]))
8264 operands[4] = GEN_INT (~INTVAL (gen_highpart_mode (SImode,
8267 operands[5] = gen_rtx_PLUS (SImode, operands[3], operands[4]);
8271 operands[4] = gen_highpart (SImode, operands[1]);
8272 operands[5] = gen_rtx_MINUS (SImode, operands[3], operands[4]);
8274 operands[1] = gen_lowpart (SImode, operands[1]);
8275 operands[2] = gen_lowpart (SImode, operands[2]);
8277 [(set_attr "conds" "set")
8278 (set_attr "length" "8")]
8281 (define_insn_and_split "*arm_cmpdi_unsigned"
8282 [(set (reg:CC_CZ CC_REGNUM)
8283 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r")
8284 (match_operand:DI 1 "arm_di_operand" "rDi")))]
8286 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
8287 "&& reload_completed"
8288 [(set (reg:CC CC_REGNUM)
8289 (compare:CC (match_dup 2) (match_dup 3)))
8290 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
8291 (set (reg:CC CC_REGNUM)
8292 (compare:CC (match_dup 0) (match_dup 1))))]
8294 operands[2] = gen_highpart (SImode, operands[0]);
8295 operands[0] = gen_lowpart (SImode, operands[0]);
8296 if (CONST_INT_P (operands[1]))
8297 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
8299 operands[3] = gen_highpart (SImode, operands[1]);
8300 operands[1] = gen_lowpart (SImode, operands[1]);
8302 [(set_attr "conds" "set")
8303 (set_attr "length" "8")]
8306 (define_insn "*arm_cmpdi_zero"
8307 [(set (reg:CC_Z CC_REGNUM)
8308 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
8310 (clobber (match_scratch:SI 1 "=r"))]
8312 "orr%.\\t%1, %Q0, %R0"
8313 [(set_attr "conds" "set")]
8316 (define_insn "*thumb_cmpdi_zero"
8317 [(set (reg:CC_Z CC_REGNUM)
8318 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
8320 (clobber (match_scratch:SI 1 "=l"))]
8322 "orr\\t%1, %Q0, %R0"
8323 [(set_attr "conds" "set")
8324 (set_attr "length" "2")]
8327 ; This insn allows redundant compares to be removed by cse, nothing should
8328 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
8329 ; is deleted later on. The match_dup will match the mode here, so that
8330 ; mode changes of the condition codes aren't lost by this even though we don't
8331 ; specify what they are.
8333 (define_insn "*deleted_compare"
8334 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
8336 "\\t%@ deleted compare"
8337 [(set_attr "conds" "set")
8338 (set_attr "length" "0")]
8342 ;; Conditional branch insns
8344 (define_expand "cbranch_cc"
8346 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
8347 (match_operand 2 "" "")])
8348 (label_ref (match_operand 3 "" ""))
8351 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
8352 operands[1], operands[2], NULL_RTX);
8353 operands[2] = const0_rtx;"
8357 ;; Patterns to match conditional branch insns.
8360 (define_insn "arm_cond_branch"
8362 (if_then_else (match_operator 1 "arm_comparison_operator"
8363 [(match_operand 2 "cc_register" "") (const_int 0)])
8364 (label_ref (match_operand 0 "" ""))
8368 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8370 arm_ccfsm_state += 2;
8373 return \"b%d1\\t%l0\";
8375 [(set_attr "conds" "use")
8376 (set_attr "type" "branch")
8377 (set (attr "length")
8379 (and (match_test "TARGET_THUMB2")
8380 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8381 (le (minus (match_dup 0) (pc)) (const_int 256))))
8386 (define_insn "*arm_cond_branch_reversed"
8388 (if_then_else (match_operator 1 "arm_comparison_operator"
8389 [(match_operand 2 "cc_register" "") (const_int 0)])
8391 (label_ref (match_operand 0 "" ""))))]
8394 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8396 arm_ccfsm_state += 2;
8399 return \"b%D1\\t%l0\";
8401 [(set_attr "conds" "use")
8402 (set_attr "type" "branch")
8403 (set (attr "length")
8405 (and (match_test "TARGET_THUMB2")
8406 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
8407 (le (minus (match_dup 0) (pc)) (const_int 256))))
8416 (define_expand "cstore_cc"
8417 [(set (match_operand:SI 0 "s_register_operand" "")
8418 (match_operator:SI 1 "" [(match_operand 2 "" "")
8419 (match_operand 3 "" "")]))]
8421 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
8422 operands[2], operands[3], NULL_RTX);
8423 operands[3] = const0_rtx;"
8426 (define_insn_and_split "*mov_scc"
8427 [(set (match_operand:SI 0 "s_register_operand" "=r")
8428 (match_operator:SI 1 "arm_comparison_operator"
8429 [(match_operand 2 "cc_register" "") (const_int 0)]))]
8431 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
8434 (if_then_else:SI (match_dup 1)
8438 [(set_attr "conds" "use")
8439 (set_attr "length" "8")]
8442 (define_insn_and_split "*mov_negscc"
8443 [(set (match_operand:SI 0 "s_register_operand" "=r")
8444 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
8445 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8447 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8450 (if_then_else:SI (match_dup 1)
8454 operands[3] = GEN_INT (~0);
8456 [(set_attr "conds" "use")
8457 (set_attr "length" "8")]
8460 (define_insn_and_split "*mov_notscc"
8461 [(set (match_operand:SI 0 "s_register_operand" "=r")
8462 (not:SI (match_operator:SI 1 "arm_comparison_operator"
8463 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8465 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8468 (if_then_else:SI (match_dup 1)
8472 operands[3] = GEN_INT (~1);
8473 operands[4] = GEN_INT (~0);
8475 [(set_attr "conds" "use")
8476 (set_attr "length" "8")]
8479 (define_expand "cstoresi4"
8480 [(set (match_operand:SI 0 "s_register_operand" "")
8481 (match_operator:SI 1 "expandable_comparison_operator"
8482 [(match_operand:SI 2 "s_register_operand" "")
8483 (match_operand:SI 3 "reg_or_int_operand" "")]))]
8484 "TARGET_32BIT || TARGET_THUMB1"
8486 rtx op3, scratch, scratch2;
8490 if (!arm_add_operand (operands[3], SImode))
8491 operands[3] = force_reg (SImode, operands[3]);
8492 emit_insn (gen_cstore_cc (operands[0], operands[1],
8493 operands[2], operands[3]));
8497 if (operands[3] == const0_rtx)
8499 switch (GET_CODE (operands[1]))
8502 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8506 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8510 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8511 NULL_RTX, 0, OPTAB_WIDEN);
8512 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8513 NULL_RTX, 0, OPTAB_WIDEN);
8514 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8515 operands[0], 1, OPTAB_WIDEN);
8519 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8521 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8522 NULL_RTX, 1, OPTAB_WIDEN);
8526 scratch = expand_binop (SImode, ashr_optab, operands[2],
8527 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8528 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8529 NULL_RTX, 0, OPTAB_WIDEN);
8530 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8534 /* LT is handled by generic code. No need for unsigned with 0. */
8541 switch (GET_CODE (operands[1]))
8544 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8545 NULL_RTX, 0, OPTAB_WIDEN);
8546 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8550 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8551 NULL_RTX, 0, OPTAB_WIDEN);
8552 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8556 op3 = force_reg (SImode, operands[3]);
8558 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8559 NULL_RTX, 1, OPTAB_WIDEN);
8560 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8561 NULL_RTX, 0, OPTAB_WIDEN);
8562 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8568 if (!thumb1_cmp_operand (op3, SImode))
8569 op3 = force_reg (SImode, op3);
8570 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8571 NULL_RTX, 0, OPTAB_WIDEN);
8572 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8573 NULL_RTX, 1, OPTAB_WIDEN);
8574 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8579 op3 = force_reg (SImode, operands[3]);
8580 scratch = force_reg (SImode, const0_rtx);
8581 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8587 if (!thumb1_cmp_operand (op3, SImode))
8588 op3 = force_reg (SImode, op3);
8589 scratch = force_reg (SImode, const0_rtx);
8590 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8596 if (!thumb1_cmp_operand (op3, SImode))
8597 op3 = force_reg (SImode, op3);
8598 scratch = gen_reg_rtx (SImode);
8599 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8603 op3 = force_reg (SImode, operands[3]);
8604 scratch = gen_reg_rtx (SImode);
8605 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8608 /* No good sequences for GT, LT. */
8615 (define_expand "cstoresf4"
8616 [(set (match_operand:SI 0 "s_register_operand" "")
8617 (match_operator:SI 1 "expandable_comparison_operator"
8618 [(match_operand:SF 2 "s_register_operand" "")
8619 (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8620 "TARGET_32BIT && TARGET_HARD_FLOAT"
8621 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8622 operands[2], operands[3])); DONE;"
8625 (define_expand "cstoredf4"
8626 [(set (match_operand:SI 0 "s_register_operand" "")
8627 (match_operator:SI 1 "expandable_comparison_operator"
8628 [(match_operand:DF 2 "s_register_operand" "")
8629 (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8630 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8631 "emit_insn (gen_cstore_cc (operands[0], operands[1],
8632 operands[2], operands[3])); DONE;"
8635 (define_expand "cstoredi4"
8636 [(set (match_operand:SI 0 "s_register_operand" "")
8637 (match_operator:SI 1 "expandable_comparison_operator"
8638 [(match_operand:DI 2 "s_register_operand" "")
8639 (match_operand:DI 3 "cmpdi_operand" "")]))]
8642 if (!arm_validize_comparison (&operands[1],
8646 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8652 (define_expand "cstoresi_eq0_thumb1"
8654 [(set (match_operand:SI 0 "s_register_operand" "")
8655 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8657 (clobber (match_dup:SI 2))])]
8659 "operands[2] = gen_reg_rtx (SImode);"
8662 (define_expand "cstoresi_ne0_thumb1"
8664 [(set (match_operand:SI 0 "s_register_operand" "")
8665 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8667 (clobber (match_dup:SI 2))])]
8669 "operands[2] = gen_reg_rtx (SImode);"
8672 (define_insn "*cstoresi_eq0_thumb1_insn"
8673 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8674 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8676 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8679 neg\\t%0, %1\;adc\\t%0, %0, %1
8680 neg\\t%2, %1\;adc\\t%0, %1, %2"
8681 [(set_attr "length" "4")]
8684 (define_insn "*cstoresi_ne0_thumb1_insn"
8685 [(set (match_operand:SI 0 "s_register_operand" "=l")
8686 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8688 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8690 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8691 [(set_attr "length" "4")]
8694 ;; Used as part of the expansion of thumb ltu and gtu sequences
8695 (define_insn "cstoresi_nltu_thumb1"
8696 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8697 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8698 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8700 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8701 [(set_attr "length" "4")]
8704 (define_insn_and_split "cstoresi_ltu_thumb1"
8705 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8706 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8707 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8712 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8713 (set (match_dup 0) (neg:SI (match_dup 3)))]
8714 "operands[3] = gen_reg_rtx (SImode);"
8715 [(set_attr "length" "4")]
8718 ;; Used as part of the expansion of thumb les sequence.
8719 (define_insn "thumb1_addsi3_addgeu"
8720 [(set (match_operand:SI 0 "s_register_operand" "=l")
8721 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8722 (match_operand:SI 2 "s_register_operand" "l"))
8723 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8724 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8726 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8727 [(set_attr "length" "4")]
8731 ;; Conditional move insns
8733 (define_expand "movsicc"
8734 [(set (match_operand:SI 0 "s_register_operand" "")
8735 (if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
8736 (match_operand:SI 2 "arm_not_operand" "")
8737 (match_operand:SI 3 "arm_not_operand" "")))]
8744 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8745 &XEXP (operands[1], 1)))
8748 code = GET_CODE (operands[1]);
8749 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8750 XEXP (operands[1], 1), NULL_RTX);
8751 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8755 (define_expand "movsfcc"
8756 [(set (match_operand:SF 0 "s_register_operand" "")
8757 (if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
8758 (match_operand:SF 2 "s_register_operand" "")
8759 (match_operand:SF 3 "s_register_operand" "")))]
8760 "TARGET_32BIT && TARGET_HARD_FLOAT"
8763 enum rtx_code code = GET_CODE (operands[1]);
8766 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8767 &XEXP (operands[1], 1)))
8770 code = GET_CODE (operands[1]);
8771 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8772 XEXP (operands[1], 1), NULL_RTX);
8773 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8777 (define_expand "movdfcc"
8778 [(set (match_operand:DF 0 "s_register_operand" "")
8779 (if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
8780 (match_operand:DF 2 "s_register_operand" "")
8781 (match_operand:DF 3 "s_register_operand" "")))]
8782 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8785 enum rtx_code code = GET_CODE (operands[1]);
8788 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8789 &XEXP (operands[1], 1)))
8791 code = GET_CODE (operands[1]);
8792 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8793 XEXP (operands[1], 1), NULL_RTX);
8794 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8798 (define_insn "*cmov<mode>"
8799 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8800 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8801 [(match_operand 2 "cc_register" "") (const_int 0)])
8802 (match_operand:SDF 3 "s_register_operand"
8804 (match_operand:SDF 4 "s_register_operand"
8805 "<F_constraint>")))]
8806 "TARGET_HARD_FLOAT && TARGET_FPU_ARMV8 <vfp_double_cond>"
8809 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8816 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8821 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8827 [(set_attr "conds" "use")
8828 (set_attr "type" "f_sel<vfp_type>")]
8831 (define_insn_and_split "*movsicc_insn"
8832 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8834 (match_operator 3 "arm_comparison_operator"
8835 [(match_operand 4 "cc_register" "") (const_int 0)])
8836 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8837 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8848 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8849 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8850 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8851 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8852 "&& reload_completed"
8855 enum rtx_code rev_code;
8856 enum machine_mode mode;
8859 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8861 gen_rtx_SET (VOIDmode,
8865 rev_code = GET_CODE (operands[3]);
8866 mode = GET_MODE (operands[4]);
8867 if (mode == CCFPmode || mode == CCFPEmode)
8868 rev_code = reverse_condition_maybe_unordered (rev_code);
8870 rev_code = reverse_condition (rev_code);
8872 rev_cond = gen_rtx_fmt_ee (rev_code,
8876 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8878 gen_rtx_SET (VOIDmode,
8883 [(set_attr "length" "4,4,4,4,8,8,8,8")
8884 (set_attr "conds" "use")
8885 (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")
8886 (set_attr_alternative "type"
8887 [(if_then_else (match_operand 2 "const_int_operand" "")
8888 (const_string "simple_alu_imm")
8890 (const_string "simple_alu_imm")
8891 (if_then_else (match_operand 1 "const_int_operand" "")
8892 (const_string "simple_alu_imm")
8894 (const_string "simple_alu_imm")
8898 (const_string "*")])]
8901 (define_insn "*movsfcc_soft_insn"
8902 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8903 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8904 [(match_operand 4 "cc_register" "") (const_int 0)])
8905 (match_operand:SF 1 "s_register_operand" "0,r")
8906 (match_operand:SF 2 "s_register_operand" "r,0")))]
8907 "TARGET_ARM && TARGET_SOFT_FLOAT"
8911 [(set_attr "conds" "use")
8912 (set_attr "insn" "mov")]
8916 ;; Jump and linkage insns
8918 (define_expand "jump"
8920 (label_ref (match_operand 0 "" "")))]
8925 (define_insn "*arm_jump"
8927 (label_ref (match_operand 0 "" "")))]
8931 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8933 arm_ccfsm_state += 2;
8936 return \"b%?\\t%l0\";
8939 [(set_attr "predicable" "yes")
8940 (set (attr "length")
8942 (and (match_test "TARGET_THUMB2")
8943 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8944 (le (minus (match_dup 0) (pc)) (const_int 2048))))
8949 (define_insn "*thumb_jump"
8951 (label_ref (match_operand 0 "" "")))]
8954 if (get_attr_length (insn) == 2)
8956 return \"bl\\t%l0\\t%@ far jump\";
8958 [(set (attr "far_jump")
8960 (eq_attr "length" "4")
8961 (const_string "yes")
8962 (const_string "no")))
8963 (set (attr "length")
8965 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8966 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8971 (define_expand "call"
8972 [(parallel [(call (match_operand 0 "memory_operand" "")
8973 (match_operand 1 "general_operand" ""))
8974 (use (match_operand 2 "" ""))
8975 (clobber (reg:SI LR_REGNUM))])]
8981 /* In an untyped call, we can get NULL for operand 2. */
8982 if (operands[2] == NULL_RTX)
8983 operands[2] = const0_rtx;
8985 /* Decide if we should generate indirect calls by loading the
8986 32-bit address of the callee into a register before performing the
8988 callee = XEXP (operands[0], 0);
8989 if (GET_CODE (callee) == SYMBOL_REF
8990 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8992 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8994 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8995 arm_emit_call_insn (pat, XEXP (operands[0], 0));
9000 (define_expand "call_internal"
9001 [(parallel [(call (match_operand 0 "memory_operand" "")
9002 (match_operand 1 "general_operand" ""))
9003 (use (match_operand 2 "" ""))
9004 (clobber (reg:SI LR_REGNUM))])])
9006 (define_insn "*call_reg_armv5"
9007 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9008 (match_operand 1 "" ""))
9009 (use (match_operand 2 "" ""))
9010 (clobber (reg:SI LR_REGNUM))]
9011 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9013 [(set_attr "type" "call")]
9016 (define_insn "*call_reg_arm"
9017 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
9018 (match_operand 1 "" ""))
9019 (use (match_operand 2 "" ""))
9020 (clobber (reg:SI LR_REGNUM))]
9021 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9023 return output_call (operands);
9025 ;; length is worst case, normally it is only two
9026 [(set_attr "length" "12")
9027 (set_attr "type" "call")]
9031 ;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
9032 ;; considered a function call by the branch predictor of some cores (PR40887).
9033 ;; Falls back to blx rN (*call_reg_armv5).
9035 (define_insn "*call_mem"
9036 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
9037 (match_operand 1 "" ""))
9038 (use (match_operand 2 "" ""))
9039 (clobber (reg:SI LR_REGNUM))]
9040 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9042 return output_call_mem (operands);
9044 [(set_attr "length" "12")
9045 (set_attr "type" "call")]
9048 (define_insn "*call_reg_thumb1_v5"
9049 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9050 (match_operand 1 "" ""))
9051 (use (match_operand 2 "" ""))
9052 (clobber (reg:SI LR_REGNUM))]
9053 "TARGET_THUMB1 && arm_arch5 && !SIBLING_CALL_P (insn)"
9055 [(set_attr "length" "2")
9056 (set_attr "type" "call")]
9059 (define_insn "*call_reg_thumb1"
9060 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
9061 (match_operand 1 "" ""))
9062 (use (match_operand 2 "" ""))
9063 (clobber (reg:SI LR_REGNUM))]
9064 "TARGET_THUMB1 && !arm_arch5 && !SIBLING_CALL_P (insn)"
9067 if (!TARGET_CALLER_INTERWORKING)
9068 return thumb_call_via_reg (operands[0]);
9069 else if (operands[1] == const0_rtx)
9070 return \"bl\\t%__interwork_call_via_%0\";
9071 else if (frame_pointer_needed)
9072 return \"bl\\t%__interwork_r7_call_via_%0\";
9074 return \"bl\\t%__interwork_r11_call_via_%0\";
9076 [(set_attr "type" "call")]
9079 (define_expand "call_value"
9080 [(parallel [(set (match_operand 0 "" "")
9081 (call (match_operand 1 "memory_operand" "")
9082 (match_operand 2 "general_operand" "")))
9083 (use (match_operand 3 "" ""))
9084 (clobber (reg:SI LR_REGNUM))])]
9090 /* In an untyped call, we can get NULL for operand 2. */
9091 if (operands[3] == 0)
9092 operands[3] = const0_rtx;
9094 /* Decide if we should generate indirect calls by loading the
9095 32-bit address of the callee into a register before performing the
9097 callee = XEXP (operands[1], 0);
9098 if (GET_CODE (callee) == SYMBOL_REF
9099 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
9101 XEXP (operands[1], 0) = force_reg (Pmode, callee);
9103 pat = gen_call_value_internal (operands[0], operands[1],
9104 operands[2], operands[3]);
9105 arm_emit_call_insn (pat, XEXP (operands[1], 0));
9110 (define_expand "call_value_internal"
9111 [(parallel [(set (match_operand 0 "" "")
9112 (call (match_operand 1 "memory_operand" "")
9113 (match_operand 2 "general_operand" "")))
9114 (use (match_operand 3 "" ""))
9115 (clobber (reg:SI LR_REGNUM))])])
9117 (define_insn "*call_value_reg_armv5"
9118 [(set (match_operand 0 "" "")
9119 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9120 (match_operand 2 "" "")))
9121 (use (match_operand 3 "" ""))
9122 (clobber (reg:SI LR_REGNUM))]
9123 "TARGET_ARM && arm_arch5 && !SIBLING_CALL_P (insn)"
9125 [(set_attr "type" "call")]
9128 (define_insn "*call_value_reg_arm"
9129 [(set (match_operand 0 "" "")
9130 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
9131 (match_operand 2 "" "")))
9132 (use (match_operand 3 "" ""))
9133 (clobber (reg:SI LR_REGNUM))]
9134 "TARGET_ARM && !arm_arch5 && !SIBLING_CALL_P (insn)"
9136 return output_call (&operands[1]);
9138 [(set_attr "length" "12")
9139 (set_attr "type" "call")]
9142 ;; Note: see *call_mem
9144 (define_insn "*call_value_mem"
9145 [(set (match_operand 0 "" "")
9146 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
9147 (match_operand 2 "" "")))
9148 (use (match_operand 3 "" ""))
9149 (clobber (reg:SI LR_REGNUM))]
9150 "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))
9151 && !SIBLING_CALL_P (insn)"
9153 return output_call_mem (&operands[1]);
9155 [(set_attr "length" "12")
9156 (set_attr "type" "call")]
9159 (define_insn "*call_value_reg_thumb1_v5"
9160 [(set (match_operand 0 "" "")
9161 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9162 (match_operand 2 "" "")))
9163 (use (match_operand 3 "" ""))
9164 (clobber (reg:SI LR_REGNUM))]
9165 "TARGET_THUMB1 && arm_arch5"
9167 [(set_attr "length" "2")
9168 (set_attr "type" "call")]
9171 (define_insn "*call_value_reg_thumb1"
9172 [(set (match_operand 0 "" "")
9173 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
9174 (match_operand 2 "" "")))
9175 (use (match_operand 3 "" ""))
9176 (clobber (reg:SI LR_REGNUM))]
9177 "TARGET_THUMB1 && !arm_arch5"
9180 if (!TARGET_CALLER_INTERWORKING)
9181 return thumb_call_via_reg (operands[1]);
9182 else if (operands[2] == const0_rtx)
9183 return \"bl\\t%__interwork_call_via_%1\";
9184 else if (frame_pointer_needed)
9185 return \"bl\\t%__interwork_r7_call_via_%1\";
9187 return \"bl\\t%__interwork_r11_call_via_%1\";
9189 [(set_attr "type" "call")]
9192 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
9193 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
9195 (define_insn "*call_symbol"
9196 [(call (mem:SI (match_operand:SI 0 "" ""))
9197 (match_operand 1 "" ""))
9198 (use (match_operand 2 "" ""))
9199 (clobber (reg:SI LR_REGNUM))]
9201 && !SIBLING_CALL_P (insn)
9202 && (GET_CODE (operands[0]) == SYMBOL_REF)
9203 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9206 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
9208 [(set_attr "type" "call")]
9211 (define_insn "*call_value_symbol"
9212 [(set (match_operand 0 "" "")
9213 (call (mem:SI (match_operand:SI 1 "" ""))
9214 (match_operand:SI 2 "" "")))
9215 (use (match_operand 3 "" ""))
9216 (clobber (reg:SI LR_REGNUM))]
9218 && !SIBLING_CALL_P (insn)
9219 && (GET_CODE (operands[1]) == SYMBOL_REF)
9220 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9223 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
9225 [(set_attr "type" "call")]
9228 (define_insn "*call_insn"
9229 [(call (mem:SI (match_operand:SI 0 "" ""))
9230 (match_operand:SI 1 "" ""))
9231 (use (match_operand 2 "" ""))
9232 (clobber (reg:SI LR_REGNUM))]
9234 && GET_CODE (operands[0]) == SYMBOL_REF
9235 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
9237 [(set_attr "length" "4")
9238 (set_attr "type" "call")]
9241 (define_insn "*call_value_insn"
9242 [(set (match_operand 0 "" "")
9243 (call (mem:SI (match_operand 1 "" ""))
9244 (match_operand 2 "" "")))
9245 (use (match_operand 3 "" ""))
9246 (clobber (reg:SI LR_REGNUM))]
9248 && GET_CODE (operands[1]) == SYMBOL_REF
9249 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
9251 [(set_attr "length" "4")
9252 (set_attr "type" "call")]
9255 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
9256 (define_expand "sibcall"
9257 [(parallel [(call (match_operand 0 "memory_operand" "")
9258 (match_operand 1 "general_operand" ""))
9260 (use (match_operand 2 "" ""))])]
9264 if (!REG_P (XEXP (operands[0], 0))
9265 && (GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF))
9266 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
9268 if (operands[2] == NULL_RTX)
9269 operands[2] = const0_rtx;
9273 (define_expand "sibcall_value"
9274 [(parallel [(set (match_operand 0 "" "")
9275 (call (match_operand 1 "memory_operand" "")
9276 (match_operand 2 "general_operand" "")))
9278 (use (match_operand 3 "" ""))])]
9282 if (!REG_P (XEXP (operands[1], 0)) &&
9283 (GET_CODE (XEXP (operands[1],0)) != SYMBOL_REF))
9284 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
9286 if (operands[3] == NULL_RTX)
9287 operands[3] = const0_rtx;
9291 (define_insn "*sibcall_insn"
9292 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs,Ss"))
9293 (match_operand 1 "" ""))
9295 (use (match_operand 2 "" ""))]
9296 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9298 if (which_alternative == 1)
9299 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
9302 if (arm_arch5 || arm_arch4t)
9303 return \" bx\\t%0\\t%@ indirect register sibling call\";
9305 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
9308 [(set_attr "type" "call")]
9311 (define_insn "*sibcall_value_insn"
9312 [(set (match_operand 0 "s_register_operand" "")
9313 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,Ss"))
9314 (match_operand 2 "" "")))
9316 (use (match_operand 3 "" ""))]
9317 "TARGET_32BIT && SIBLING_CALL_P (insn)"
9319 if (which_alternative == 1)
9320 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
9323 if (arm_arch5 || arm_arch4t)
9326 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
9329 [(set_attr "type" "call")]
9332 (define_expand "<return_str>return"
9334 "(TARGET_ARM || (TARGET_THUMB2
9335 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
9336 && !IS_STACKALIGN (arm_current_func_type ())))
9337 <return_cond_false>"
9342 thumb2_expand_return (<return_simple_p>);
9349 ;; Often the return insn will be the same as loading from memory, so set attr
9350 (define_insn "*arm_return"
9352 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
9355 if (arm_ccfsm_state == 2)
9357 arm_ccfsm_state += 2;
9360 return output_return_instruction (const_true_rtx, true, false, false);
9362 [(set_attr "type" "load1")
9363 (set_attr "length" "12")
9364 (set_attr "predicable" "yes")]
9367 (define_insn "*cond_<return_str>return"
9369 (if_then_else (match_operator 0 "arm_comparison_operator"
9370 [(match_operand 1 "cc_register" "") (const_int 0)])
9373 "TARGET_ARM <return_cond_true>"
9376 if (arm_ccfsm_state == 2)
9378 arm_ccfsm_state += 2;
9381 return output_return_instruction (operands[0], true, false,
9384 [(set_attr "conds" "use")
9385 (set_attr "length" "12")
9386 (set_attr "type" "load1")]
9389 (define_insn "*cond_<return_str>return_inverted"
9391 (if_then_else (match_operator 0 "arm_comparison_operator"
9392 [(match_operand 1 "cc_register" "") (const_int 0)])
9395 "TARGET_ARM <return_cond_true>"
9398 if (arm_ccfsm_state == 2)
9400 arm_ccfsm_state += 2;
9403 return output_return_instruction (operands[0], true, true,
9406 [(set_attr "conds" "use")
9407 (set_attr "length" "12")
9408 (set_attr "type" "load1")]
9411 (define_insn "*arm_simple_return"
9416 if (arm_ccfsm_state == 2)
9418 arm_ccfsm_state += 2;
9421 return output_return_instruction (const_true_rtx, true, false, true);
9423 [(set_attr "type" "branch")
9424 (set_attr "length" "4")
9425 (set_attr "predicable" "yes")]
9428 ;; Generate a sequence of instructions to determine if the processor is
9429 ;; in 26-bit or 32-bit mode, and return the appropriate return address
9432 (define_expand "return_addr_mask"
9434 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9436 (set (match_operand:SI 0 "s_register_operand" "")
9437 (if_then_else:SI (eq (match_dup 1) (const_int 0))
9439 (const_int 67108860)))] ; 0x03fffffc
9442 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
9445 (define_insn "*check_arch2"
9446 [(set (match_operand:CC_NOOV 0 "cc_register" "")
9447 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9450 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9451 [(set_attr "length" "8")
9452 (set_attr "conds" "set")]
9455 ;; Call subroutine returning any type.
9457 (define_expand "untyped_call"
9458 [(parallel [(call (match_operand 0 "" "")
9460 (match_operand 1 "" "")
9461 (match_operand 2 "" "")])]
9466 rtx par = gen_rtx_PARALLEL (VOIDmode,
9467 rtvec_alloc (XVECLEN (operands[2], 0)));
9468 rtx addr = gen_reg_rtx (Pmode);
9472 emit_move_insn (addr, XEXP (operands[1], 0));
9473 mem = change_address (operands[1], BLKmode, addr);
9475 for (i = 0; i < XVECLEN (operands[2], 0); i++)
9477 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9479 /* Default code only uses r0 as a return value, but we could
9480 be using anything up to 4 registers. */
9481 if (REGNO (src) == R0_REGNUM)
9482 src = gen_rtx_REG (TImode, R0_REGNUM);
9484 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9486 size += GET_MODE_SIZE (GET_MODE (src));
9489 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
9494 for (i = 0; i < XVECLEN (par, 0); i++)
9496 HOST_WIDE_INT offset = 0;
9497 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9500 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9502 mem = change_address (mem, GET_MODE (reg), NULL);
9503 if (REGNO (reg) == R0_REGNUM)
9505 /* On thumb we have to use a write-back instruction. */
9506 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9507 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9508 size = TARGET_ARM ? 16 : 0;
9512 emit_move_insn (mem, reg);
9513 size = GET_MODE_SIZE (GET_MODE (reg));
9517 /* The optimizer does not know that the call sets the function value
9518 registers we stored in the result block. We avoid problems by
9519 claiming that all hard registers are used and clobbered at this
9521 emit_insn (gen_blockage ());
9527 (define_expand "untyped_return"
9528 [(match_operand:BLK 0 "memory_operand" "")
9529 (match_operand 1 "" "")]
9534 rtx addr = gen_reg_rtx (Pmode);
9538 emit_move_insn (addr, XEXP (operands[0], 0));
9539 mem = change_address (operands[0], BLKmode, addr);
9541 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9543 HOST_WIDE_INT offset = 0;
9544 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9547 emit_move_insn (addr, plus_constant (Pmode, addr, size));
9549 mem = change_address (mem, GET_MODE (reg), NULL);
9550 if (REGNO (reg) == R0_REGNUM)
9552 /* On thumb we have to use a write-back instruction. */
9553 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9554 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9555 size = TARGET_ARM ? 16 : 0;
9559 emit_move_insn (reg, mem);
9560 size = GET_MODE_SIZE (GET_MODE (reg));
9564 /* Emit USE insns before the return. */
9565 for (i = 0; i < XVECLEN (operands[1], 0); i++)
9566 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9568 /* Construct the return. */
9569 expand_naked_return ();
9575 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9576 ;; all of memory. This blocks insns from being moved across this point.
9578 (define_insn "blockage"
9579 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9582 [(set_attr "length" "0")
9583 (set_attr "type" "block")]
9586 (define_expand "casesi"
9587 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
9588 (match_operand:SI 1 "const_int_operand" "") ; lower bound
9589 (match_operand:SI 2 "const_int_operand" "") ; total range
9590 (match_operand:SI 3 "" "") ; table label
9591 (match_operand:SI 4 "" "")] ; Out of range label
9592 "TARGET_32BIT || optimize_size || flag_pic"
9595 enum insn_code code;
9596 if (operands[1] != const0_rtx)
9598 rtx reg = gen_reg_rtx (SImode);
9600 emit_insn (gen_addsi3 (reg, operands[0],
9601 gen_int_mode (-INTVAL (operands[1]),
9607 code = CODE_FOR_arm_casesi_internal;
9608 else if (TARGET_THUMB1)
9609 code = CODE_FOR_thumb1_casesi_internal_pic;
9611 code = CODE_FOR_thumb2_casesi_internal_pic;
9613 code = CODE_FOR_thumb2_casesi_internal;
9615 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9616 operands[2] = force_reg (SImode, operands[2]);
9618 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9619 operands[3], operands[4]));
9624 ;; The USE in this pattern is needed to tell flow analysis that this is
9625 ;; a CASESI insn. It has no other purpose.
9626 (define_insn "arm_casesi_internal"
9627 [(parallel [(set (pc)
9629 (leu (match_operand:SI 0 "s_register_operand" "r")
9630 (match_operand:SI 1 "arm_rhs_operand" "rI"))
9631 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9632 (label_ref (match_operand 2 "" ""))))
9633 (label_ref (match_operand 3 "" ""))))
9634 (clobber (reg:CC CC_REGNUM))
9635 (use (label_ref (match_dup 2)))])]
9639 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9640 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9642 [(set_attr "conds" "clob")
9643 (set_attr "length" "12")]
9646 (define_expand "thumb1_casesi_internal_pic"
9647 [(match_operand:SI 0 "s_register_operand" "")
9648 (match_operand:SI 1 "thumb1_cmp_operand" "")
9649 (match_operand 2 "" "")
9650 (match_operand 3 "" "")]
9654 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9655 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9657 reg0 = gen_rtx_REG (SImode, 0);
9658 emit_move_insn (reg0, operands[0]);
9659 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9664 (define_insn "thumb1_casesi_dispatch"
9665 [(parallel [(set (pc) (unspec [(reg:SI 0)
9666 (label_ref (match_operand 0 "" ""))
9667 ;; (label_ref (match_operand 1 "" ""))
9669 UNSPEC_THUMB1_CASESI))
9670 (clobber (reg:SI IP_REGNUM))
9671 (clobber (reg:SI LR_REGNUM))])]
9673 "* return thumb1_output_casesi(operands);"
9674 [(set_attr "length" "4")]
9677 (define_expand "indirect_jump"
9679 (match_operand:SI 0 "s_register_operand" ""))]
9682 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
9683 address and use bx. */
9687 tmp = gen_reg_rtx (SImode);
9688 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9694 ;; NB Never uses BX.
9695 (define_insn "*arm_indirect_jump"
9697 (match_operand:SI 0 "s_register_operand" "r"))]
9699 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9700 [(set_attr "predicable" "yes")]
9703 (define_insn "*load_indirect_jump"
9705 (match_operand:SI 0 "memory_operand" "m"))]
9707 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9708 [(set_attr "type" "load1")
9709 (set_attr "pool_range" "4096")
9710 (set_attr "neg_pool_range" "4084")
9711 (set_attr "predicable" "yes")]
9714 ;; NB Never uses BX.
9715 (define_insn "*thumb1_indirect_jump"
9717 (match_operand:SI 0 "register_operand" "l*r"))]
9720 [(set_attr "conds" "clob")
9721 (set_attr "length" "2")]
9731 if (TARGET_UNIFIED_ASM)
9734 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9735 return \"mov\\tr8, r8\";
9737 [(set (attr "length")
9738 (if_then_else (eq_attr "is_thumb" "yes")
9744 ;; Patterns to allow combination of arithmetic, cond code and shifts
9746 (define_insn "*arith_shiftsi"
9747 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9748 (match_operator:SI 1 "shiftable_operator"
9749 [(match_operator:SI 3 "shift_operator"
9750 [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
9751 (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
9752 (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
9754 "%i1%?\\t%0, %2, %4%S3"
9755 [(set_attr "predicable" "yes")
9756 (set_attr "shift" "4")
9757 (set_attr "arch" "a,t2,t2,a")
9758 ;; Thumb2 doesn't allow the stack pointer to be used for
9759 ;; operand1 for all operations other than add and sub. In this case
9760 ;; the minus operation is a candidate for an rsub and hence needs
9762 ;; We have to make sure to disable the fourth alternative if
9763 ;; the shift_operator is MULT, since otherwise the insn will
9764 ;; also match a multiply_accumulate pattern and validate_change
9765 ;; will allow a replacement of the constant with a register
9766 ;; despite the checks done in shift_operator.
9767 (set_attr_alternative "insn_enabled"
9768 [(const_string "yes")
9770 (match_operand:SI 1 "add_operator" "")
9771 (const_string "yes") (const_string "no"))
9772 (const_string "yes")
9774 (match_operand:SI 3 "mult_operator" "")
9775 (const_string "no") (const_string "yes"))])
9776 (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
9779 [(set (match_operand:SI 0 "s_register_operand" "")
9780 (match_operator:SI 1 "shiftable_operator"
9781 [(match_operator:SI 2 "shiftable_operator"
9782 [(match_operator:SI 3 "shift_operator"
9783 [(match_operand:SI 4 "s_register_operand" "")
9784 (match_operand:SI 5 "reg_or_int_operand" "")])
9785 (match_operand:SI 6 "s_register_operand" "")])
9786 (match_operand:SI 7 "arm_rhs_operand" "")]))
9787 (clobber (match_operand:SI 8 "s_register_operand" ""))]
9790 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9793 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9796 (define_insn "*arith_shiftsi_compare0"
9797 [(set (reg:CC_NOOV CC_REGNUM)
9799 (match_operator:SI 1 "shiftable_operator"
9800 [(match_operator:SI 3 "shift_operator"
9801 [(match_operand:SI 4 "s_register_operand" "r,r")
9802 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9803 (match_operand:SI 2 "s_register_operand" "r,r")])
9805 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9806 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9809 "%i1%.\\t%0, %2, %4%S3"
9810 [(set_attr "conds" "set")
9811 (set_attr "shift" "4")
9812 (set_attr "arch" "32,a")
9813 (set_attr "type" "alu_shift,alu_shift_reg")])
9815 (define_insn "*arith_shiftsi_compare0_scratch"
9816 [(set (reg:CC_NOOV CC_REGNUM)
9818 (match_operator:SI 1 "shiftable_operator"
9819 [(match_operator:SI 3 "shift_operator"
9820 [(match_operand:SI 4 "s_register_operand" "r,r")
9821 (match_operand:SI 5 "shift_amount_operand" "M,r")])
9822 (match_operand:SI 2 "s_register_operand" "r,r")])
9824 (clobber (match_scratch:SI 0 "=r,r"))]
9826 "%i1%.\\t%0, %2, %4%S3"
9827 [(set_attr "conds" "set")
9828 (set_attr "shift" "4")
9829 (set_attr "arch" "32,a")
9830 (set_attr "type" "alu_shift,alu_shift_reg")])
9832 (define_insn "*sub_shiftsi"
9833 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9834 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9835 (match_operator:SI 2 "shift_operator"
9836 [(match_operand:SI 3 "s_register_operand" "r,r")
9837 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9839 "sub%?\\t%0, %1, %3%S2"
9840 [(set_attr "predicable" "yes")
9841 (set_attr "shift" "3")
9842 (set_attr "arch" "32,a")
9843 (set_attr "type" "alu_shift,alu_shift_reg")])
9845 (define_insn "*sub_shiftsi_compare0"
9846 [(set (reg:CC_NOOV CC_REGNUM)
9848 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9849 (match_operator:SI 2 "shift_operator"
9850 [(match_operand:SI 3 "s_register_operand" "r,r")
9851 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9853 (set (match_operand:SI 0 "s_register_operand" "=r,r")
9854 (minus:SI (match_dup 1)
9855 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9857 "sub%.\\t%0, %1, %3%S2"
9858 [(set_attr "conds" "set")
9859 (set_attr "shift" "3")
9860 (set_attr "arch" "32,a")
9861 (set_attr "type" "alu_shift,alu_shift_reg")])
9863 (define_insn "*sub_shiftsi_compare0_scratch"
9864 [(set (reg:CC_NOOV CC_REGNUM)
9866 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9867 (match_operator:SI 2 "shift_operator"
9868 [(match_operand:SI 3 "s_register_operand" "r,r")
9869 (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9871 (clobber (match_scratch:SI 0 "=r,r"))]
9873 "sub%.\\t%0, %1, %3%S2"
9874 [(set_attr "conds" "set")
9875 (set_attr "shift" "3")
9876 (set_attr "arch" "32,a")
9877 (set_attr "type" "alu_shift,alu_shift_reg")])
9880 (define_insn_and_split "*and_scc"
9881 [(set (match_operand:SI 0 "s_register_operand" "=r")
9882 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9883 [(match_operand 2 "cc_register" "") (const_int 0)])
9884 (match_operand:SI 3 "s_register_operand" "r")))]
9886 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9887 "&& reload_completed"
9888 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9889 (cond_exec (match_dup 4) (set (match_dup 0)
9890 (and:SI (match_dup 3) (const_int 1))))]
9892 enum machine_mode mode = GET_MODE (operands[2]);
9893 enum rtx_code rc = GET_CODE (operands[1]);
9895 /* Note that operands[4] is the same as operands[1],
9896 but with VOIDmode as the result. */
9897 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9898 if (mode == CCFPmode || mode == CCFPEmode)
9899 rc = reverse_condition_maybe_unordered (rc);
9901 rc = reverse_condition (rc);
9902 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9904 [(set_attr "conds" "use")
9905 (set_attr "insn" "mov")
9906 (set_attr "length" "8")]
9909 (define_insn_and_split "*ior_scc"
9910 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9911 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9912 [(match_operand 2 "cc_register" "") (const_int 0)])
9913 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9918 "&& reload_completed
9919 && REGNO (operands [0]) != REGNO (operands[3])"
9920 ;; && which_alternative == 1
9921 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9922 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9923 (cond_exec (match_dup 4) (set (match_dup 0)
9924 (ior:SI (match_dup 3) (const_int 1))))]
9926 enum machine_mode mode = GET_MODE (operands[2]);
9927 enum rtx_code rc = GET_CODE (operands[1]);
9929 /* Note that operands[4] is the same as operands[1],
9930 but with VOIDmode as the result. */
9931 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9932 if (mode == CCFPmode || mode == CCFPEmode)
9933 rc = reverse_condition_maybe_unordered (rc);
9935 rc = reverse_condition (rc);
9936 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9938 [(set_attr "conds" "use")
9939 (set_attr "length" "4,8")]
9942 ; A series of splitters for the compare_scc pattern below. Note that
9943 ; order is important.
9945 [(set (match_operand:SI 0 "s_register_operand" "")
9946 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9948 (clobber (reg:CC CC_REGNUM))]
9949 "TARGET_32BIT && reload_completed"
9950 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9953 [(set (match_operand:SI 0 "s_register_operand" "")
9954 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9956 (clobber (reg:CC CC_REGNUM))]
9957 "TARGET_32BIT && reload_completed"
9958 [(set (match_dup 0) (not:SI (match_dup 1)))
9959 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9962 [(set (match_operand:SI 0 "s_register_operand" "")
9963 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9965 (clobber (reg:CC CC_REGNUM))]
9966 "TARGET_32BIT && reload_completed"
9968 [(set (reg:CC CC_REGNUM)
9969 (compare:CC (const_int 1) (match_dup 1)))
9971 (minus:SI (const_int 1) (match_dup 1)))])
9972 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9973 (set (match_dup 0) (const_int 0)))])
9976 [(set (match_operand:SI 0 "s_register_operand" "")
9977 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9978 (match_operand:SI 2 "const_int_operand" "")))
9979 (clobber (reg:CC CC_REGNUM))]
9980 "TARGET_32BIT && reload_completed"
9982 [(set (reg:CC CC_REGNUM)
9983 (compare:CC (match_dup 1) (match_dup 2)))
9984 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9985 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9986 (set (match_dup 0) (const_int 1)))]
9988 operands[3] = GEN_INT (-INTVAL (operands[2]));
9992 [(set (match_operand:SI 0 "s_register_operand" "")
9993 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9994 (match_operand:SI 2 "arm_add_operand" "")))
9995 (clobber (reg:CC CC_REGNUM))]
9996 "TARGET_32BIT && reload_completed"
9998 [(set (reg:CC_NOOV CC_REGNUM)
9999 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
10001 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
10002 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
10003 (set (match_dup 0) (const_int 1)))])
10005 (define_insn_and_split "*compare_scc"
10006 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10007 (match_operator:SI 1 "arm_comparison_operator"
10008 [(match_operand:SI 2 "s_register_operand" "r,r")
10009 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
10010 (clobber (reg:CC CC_REGNUM))]
10013 "&& reload_completed"
10014 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
10015 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
10016 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
10019 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10020 operands[2], operands[3]);
10021 enum rtx_code rc = GET_CODE (operands[1]);
10023 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
10025 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10026 if (mode == CCFPmode || mode == CCFPEmode)
10027 rc = reverse_condition_maybe_unordered (rc);
10029 rc = reverse_condition (rc);
10030 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
10033 ;; Attempt to improve the sequence generated by the compare_scc splitters
10034 ;; not to use conditional execution.
10036 [(set (reg:CC CC_REGNUM)
10037 (compare:CC (match_operand:SI 1 "register_operand" "")
10038 (match_operand:SI 2 "arm_rhs_operand" "")))
10039 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
10040 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
10041 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
10042 (set (match_dup 0) (const_int 1)))
10043 (match_scratch:SI 3 "r")]
10046 [(set (reg:CC CC_REGNUM)
10047 (compare:CC (match_dup 1) (match_dup 2)))
10048 (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
10050 [(set (reg:CC CC_REGNUM)
10051 (compare:CC (const_int 0) (match_dup 3)))
10052 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
10054 [(set (match_dup 0)
10055 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
10056 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
10057 (clobber (reg:CC CC_REGNUM))])])
10059 (define_insn "*cond_move"
10060 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10061 (if_then_else:SI (match_operator 3 "equality_operator"
10062 [(match_operator 4 "arm_comparison_operator"
10063 [(match_operand 5 "cc_register" "") (const_int 0)])
10065 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10066 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
10069 if (GET_CODE (operands[3]) == NE)
10071 if (which_alternative != 1)
10072 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
10073 if (which_alternative != 0)
10074 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
10077 if (which_alternative != 0)
10078 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10079 if (which_alternative != 1)
10080 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
10083 [(set_attr "conds" "use")
10084 (set_attr "insn" "mov")
10085 (set_attr "length" "4,4,8")]
10088 (define_insn "*cond_arith"
10089 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10090 (match_operator:SI 5 "shiftable_operator"
10091 [(match_operator:SI 4 "arm_comparison_operator"
10092 [(match_operand:SI 2 "s_register_operand" "r,r")
10093 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10094 (match_operand:SI 1 "s_register_operand" "0,?r")]))
10095 (clobber (reg:CC CC_REGNUM))]
10098 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
10099 return \"%i5\\t%0, %1, %2, lsr #31\";
10101 output_asm_insn (\"cmp\\t%2, %3\", operands);
10102 if (GET_CODE (operands[5]) == AND)
10103 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
10104 else if (GET_CODE (operands[5]) == MINUS)
10105 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
10106 else if (which_alternative != 0)
10107 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10108 return \"%i5%d4\\t%0, %1, #1\";
10110 [(set_attr "conds" "clob")
10111 (set_attr "length" "12")]
10114 (define_insn "*cond_sub"
10115 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10116 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
10117 (match_operator:SI 4 "arm_comparison_operator"
10118 [(match_operand:SI 2 "s_register_operand" "r,r")
10119 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10120 (clobber (reg:CC CC_REGNUM))]
10123 output_asm_insn (\"cmp\\t%2, %3\", operands);
10124 if (which_alternative != 0)
10125 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
10126 return \"sub%d4\\t%0, %1, #1\";
10128 [(set_attr "conds" "clob")
10129 (set_attr "length" "8,12")]
10132 (define_insn "*cmp_ite0"
10133 [(set (match_operand 6 "dominant_cc_register" "")
10136 (match_operator 4 "arm_comparison_operator"
10137 [(match_operand:SI 0 "s_register_operand"
10138 "l,l,l,r,r,r,r,r,r")
10139 (match_operand:SI 1 "arm_add_operand"
10140 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10141 (match_operator:SI 5 "arm_comparison_operator"
10142 [(match_operand:SI 2 "s_register_operand"
10143 "l,r,r,l,l,r,r,r,r")
10144 (match_operand:SI 3 "arm_add_operand"
10145 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10151 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10153 {\"cmp%d5\\t%0, %1\",
10154 \"cmp%d4\\t%2, %3\"},
10155 {\"cmn%d5\\t%0, #%n1\",
10156 \"cmp%d4\\t%2, %3\"},
10157 {\"cmp%d5\\t%0, %1\",
10158 \"cmn%d4\\t%2, #%n3\"},
10159 {\"cmn%d5\\t%0, #%n1\",
10160 \"cmn%d4\\t%2, #%n3\"}
10162 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10167 \"cmn\\t%0, #%n1\"},
10168 {\"cmn\\t%2, #%n3\",
10170 {\"cmn\\t%2, #%n3\",
10171 \"cmn\\t%0, #%n1\"}
10173 static const char * const ite[2] =
10178 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10179 CMP_CMP, CMN_CMP, CMP_CMP,
10180 CMN_CMP, CMP_CMN, CMN_CMN};
10182 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10184 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10185 if (TARGET_THUMB2) {
10186 output_asm_insn (ite[swap], operands);
10188 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10191 [(set_attr "conds" "set")
10192 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10193 (set_attr_alternative "length"
10199 (if_then_else (eq_attr "is_thumb" "no")
10202 (if_then_else (eq_attr "is_thumb" "no")
10205 (if_then_else (eq_attr "is_thumb" "no")
10208 (if_then_else (eq_attr "is_thumb" "no")
10213 (define_insn "*cmp_ite1"
10214 [(set (match_operand 6 "dominant_cc_register" "")
10217 (match_operator 4 "arm_comparison_operator"
10218 [(match_operand:SI 0 "s_register_operand"
10219 "l,l,l,r,r,r,r,r,r")
10220 (match_operand:SI 1 "arm_add_operand"
10221 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10222 (match_operator:SI 5 "arm_comparison_operator"
10223 [(match_operand:SI 2 "s_register_operand"
10224 "l,r,r,l,l,r,r,r,r")
10225 (match_operand:SI 3 "arm_add_operand"
10226 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10232 static const char * const cmp1[NUM_OF_COND_CMP][2] =
10236 {\"cmn\\t%0, #%n1\",
10239 \"cmn\\t%2, #%n3\"},
10240 {\"cmn\\t%0, #%n1\",
10241 \"cmn\\t%2, #%n3\"}
10243 static const char * const cmp2[NUM_OF_COND_CMP][2] =
10245 {\"cmp%d4\\t%2, %3\",
10246 \"cmp%D5\\t%0, %1\"},
10247 {\"cmp%d4\\t%2, %3\",
10248 \"cmn%D5\\t%0, #%n1\"},
10249 {\"cmn%d4\\t%2, #%n3\",
10250 \"cmp%D5\\t%0, %1\"},
10251 {\"cmn%d4\\t%2, #%n3\",
10252 \"cmn%D5\\t%0, #%n1\"}
10254 static const char * const ite[2] =
10259 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10260 CMP_CMP, CMN_CMP, CMP_CMP,
10261 CMN_CMP, CMP_CMN, CMN_CMN};
10263 comparison_dominates_p (GET_CODE (operands[5]),
10264 reverse_condition (GET_CODE (operands[4])));
10266 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10267 if (TARGET_THUMB2) {
10268 output_asm_insn (ite[swap], operands);
10270 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10273 [(set_attr "conds" "set")
10274 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10275 (set_attr_alternative "length"
10281 (if_then_else (eq_attr "is_thumb" "no")
10284 (if_then_else (eq_attr "is_thumb" "no")
10287 (if_then_else (eq_attr "is_thumb" "no")
10290 (if_then_else (eq_attr "is_thumb" "no")
10295 (define_insn "*cmp_and"
10296 [(set (match_operand 6 "dominant_cc_register" "")
10299 (match_operator 4 "arm_comparison_operator"
10300 [(match_operand:SI 0 "s_register_operand"
10301 "l,l,l,r,r,r,r,r,r")
10302 (match_operand:SI 1 "arm_add_operand"
10303 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10304 (match_operator:SI 5 "arm_comparison_operator"
10305 [(match_operand:SI 2 "s_register_operand"
10306 "l,r,r,l,l,r,r,r,r")
10307 (match_operand:SI 3 "arm_add_operand"
10308 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10313 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10315 {\"cmp%d5\\t%0, %1\",
10316 \"cmp%d4\\t%2, %3\"},
10317 {\"cmn%d5\\t%0, #%n1\",
10318 \"cmp%d4\\t%2, %3\"},
10319 {\"cmp%d5\\t%0, %1\",
10320 \"cmn%d4\\t%2, #%n3\"},
10321 {\"cmn%d5\\t%0, #%n1\",
10322 \"cmn%d4\\t%2, #%n3\"}
10324 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10329 \"cmn\\t%0, #%n1\"},
10330 {\"cmn\\t%2, #%n3\",
10332 {\"cmn\\t%2, #%n3\",
10333 \"cmn\\t%0, #%n1\"}
10335 static const char *const ite[2] =
10340 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10341 CMP_CMP, CMN_CMP, CMP_CMP,
10342 CMN_CMP, CMP_CMN, CMN_CMN};
10344 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10346 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10347 if (TARGET_THUMB2) {
10348 output_asm_insn (ite[swap], operands);
10350 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10353 [(set_attr "conds" "set")
10354 (set_attr "predicable" "no")
10355 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10356 (set_attr_alternative "length"
10362 (if_then_else (eq_attr "is_thumb" "no")
10365 (if_then_else (eq_attr "is_thumb" "no")
10368 (if_then_else (eq_attr "is_thumb" "no")
10371 (if_then_else (eq_attr "is_thumb" "no")
10376 (define_insn "*cmp_ior"
10377 [(set (match_operand 6 "dominant_cc_register" "")
10380 (match_operator 4 "arm_comparison_operator"
10381 [(match_operand:SI 0 "s_register_operand"
10382 "l,l,l,r,r,r,r,r,r")
10383 (match_operand:SI 1 "arm_add_operand"
10384 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10385 (match_operator:SI 5 "arm_comparison_operator"
10386 [(match_operand:SI 2 "s_register_operand"
10387 "l,r,r,l,l,r,r,r,r")
10388 (match_operand:SI 3 "arm_add_operand"
10389 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
10394 static const char *const cmp1[NUM_OF_COND_CMP][2] =
10398 {\"cmn\\t%0, #%n1\",
10401 \"cmn\\t%2, #%n3\"},
10402 {\"cmn\\t%0, #%n1\",
10403 \"cmn\\t%2, #%n3\"}
10405 static const char *const cmp2[NUM_OF_COND_CMP][2] =
10407 {\"cmp%D4\\t%2, %3\",
10408 \"cmp%D5\\t%0, %1\"},
10409 {\"cmp%D4\\t%2, %3\",
10410 \"cmn%D5\\t%0, #%n1\"},
10411 {\"cmn%D4\\t%2, #%n3\",
10412 \"cmp%D5\\t%0, %1\"},
10413 {\"cmn%D4\\t%2, #%n3\",
10414 \"cmn%D5\\t%0, #%n1\"}
10416 static const char *const ite[2] =
10421 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10422 CMP_CMP, CMN_CMP, CMP_CMP,
10423 CMN_CMP, CMP_CMN, CMN_CMN};
10425 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10427 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10428 if (TARGET_THUMB2) {
10429 output_asm_insn (ite[swap], operands);
10431 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10435 [(set_attr "conds" "set")
10436 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10437 (set_attr_alternative "length"
10443 (if_then_else (eq_attr "is_thumb" "no")
10446 (if_then_else (eq_attr "is_thumb" "no")
10449 (if_then_else (eq_attr "is_thumb" "no")
10452 (if_then_else (eq_attr "is_thumb" "no")
10457 (define_insn_and_split "*ior_scc_scc"
10458 [(set (match_operand:SI 0 "s_register_operand" "=r")
10459 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10460 [(match_operand:SI 1 "s_register_operand" "r")
10461 (match_operand:SI 2 "arm_add_operand" "rIL")])
10462 (match_operator:SI 6 "arm_comparison_operator"
10463 [(match_operand:SI 4 "s_register_operand" "r")
10464 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10465 (clobber (reg:CC CC_REGNUM))]
10467 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10470 "TARGET_32BIT && reload_completed"
10471 [(set (match_dup 7)
10474 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10475 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10477 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10479 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10482 [(set_attr "conds" "clob")
10483 (set_attr "length" "16")])
10485 ; If the above pattern is followed by a CMP insn, then the compare is
10486 ; redundant, since we can rework the conditional instruction that follows.
10487 (define_insn_and_split "*ior_scc_scc_cmp"
10488 [(set (match_operand 0 "dominant_cc_register" "")
10489 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10490 [(match_operand:SI 1 "s_register_operand" "r")
10491 (match_operand:SI 2 "arm_add_operand" "rIL")])
10492 (match_operator:SI 6 "arm_comparison_operator"
10493 [(match_operand:SI 4 "s_register_operand" "r")
10494 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10496 (set (match_operand:SI 7 "s_register_operand" "=r")
10497 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10498 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10501 "TARGET_32BIT && reload_completed"
10502 [(set (match_dup 0)
10505 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10506 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10508 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10510 [(set_attr "conds" "set")
10511 (set_attr "length" "16")])
10513 (define_insn_and_split "*and_scc_scc"
10514 [(set (match_operand:SI 0 "s_register_operand" "=r")
10515 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10516 [(match_operand:SI 1 "s_register_operand" "r")
10517 (match_operand:SI 2 "arm_add_operand" "rIL")])
10518 (match_operator:SI 6 "arm_comparison_operator"
10519 [(match_operand:SI 4 "s_register_operand" "r")
10520 (match_operand:SI 5 "arm_add_operand" "rIL")])))
10521 (clobber (reg:CC CC_REGNUM))]
10523 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10526 "TARGET_32BIT && reload_completed
10527 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10529 [(set (match_dup 7)
10532 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10533 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10535 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10537 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10540 [(set_attr "conds" "clob")
10541 (set_attr "length" "16")])
10543 ; If the above pattern is followed by a CMP insn, then the compare is
10544 ; redundant, since we can rework the conditional instruction that follows.
10545 (define_insn_and_split "*and_scc_scc_cmp"
10546 [(set (match_operand 0 "dominant_cc_register" "")
10547 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10548 [(match_operand:SI 1 "s_register_operand" "r")
10549 (match_operand:SI 2 "arm_add_operand" "rIL")])
10550 (match_operator:SI 6 "arm_comparison_operator"
10551 [(match_operand:SI 4 "s_register_operand" "r")
10552 (match_operand:SI 5 "arm_add_operand" "rIL")]))
10554 (set (match_operand:SI 7 "s_register_operand" "=r")
10555 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10556 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10559 "TARGET_32BIT && reload_completed"
10560 [(set (match_dup 0)
10563 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10564 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10566 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10568 [(set_attr "conds" "set")
10569 (set_attr "length" "16")])
10571 ;; If there is no dominance in the comparison, then we can still save an
10572 ;; instruction in the AND case, since we can know that the second compare
10573 ;; need only zero the value if false (if true, then the value is already
10575 (define_insn_and_split "*and_scc_scc_nodom"
10576 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
10577 (and:SI (match_operator:SI 3 "arm_comparison_operator"
10578 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10579 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10580 (match_operator:SI 6 "arm_comparison_operator"
10581 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10582 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10583 (clobber (reg:CC CC_REGNUM))]
10585 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10588 "TARGET_32BIT && reload_completed"
10589 [(parallel [(set (match_dup 0)
10590 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10591 (clobber (reg:CC CC_REGNUM))])
10592 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10594 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10597 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10598 operands[4], operands[5]),
10600 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10602 [(set_attr "conds" "clob")
10603 (set_attr "length" "20")])
10606 [(set (reg:CC_NOOV CC_REGNUM)
10607 (compare:CC_NOOV (ior:SI
10608 (and:SI (match_operand:SI 0 "s_register_operand" "")
10610 (match_operator:SI 1 "arm_comparison_operator"
10611 [(match_operand:SI 2 "s_register_operand" "")
10612 (match_operand:SI 3 "arm_add_operand" "")]))
10614 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10616 [(set (match_dup 4)
10617 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10619 (set (reg:CC_NOOV CC_REGNUM)
10620 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10625 [(set (reg:CC_NOOV CC_REGNUM)
10626 (compare:CC_NOOV (ior:SI
10627 (match_operator:SI 1 "arm_comparison_operator"
10628 [(match_operand:SI 2 "s_register_operand" "")
10629 (match_operand:SI 3 "arm_add_operand" "")])
10630 (and:SI (match_operand:SI 0 "s_register_operand" "")
10633 (clobber (match_operand:SI 4 "s_register_operand" ""))]
10635 [(set (match_dup 4)
10636 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10638 (set (reg:CC_NOOV CC_REGNUM)
10639 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
10642 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10644 (define_insn_and_split "*negscc"
10645 [(set (match_operand:SI 0 "s_register_operand" "=r")
10646 (neg:SI (match_operator 3 "arm_comparison_operator"
10647 [(match_operand:SI 1 "s_register_operand" "r")
10648 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10649 (clobber (reg:CC CC_REGNUM))]
10652 "&& reload_completed"
10655 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10657 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10659 /* Emit mov\\t%0, %1, asr #31 */
10660 emit_insn (gen_rtx_SET (VOIDmode,
10662 gen_rtx_ASHIFTRT (SImode,
10667 else if (GET_CODE (operands[3]) == NE)
10669 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10670 if (CONST_INT_P (operands[2]))
10671 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10672 GEN_INT (- INTVAL (operands[2]))));
10674 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10676 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10677 gen_rtx_NE (SImode,
10680 gen_rtx_SET (SImode,
10687 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10688 emit_insn (gen_rtx_SET (VOIDmode,
10690 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10691 enum rtx_code rc = GET_CODE (operands[3]);
10693 rc = reverse_condition (rc);
10694 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10695 gen_rtx_fmt_ee (rc,
10699 gen_rtx_SET (VOIDmode, operands[0], const0_rtx)));
10700 rc = GET_CODE (operands[3]);
10701 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10702 gen_rtx_fmt_ee (rc,
10706 gen_rtx_SET (VOIDmode,
10713 [(set_attr "conds" "clob")
10714 (set_attr "length" "12")]
10717 (define_insn "movcond"
10718 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10720 (match_operator 5 "arm_comparison_operator"
10721 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10722 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10723 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10724 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10725 (clobber (reg:CC CC_REGNUM))]
10728 if (GET_CODE (operands[5]) == LT
10729 && (operands[4] == const0_rtx))
10731 if (which_alternative != 1 && REG_P (operands[1]))
10733 if (operands[2] == const0_rtx)
10734 return \"and\\t%0, %1, %3, asr #31\";
10735 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10737 else if (which_alternative != 0 && REG_P (operands[2]))
10739 if (operands[1] == const0_rtx)
10740 return \"bic\\t%0, %2, %3, asr #31\";
10741 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10743 /* The only case that falls through to here is when both ops 1 & 2
10747 if (GET_CODE (operands[5]) == GE
10748 && (operands[4] == const0_rtx))
10750 if (which_alternative != 1 && REG_P (operands[1]))
10752 if (operands[2] == const0_rtx)
10753 return \"bic\\t%0, %1, %3, asr #31\";
10754 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10756 else if (which_alternative != 0 && REG_P (operands[2]))
10758 if (operands[1] == const0_rtx)
10759 return \"and\\t%0, %2, %3, asr #31\";
10760 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10762 /* The only case that falls through to here is when both ops 1 & 2
10765 if (CONST_INT_P (operands[4])
10766 && !const_ok_for_arm (INTVAL (operands[4])))
10767 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10769 output_asm_insn (\"cmp\\t%3, %4\", operands);
10770 if (which_alternative != 0)
10771 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10772 if (which_alternative != 1)
10773 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10776 [(set_attr "conds" "clob")
10777 (set_attr "length" "8,8,12")]
10780 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10782 (define_insn "*ifcompare_plus_move"
10783 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10784 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10785 [(match_operand:SI 4 "s_register_operand" "r,r")
10786 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10788 (match_operand:SI 2 "s_register_operand" "r,r")
10789 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10790 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10791 (clobber (reg:CC CC_REGNUM))]
10794 [(set_attr "conds" "clob")
10795 (set_attr "length" "8,12")]
10798 (define_insn "*if_plus_move"
10799 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10801 (match_operator 4 "arm_comparison_operator"
10802 [(match_operand 5 "cc_register" "") (const_int 0)])
10804 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10805 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10806 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10809 add%d4\\t%0, %2, %3
10810 sub%d4\\t%0, %2, #%n3
10811 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10812 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10813 [(set_attr "conds" "use")
10814 (set_attr "length" "4,4,8,8")
10815 (set_attr_alternative "type"
10816 [(if_then_else (match_operand 3 "const_int_operand" "")
10817 (const_string "simple_alu_imm" )
10818 (const_string "*"))
10819 (const_string "simple_alu_imm")
10821 (const_string "*")])]
10824 (define_insn "*ifcompare_move_plus"
10825 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10826 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10827 [(match_operand:SI 4 "s_register_operand" "r,r")
10828 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10829 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10831 (match_operand:SI 2 "s_register_operand" "r,r")
10832 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10833 (clobber (reg:CC CC_REGNUM))]
10836 [(set_attr "conds" "clob")
10837 (set_attr "length" "8,12")]
10840 (define_insn "*if_move_plus"
10841 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10843 (match_operator 4 "arm_comparison_operator"
10844 [(match_operand 5 "cc_register" "") (const_int 0)])
10845 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10847 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10848 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10851 add%D4\\t%0, %2, %3
10852 sub%D4\\t%0, %2, #%n3
10853 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10854 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10855 [(set_attr "conds" "use")
10856 (set_attr "length" "4,4,8,8")
10857 (set_attr_alternative "type"
10858 [(if_then_else (match_operand 3 "const_int_operand" "")
10859 (const_string "simple_alu_imm" )
10860 (const_string "*"))
10861 (const_string "simple_alu_imm")
10863 (const_string "*")])]
10866 (define_insn "*ifcompare_arith_arith"
10867 [(set (match_operand:SI 0 "s_register_operand" "=r")
10868 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10869 [(match_operand:SI 5 "s_register_operand" "r")
10870 (match_operand:SI 6 "arm_add_operand" "rIL")])
10871 (match_operator:SI 8 "shiftable_operator"
10872 [(match_operand:SI 1 "s_register_operand" "r")
10873 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10874 (match_operator:SI 7 "shiftable_operator"
10875 [(match_operand:SI 3 "s_register_operand" "r")
10876 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10877 (clobber (reg:CC CC_REGNUM))]
10880 [(set_attr "conds" "clob")
10881 (set_attr "length" "12")]
10884 (define_insn "*if_arith_arith"
10885 [(set (match_operand:SI 0 "s_register_operand" "=r")
10886 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10887 [(match_operand 8 "cc_register" "") (const_int 0)])
10888 (match_operator:SI 6 "shiftable_operator"
10889 [(match_operand:SI 1 "s_register_operand" "r")
10890 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10891 (match_operator:SI 7 "shiftable_operator"
10892 [(match_operand:SI 3 "s_register_operand" "r")
10893 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10895 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10896 [(set_attr "conds" "use")
10897 (set_attr "length" "8")]
10900 (define_insn "*ifcompare_arith_move"
10901 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10902 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10903 [(match_operand:SI 2 "s_register_operand" "r,r")
10904 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10905 (match_operator:SI 7 "shiftable_operator"
10906 [(match_operand:SI 4 "s_register_operand" "r,r")
10907 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10908 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10909 (clobber (reg:CC CC_REGNUM))]
10912 /* If we have an operation where (op x 0) is the identity operation and
10913 the conditional operator is LT or GE and we are comparing against zero and
10914 everything is in registers then we can do this in two instructions. */
10915 if (operands[3] == const0_rtx
10916 && GET_CODE (operands[7]) != AND
10917 && REG_P (operands[5])
10918 && REG_P (operands[1])
10919 && REGNO (operands[1]) == REGNO (operands[4])
10920 && REGNO (operands[4]) != REGNO (operands[0]))
10922 if (GET_CODE (operands[6]) == LT)
10923 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10924 else if (GET_CODE (operands[6]) == GE)
10925 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10927 if (CONST_INT_P (operands[3])
10928 && !const_ok_for_arm (INTVAL (operands[3])))
10929 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10931 output_asm_insn (\"cmp\\t%2, %3\", operands);
10932 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10933 if (which_alternative != 0)
10934 return \"mov%D6\\t%0, %1\";
10937 [(set_attr "conds" "clob")
10938 (set_attr "length" "8,12")]
10941 (define_insn "*if_arith_move"
10942 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10943 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10944 [(match_operand 6 "cc_register" "") (const_int 0)])
10945 (match_operator:SI 5 "shiftable_operator"
10946 [(match_operand:SI 2 "s_register_operand" "r,r")
10947 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10948 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10951 %I5%d4\\t%0, %2, %3
10952 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10953 [(set_attr "conds" "use")
10954 (set_attr "length" "4,8")
10955 (set_attr "type" "*,*")]
10958 (define_insn "*ifcompare_move_arith"
10959 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10960 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10961 [(match_operand:SI 4 "s_register_operand" "r,r")
10962 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10963 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10964 (match_operator:SI 7 "shiftable_operator"
10965 [(match_operand:SI 2 "s_register_operand" "r,r")
10966 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10967 (clobber (reg:CC CC_REGNUM))]
10970 /* If we have an operation where (op x 0) is the identity operation and
10971 the conditional operator is LT or GE and we are comparing against zero and
10972 everything is in registers then we can do this in two instructions */
10973 if (operands[5] == const0_rtx
10974 && GET_CODE (operands[7]) != AND
10975 && REG_P (operands[3])
10976 && REG_P (operands[1])
10977 && REGNO (operands[1]) == REGNO (operands[2])
10978 && REGNO (operands[2]) != REGNO (operands[0]))
10980 if (GET_CODE (operands[6]) == GE)
10981 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10982 else if (GET_CODE (operands[6]) == LT)
10983 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10986 if (CONST_INT_P (operands[5])
10987 && !const_ok_for_arm (INTVAL (operands[5])))
10988 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10990 output_asm_insn (\"cmp\\t%4, %5\", operands);
10992 if (which_alternative != 0)
10993 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10994 return \"%I7%D6\\t%0, %2, %3\";
10996 [(set_attr "conds" "clob")
10997 (set_attr "length" "8,12")]
11000 (define_insn "*if_move_arith"
11001 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11003 (match_operator 4 "arm_comparison_operator"
11004 [(match_operand 6 "cc_register" "") (const_int 0)])
11005 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11006 (match_operator:SI 5 "shiftable_operator"
11007 [(match_operand:SI 2 "s_register_operand" "r,r")
11008 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
11011 %I5%D4\\t%0, %2, %3
11012 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
11013 [(set_attr "conds" "use")
11014 (set_attr "length" "4,8")
11015 (set_attr "type" "*,*")]
11018 (define_insn "*ifcompare_move_not"
11019 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11021 (match_operator 5 "arm_comparison_operator"
11022 [(match_operand:SI 3 "s_register_operand" "r,r")
11023 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11024 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11026 (match_operand:SI 2 "s_register_operand" "r,r"))))
11027 (clobber (reg:CC CC_REGNUM))]
11030 [(set_attr "conds" "clob")
11031 (set_attr "length" "8,12")]
11034 (define_insn "*if_move_not"
11035 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11037 (match_operator 4 "arm_comparison_operator"
11038 [(match_operand 3 "cc_register" "") (const_int 0)])
11039 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11040 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11044 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
11045 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
11046 [(set_attr "conds" "use")
11047 (set_attr "insn" "mvn")
11048 (set_attr "length" "4,8,8")]
11051 (define_insn "*ifcompare_not_move"
11052 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11054 (match_operator 5 "arm_comparison_operator"
11055 [(match_operand:SI 3 "s_register_operand" "r,r")
11056 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11058 (match_operand:SI 2 "s_register_operand" "r,r"))
11059 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11060 (clobber (reg:CC CC_REGNUM))]
11063 [(set_attr "conds" "clob")
11064 (set_attr "length" "8,12")]
11067 (define_insn "*if_not_move"
11068 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11070 (match_operator 4 "arm_comparison_operator"
11071 [(match_operand 3 "cc_register" "") (const_int 0)])
11072 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11073 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11077 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11078 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11079 [(set_attr "conds" "use")
11080 (set_attr "insn" "mvn")
11081 (set_attr "length" "4,8,8")]
11084 (define_insn "*ifcompare_shift_move"
11085 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11087 (match_operator 6 "arm_comparison_operator"
11088 [(match_operand:SI 4 "s_register_operand" "r,r")
11089 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11090 (match_operator:SI 7 "shift_operator"
11091 [(match_operand:SI 2 "s_register_operand" "r,r")
11092 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11093 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11094 (clobber (reg:CC CC_REGNUM))]
11097 [(set_attr "conds" "clob")
11098 (set_attr "length" "8,12")]
11101 (define_insn "*if_shift_move"
11102 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11104 (match_operator 5 "arm_comparison_operator"
11105 [(match_operand 6 "cc_register" "") (const_int 0)])
11106 (match_operator:SI 4 "shift_operator"
11107 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11108 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11109 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11113 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11114 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11115 [(set_attr "conds" "use")
11116 (set_attr "shift" "2")
11117 (set_attr "length" "4,8,8")
11118 (set_attr "insn" "mov")
11119 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11120 (const_string "alu_shift")
11121 (const_string "alu_shift_reg")))]
11124 (define_insn "*ifcompare_move_shift"
11125 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11127 (match_operator 6 "arm_comparison_operator"
11128 [(match_operand:SI 4 "s_register_operand" "r,r")
11129 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11130 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11131 (match_operator:SI 7 "shift_operator"
11132 [(match_operand:SI 2 "s_register_operand" "r,r")
11133 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11134 (clobber (reg:CC CC_REGNUM))]
11137 [(set_attr "conds" "clob")
11138 (set_attr "length" "8,12")]
11141 (define_insn "*if_move_shift"
11142 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11144 (match_operator 5 "arm_comparison_operator"
11145 [(match_operand 6 "cc_register" "") (const_int 0)])
11146 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11147 (match_operator:SI 4 "shift_operator"
11148 [(match_operand:SI 2 "s_register_operand" "r,r,r")
11149 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11153 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11154 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11155 [(set_attr "conds" "use")
11156 (set_attr "shift" "2")
11157 (set_attr "length" "4,8,8")
11158 (set_attr "insn" "mov")
11159 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
11160 (const_string "alu_shift")
11161 (const_string "alu_shift_reg")))]
11164 (define_insn "*ifcompare_shift_shift"
11165 [(set (match_operand:SI 0 "s_register_operand" "=r")
11167 (match_operator 7 "arm_comparison_operator"
11168 [(match_operand:SI 5 "s_register_operand" "r")
11169 (match_operand:SI 6 "arm_add_operand" "rIL")])
11170 (match_operator:SI 8 "shift_operator"
11171 [(match_operand:SI 1 "s_register_operand" "r")
11172 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11173 (match_operator:SI 9 "shift_operator"
11174 [(match_operand:SI 3 "s_register_operand" "r")
11175 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11176 (clobber (reg:CC CC_REGNUM))]
11179 [(set_attr "conds" "clob")
11180 (set_attr "length" "12")]
11183 (define_insn "*if_shift_shift"
11184 [(set (match_operand:SI 0 "s_register_operand" "=r")
11186 (match_operator 5 "arm_comparison_operator"
11187 [(match_operand 8 "cc_register" "") (const_int 0)])
11188 (match_operator:SI 6 "shift_operator"
11189 [(match_operand:SI 1 "s_register_operand" "r")
11190 (match_operand:SI 2 "arm_rhs_operand" "rM")])
11191 (match_operator:SI 7 "shift_operator"
11192 [(match_operand:SI 3 "s_register_operand" "r")
11193 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11195 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11196 [(set_attr "conds" "use")
11197 (set_attr "shift" "1")
11198 (set_attr "length" "8")
11199 (set_attr "insn" "mov")
11200 (set (attr "type") (if_then_else
11201 (and (match_operand 2 "const_int_operand" "")
11202 (match_operand 4 "const_int_operand" ""))
11203 (const_string "alu_shift")
11204 (const_string "alu_shift_reg")))]
11207 (define_insn "*ifcompare_not_arith"
11208 [(set (match_operand:SI 0 "s_register_operand" "=r")
11210 (match_operator 6 "arm_comparison_operator"
11211 [(match_operand:SI 4 "s_register_operand" "r")
11212 (match_operand:SI 5 "arm_add_operand" "rIL")])
11213 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11214 (match_operator:SI 7 "shiftable_operator"
11215 [(match_operand:SI 2 "s_register_operand" "r")
11216 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11217 (clobber (reg:CC CC_REGNUM))]
11220 [(set_attr "conds" "clob")
11221 (set_attr "length" "12")]
11224 (define_insn "*if_not_arith"
11225 [(set (match_operand:SI 0 "s_register_operand" "=r")
11227 (match_operator 5 "arm_comparison_operator"
11228 [(match_operand 4 "cc_register" "") (const_int 0)])
11229 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11230 (match_operator:SI 6 "shiftable_operator"
11231 [(match_operand:SI 2 "s_register_operand" "r")
11232 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11234 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11235 [(set_attr "conds" "use")
11236 (set_attr "insn" "mvn")
11237 (set_attr "length" "8")]
11240 (define_insn "*ifcompare_arith_not"
11241 [(set (match_operand:SI 0 "s_register_operand" "=r")
11243 (match_operator 6 "arm_comparison_operator"
11244 [(match_operand:SI 4 "s_register_operand" "r")
11245 (match_operand:SI 5 "arm_add_operand" "rIL")])
11246 (match_operator:SI 7 "shiftable_operator"
11247 [(match_operand:SI 2 "s_register_operand" "r")
11248 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11249 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11250 (clobber (reg:CC CC_REGNUM))]
11253 [(set_attr "conds" "clob")
11254 (set_attr "length" "12")]
11257 (define_insn "*if_arith_not"
11258 [(set (match_operand:SI 0 "s_register_operand" "=r")
11260 (match_operator 5 "arm_comparison_operator"
11261 [(match_operand 4 "cc_register" "") (const_int 0)])
11262 (match_operator:SI 6 "shiftable_operator"
11263 [(match_operand:SI 2 "s_register_operand" "r")
11264 (match_operand:SI 3 "arm_rhs_operand" "rI")])
11265 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11267 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11268 [(set_attr "conds" "use")
11269 (set_attr "insn" "mvn")
11270 (set_attr "length" "8")]
11273 (define_insn "*ifcompare_neg_move"
11274 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11276 (match_operator 5 "arm_comparison_operator"
11277 [(match_operand:SI 3 "s_register_operand" "r,r")
11278 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11279 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11280 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11281 (clobber (reg:CC CC_REGNUM))]
11284 [(set_attr "conds" "clob")
11285 (set_attr "length" "8,12")]
11288 (define_insn "*if_neg_move"
11289 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11291 (match_operator 4 "arm_comparison_operator"
11292 [(match_operand 3 "cc_register" "") (const_int 0)])
11293 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11294 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11297 rsb%d4\\t%0, %2, #0
11298 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
11299 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
11300 [(set_attr "conds" "use")
11301 (set_attr "length" "4,8,8")]
11304 (define_insn "*ifcompare_move_neg"
11305 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11307 (match_operator 5 "arm_comparison_operator"
11308 [(match_operand:SI 3 "s_register_operand" "r,r")
11309 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11310 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11311 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11312 (clobber (reg:CC CC_REGNUM))]
11315 [(set_attr "conds" "clob")
11316 (set_attr "length" "8,12")]
11319 (define_insn "*if_move_neg"
11320 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11322 (match_operator 4 "arm_comparison_operator"
11323 [(match_operand 3 "cc_register" "") (const_int 0)])
11324 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11325 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
11328 rsb%D4\\t%0, %2, #0
11329 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
11330 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
11331 [(set_attr "conds" "use")
11332 (set_attr "length" "4,8,8")]
11335 (define_insn "*arith_adjacentmem"
11336 [(set (match_operand:SI 0 "s_register_operand" "=r")
11337 (match_operator:SI 1 "shiftable_operator"
11338 [(match_operand:SI 2 "memory_operand" "m")
11339 (match_operand:SI 3 "memory_operand" "m")]))
11340 (clobber (match_scratch:SI 4 "=r"))]
11341 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11347 HOST_WIDE_INT val1 = 0, val2 = 0;
11349 if (REGNO (operands[0]) > REGNO (operands[4]))
11351 ldm[1] = operands[4];
11352 ldm[2] = operands[0];
11356 ldm[1] = operands[0];
11357 ldm[2] = operands[4];
11360 base_reg = XEXP (operands[2], 0);
11362 if (!REG_P (base_reg))
11364 val1 = INTVAL (XEXP (base_reg, 1));
11365 base_reg = XEXP (base_reg, 0);
11368 if (!REG_P (XEXP (operands[3], 0)))
11369 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11371 arith[0] = operands[0];
11372 arith[3] = operands[1];
11386 if (val1 !=0 && val2 != 0)
11390 if (val1 == 4 || val2 == 4)
11391 /* Other val must be 8, since we know they are adjacent and neither
11393 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
11394 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11396 ldm[0] = ops[0] = operands[4];
11398 ops[2] = GEN_INT (val1);
11399 output_add_immediate (ops);
11401 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11403 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11407 /* Offset is out of range for a single add, so use two ldr. */
11410 ops[2] = GEN_INT (val1);
11411 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11413 ops[2] = GEN_INT (val2);
11414 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11417 else if (val1 != 0)
11420 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11422 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11427 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
11429 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
11431 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11434 [(set_attr "length" "12")
11435 (set_attr "predicable" "yes")
11436 (set_attr "type" "load1")]
11439 ; This pattern is never tried by combine, so do it as a peephole
11442 [(set (match_operand:SI 0 "arm_general_register_operand" "")
11443 (match_operand:SI 1 "arm_general_register_operand" ""))
11444 (set (reg:CC CC_REGNUM)
11445 (compare:CC (match_dup 1) (const_int 0)))]
11447 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11448 (set (match_dup 0) (match_dup 1))])]
11453 [(set (match_operand:SI 0 "s_register_operand" "")
11454 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11456 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
11457 [(match_operand:SI 3 "s_register_operand" "")
11458 (match_operand:SI 4 "arm_rhs_operand" "")]))))
11459 (clobber (match_operand:SI 5 "s_register_operand" ""))]
11461 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11462 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11467 ;; This split can be used because CC_Z mode implies that the following
11468 ;; branch will be an equality, or an unsigned inequality, so the sign
11469 ;; extension is not needed.
11472 [(set (reg:CC_Z CC_REGNUM)
11474 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11476 (match_operand 1 "const_int_operand" "")))
11477 (clobber (match_scratch:SI 2 ""))]
11479 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
11480 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
11481 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11482 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11484 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11487 ;; ??? Check the patterns above for Thumb-2 usefulness
11489 (define_expand "prologue"
11490 [(clobber (const_int 0))]
11493 arm_expand_prologue ();
11495 thumb1_expand_prologue ();
11500 (define_expand "epilogue"
11501 [(clobber (const_int 0))]
11504 if (crtl->calls_eh_return)
11505 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11508 thumb1_expand_epilogue ();
11509 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11510 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11512 else if (HAVE_return)
11514 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
11515 no need for explicit testing again. */
11516 emit_jump_insn (gen_return ());
11518 else if (TARGET_32BIT)
11520 arm_expand_epilogue (true);
11526 (define_insn "prologue_thumb1_interwork"
11527 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
11529 "* return thumb1_output_interwork ();"
11530 [(set_attr "length" "8")]
11533 ;; Note - although unspec_volatile's USE all hard registers,
11534 ;; USEs are ignored after relaod has completed. Thus we need
11535 ;; to add an unspec of the link register to ensure that flow
11536 ;; does not think that it is unused by the sibcall branch that
11537 ;; will replace the standard function epilogue.
11538 (define_expand "sibcall_epilogue"
11539 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11540 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11543 arm_expand_epilogue (false);
11548 (define_insn "*epilogue_insns"
11549 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
11552 return thumb1_unexpanded_epilogue ();
11554 ; Length is absolute worst case
11555 [(set_attr "length" "44")
11556 (set_attr "type" "block")
11557 ;; We don't clobber the conditions, but the potential length of this
11558 ;; operation is sufficient to make conditionalizing the sequence
11559 ;; unlikely to be profitable.
11560 (set_attr "conds" "clob")]
11563 (define_expand "eh_epilogue"
11564 [(use (match_operand:SI 0 "register_operand" ""))
11565 (use (match_operand:SI 1 "register_operand" ""))
11566 (use (match_operand:SI 2 "register_operand" ""))]
11570 cfun->machine->eh_epilogue_sp_ofs = operands[1];
11571 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11573 rtx ra = gen_rtx_REG (Pmode, 2);
11575 emit_move_insn (ra, operands[2]);
11578 /* This is a hack -- we may have crystalized the function type too
11580 cfun->machine->func_type = 0;
11584 ;; This split is only used during output to reduce the number of patterns
11585 ;; that need assembler instructions adding to them. We allowed the setting
11586 ;; of the conditions to be implicit during rtl generation so that
11587 ;; the conditional compare patterns would work. However this conflicts to
11588 ;; some extent with the conditional data operations, so we have to split them
11591 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
11592 ;; conditional execution sufficient?
11595 [(set (match_operand:SI 0 "s_register_operand" "")
11596 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11597 [(match_operand 2 "" "") (match_operand 3 "" "")])
11599 (match_operand 4 "" "")))
11600 (clobber (reg:CC CC_REGNUM))]
11601 "TARGET_ARM && reload_completed"
11602 [(set (match_dup 5) (match_dup 6))
11603 (cond_exec (match_dup 7)
11604 (set (match_dup 0) (match_dup 4)))]
11607 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11608 operands[2], operands[3]);
11609 enum rtx_code rc = GET_CODE (operands[1]);
11611 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11612 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11613 if (mode == CCFPmode || mode == CCFPEmode)
11614 rc = reverse_condition_maybe_unordered (rc);
11616 rc = reverse_condition (rc);
11618 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11623 [(set (match_operand:SI 0 "s_register_operand" "")
11624 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11625 [(match_operand 2 "" "") (match_operand 3 "" "")])
11626 (match_operand 4 "" "")
11628 (clobber (reg:CC CC_REGNUM))]
11629 "TARGET_ARM && reload_completed"
11630 [(set (match_dup 5) (match_dup 6))
11631 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11632 (set (match_dup 0) (match_dup 4)))]
11635 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11636 operands[2], operands[3]);
11638 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11639 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11644 [(set (match_operand:SI 0 "s_register_operand" "")
11645 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11646 [(match_operand 2 "" "") (match_operand 3 "" "")])
11647 (match_operand 4 "" "")
11648 (match_operand 5 "" "")))
11649 (clobber (reg:CC CC_REGNUM))]
11650 "TARGET_ARM && reload_completed"
11651 [(set (match_dup 6) (match_dup 7))
11652 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11653 (set (match_dup 0) (match_dup 4)))
11654 (cond_exec (match_dup 8)
11655 (set (match_dup 0) (match_dup 5)))]
11658 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11659 operands[2], operands[3]);
11660 enum rtx_code rc = GET_CODE (operands[1]);
11662 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11663 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11664 if (mode == CCFPmode || mode == CCFPEmode)
11665 rc = reverse_condition_maybe_unordered (rc);
11667 rc = reverse_condition (rc);
11669 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11674 [(set (match_operand:SI 0 "s_register_operand" "")
11675 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
11676 [(match_operand:SI 2 "s_register_operand" "")
11677 (match_operand:SI 3 "arm_add_operand" "")])
11678 (match_operand:SI 4 "arm_rhs_operand" "")
11680 (match_operand:SI 5 "s_register_operand" ""))))
11681 (clobber (reg:CC CC_REGNUM))]
11682 "TARGET_ARM && reload_completed"
11683 [(set (match_dup 6) (match_dup 7))
11684 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11685 (set (match_dup 0) (match_dup 4)))
11686 (cond_exec (match_dup 8)
11687 (set (match_dup 0) (not:SI (match_dup 5))))]
11690 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11691 operands[2], operands[3]);
11692 enum rtx_code rc = GET_CODE (operands[1]);
11694 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11695 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11696 if (mode == CCFPmode || mode == CCFPEmode)
11697 rc = reverse_condition_maybe_unordered (rc);
11699 rc = reverse_condition (rc);
11701 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11705 (define_insn "*cond_move_not"
11706 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11707 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11708 [(match_operand 3 "cc_register" "") (const_int 0)])
11709 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11711 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11715 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11716 [(set_attr "conds" "use")
11717 (set_attr "insn" "mvn")
11718 (set_attr "length" "4,8")]
11721 ;; The next two patterns occur when an AND operation is followed by a
11722 ;; scc insn sequence
11724 (define_insn "*sign_extract_onebit"
11725 [(set (match_operand:SI 0 "s_register_operand" "=r")
11726 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11728 (match_operand:SI 2 "const_int_operand" "n")))
11729 (clobber (reg:CC CC_REGNUM))]
11732 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11733 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11734 return \"mvnne\\t%0, #0\";
11736 [(set_attr "conds" "clob")
11737 (set_attr "length" "8")]
11740 (define_insn "*not_signextract_onebit"
11741 [(set (match_operand:SI 0 "s_register_operand" "=r")
11743 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11745 (match_operand:SI 2 "const_int_operand" "n"))))
11746 (clobber (reg:CC CC_REGNUM))]
11749 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11750 output_asm_insn (\"tst\\t%1, %2\", operands);
11751 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11752 return \"movne\\t%0, #0\";
11754 [(set_attr "conds" "clob")
11755 (set_attr "length" "12")]
11757 ;; ??? The above patterns need auditing for Thumb-2
11759 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11760 ;; expressions. For simplicity, the first register is also in the unspec
11762 ;; To avoid the usage of GNU extension, the length attribute is computed
11763 ;; in a C function arm_attr_length_push_multi.
11764 (define_insn "*push_multi"
11765 [(match_parallel 2 "multi_register_push"
11766 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11767 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11768 UNSPEC_PUSH_MULT))])]
11772 int num_saves = XVECLEN (operands[2], 0);
11774 /* For the StrongARM at least it is faster to
11775 use STR to store only a single register.
11776 In Thumb mode always use push, and the assembler will pick
11777 something appropriate. */
11778 if (num_saves == 1 && TARGET_ARM)
11779 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11786 strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
11787 else if (TARGET_THUMB2)
11788 strcpy (pattern, \"push%?\\t{%1\");
11790 strcpy (pattern, \"push\\t{%1\");
11792 for (i = 1; i < num_saves; i++)
11794 strcat (pattern, \", %|\");
11796 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11799 strcat (pattern, \"}\");
11800 output_asm_insn (pattern, operands);
11805 [(set_attr "type" "store4")
11806 (set (attr "length")
11807 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11810 (define_insn "stack_tie"
11811 [(set (mem:BLK (scratch))
11812 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11813 (match_operand:SI 1 "s_register_operand" "rk")]
11817 [(set_attr "length" "0")]
11820 ;; Pop (as used in epilogue RTL)
11822 (define_insn "*load_multiple_with_writeback"
11823 [(match_parallel 0 "load_multiple_operation"
11824 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11825 (plus:SI (match_dup 1)
11826 (match_operand:SI 2 "const_int_operand" "I")))
11827 (set (match_operand:SI 3 "s_register_operand" "=rk")
11828 (mem:SI (match_dup 1)))
11830 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11833 arm_output_multireg_pop (operands, /*return_pc=*/false,
11834 /*cond=*/const_true_rtx,
11840 [(set_attr "type" "load4")
11841 (set_attr "predicable" "yes")]
11844 ;; Pop with return (as used in epilogue RTL)
11846 ;; This instruction is generated when the registers are popped at the end of
11847 ;; epilogue. Here, instead of popping the value into LR and then generating
11848 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11850 (define_insn "*pop_multiple_with_writeback_and_return"
11851 [(match_parallel 0 "pop_multiple_return"
11853 (set (match_operand:SI 1 "s_register_operand" "+rk")
11854 (plus:SI (match_dup 1)
11855 (match_operand:SI 2 "const_int_operand" "I")))
11856 (set (match_operand:SI 3 "s_register_operand" "=rk")
11857 (mem:SI (match_dup 1)))
11859 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11862 arm_output_multireg_pop (operands, /*return_pc=*/true,
11863 /*cond=*/const_true_rtx,
11869 [(set_attr "type" "load4")
11870 (set_attr "predicable" "yes")]
11873 (define_insn "*pop_multiple_with_return"
11874 [(match_parallel 0 "pop_multiple_return"
11876 (set (match_operand:SI 2 "s_register_operand" "=rk")
11877 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11879 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11882 arm_output_multireg_pop (operands, /*return_pc=*/true,
11883 /*cond=*/const_true_rtx,
11889 [(set_attr "type" "load4")
11890 (set_attr "predicable" "yes")]
11893 ;; Load into PC and return
11894 (define_insn "*ldr_with_return"
11896 (set (reg:SI PC_REGNUM)
11897 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11898 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11899 "ldr%?\t%|pc, [%0], #4"
11900 [(set_attr "type" "load1")
11901 (set_attr "predicable" "yes")]
11903 ;; Pop for floating point registers (as used in epilogue RTL)
11904 (define_insn "*vfp_pop_multiple_with_writeback"
11905 [(match_parallel 0 "pop_multiple_fp"
11906 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11907 (plus:SI (match_dup 1)
11908 (match_operand:SI 2 "const_int_operand" "I")))
11909 (set (match_operand:DF 3 "arm_hard_register_operand" "")
11910 (mem:DF (match_dup 1)))])]
11911 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
11914 int num_regs = XVECLEN (operands[0], 0);
11917 strcpy (pattern, \"fldmfdd\\t\");
11918 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11919 strcat (pattern, \"!, {\");
11920 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11921 strcat (pattern, \"%P0\");
11922 if ((num_regs - 1) > 1)
11924 strcat (pattern, \"-%P1\");
11925 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11928 strcat (pattern, \"}\");
11929 output_asm_insn (pattern, op_list);
11933 [(set_attr "type" "load4")
11934 (set_attr "conds" "unconditional")
11935 (set_attr "predicable" "no")]
11938 ;; Special patterns for dealing with the constant pool
11940 (define_insn "align_4"
11941 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11944 assemble_align (32);
11949 (define_insn "align_8"
11950 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11953 assemble_align (64);
11958 (define_insn "consttable_end"
11959 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11962 making_const_table = FALSE;
11967 (define_insn "consttable_1"
11968 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11971 making_const_table = TRUE;
11972 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11973 assemble_zeros (3);
11976 [(set_attr "length" "4")]
11979 (define_insn "consttable_2"
11980 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11983 making_const_table = TRUE;
11984 gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
11985 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11986 assemble_zeros (2);
11989 [(set_attr "length" "4")]
11992 (define_insn "consttable_4"
11993 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11997 rtx x = operands[0];
11998 making_const_table = TRUE;
11999 switch (GET_MODE_CLASS (GET_MODE (x)))
12002 if (GET_MODE (x) == HFmode)
12003 arm_emit_fp16_const (x);
12007 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
12008 assemble_real (r, GET_MODE (x), BITS_PER_WORD);
12012 /* XXX: Sometimes gcc does something really dumb and ends up with
12013 a HIGH in a constant pool entry, usually because it's trying to
12014 load into a VFP register. We know this will always be used in
12015 combination with a LO_SUM which ignores the high bits, so just
12016 strip off the HIGH. */
12017 if (GET_CODE (x) == HIGH)
12019 assemble_integer (x, 4, BITS_PER_WORD, 1);
12020 mark_symbol_refs_as_used (x);
12025 [(set_attr "length" "4")]
12028 (define_insn "consttable_8"
12029 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
12033 making_const_table = TRUE;
12034 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12039 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12040 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12044 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12049 [(set_attr "length" "8")]
12052 (define_insn "consttable_16"
12053 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12057 making_const_table = TRUE;
12058 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
12063 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
12064 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
12068 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12073 [(set_attr "length" "16")]
12076 ;; Miscellaneous Thumb patterns
12078 (define_expand "tablejump"
12079 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
12080 (use (label_ref (match_operand 1 "" "")))])]
12085 /* Hopefully, CSE will eliminate this copy. */
12086 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
12087 rtx reg2 = gen_reg_rtx (SImode);
12089 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
12090 operands[0] = reg2;
12095 ;; NB never uses BX.
12096 (define_insn "*thumb1_tablejump"
12097 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
12098 (use (label_ref (match_operand 1 "" "")))]
12101 [(set_attr "length" "2")]
12104 ;; V5 Instructions,
12106 (define_insn "clzsi2"
12107 [(set (match_operand:SI 0 "s_register_operand" "=r")
12108 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12109 "TARGET_32BIT && arm_arch5"
12111 [(set_attr "predicable" "yes")
12112 (set_attr "insn" "clz")])
12114 (define_insn "rbitsi2"
12115 [(set (match_operand:SI 0 "s_register_operand" "=r")
12116 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12117 "TARGET_32BIT && arm_arch_thumb2"
12119 [(set_attr "predicable" "yes")
12120 (set_attr "insn" "clz")])
12122 (define_expand "ctzsi2"
12123 [(set (match_operand:SI 0 "s_register_operand" "")
12124 (ctz:SI (match_operand:SI 1 "s_register_operand" "")))]
12125 "TARGET_32BIT && arm_arch_thumb2"
12128 rtx tmp = gen_reg_rtx (SImode);
12129 emit_insn (gen_rbitsi2 (tmp, operands[1]));
12130 emit_insn (gen_clzsi2 (operands[0], tmp));
12136 ;; V5E instructions.
12138 (define_insn "prefetch"
12139 [(prefetch (match_operand:SI 0 "address_operand" "p")
12140 (match_operand:SI 1 "" "")
12141 (match_operand:SI 2 "" ""))]
12142 "TARGET_32BIT && arm_arch5e"
12145 ;; General predication pattern
12148 [(match_operator 0 "arm_comparison_operator"
12149 [(match_operand 1 "cc_register" "")
12153 [(set_attr "predicated" "yes")]
12156 (define_insn "force_register_use"
12157 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12160 [(set_attr "length" "0")]
12164 ;; Patterns for exception handling
12166 (define_expand "eh_return"
12167 [(use (match_operand 0 "general_operand" ""))]
12172 emit_insn (gen_arm_eh_return (operands[0]));
12174 emit_insn (gen_thumb_eh_return (operands[0]));
12179 ;; We can't expand this before we know where the link register is stored.
12180 (define_insn_and_split "arm_eh_return"
12181 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12183 (clobber (match_scratch:SI 1 "=&r"))]
12186 "&& reload_completed"
12190 arm_set_return_address (operands[0], operands[1]);
12195 (define_insn_and_split "thumb_eh_return"
12196 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
12198 (clobber (match_scratch:SI 1 "=&l"))]
12201 "&& reload_completed"
12205 thumb_set_return_address (operands[0], operands[1]);
12213 (define_insn "load_tp_hard"
12214 [(set (match_operand:SI 0 "register_operand" "=r")
12215 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
12217 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12218 [(set_attr "predicable" "yes")]
12221 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
12222 (define_insn "load_tp_soft"
12223 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12224 (clobber (reg:SI LR_REGNUM))
12225 (clobber (reg:SI IP_REGNUM))
12226 (clobber (reg:CC CC_REGNUM))]
12228 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12229 [(set_attr "conds" "clob")]
12232 ;; tls descriptor call
12233 (define_insn "tlscall"
12234 [(set (reg:SI R0_REGNUM)
12235 (unspec:SI [(reg:SI R0_REGNUM)
12236 (match_operand:SI 0 "" "X")
12237 (match_operand 1 "" "")] UNSPEC_TLS))
12238 (clobber (reg:SI R1_REGNUM))
12239 (clobber (reg:SI LR_REGNUM))
12240 (clobber (reg:SI CC_REGNUM))]
12243 targetm.asm_out.internal_label (asm_out_file, "LPIC",
12244 INTVAL (operands[1]));
12245 return "bl\\t%c0(tlscall)";
12247 [(set_attr "conds" "clob")
12248 (set_attr "length" "4")]
12251 ;; For thread pointer builtin
12252 (define_expand "get_thread_pointersi"
12253 [(match_operand:SI 0 "s_register_operand" "=r")]
12257 arm_load_tp (operands[0]);
12263 ;; We only care about the lower 16 bits of the constant
12264 ;; being inserted into the upper 16 bits of the register.
12265 (define_insn "*arm_movtas_ze"
12266 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
12269 (match_operand:SI 1 "const_int_operand" ""))]
12272 [(set_attr "predicable" "yes")
12273 (set_attr "length" "4")]
12276 (define_insn "*arm_rev"
12277 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12278 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12284 [(set_attr "arch" "t1,t2,32")
12285 (set_attr "length" "2,2,4")]
12288 (define_expand "arm_legacy_rev"
12289 [(set (match_operand:SI 2 "s_register_operand" "")
12290 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
12294 (lshiftrt:SI (match_dup 2)
12296 (set (match_operand:SI 3 "s_register_operand" "")
12297 (rotatert:SI (match_dup 1)
12300 (and:SI (match_dup 2)
12301 (const_int -65281)))
12302 (set (match_operand:SI 0 "s_register_operand" "")
12303 (xor:SI (match_dup 3)
12309 ;; Reuse temporaries to keep register pressure down.
12310 (define_expand "thumb_legacy_rev"
12311 [(set (match_operand:SI 2 "s_register_operand" "")
12312 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
12314 (set (match_operand:SI 3 "s_register_operand" "")
12315 (lshiftrt:SI (match_dup 1)
12318 (ior:SI (match_dup 3)
12320 (set (match_operand:SI 4 "s_register_operand" "")
12322 (set (match_operand:SI 5 "s_register_operand" "")
12323 (rotatert:SI (match_dup 1)
12326 (ashift:SI (match_dup 5)
12329 (lshiftrt:SI (match_dup 5)
12332 (ior:SI (match_dup 5)
12335 (rotatert:SI (match_dup 5)
12337 (set (match_operand:SI 0 "s_register_operand" "")
12338 (ior:SI (match_dup 5)
12344 (define_expand "bswapsi2"
12345 [(set (match_operand:SI 0 "s_register_operand" "=r")
12346 (bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
12347 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
12351 rtx op2 = gen_reg_rtx (SImode);
12352 rtx op3 = gen_reg_rtx (SImode);
12356 rtx op4 = gen_reg_rtx (SImode);
12357 rtx op5 = gen_reg_rtx (SImode);
12359 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12360 op2, op3, op4, op5));
12364 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12373 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
12374 ;; and unsigned variants, respectively. For rev16, expose
12375 ;; byte-swapping in the lower 16 bits only.
12376 (define_insn "*arm_revsh"
12377 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12378 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12384 [(set_attr "arch" "t1,t2,32")
12385 (set_attr "length" "2,2,4")]
12388 (define_insn "*arm_rev16"
12389 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12390 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12396 [(set_attr "arch" "t1,t2,32")
12397 (set_attr "length" "2,2,4")]
12400 (define_expand "bswaphi2"
12401 [(set (match_operand:HI 0 "s_register_operand" "=r")
12402 (bswap:HI (match_operand:HI 1 "s_register_operand" "r")))]
12407 ;; Patterns for LDRD/STRD in Thumb2 mode
12409 (define_insn "*thumb2_ldrd"
12410 [(set (match_operand:SI 0 "s_register_operand" "=r")
12411 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12412 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12413 (set (match_operand:SI 3 "s_register_operand" "=r")
12414 (mem:SI (plus:SI (match_dup 1)
12415 (match_operand:SI 4 "const_int_operand" ""))))]
12416 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12417 && current_tune->prefer_ldrd_strd
12418 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12419 && (operands_ok_ldrd_strd (operands[0], operands[3],
12420 operands[1], INTVAL (operands[2]),
12422 "ldrd%?\t%0, %3, [%1, %2]"
12423 [(set_attr "type" "load2")
12424 (set_attr "predicable" "yes")])
12426 (define_insn "*thumb2_ldrd_base"
12427 [(set (match_operand:SI 0 "s_register_operand" "=r")
12428 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12429 (set (match_operand:SI 2 "s_register_operand" "=r")
12430 (mem:SI (plus:SI (match_dup 1)
12432 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12433 && current_tune->prefer_ldrd_strd
12434 && (operands_ok_ldrd_strd (operands[0], operands[2],
12435 operands[1], 0, false, true))"
12436 "ldrd%?\t%0, %2, [%1]"
12437 [(set_attr "type" "load2")
12438 (set_attr "predicable" "yes")])
12440 (define_insn "*thumb2_ldrd_base_neg"
12441 [(set (match_operand:SI 0 "s_register_operand" "=r")
12442 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12444 (set (match_operand:SI 2 "s_register_operand" "=r")
12445 (mem:SI (match_dup 1)))]
12446 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12447 && current_tune->prefer_ldrd_strd
12448 && (operands_ok_ldrd_strd (operands[0], operands[2],
12449 operands[1], -4, false, true))"
12450 "ldrd%?\t%0, %2, [%1, #-4]"
12451 [(set_attr "type" "load2")
12452 (set_attr "predicable" "yes")])
12454 (define_insn "*thumb2_strd"
12455 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12456 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12457 (match_operand:SI 2 "s_register_operand" "r"))
12458 (set (mem:SI (plus:SI (match_dup 0)
12459 (match_operand:SI 3 "const_int_operand" "")))
12460 (match_operand:SI 4 "s_register_operand" "r"))]
12461 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12462 && current_tune->prefer_ldrd_strd
12463 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12464 && (operands_ok_ldrd_strd (operands[2], operands[4],
12465 operands[0], INTVAL (operands[1]),
12467 "strd%?\t%2, %4, [%0, %1]"
12468 [(set_attr "type" "store2")
12469 (set_attr "predicable" "yes")])
12471 (define_insn "*thumb2_strd_base"
12472 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12473 (match_operand:SI 1 "s_register_operand" "r"))
12474 (set (mem:SI (plus:SI (match_dup 0)
12476 (match_operand:SI 2 "s_register_operand" "r"))]
12477 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12478 && current_tune->prefer_ldrd_strd
12479 && (operands_ok_ldrd_strd (operands[1], operands[2],
12480 operands[0], 0, false, false))"
12481 "strd%?\t%1, %2, [%0]"
12482 [(set_attr "type" "store2")
12483 (set_attr "predicable" "yes")])
12485 (define_insn "*thumb2_strd_base_neg"
12486 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12488 (match_operand:SI 1 "s_register_operand" "r"))
12489 (set (mem:SI (match_dup 0))
12490 (match_operand:SI 2 "s_register_operand" "r"))]
12491 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12492 && current_tune->prefer_ldrd_strd
12493 && (operands_ok_ldrd_strd (operands[1], operands[2],
12494 operands[0], -4, false, false))"
12495 "strd%?\t%1, %2, [%0, #-4]"
12496 [(set_attr "type" "store2")
12497 (set_attr "predicable" "yes")])
12500 ;; Load the load/store double peephole optimizations.
12501 (include "ldrdstrd.md")
12503 ;; Load the load/store multiple patterns
12504 (include "ldmstm.md")
12506 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12507 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12508 (define_insn "*load_multiple"
12509 [(match_parallel 0 "load_multiple_operation"
12510 [(set (match_operand:SI 2 "s_register_operand" "=rk")
12511 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12516 arm_output_multireg_pop (operands, /*return_pc=*/false,
12517 /*cond=*/const_true_rtx,
12523 [(set_attr "predicable" "yes")]
12526 ;; Vector bits common to IWMMXT and Neon
12527 (include "vec-common.md")
12528 ;; Load the Intel Wireless Multimedia Extension patterns
12529 (include "iwmmxt.md")
12530 ;; Load the VFP co-processor patterns
12532 ;; Thumb-2 patterns
12533 (include "thumb2.md")
12535 (include "neon.md")
12536 ;; Synchronization Primitives
12537 (include "sync.md")
12538 ;; Fixed-point patterns
12539 (include "arm-fixed.md")