1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (LAST_ARM_REGNUM 15) ;
39 (CC_REGNUM 100) ; Condition code pseudo register
40 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
43 ;; 3rd operand to select_dominance_cc_mode
50 ;; conditional compare combination
61 ;;---------------------------------------------------------------------------
64 ;; Processor type. This is created automatically from arm-cores.def.
65 (include "arm-tune.md")
67 ;; Instruction classification types
70 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
71 ; generating ARM code. This is used to control the length of some insn
72 ; patterns that share the same RTL in both ARM and Thumb code.
73 (define_attr "is_thumb" "yes,no"
74 (const (if_then_else (symbol_ref "TARGET_THUMB")
75 (const_string "yes") (const_string "no"))))
77 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
78 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
80 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
81 (define_attr "is_thumb1" "yes,no"
82 (const (if_then_else (symbol_ref "TARGET_THUMB1")
83 (const_string "yes") (const_string "no"))))
85 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
86 ; The arm_restrict_it flag enables the "short IT" feature which
87 ; restricts IT blocks to a single 16-bit instruction.
88 ; This attribute should only be used on 16-bit Thumb-2 instructions
89 ; which may be predicated (the "predicable" attribute must be set).
90 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
92 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
93 ; This attribute should only be used on instructions which may emit
94 ; an IT block in their expansion which is not a short IT.
95 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
97 ;; Operand number of an input operand that is shifted. Zero if the
98 ;; given instruction does not shift one of its input operands.
99 (define_attr "shift" "" (const_int 0))
101 ;; [For compatibility with AArch64 in pipeline models]
102 ;; Attribute that specifies whether or not the instruction touches fp
104 (define_attr "fp" "no,yes" (const_string "no"))
106 ; Floating Point Unit. If we only have floating point emulation, then there
107 ; is no point in scheduling the floating point insns. (Well, for best
108 ; performance we should try and group them together).
109 (define_attr "fpu" "none,vfp"
110 (const (symbol_ref "arm_fpu_attr")))
112 ; Predicated means that the insn form is conditionally executed based on a
113 ; predicate. We default to 'no' because no Thumb patterns match this rule
114 ; and not all ARM insns do.
115 (define_attr "predicated" "yes,no" (const_string "no"))
117 ; LENGTH of an instruction (in bytes)
118 (define_attr "length" ""
121 ; The architecture which supports the instruction (or alternative).
122 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
123 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
124 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
125 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
126 ; Baseline. This attribute is used to compute attribute "enabled",
127 ; use type "any" to enable an alternative in all cases.
128 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
129 (const_string "any"))
131 (define_attr "arch_enabled" "no,yes"
132 (cond [(eq_attr "arch" "any")
135 (and (eq_attr "arch" "a")
136 (match_test "TARGET_ARM"))
139 (and (eq_attr "arch" "t")
140 (match_test "TARGET_THUMB"))
143 (and (eq_attr "arch" "t1")
144 (match_test "TARGET_THUMB1"))
147 (and (eq_attr "arch" "t2")
148 (match_test "TARGET_THUMB2"))
151 (and (eq_attr "arch" "32")
152 (match_test "TARGET_32BIT"))
155 (and (eq_attr "arch" "v6")
156 (match_test "TARGET_32BIT && arm_arch6"))
159 (and (eq_attr "arch" "nov6")
160 (match_test "TARGET_32BIT && !arm_arch6"))
163 (and (eq_attr "arch" "v6t2")
164 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
167 (and (eq_attr "arch" "v8mb")
168 (match_test "TARGET_THUMB1 && arm_arch8"))
171 (and (eq_attr "arch" "iwmmxt2")
172 (match_test "TARGET_REALLY_IWMMXT2"))
175 (and (eq_attr "arch" "armv6_or_vfpv3")
176 (match_test "arm_arch6 || TARGET_VFP3"))
179 (and (eq_attr "arch" "neon")
180 (match_test "TARGET_NEON"))
184 (const_string "no")))
186 (define_attr "opt" "any,speed,size"
187 (const_string "any"))
189 (define_attr "opt_enabled" "no,yes"
190 (cond [(eq_attr "opt" "any")
193 (and (eq_attr "opt" "speed")
194 (match_test "optimize_function_for_speed_p (cfun)"))
197 (and (eq_attr "opt" "size")
198 (match_test "optimize_function_for_size_p (cfun)"))
199 (const_string "yes")]
200 (const_string "no")))
202 (define_attr "use_literal_pool" "no,yes"
203 (cond [(and (eq_attr "type" "f_loads,f_loadd")
204 (match_test "CONSTANT_P (operands[1])"))
205 (const_string "yes")]
206 (const_string "no")))
208 ; Enable all alternatives that are both arch_enabled and insn_enabled.
209 ; FIXME:: opt_enabled has been temporarily removed till the time we have
210 ; an attribute that allows the use of such alternatives.
211 ; This depends on caching of speed_p, size_p on a per
212 ; alternative basis. The problem is that the enabled attribute
213 ; cannot depend on any state that is not cached or is not constant
214 ; for a compilation unit. We probably need a generic "hot/cold"
215 ; alternative which if implemented can help with this. We disable this
216 ; until such a time as this is implemented and / or the improvements or
217 ; regressions with removing this attribute are double checked.
218 ; See ashldi3_neon and <shift>di3_neon in neon.md.
220 (define_attr "enabled" "no,yes"
221 (cond [(and (eq_attr "predicable_short_it" "no")
222 (and (eq_attr "predicated" "yes")
223 (match_test "arm_restrict_it")))
226 (and (eq_attr "enabled_for_short_it" "no")
227 (match_test "arm_restrict_it"))
230 (eq_attr "arch_enabled" "no")
232 (const_string "yes")))
234 ; POOL_RANGE is how far away from a constant pool entry that this insn
235 ; can be placed. If the distance is zero, then this insn will never
236 ; reference the pool.
237 ; Note that for Thumb constant pools the PC value is rounded down to the
238 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
239 ; Thumb insns) should be set to <max_range> - 2.
240 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
241 ; before its address. It is set to <max_range> - (8 + <data_size>).
242 (define_attr "arm_pool_range" "" (const_int 0))
243 (define_attr "thumb2_pool_range" "" (const_int 0))
244 (define_attr "arm_neg_pool_range" "" (const_int 0))
245 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
247 (define_attr "pool_range" ""
248 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
249 (attr "arm_pool_range")))
250 (define_attr "neg_pool_range" ""
251 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
252 (attr "arm_neg_pool_range")))
254 ; An assembler sequence may clobber the condition codes without us knowing.
255 ; If such an insn references the pool, then we have no way of knowing how,
256 ; so use the most conservative value for pool_range.
257 (define_asm_attributes
258 [(set_attr "conds" "clob")
259 (set_attr "length" "4")
260 (set_attr "pool_range" "250")])
262 ; Load scheduling, set from the arm_ld_sched variable
263 ; initialized by arm_option_override()
264 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
266 ; condition codes: this one is used by final_prescan_insn to speed up
267 ; conditionalizing instructions. It saves having to scan the rtl to see if
268 ; it uses or alters the condition codes.
270 ; USE means that the condition codes are used by the insn in the process of
271 ; outputting code, this means (at present) that we can't use the insn in
274 ; SET means that the purpose of the insn is to set the condition codes in a
275 ; well defined manner.
277 ; CLOB means that the condition codes are altered in an undefined manner, if
278 ; they are altered at all
280 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
281 ; that the instruction does not use or alter the condition codes.
283 ; NOCOND means that the instruction does not use or alter the condition
284 ; codes but can be converted into a conditionally exectuted instruction.
286 (define_attr "conds" "use,set,clob,unconditional,nocond"
288 (ior (eq_attr "is_thumb1" "yes")
289 (eq_attr "type" "call"))
290 (const_string "clob")
291 (if_then_else (eq_attr "is_neon_type" "no")
292 (const_string "nocond")
293 (const_string "unconditional"))))
295 ; Predicable means that the insn can be conditionally executed based on
296 ; an automatically added predicate (additional patterns are generated by
297 ; gen...). We default to 'no' because no Thumb patterns match this rule
298 ; and not all ARM patterns do.
299 (define_attr "predicable" "no,yes" (const_string "no"))
301 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
302 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
303 ; suffer blockages enough to warrant modelling this (and it can adversely
304 ; affect the schedule).
305 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
307 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
308 ; to stall the processor. Used with model_wbuf above.
309 (define_attr "write_conflict" "no,yes"
310 (if_then_else (eq_attr "type"
313 (const_string "no")))
315 ; Classify the insns into those that take one cycle and those that take more
316 ; than one on the main cpu execution unit.
317 (define_attr "core_cycles" "single,multi"
318 (if_then_else (eq_attr "type"
319 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
320 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
321 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
322 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
323 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
324 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
325 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
326 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
327 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
328 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
329 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
330 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
331 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
332 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
333 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
334 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
335 (const_string "single")
336 (const_string "multi")))
338 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
339 ;; distant label. Only applicable to Thumb code.
340 (define_attr "far_jump" "yes,no" (const_string "no"))
343 ;; The number of machine instructions this pattern expands to.
344 ;; Used for Thumb-2 conditional execution.
345 (define_attr "ce_count" "" (const_int 1))
347 ;;---------------------------------------------------------------------------
350 (include "unspecs.md")
352 ;;---------------------------------------------------------------------------
355 (include "iterators.md")
357 ;;---------------------------------------------------------------------------
360 (include "predicates.md")
361 (include "constraints.md")
363 ;;---------------------------------------------------------------------------
364 ;; Pipeline descriptions
366 (define_attr "tune_cortexr4" "yes,no"
368 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
370 (const_string "no"))))
372 ;; True if the generic scheduling description should be used.
374 (define_attr "generic_sched" "yes,no"
376 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
377 arm926ejs,arm10e,arm1026ejs,arm1136js,\
378 arm1136jfs,cortexa5,cortexa7,cortexa8,\
379 cortexa9,cortexa12,cortexa15,cortexa17,\
380 cortexa53,cortexa57,cortexm4,cortexm7,\
381 exynosm1,marvell_pj4,xgene1")
382 (eq_attr "tune_cortexr4" "yes"))
384 (const_string "yes"))))
386 (define_attr "generic_vfp" "yes,no"
388 (and (eq_attr "fpu" "vfp")
389 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
390 cortexa8,cortexa9,cortexa53,cortexm4,\
391 cortexm7,marvell_pj4,xgene1")
392 (eq_attr "tune_cortexr4" "no"))
394 (const_string "no"))))
396 (include "marvell-f-iwmmxt.md")
397 (include "arm-generic.md")
398 (include "arm926ejs.md")
399 (include "arm1020e.md")
400 (include "arm1026ejs.md")
401 (include "arm1136jfs.md")
403 (include "fa606te.md")
404 (include "fa626te.md")
405 (include "fmp626.md")
406 (include "fa726te.md")
407 (include "cortex-a5.md")
408 (include "cortex-a7.md")
409 (include "cortex-a8.md")
410 (include "cortex-a9.md")
411 (include "cortex-a15.md")
412 (include "cortex-a17.md")
413 (include "cortex-a53.md")
414 (include "cortex-a57.md")
415 (include "cortex-r4.md")
416 (include "cortex-r4f.md")
417 (include "cortex-m7.md")
418 (include "cortex-m4.md")
419 (include "cortex-m4-fpu.md")
420 (include "exynos-m1.md")
422 (include "marvell-pj4.md")
423 (include "xgene1.md")
426 ;;---------------------------------------------------------------------------
431 ;; Note: For DImode insns, there is normally no reason why operands should
432 ;; not be in the same register, what we don't want is for something being
433 ;; written to partially overlap something that is an input.
435 (define_expand "adddi3"
437 [(set (match_operand:DI 0 "s_register_operand")
438 (plus:DI (match_operand:DI 1 "s_register_operand")
439 (match_operand:DI 2 "arm_adddi_operand")))
440 (clobber (reg:CC CC_REGNUM))])]
443 if (TARGET_THUMB1 && !REG_P (operands[2]))
444 operands[2] = force_reg (DImode, operands[2]);
448 (define_insn_and_split "*arm_adddi3"
449 [(set (match_operand:DI 0 "arm_general_register_operand" "=&r,&r,&r,&r,&r")
450 (plus:DI (match_operand:DI 1 "arm_general_register_operand" "%0, 0, r, 0, r")
451 (match_operand:DI 2 "arm_general_adddi_operand" "r, 0, r, Dd, Dd")))
452 (clobber (reg:CC CC_REGNUM))]
456 [(parallel [(set (reg:CC_C CC_REGNUM)
457 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
459 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
460 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
461 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
464 operands[3] = gen_highpart (SImode, operands[0]);
465 operands[0] = gen_lowpart (SImode, operands[0]);
466 operands[4] = gen_highpart (SImode, operands[1]);
467 operands[1] = gen_lowpart (SImode, operands[1]);
468 operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
469 operands[2] = gen_lowpart (SImode, operands[2]);
471 [(set_attr "conds" "clob")
472 (set_attr "length" "8")
473 (set_attr "type" "multiple")]
476 (define_insn_and_split "*adddi_sesidi_di"
477 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
478 (plus:DI (sign_extend:DI
479 (match_operand:SI 2 "s_register_operand" "r,r"))
480 (match_operand:DI 1 "s_register_operand" "0,r")))
481 (clobber (reg:CC CC_REGNUM))]
484 "TARGET_32BIT && reload_completed"
485 [(parallel [(set (reg:CC_C CC_REGNUM)
486 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
488 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
489 (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
492 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
495 operands[3] = gen_highpart (SImode, operands[0]);
496 operands[0] = gen_lowpart (SImode, operands[0]);
497 operands[4] = gen_highpart (SImode, operands[1]);
498 operands[1] = gen_lowpart (SImode, operands[1]);
499 operands[2] = gen_lowpart (SImode, operands[2]);
501 [(set_attr "conds" "clob")
502 (set_attr "length" "8")
503 (set_attr "type" "multiple")]
506 (define_insn_and_split "*adddi_zesidi_di"
507 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
508 (plus:DI (zero_extend:DI
509 (match_operand:SI 2 "s_register_operand" "r,r"))
510 (match_operand:DI 1 "s_register_operand" "0,r")))
511 (clobber (reg:CC CC_REGNUM))]
514 "TARGET_32BIT && reload_completed"
515 [(parallel [(set (reg:CC_C CC_REGNUM)
516 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
518 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
519 (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
520 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
523 operands[3] = gen_highpart (SImode, operands[0]);
524 operands[0] = gen_lowpart (SImode, operands[0]);
525 operands[4] = gen_highpart (SImode, operands[1]);
526 operands[1] = gen_lowpart (SImode, operands[1]);
527 operands[2] = gen_lowpart (SImode, operands[2]);
529 [(set_attr "conds" "clob")
530 (set_attr "length" "8")
531 (set_attr "type" "multiple")]
534 (define_expand "addv<mode>4"
535 [(match_operand:SIDI 0 "register_operand")
536 (match_operand:SIDI 1 "register_operand")
537 (match_operand:SIDI 2 "register_operand")
538 (match_operand 3 "")]
541 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
542 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
547 (define_expand "uaddv<mode>4"
548 [(match_operand:SIDI 0 "register_operand")
549 (match_operand:SIDI 1 "register_operand")
550 (match_operand:SIDI 2 "register_operand")
551 (match_operand 3 "")]
554 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
555 arm_gen_unlikely_cbranch (NE, CC_Cmode, operands[3]);
560 (define_expand "addsi3"
561 [(set (match_operand:SI 0 "s_register_operand")
562 (plus:SI (match_operand:SI 1 "s_register_operand")
563 (match_operand:SI 2 "reg_or_int_operand")))]
566 if (TARGET_32BIT && CONST_INT_P (operands[2]))
568 arm_split_constant (PLUS, SImode, NULL_RTX,
569 INTVAL (operands[2]), operands[0], operands[1],
570 optimize && can_create_pseudo_p ());
576 ; If there is a scratch available, this will be faster than synthesizing the
579 [(match_scratch:SI 3 "r")
580 (set (match_operand:SI 0 "arm_general_register_operand" "")
581 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
582 (match_operand:SI 2 "const_int_operand" "")))]
584 !(const_ok_for_arm (INTVAL (operands[2]))
585 || const_ok_for_arm (-INTVAL (operands[2])))
586 && const_ok_for_arm (~INTVAL (operands[2]))"
587 [(set (match_dup 3) (match_dup 2))
588 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
592 ;; The r/r/k alternative is required when reloading the address
593 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
594 ;; put the duplicated register first, and not try the commutative version.
595 (define_insn_and_split "*arm_addsi3"
596 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
597 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
598 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
614 subw%?\\t%0, %1, #%n2
615 subw%?\\t%0, %1, #%n2
618 && CONST_INT_P (operands[2])
619 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
620 && (reload_completed || !arm_eliminable_register (operands[1]))"
621 [(clobber (const_int 0))]
623 arm_split_constant (PLUS, SImode, curr_insn,
624 INTVAL (operands[2]), operands[0],
628 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
629 (set_attr "predicable" "yes")
630 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
631 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
632 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
633 (const_string "alu_imm")
634 (const_string "alu_sreg")))
638 (define_insn_and_split "adddi3_compareV"
639 [(set (reg:CC_V CC_REGNUM)
642 (sign_extend:TI (match_operand:DI 1 "register_operand" "r"))
643 (sign_extend:TI (match_operand:DI 2 "register_operand" "r")))
644 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
645 (set (match_operand:DI 0 "register_operand" "=&r")
646 (plus:DI (match_dup 1) (match_dup 2)))]
649 "&& reload_completed"
650 [(parallel [(set (reg:CC_C CC_REGNUM)
651 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
653 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
654 (parallel [(set (reg:CC_V CC_REGNUM)
657 (sign_extend:DI (match_dup 4))
658 (sign_extend:DI (match_dup 5)))
659 (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))
660 (plus:DI (sign_extend:DI
661 (plus:SI (match_dup 4) (match_dup 5)))
662 (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))))
663 (set (match_dup 3) (plus:SI (plus:SI
664 (match_dup 4) (match_dup 5))
665 (ltu:SI (reg:CC_C CC_REGNUM)
669 operands[3] = gen_highpart (SImode, operands[0]);
670 operands[0] = gen_lowpart (SImode, operands[0]);
671 operands[4] = gen_highpart (SImode, operands[1]);
672 operands[1] = gen_lowpart (SImode, operands[1]);
673 operands[5] = gen_highpart (SImode, operands[2]);
674 operands[2] = gen_lowpart (SImode, operands[2]);
676 [(set_attr "conds" "set")
677 (set_attr "length" "8")
678 (set_attr "type" "multiple")]
681 (define_insn "addsi3_compareV"
682 [(set (reg:CC_V CC_REGNUM)
685 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
686 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
687 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
688 (set (match_operand:SI 0 "register_operand" "=r")
689 (plus:SI (match_dup 1) (match_dup 2)))]
691 "adds%?\\t%0, %1, %2"
692 [(set_attr "conds" "set")
693 (set_attr "type" "alus_sreg")]
696 (define_insn "*addsi3_compareV_upper"
697 [(set (reg:CC_V CC_REGNUM)
701 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
702 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
703 (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))
704 (plus:DI (sign_extend:DI
705 (plus:SI (match_dup 1) (match_dup 2)))
706 (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))))
707 (set (match_operand:SI 0 "register_operand" "=r")
709 (plus:SI (match_dup 1) (match_dup 2))
710 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
712 "adcs%?\\t%0, %1, %2"
713 [(set_attr "conds" "set")
714 (set_attr "type" "adcs_reg")]
717 (define_insn_and_split "adddi3_compareC"
718 [(set (reg:CC_C CC_REGNUM)
721 (zero_extend:TI (match_operand:DI 1 "register_operand" "r"))
722 (zero_extend:TI (match_operand:DI 2 "register_operand" "r")))
723 (zero_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
724 (set (match_operand:DI 0 "register_operand" "=&r")
725 (plus:DI (match_dup 1) (match_dup 2)))]
728 "&& reload_completed"
729 [(parallel [(set (reg:CC_C CC_REGNUM)
730 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
732 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
733 (parallel [(set (reg:CC_C CC_REGNUM)
736 (zero_extend:DI (match_dup 4))
737 (zero_extend:DI (match_dup 5)))
738 (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))
739 (plus:DI (zero_extend:DI
740 (plus:SI (match_dup 4) (match_dup 5)))
741 (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))))
742 (set (match_dup 3) (plus:SI
743 (plus:SI (match_dup 4) (match_dup 5))
744 (ltu:SI (reg:CC_C CC_REGNUM)
748 operands[3] = gen_highpart (SImode, operands[0]);
749 operands[0] = gen_lowpart (SImode, operands[0]);
750 operands[4] = gen_highpart (SImode, operands[1]);
751 operands[5] = gen_highpart (SImode, operands[2]);
752 operands[1] = gen_lowpart (SImode, operands[1]);
753 operands[2] = gen_lowpart (SImode, operands[2]);
755 [(set_attr "conds" "set")
756 (set_attr "length" "8")
757 (set_attr "type" "multiple")]
760 (define_insn "*addsi3_compareC_upper"
761 [(set (reg:CC_C CC_REGNUM)
765 (zero_extend:DI (match_operand:SI 1 "register_operand" "r"))
766 (zero_extend:DI (match_operand:SI 2 "register_operand" "r")))
767 (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))
768 (plus:DI (zero_extend:DI
769 (plus:SI (match_dup 1) (match_dup 2)))
770 (ltu:DI (reg:CC_C CC_REGNUM) (const_int 0)))))
771 (set (match_operand:SI 0 "register_operand" "=r")
773 (plus:SI (match_dup 1) (match_dup 2))
774 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
776 "adcs%?\\t%0, %1, %2"
777 [(set_attr "conds" "set")
778 (set_attr "type" "adcs_reg")]
781 (define_insn "addsi3_compareC"
782 [(set (reg:CC_C CC_REGNUM)
785 (zero_extend:DI (match_operand:SI 1 "register_operand" "r"))
786 (zero_extend:DI (match_operand:SI 2 "register_operand" "r")))
788 (plus:SI (match_dup 1) (match_dup 2)))))
789 (set (match_operand:SI 0 "register_operand" "=r")
790 (plus:SI (match_dup 1) (match_dup 2)))]
792 "adds%?\\t%0, %1, %2"
793 [(set_attr "conds" "set")
794 (set_attr "type" "alus_sreg")]
797 (define_insn "addsi3_compare0"
798 [(set (reg:CC_NOOV CC_REGNUM)
800 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
801 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
803 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
804 (plus:SI (match_dup 1) (match_dup 2)))]
808 subs%?\\t%0, %1, #%n2
810 [(set_attr "conds" "set")
811 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
814 (define_insn "*addsi3_compare0_scratch"
815 [(set (reg:CC_NOOV CC_REGNUM)
817 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
818 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
825 [(set_attr "conds" "set")
826 (set_attr "predicable" "yes")
827 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
830 (define_insn "*compare_negsi_si"
831 [(set (reg:CC_Z CC_REGNUM)
833 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
834 (match_operand:SI 1 "s_register_operand" "l,r")))]
837 [(set_attr "conds" "set")
838 (set_attr "predicable" "yes")
839 (set_attr "arch" "t2,*")
840 (set_attr "length" "2,4")
841 (set_attr "predicable_short_it" "yes,no")
842 (set_attr "type" "alus_sreg")]
845 ;; This is the canonicalization of subsi3_compare when the
846 ;; addend is a constant.
847 (define_insn "cmpsi2_addneg"
848 [(set (reg:CC CC_REGNUM)
850 (match_operand:SI 1 "s_register_operand" "r,r")
851 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
852 (set (match_operand:SI 0 "s_register_operand" "=r,r")
853 (plus:SI (match_dup 1)
854 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
856 && (INTVAL (operands[2])
857 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
859 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
860 in different condition codes (like cmn rather than like cmp), so that
861 alternative comes first. Both alternatives can match for any 0x??000000
862 where except for 0 and INT_MIN it doesn't matter what we choose, and also
863 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
865 if (which_alternative == 0 && operands[3] != const1_rtx)
866 return "subs%?\\t%0, %1, #%n3";
868 return "adds%?\\t%0, %1, %3";
870 [(set_attr "conds" "set")
871 (set_attr "type" "alus_sreg")]
874 ;; Convert the sequence
876 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
880 ;; bcs dest ((unsigned)rn >= 1)
881 ;; similarly for the beq variant using bcc.
882 ;; This is a common looping idiom (while (n--))
884 [(set (match_operand:SI 0 "arm_general_register_operand" "")
885 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
887 (set (match_operand 2 "cc_register" "")
888 (compare (match_dup 0) (const_int -1)))
890 (if_then_else (match_operator 3 "equality_operator"
891 [(match_dup 2) (const_int 0)])
892 (match_operand 4 "" "")
893 (match_operand 5 "" "")))]
894 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
898 (match_dup 1) (const_int 1)))
899 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
901 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
904 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
905 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
908 operands[2], const0_rtx);"
911 ;; The next four insns work because they compare the result with one of
912 ;; the operands, and we know that the use of the condition code is
913 ;; either GEU or LTU, so we can use the carry flag from the addition
914 ;; instead of doing the compare a second time.
915 (define_insn "*addsi3_compare_op1"
916 [(set (reg:CC_C CC_REGNUM)
918 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
919 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
921 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
922 (plus:SI (match_dup 1) (match_dup 2)))]
927 subs%?\\t%0, %1, #%n2
928 subs%?\\t%0, %0, #%n2
930 subs%?\\t%0, %1, #%n2
932 [(set_attr "conds" "set")
933 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
934 (set_attr "length" "2,2,2,2,4,4,4")
936 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
939 (define_insn "*addsi3_compare_op2"
940 [(set (reg:CC_C CC_REGNUM)
942 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
943 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
945 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
946 (plus:SI (match_dup 1) (match_dup 2)))]
951 subs%?\\t%0, %1, #%n2
952 subs%?\\t%0, %0, #%n2
954 subs%?\\t%0, %1, #%n2
956 [(set_attr "conds" "set")
957 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
958 (set_attr "length" "2,2,2,2,4,4,4")
960 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
963 (define_insn "*compare_addsi2_op0"
964 [(set (reg:CC_C CC_REGNUM)
966 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
967 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
976 [(set_attr "conds" "set")
977 (set_attr "predicable" "yes")
978 (set_attr "arch" "t2,t2,*,*,*")
979 (set_attr "predicable_short_it" "yes,yes,no,no,no")
980 (set_attr "length" "2,2,4,4,4")
981 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
984 (define_insn "*compare_addsi2_op1"
985 [(set (reg:CC_C CC_REGNUM)
987 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
988 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
997 [(set_attr "conds" "set")
998 (set_attr "predicable" "yes")
999 (set_attr "arch" "t2,t2,*,*,*")
1000 (set_attr "predicable_short_it" "yes,yes,no,no,no")
1001 (set_attr "length" "2,2,4,4,4")
1002 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
1005 (define_insn "*addsi3_carryin_<optab>"
1006 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1007 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1008 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1009 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1014 sbc%?\\t%0, %1, #%B2"
1015 [(set_attr "conds" "use")
1016 (set_attr "predicable" "yes")
1017 (set_attr "arch" "t2,*,*")
1018 (set_attr "length" "4")
1019 (set_attr "predicable_short_it" "yes,no,no")
1020 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1023 (define_insn "*addsi3_carryin_alt2_<optab>"
1024 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1025 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
1026 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1027 (match_operand:SI 2 "arm_rhs_operand" "l,rI,K")))]
1032 sbc%?\\t%0, %1, #%B2"
1033 [(set_attr "conds" "use")
1034 (set_attr "predicable" "yes")
1035 (set_attr "arch" "t2,*,*")
1036 (set_attr "length" "4")
1037 (set_attr "predicable_short_it" "yes,no,no")
1038 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1041 (define_insn "*addsi3_carryin_shift_<optab>"
1042 [(set (match_operand:SI 0 "s_register_operand" "=r")
1044 (match_operator:SI 2 "shift_operator"
1045 [(match_operand:SI 3 "s_register_operand" "r")
1046 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1047 (match_operand:SI 1 "s_register_operand" "r"))
1048 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1050 "adc%?\\t%0, %1, %3%S2"
1051 [(set_attr "conds" "use")
1052 (set_attr "predicable" "yes")
1053 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1054 (const_string "alu_shift_imm")
1055 (const_string "alu_shift_reg")))]
1058 (define_insn "*addsi3_carryin_clobercc_<optab>"
1059 [(set (match_operand:SI 0 "s_register_operand" "=r")
1060 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1061 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1062 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1063 (clobber (reg:CC CC_REGNUM))]
1065 "adcs%?\\t%0, %1, %2"
1066 [(set_attr "conds" "set")
1067 (set_attr "type" "adcs_reg")]
1070 (define_expand "subv<mode>4"
1071 [(match_operand:SIDI 0 "register_operand")
1072 (match_operand:SIDI 1 "register_operand")
1073 (match_operand:SIDI 2 "register_operand")
1074 (match_operand 3 "")]
1077 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
1078 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1083 (define_expand "usubv<mode>4"
1084 [(match_operand:SIDI 0 "register_operand")
1085 (match_operand:SIDI 1 "register_operand")
1086 (match_operand:SIDI 2 "register_operand")
1087 (match_operand 3 "")]
1090 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
1091 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
1096 (define_insn_and_split "subdi3_compare1"
1097 [(set (reg:CC CC_REGNUM)
1099 (match_operand:DI 1 "register_operand" "r")
1100 (match_operand:DI 2 "register_operand" "r")))
1101 (set (match_operand:DI 0 "register_operand" "=&r")
1102 (minus:DI (match_dup 1) (match_dup 2)))]
1105 "&& reload_completed"
1106 [(parallel [(set (reg:CC CC_REGNUM)
1107 (compare:CC (match_dup 1) (match_dup 2)))
1108 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1109 (parallel [(set (reg:CC CC_REGNUM)
1110 (compare:CC (match_dup 4) (match_dup 5)))
1111 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
1112 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))])]
1114 operands[3] = gen_highpart (SImode, operands[0]);
1115 operands[0] = gen_lowpart (SImode, operands[0]);
1116 operands[4] = gen_highpart (SImode, operands[1]);
1117 operands[1] = gen_lowpart (SImode, operands[1]);
1118 operands[5] = gen_highpart (SImode, operands[2]);
1119 operands[2] = gen_lowpart (SImode, operands[2]);
1121 [(set_attr "conds" "set")
1122 (set_attr "length" "8")
1123 (set_attr "type" "multiple")]
1126 (define_insn "subsi3_compare1"
1127 [(set (reg:CC CC_REGNUM)
1129 (match_operand:SI 1 "register_operand" "r")
1130 (match_operand:SI 2 "register_operand" "r")))
1131 (set (match_operand:SI 0 "register_operand" "=r")
1132 (minus:SI (match_dup 1) (match_dup 2)))]
1134 "subs%?\\t%0, %1, %2"
1135 [(set_attr "conds" "set")
1136 (set_attr "type" "alus_sreg")]
1139 (define_insn "*subsi3_carryin"
1140 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1141 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1142 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1143 (match_operand:SI 3 "arm_borrow_operation" "")))]
1148 sbc%?\\t%0, %2, %2, lsl #1"
1149 [(set_attr "conds" "use")
1150 (set_attr "arch" "*,a,t2")
1151 (set_attr "predicable" "yes")
1152 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1155 (define_insn "*subsi3_carryin_const"
1156 [(set (match_operand:SI 0 "s_register_operand" "=r")
1158 (match_operand:SI 1 "s_register_operand" "r")
1159 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1160 (match_operand:SI 3 "arm_borrow_operation" "")))]
1162 "sbc\\t%0, %1, #%n2"
1163 [(set_attr "conds" "use")
1164 (set_attr "type" "adc_imm")]
1167 (define_insn "*subsi3_carryin_const0"
1168 [(set (match_operand:SI 0 "s_register_operand" "=r")
1169 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1170 (match_operand:SI 2 "arm_borrow_operation" "")))]
1173 [(set_attr "conds" "use")
1174 (set_attr "type" "adc_imm")]
1177 (define_insn "*subsi3_carryin_compare"
1178 [(set (reg:CC CC_REGNUM)
1179 (compare:CC (match_operand:SI 1 "s_register_operand" "r")
1180 (match_operand:SI 2 "s_register_operand" "r")))
1181 (set (match_operand:SI 0 "s_register_operand" "=r")
1182 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
1183 (match_operand:SI 3 "arm_borrow_operation" "")))]
1186 [(set_attr "conds" "set")
1187 (set_attr "type" "adcs_reg")]
1190 (define_insn "*subsi3_carryin_compare_const"
1191 [(set (reg:CC CC_REGNUM)
1192 (compare:CC (match_operand:SI 1 "reg_or_int_operand" "r")
1193 (match_operand:SI 2 "const_int_I_operand" "I")))
1194 (set (match_operand:SI 0 "s_register_operand" "=r")
1197 (match_operand:SI 3 "arm_neg_immediate_operand" "L"))
1198 (match_operand:SI 4 "arm_borrow_operation" "")))]
1200 && (INTVAL (operands[2])
1201 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1202 "sbcs\\t%0, %1, #%n3"
1203 [(set_attr "conds" "set")
1204 (set_attr "type" "adcs_imm")]
1207 (define_insn "*subsi3_carryin_compare_const0"
1208 [(set (reg:CC CC_REGNUM)
1209 (compare:CC (match_operand:SI 1 "reg_or_int_operand" "r")
1211 (set (match_operand:SI 0 "s_register_operand" "=r")
1212 (minus:SI (match_dup 1)
1213 (match_operand:SI 2 "arm_borrow_operation" "")))]
1216 [(set_attr "conds" "set")
1217 (set_attr "type" "adcs_imm")]
1220 (define_insn "*subsi3_carryin_shift"
1221 [(set (match_operand:SI 0 "s_register_operand" "=r")
1223 (match_operand:SI 1 "s_register_operand" "r")
1224 (match_operator:SI 2 "shift_operator"
1225 [(match_operand:SI 3 "s_register_operand" "r")
1226 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1227 (match_operand:SI 5 "arm_borrow_operation" "")))]
1229 "sbc%?\\t%0, %1, %3%S2"
1230 [(set_attr "conds" "use")
1231 (set_attr "predicable" "yes")
1232 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1233 (const_string "alu_shift_imm")
1234 (const_string "alu_shift_reg")))]
1237 (define_insn "*rsbsi3_carryin_shift"
1238 [(set (match_operand:SI 0 "s_register_operand" "=r")
1240 (match_operator:SI 2 "shift_operator"
1241 [(match_operand:SI 3 "s_register_operand" "r")
1242 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1243 (match_operand:SI 1 "s_register_operand" "r"))
1244 (match_operand:SI 5 "arm_borrow_operation" "")))]
1246 "rsc%?\\t%0, %1, %3%S2"
1247 [(set_attr "conds" "use")
1248 (set_attr "predicable" "yes")
1249 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1250 (const_string "alu_shift_imm")
1251 (const_string "alu_shift_reg")))]
1254 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1256 [(set (match_operand:SI 0 "s_register_operand" "")
1257 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1258 (match_operand:SI 2 "s_register_operand" ""))
1260 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1262 [(set (match_dup 3) (match_dup 1))
1263 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1265 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1268 (define_expand "addsf3"
1269 [(set (match_operand:SF 0 "s_register_operand")
1270 (plus:SF (match_operand:SF 1 "s_register_operand")
1271 (match_operand:SF 2 "s_register_operand")))]
1272 "TARGET_32BIT && TARGET_HARD_FLOAT"
1276 (define_expand "adddf3"
1277 [(set (match_operand:DF 0 "s_register_operand")
1278 (plus:DF (match_operand:DF 1 "s_register_operand")
1279 (match_operand:DF 2 "s_register_operand")))]
1280 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1284 (define_expand "subdi3"
1286 [(set (match_operand:DI 0 "s_register_operand")
1287 (minus:DI (match_operand:DI 1 "s_register_operand")
1288 (match_operand:DI 2 "s_register_operand")))
1289 (clobber (reg:CC CC_REGNUM))])]
1294 (define_insn_and_split "*arm_subdi3"
1295 [(set (match_operand:DI 0 "arm_general_register_operand" "=&r,&r,&r")
1296 (minus:DI (match_operand:DI 1 "arm_general_register_operand" "0,r,0")
1297 (match_operand:DI 2 "arm_general_register_operand" "r,0,0")))
1298 (clobber (reg:CC CC_REGNUM))]
1300 "#" ; "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1302 [(parallel [(set (reg:CC CC_REGNUM)
1303 (compare:CC (match_dup 1) (match_dup 2)))
1304 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1305 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4) (match_dup 5))
1306 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))]
1308 operands[3] = gen_highpart (SImode, operands[0]);
1309 operands[0] = gen_lowpart (SImode, operands[0]);
1310 operands[4] = gen_highpart (SImode, operands[1]);
1311 operands[1] = gen_lowpart (SImode, operands[1]);
1312 operands[5] = gen_highpart (SImode, operands[2]);
1313 operands[2] = gen_lowpart (SImode, operands[2]);
1315 [(set_attr "conds" "clob")
1316 (set_attr "length" "8")
1317 (set_attr "type" "multiple")]
1320 (define_insn_and_split "*subdi_di_zesidi"
1321 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1322 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1324 (match_operand:SI 2 "s_register_operand" "r,r"))))
1325 (clobber (reg:CC CC_REGNUM))]
1327 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1328 "&& reload_completed"
1329 [(parallel [(set (reg:CC CC_REGNUM)
1330 (compare:CC (match_dup 1) (match_dup 2)))
1331 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1332 (set (match_dup 3) (minus:SI (match_dup 4)
1333 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))]
1335 operands[3] = gen_highpart (SImode, operands[0]);
1336 operands[0] = gen_lowpart (SImode, operands[0]);
1337 operands[4] = gen_highpart (SImode, operands[1]);
1338 operands[1] = gen_lowpart (SImode, operands[1]);
1340 [(set_attr "conds" "clob")
1341 (set_attr "length" "8")
1342 (set_attr "type" "multiple")]
1345 (define_insn_and_split "*subdi_di_sesidi"
1346 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1347 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r")
1349 (match_operand:SI 2 "s_register_operand" "r,r"))))
1350 (clobber (reg:CC CC_REGNUM))]
1352 "#" ; "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1353 "&& reload_completed"
1354 [(parallel [(set (reg:CC CC_REGNUM)
1355 (compare:CC (match_dup 1) (match_dup 2)))
1356 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1357 (set (match_dup 3) (minus:SI (minus:SI (match_dup 4)
1358 (ashiftrt:SI (match_dup 2)
1360 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))]
1362 operands[3] = gen_highpart (SImode, operands[0]);
1363 operands[0] = gen_lowpart (SImode, operands[0]);
1364 operands[4] = gen_highpart (SImode, operands[1]);
1365 operands[1] = gen_lowpart (SImode, operands[1]);
1367 [(set_attr "conds" "clob")
1368 (set_attr "length" "8")
1369 (set_attr "type" "multiple")]
1372 (define_insn_and_split "*subdi_zesidi_di"
1373 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1374 (minus:DI (zero_extend:DI
1375 (match_operand:SI 2 "s_register_operand" "r,r"))
1376 (match_operand:DI 1 "s_register_operand" "0,r")))
1377 (clobber (reg:CC CC_REGNUM))]
1379 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1381 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, #0"
1382 "&& reload_completed"
1383 [(parallel [(set (reg:CC CC_REGNUM)
1384 (compare:CC (match_dup 2) (match_dup 1)))
1385 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1386 (set (match_dup 3) (minus:SI (minus:SI (const_int 0) (match_dup 4))
1387 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))]
1389 operands[3] = gen_highpart (SImode, operands[0]);
1390 operands[0] = gen_lowpart (SImode, operands[0]);
1391 operands[4] = gen_highpart (SImode, operands[1]);
1392 operands[1] = gen_lowpart (SImode, operands[1]);
1394 [(set_attr "conds" "clob")
1395 (set_attr "length" "8")
1396 (set_attr "type" "multiple")]
1399 (define_insn_and_split "*subdi_sesidi_di"
1400 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1401 (minus:DI (sign_extend:DI
1402 (match_operand:SI 2 "s_register_operand" "r,r"))
1403 (match_operand:DI 1 "s_register_operand" "0,r")))
1404 (clobber (reg:CC CC_REGNUM))]
1406 "#" ; "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1408 ; "subs\\t%Q0, %2, %Q1\;rsc\\t%R0, %R1, %2, asr #31"
1409 "&& reload_completed"
1410 [(parallel [(set (reg:CC CC_REGNUM)
1411 (compare:CC (match_dup 2) (match_dup 1)))
1412 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))])
1413 (set (match_dup 3) (minus:SI (minus:SI
1414 (ashiftrt:SI (match_dup 2)
1417 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))]
1419 operands[3] = gen_highpart (SImode, operands[0]);
1420 operands[0] = gen_lowpart (SImode, operands[0]);
1421 operands[4] = gen_highpart (SImode, operands[1]);
1422 operands[1] = gen_lowpart (SImode, operands[1]);
1424 [(set_attr "conds" "clob")
1425 (set_attr "length" "8")
1426 (set_attr "type" "multiple")]
1429 (define_insn_and_split "*subdi_zesidi_zesidi"
1430 [(set (match_operand:DI 0 "s_register_operand" "=r")
1431 (minus:DI (zero_extend:DI
1432 (match_operand:SI 1 "s_register_operand" "r"))
1434 (match_operand:SI 2 "s_register_operand" "r"))))
1435 (clobber (reg:CC CC_REGNUM))]
1437 "#" ; "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1438 "&& reload_completed"
1439 [(parallel [(set (reg:CC CC_REGNUM)
1440 (compare:CC (match_dup 1) (match_dup 2)))
1441 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
1442 (set (match_dup 3) (minus:SI (minus:SI (match_dup 1) (match_dup 1))
1443 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))]
1445 operands[3] = gen_highpart (SImode, operands[0]);
1446 operands[0] = gen_lowpart (SImode, operands[0]);
1448 [(set_attr "conds" "clob")
1449 (set_attr "length" "8")
1450 (set_attr "type" "multiple")]
1453 (define_expand "subsi3"
1454 [(set (match_operand:SI 0 "s_register_operand")
1455 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1456 (match_operand:SI 2 "s_register_operand")))]
1459 if (CONST_INT_P (operands[1]))
1463 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1464 operands[1] = force_reg (SImode, operands[1]);
1467 arm_split_constant (MINUS, SImode, NULL_RTX,
1468 INTVAL (operands[1]), operands[0],
1470 optimize && can_create_pseudo_p ());
1474 else /* TARGET_THUMB1 */
1475 operands[1] = force_reg (SImode, operands[1]);
1480 ; ??? Check Thumb-2 split length
1481 (define_insn_and_split "*arm_subsi3_insn"
1482 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1483 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1484 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1496 "&& (CONST_INT_P (operands[1])
1497 && !const_ok_for_arm (INTVAL (operands[1])))"
1498 [(clobber (const_int 0))]
1500 arm_split_constant (MINUS, SImode, curr_insn,
1501 INTVAL (operands[1]), operands[0], operands[2], 0);
1504 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1505 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1506 (set_attr "predicable" "yes")
1507 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1508 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1512 [(match_scratch:SI 3 "r")
1513 (set (match_operand:SI 0 "arm_general_register_operand" "")
1514 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1515 (match_operand:SI 2 "arm_general_register_operand" "")))]
1517 && !const_ok_for_arm (INTVAL (operands[1]))
1518 && const_ok_for_arm (~INTVAL (operands[1]))"
1519 [(set (match_dup 3) (match_dup 1))
1520 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1524 (define_insn "subsi3_compare0"
1525 [(set (reg:CC_NOOV CC_REGNUM)
1527 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1528 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1530 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1531 (minus:SI (match_dup 1) (match_dup 2)))]
1536 rsbs%?\\t%0, %2, %1"
1537 [(set_attr "conds" "set")
1538 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1541 (define_insn "subsi3_compare"
1542 [(set (reg:CC CC_REGNUM)
1543 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1544 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1545 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1546 (minus:SI (match_dup 1) (match_dup 2)))]
1551 rsbs%?\\t%0, %2, %1"
1552 [(set_attr "conds" "set")
1553 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1556 (define_expand "subsf3"
1557 [(set (match_operand:SF 0 "s_register_operand")
1558 (minus:SF (match_operand:SF 1 "s_register_operand")
1559 (match_operand:SF 2 "s_register_operand")))]
1560 "TARGET_32BIT && TARGET_HARD_FLOAT"
1564 (define_expand "subdf3"
1565 [(set (match_operand:DF 0 "s_register_operand")
1566 (minus:DF (match_operand:DF 1 "s_register_operand")
1567 (match_operand:DF 2 "s_register_operand")))]
1568 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1573 ;; Multiplication insns
1575 (define_expand "mulhi3"
1576 [(set (match_operand:HI 0 "s_register_operand")
1577 (mult:HI (match_operand:HI 1 "s_register_operand")
1578 (match_operand:HI 2 "s_register_operand")))]
1579 "TARGET_DSP_MULTIPLY"
1582 rtx result = gen_reg_rtx (SImode);
1583 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1584 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1589 (define_expand "mulsi3"
1590 [(set (match_operand:SI 0 "s_register_operand")
1591 (mult:SI (match_operand:SI 2 "s_register_operand")
1592 (match_operand:SI 1 "s_register_operand")))]
1597 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1598 (define_insn "*arm_mulsi3"
1599 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1600 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1601 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1602 "TARGET_32BIT && !arm_arch6"
1603 "mul%?\\t%0, %2, %1"
1604 [(set_attr "type" "mul")
1605 (set_attr "predicable" "yes")]
1608 (define_insn "*arm_mulsi3_v6"
1609 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
1610 (mult:SI (match_operand:SI 1 "s_register_operand" "0,l,r")
1611 (match_operand:SI 2 "s_register_operand" "l,0,r")))]
1612 "TARGET_32BIT && arm_arch6"
1613 "mul%?\\t%0, %1, %2"
1614 [(set_attr "type" "mul")
1615 (set_attr "predicable" "yes")
1616 (set_attr "arch" "t2,t2,*")
1617 (set_attr "length" "4")
1618 (set_attr "predicable_short_it" "yes,yes,no")]
1621 (define_insn "*mulsi3_compare0"
1622 [(set (reg:CC_NOOV CC_REGNUM)
1623 (compare:CC_NOOV (mult:SI
1624 (match_operand:SI 2 "s_register_operand" "r,r")
1625 (match_operand:SI 1 "s_register_operand" "%0,r"))
1627 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1628 (mult:SI (match_dup 2) (match_dup 1)))]
1629 "TARGET_ARM && !arm_arch6"
1630 "muls%?\\t%0, %2, %1"
1631 [(set_attr "conds" "set")
1632 (set_attr "type" "muls")]
1635 (define_insn "*mulsi3_compare0_v6"
1636 [(set (reg:CC_NOOV CC_REGNUM)
1637 (compare:CC_NOOV (mult:SI
1638 (match_operand:SI 2 "s_register_operand" "r")
1639 (match_operand:SI 1 "s_register_operand" "r"))
1641 (set (match_operand:SI 0 "s_register_operand" "=r")
1642 (mult:SI (match_dup 2) (match_dup 1)))]
1643 "TARGET_ARM && arm_arch6 && optimize_size"
1644 "muls%?\\t%0, %2, %1"
1645 [(set_attr "conds" "set")
1646 (set_attr "type" "muls")]
1649 (define_insn "*mulsi_compare0_scratch"
1650 [(set (reg:CC_NOOV CC_REGNUM)
1651 (compare:CC_NOOV (mult:SI
1652 (match_operand:SI 2 "s_register_operand" "r,r")
1653 (match_operand:SI 1 "s_register_operand" "%0,r"))
1655 (clobber (match_scratch:SI 0 "=&r,&r"))]
1656 "TARGET_ARM && !arm_arch6"
1657 "muls%?\\t%0, %2, %1"
1658 [(set_attr "conds" "set")
1659 (set_attr "type" "muls")]
1662 (define_insn "*mulsi_compare0_scratch_v6"
1663 [(set (reg:CC_NOOV CC_REGNUM)
1664 (compare:CC_NOOV (mult:SI
1665 (match_operand:SI 2 "s_register_operand" "r")
1666 (match_operand:SI 1 "s_register_operand" "r"))
1668 (clobber (match_scratch:SI 0 "=r"))]
1669 "TARGET_ARM && arm_arch6 && optimize_size"
1670 "muls%?\\t%0, %2, %1"
1671 [(set_attr "conds" "set")
1672 (set_attr "type" "muls")]
1675 ;; Unnamed templates to match MLA instruction.
1677 (define_insn "*mulsi3addsi"
1678 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1680 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1681 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1682 (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1683 "TARGET_32BIT && !arm_arch6"
1684 "mla%?\\t%0, %2, %1, %3"
1685 [(set_attr "type" "mla")
1686 (set_attr "predicable" "yes")]
1689 (define_insn "*mulsi3addsi_v6"
1690 [(set (match_operand:SI 0 "s_register_operand" "=r")
1692 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1693 (match_operand:SI 1 "s_register_operand" "r"))
1694 (match_operand:SI 3 "s_register_operand" "r")))]
1695 "TARGET_32BIT && arm_arch6"
1696 "mla%?\\t%0, %2, %1, %3"
1697 [(set_attr "type" "mla")
1698 (set_attr "predicable" "yes")]
1701 (define_insn "*mulsi3addsi_compare0"
1702 [(set (reg:CC_NOOV CC_REGNUM)
1705 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1706 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1707 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1709 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1710 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1712 "TARGET_ARM && arm_arch6"
1713 "mlas%?\\t%0, %2, %1, %3"
1714 [(set_attr "conds" "set")
1715 (set_attr "type" "mlas")]
1718 (define_insn "*mulsi3addsi_compare0_v6"
1719 [(set (reg:CC_NOOV CC_REGNUM)
1722 (match_operand:SI 2 "s_register_operand" "r")
1723 (match_operand:SI 1 "s_register_operand" "r"))
1724 (match_operand:SI 3 "s_register_operand" "r"))
1726 (set (match_operand:SI 0 "s_register_operand" "=r")
1727 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1729 "TARGET_ARM && arm_arch6 && optimize_size"
1730 "mlas%?\\t%0, %2, %1, %3"
1731 [(set_attr "conds" "set")
1732 (set_attr "type" "mlas")]
1735 (define_insn "*mulsi3addsi_compare0_scratch"
1736 [(set (reg:CC_NOOV CC_REGNUM)
1739 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1740 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1741 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1743 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1744 "TARGET_ARM && !arm_arch6"
1745 "mlas%?\\t%0, %2, %1, %3"
1746 [(set_attr "conds" "set")
1747 (set_attr "type" "mlas")]
1750 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1751 [(set (reg:CC_NOOV CC_REGNUM)
1754 (match_operand:SI 2 "s_register_operand" "r")
1755 (match_operand:SI 1 "s_register_operand" "r"))
1756 (match_operand:SI 3 "s_register_operand" "r"))
1758 (clobber (match_scratch:SI 0 "=r"))]
1759 "TARGET_ARM && arm_arch6 && optimize_size"
1760 "mlas%?\\t%0, %2, %1, %3"
1761 [(set_attr "conds" "set")
1762 (set_attr "type" "mlas")]
1765 (define_insn "*mulsi3subsi"
1766 [(set (match_operand:SI 0 "s_register_operand" "=r")
1768 (match_operand:SI 3 "s_register_operand" "r")
1769 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1770 (match_operand:SI 1 "s_register_operand" "r"))))]
1771 "TARGET_32BIT && arm_arch_thumb2"
1772 "mls%?\\t%0, %2, %1, %3"
1773 [(set_attr "type" "mla")
1774 (set_attr "predicable" "yes")]
1777 (define_expand "maddsidi4"
1778 [(set (match_operand:DI 0 "s_register_operand")
1781 (sign_extend:DI (match_operand:SI 1 "s_register_operand"))
1782 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
1783 (match_operand:DI 3 "s_register_operand")))]
1787 (define_insn "*mulsidi3adddi"
1788 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1791 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1792 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1793 (match_operand:DI 1 "s_register_operand" "0")))]
1794 "TARGET_32BIT && !arm_arch6"
1795 "smlal%?\\t%Q0, %R0, %3, %2"
1796 [(set_attr "type" "smlal")
1797 (set_attr "predicable" "yes")]
1800 (define_insn "*mulsidi3adddi_v6"
1801 [(set (match_operand:DI 0 "s_register_operand" "=r")
1804 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1805 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1806 (match_operand:DI 1 "s_register_operand" "0")))]
1807 "TARGET_32BIT && arm_arch6"
1808 "smlal%?\\t%Q0, %R0, %3, %2"
1809 [(set_attr "type" "smlal")
1810 (set_attr "predicable" "yes")]
1813 ;; 32x32->64 widening multiply.
1814 ;; As with mulsi3, the only difference between the v3-5 and v6+
1815 ;; versions of these patterns is the requirement that the output not
1816 ;; overlap the inputs, but that still means we have to have a named
1817 ;; expander and two different starred insns.
1819 (define_expand "mulsidi3"
1820 [(set (match_operand:DI 0 "s_register_operand")
1822 (sign_extend:DI (match_operand:SI 1 "s_register_operand"))
1823 (sign_extend:DI (match_operand:SI 2 "s_register_operand"))))]
1828 (define_insn "*mulsidi3_nov6"
1829 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1831 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1832 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1833 "TARGET_32BIT && !arm_arch6"
1834 "smull%?\\t%Q0, %R0, %1, %2"
1835 [(set_attr "type" "smull")
1836 (set_attr "predicable" "yes")]
1839 (define_insn "*mulsidi3_v6"
1840 [(set (match_operand:DI 0 "s_register_operand" "=r")
1842 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1843 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1844 "TARGET_32BIT && arm_arch6"
1845 "smull%?\\t%Q0, %R0, %1, %2"
1846 [(set_attr "type" "smull")
1847 (set_attr "predicable" "yes")]
1850 (define_expand "umulsidi3"
1851 [(set (match_operand:DI 0 "s_register_operand")
1853 (zero_extend:DI (match_operand:SI 1 "s_register_operand"))
1854 (zero_extend:DI (match_operand:SI 2 "s_register_operand"))))]
1859 (define_insn "*umulsidi3_nov6"
1860 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1862 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1863 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1864 "TARGET_32BIT && !arm_arch6"
1865 "umull%?\\t%Q0, %R0, %1, %2"
1866 [(set_attr "type" "umull")
1867 (set_attr "predicable" "yes")]
1870 (define_insn "*umulsidi3_v6"
1871 [(set (match_operand:DI 0 "s_register_operand" "=r")
1873 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1874 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1875 "TARGET_32BIT && arm_arch6"
1876 "umull%?\\t%Q0, %R0, %1, %2"
1877 [(set_attr "type" "umull")
1878 (set_attr "predicable" "yes")]
1881 (define_expand "umaddsidi4"
1882 [(set (match_operand:DI 0 "s_register_operand")
1885 (zero_extend:DI (match_operand:SI 1 "s_register_operand"))
1886 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
1887 (match_operand:DI 3 "s_register_operand")))]
1891 (define_insn "*umulsidi3adddi"
1892 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1895 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1896 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1897 (match_operand:DI 1 "s_register_operand" "0")))]
1898 "TARGET_32BIT && !arm_arch6"
1899 "umlal%?\\t%Q0, %R0, %3, %2"
1900 [(set_attr "type" "umlal")
1901 (set_attr "predicable" "yes")]
1904 (define_insn "*umulsidi3adddi_v6"
1905 [(set (match_operand:DI 0 "s_register_operand" "=r")
1908 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1909 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1910 (match_operand:DI 1 "s_register_operand" "0")))]
1911 "TARGET_32BIT && arm_arch6"
1912 "umlal%?\\t%Q0, %R0, %3, %2"
1913 [(set_attr "type" "umlal")
1914 (set_attr "predicable" "yes")]
1917 (define_expand "smulsi3_highpart"
1919 [(set (match_operand:SI 0 "s_register_operand")
1923 (sign_extend:DI (match_operand:SI 1 "s_register_operand"))
1924 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
1926 (clobber (match_scratch:SI 3 ""))])]
1931 (define_insn "*smulsi3_highpart_nov6"
1932 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1936 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1937 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1939 (clobber (match_scratch:SI 3 "=&r,&r"))]
1940 "TARGET_32BIT && !arm_arch6"
1941 "smull%?\\t%3, %0, %2, %1"
1942 [(set_attr "type" "smull")
1943 (set_attr "predicable" "yes")]
1946 (define_insn "*smulsi3_highpart_v6"
1947 [(set (match_operand:SI 0 "s_register_operand" "=r")
1951 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1952 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1954 (clobber (match_scratch:SI 3 "=r"))]
1955 "TARGET_32BIT && arm_arch6"
1956 "smull%?\\t%3, %0, %2, %1"
1957 [(set_attr "type" "smull")
1958 (set_attr "predicable" "yes")]
1961 (define_expand "umulsi3_highpart"
1963 [(set (match_operand:SI 0 "s_register_operand")
1967 (zero_extend:DI (match_operand:SI 1 "s_register_operand"))
1968 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
1970 (clobber (match_scratch:SI 3 ""))])]
1975 (define_insn "*umulsi3_highpart_nov6"
1976 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1980 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1981 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1983 (clobber (match_scratch:SI 3 "=&r,&r"))]
1984 "TARGET_32BIT && !arm_arch6"
1985 "umull%?\\t%3, %0, %2, %1"
1986 [(set_attr "type" "umull")
1987 (set_attr "predicable" "yes")]
1990 (define_insn "*umulsi3_highpart_v6"
1991 [(set (match_operand:SI 0 "s_register_operand" "=r")
1995 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1996 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1998 (clobber (match_scratch:SI 3 "=r"))]
1999 "TARGET_32BIT && arm_arch6"
2000 "umull%?\\t%3, %0, %2, %1"
2001 [(set_attr "type" "umull")
2002 (set_attr "predicable" "yes")]
2005 (define_insn "mulhisi3"
2006 [(set (match_operand:SI 0 "s_register_operand" "=r")
2007 (mult:SI (sign_extend:SI
2008 (match_operand:HI 1 "s_register_operand" "%r"))
2010 (match_operand:HI 2 "s_register_operand" "r"))))]
2011 "TARGET_DSP_MULTIPLY"
2012 "smulbb%?\\t%0, %1, %2"
2013 [(set_attr "type" "smulxy")
2014 (set_attr "predicable" "yes")]
2017 (define_insn "*mulhisi3tb"
2018 [(set (match_operand:SI 0 "s_register_operand" "=r")
2019 (mult:SI (ashiftrt:SI
2020 (match_operand:SI 1 "s_register_operand" "r")
2023 (match_operand:HI 2 "s_register_operand" "r"))))]
2024 "TARGET_DSP_MULTIPLY"
2025 "smultb%?\\t%0, %1, %2"
2026 [(set_attr "type" "smulxy")
2027 (set_attr "predicable" "yes")]
2030 (define_insn "*mulhisi3bt"
2031 [(set (match_operand:SI 0 "s_register_operand" "=r")
2032 (mult:SI (sign_extend:SI
2033 (match_operand:HI 1 "s_register_operand" "r"))
2035 (match_operand:SI 2 "s_register_operand" "r")
2037 "TARGET_DSP_MULTIPLY"
2038 "smulbt%?\\t%0, %1, %2"
2039 [(set_attr "type" "smulxy")
2040 (set_attr "predicable" "yes")]
2043 (define_insn "*mulhisi3tt"
2044 [(set (match_operand:SI 0 "s_register_operand" "=r")
2045 (mult:SI (ashiftrt:SI
2046 (match_operand:SI 1 "s_register_operand" "r")
2049 (match_operand:SI 2 "s_register_operand" "r")
2051 "TARGET_DSP_MULTIPLY"
2052 "smultt%?\\t%0, %1, %2"
2053 [(set_attr "type" "smulxy")
2054 (set_attr "predicable" "yes")]
2057 (define_insn "maddhisi4"
2058 [(set (match_operand:SI 0 "s_register_operand" "=r")
2059 (plus:SI (mult:SI (sign_extend:SI
2060 (match_operand:HI 1 "s_register_operand" "r"))
2062 (match_operand:HI 2 "s_register_operand" "r")))
2063 (match_operand:SI 3 "s_register_operand" "r")))]
2064 "TARGET_DSP_MULTIPLY"
2065 "smlabb%?\\t%0, %1, %2, %3"
2066 [(set_attr "type" "smlaxy")
2067 (set_attr "predicable" "yes")]
2070 ;; Note: there is no maddhisi4ibt because this one is canonical form
2071 (define_insn "*maddhisi4tb"
2072 [(set (match_operand:SI 0 "s_register_operand" "=r")
2073 (plus:SI (mult:SI (ashiftrt:SI
2074 (match_operand:SI 1 "s_register_operand" "r")
2077 (match_operand:HI 2 "s_register_operand" "r")))
2078 (match_operand:SI 3 "s_register_operand" "r")))]
2079 "TARGET_DSP_MULTIPLY"
2080 "smlatb%?\\t%0, %1, %2, %3"
2081 [(set_attr "type" "smlaxy")
2082 (set_attr "predicable" "yes")]
2085 (define_insn "*maddhisi4tt"
2086 [(set (match_operand:SI 0 "s_register_operand" "=r")
2087 (plus:SI (mult:SI (ashiftrt:SI
2088 (match_operand:SI 1 "s_register_operand" "r")
2091 (match_operand:SI 2 "s_register_operand" "r")
2093 (match_operand:SI 3 "s_register_operand" "r")))]
2094 "TARGET_DSP_MULTIPLY"
2095 "smlatt%?\\t%0, %1, %2, %3"
2096 [(set_attr "type" "smlaxy")
2097 (set_attr "predicable" "yes")]
2100 (define_insn "maddhidi4"
2101 [(set (match_operand:DI 0 "s_register_operand" "=r")
2103 (mult:DI (sign_extend:DI
2104 (match_operand:HI 1 "s_register_operand" "r"))
2106 (match_operand:HI 2 "s_register_operand" "r")))
2107 (match_operand:DI 3 "s_register_operand" "0")))]
2108 "TARGET_DSP_MULTIPLY"
2109 "smlalbb%?\\t%Q0, %R0, %1, %2"
2110 [(set_attr "type" "smlalxy")
2111 (set_attr "predicable" "yes")])
2113 ;; Note: there is no maddhidi4ibt because this one is canonical form
2114 (define_insn "*maddhidi4tb"
2115 [(set (match_operand:DI 0 "s_register_operand" "=r")
2117 (mult:DI (sign_extend:DI
2119 (match_operand:SI 1 "s_register_operand" "r")
2122 (match_operand:HI 2 "s_register_operand" "r")))
2123 (match_operand:DI 3 "s_register_operand" "0")))]
2124 "TARGET_DSP_MULTIPLY"
2125 "smlaltb%?\\t%Q0, %R0, %1, %2"
2126 [(set_attr "type" "smlalxy")
2127 (set_attr "predicable" "yes")])
2129 (define_insn "*maddhidi4tt"
2130 [(set (match_operand:DI 0 "s_register_operand" "=r")
2132 (mult:DI (sign_extend:DI
2134 (match_operand:SI 1 "s_register_operand" "r")
2138 (match_operand:SI 2 "s_register_operand" "r")
2140 (match_operand:DI 3 "s_register_operand" "0")))]
2141 "TARGET_DSP_MULTIPLY"
2142 "smlaltt%?\\t%Q0, %R0, %1, %2"
2143 [(set_attr "type" "smlalxy")
2144 (set_attr "predicable" "yes")])
2146 (define_expand "mulsf3"
2147 [(set (match_operand:SF 0 "s_register_operand")
2148 (mult:SF (match_operand:SF 1 "s_register_operand")
2149 (match_operand:SF 2 "s_register_operand")))]
2150 "TARGET_32BIT && TARGET_HARD_FLOAT"
2154 (define_expand "muldf3"
2155 [(set (match_operand:DF 0 "s_register_operand")
2156 (mult:DF (match_operand:DF 1 "s_register_operand")
2157 (match_operand:DF 2 "s_register_operand")))]
2158 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2164 (define_expand "divsf3"
2165 [(set (match_operand:SF 0 "s_register_operand")
2166 (div:SF (match_operand:SF 1 "s_register_operand")
2167 (match_operand:SF 2 "s_register_operand")))]
2168 "TARGET_32BIT && TARGET_HARD_FLOAT"
2171 (define_expand "divdf3"
2172 [(set (match_operand:DF 0 "s_register_operand")
2173 (div:DF (match_operand:DF 1 "s_register_operand")
2174 (match_operand:DF 2 "s_register_operand")))]
2175 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2179 ;; Split DImode and, ior, xor operations. Simply perform the logical
2180 ;; operation on the upper and lower halves of the registers.
2181 ;; This is needed for atomic operations in arm_split_atomic_op.
2182 ;; Avoid splitting IWMMXT instructions.
2184 [(set (match_operand:DI 0 "s_register_operand" "")
2185 (match_operator:DI 6 "logical_binary_operator"
2186 [(match_operand:DI 1 "s_register_operand" "")
2187 (match_operand:DI 2 "s_register_operand" "")]))]
2188 "TARGET_32BIT && reload_completed
2189 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2190 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2191 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2194 operands[3] = gen_highpart (SImode, operands[0]);
2195 operands[0] = gen_lowpart (SImode, operands[0]);
2196 operands[4] = gen_highpart (SImode, operands[1]);
2197 operands[1] = gen_lowpart (SImode, operands[1]);
2198 operands[5] = gen_highpart (SImode, operands[2]);
2199 operands[2] = gen_lowpart (SImode, operands[2]);
2203 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2204 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2206 [(set (match_operand:DI 0 "s_register_operand")
2207 (not:DI (match_operand:DI 1 "s_register_operand")))]
2209 [(set (match_dup 0) (not:SI (match_dup 1)))
2210 (set (match_dup 2) (not:SI (match_dup 3)))]
2213 operands[2] = gen_highpart (SImode, operands[0]);
2214 operands[0] = gen_lowpart (SImode, operands[0]);
2215 operands[3] = gen_highpart (SImode, operands[1]);
2216 operands[1] = gen_lowpart (SImode, operands[1]);
2220 (define_expand "andsi3"
2221 [(set (match_operand:SI 0 "s_register_operand")
2222 (and:SI (match_operand:SI 1 "s_register_operand")
2223 (match_operand:SI 2 "reg_or_int_operand")))]
2228 if (CONST_INT_P (operands[2]))
2230 if (INTVAL (operands[2]) == 255 && arm_arch6)
2232 operands[1] = convert_to_mode (QImode, operands[1], 1);
2233 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2237 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2238 operands[2] = force_reg (SImode, operands[2]);
2241 arm_split_constant (AND, SImode, NULL_RTX,
2242 INTVAL (operands[2]), operands[0],
2244 optimize && can_create_pseudo_p ());
2250 else /* TARGET_THUMB1 */
2252 if (!CONST_INT_P (operands[2]))
2254 rtx tmp = force_reg (SImode, operands[2]);
2255 if (rtx_equal_p (operands[0], operands[1]))
2259 operands[2] = operands[1];
2267 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2269 operands[2] = force_reg (SImode,
2270 GEN_INT (~INTVAL (operands[2])));
2272 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2277 for (i = 9; i <= 31; i++)
2279 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2281 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2285 else if ((HOST_WIDE_INT_1 << i) - 1
2286 == ~INTVAL (operands[2]))
2288 rtx shift = GEN_INT (i);
2289 rtx reg = gen_reg_rtx (SImode);
2291 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2292 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2298 operands[2] = force_reg (SImode, operands[2]);
2304 ; ??? Check split length for Thumb-2
2305 (define_insn_and_split "*arm_andsi3_insn"
2306 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2307 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2308 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2313 bic%?\\t%0, %1, #%B2
2317 && CONST_INT_P (operands[2])
2318 && !(const_ok_for_arm (INTVAL (operands[2]))
2319 || const_ok_for_arm (~INTVAL (operands[2])))"
2320 [(clobber (const_int 0))]
2322 arm_split_constant (AND, SImode, curr_insn,
2323 INTVAL (operands[2]), operands[0], operands[1], 0);
2326 [(set_attr "length" "4,4,4,4,16")
2327 (set_attr "predicable" "yes")
2328 (set_attr "predicable_short_it" "no,yes,no,no,no")
2329 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2332 (define_insn "*andsi3_compare0"
2333 [(set (reg:CC_NOOV CC_REGNUM)
2335 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2336 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2338 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2339 (and:SI (match_dup 1) (match_dup 2)))]
2343 bics%?\\t%0, %1, #%B2
2344 ands%?\\t%0, %1, %2"
2345 [(set_attr "conds" "set")
2346 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2349 (define_insn "*andsi3_compare0_scratch"
2350 [(set (reg:CC_NOOV CC_REGNUM)
2352 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2353 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2355 (clobber (match_scratch:SI 2 "=X,r,X"))]
2359 bics%?\\t%2, %0, #%B1
2361 [(set_attr "conds" "set")
2362 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2365 (define_insn "*zeroextractsi_compare0_scratch"
2366 [(set (reg:CC_NOOV CC_REGNUM)
2367 (compare:CC_NOOV (zero_extract:SI
2368 (match_operand:SI 0 "s_register_operand" "r")
2369 (match_operand 1 "const_int_operand" "n")
2370 (match_operand 2 "const_int_operand" "n"))
2373 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2374 && INTVAL (operands[1]) > 0
2375 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2376 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2378 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2379 << INTVAL (operands[2]));
2380 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2383 [(set_attr "conds" "set")
2384 (set_attr "predicable" "yes")
2385 (set_attr "type" "logics_imm")]
2388 (define_insn_and_split "*ne_zeroextractsi"
2389 [(set (match_operand:SI 0 "s_register_operand" "=r")
2390 (ne:SI (zero_extract:SI
2391 (match_operand:SI 1 "s_register_operand" "r")
2392 (match_operand:SI 2 "const_int_operand" "n")
2393 (match_operand:SI 3 "const_int_operand" "n"))
2395 (clobber (reg:CC CC_REGNUM))]
2397 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2398 && INTVAL (operands[2]) > 0
2399 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2400 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2403 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2404 && INTVAL (operands[2]) > 0
2405 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2406 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2407 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2408 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2410 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2412 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2413 (match_dup 0) (const_int 1)))]
2415 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2416 << INTVAL (operands[3]));
2418 [(set_attr "conds" "clob")
2419 (set (attr "length")
2420 (if_then_else (eq_attr "is_thumb" "yes")
2423 (set_attr "type" "multiple")]
2426 (define_insn_and_split "*ne_zeroextractsi_shifted"
2427 [(set (match_operand:SI 0 "s_register_operand" "=r")
2428 (ne:SI (zero_extract:SI
2429 (match_operand:SI 1 "s_register_operand" "r")
2430 (match_operand:SI 2 "const_int_operand" "n")
2433 (clobber (reg:CC CC_REGNUM))]
2437 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2438 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2440 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2442 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2443 (match_dup 0) (const_int 1)))]
2445 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2447 [(set_attr "conds" "clob")
2448 (set_attr "length" "8")
2449 (set_attr "type" "multiple")]
2452 (define_insn_and_split "*ite_ne_zeroextractsi"
2453 [(set (match_operand:SI 0 "s_register_operand" "=r")
2454 (if_then_else:SI (ne (zero_extract:SI
2455 (match_operand:SI 1 "s_register_operand" "r")
2456 (match_operand:SI 2 "const_int_operand" "n")
2457 (match_operand:SI 3 "const_int_operand" "n"))
2459 (match_operand:SI 4 "arm_not_operand" "rIK")
2461 (clobber (reg:CC CC_REGNUM))]
2463 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2464 && INTVAL (operands[2]) > 0
2465 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2466 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2467 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2470 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2471 && INTVAL (operands[2]) > 0
2472 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2473 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2474 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2475 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2476 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2478 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2480 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2481 (match_dup 0) (match_dup 4)))]
2483 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2484 << INTVAL (operands[3]));
2486 [(set_attr "conds" "clob")
2487 (set_attr "length" "8")
2488 (set_attr "type" "multiple")]
2491 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2492 [(set (match_operand:SI 0 "s_register_operand" "=r")
2493 (if_then_else:SI (ne (zero_extract:SI
2494 (match_operand:SI 1 "s_register_operand" "r")
2495 (match_operand:SI 2 "const_int_operand" "n")
2498 (match_operand:SI 3 "arm_not_operand" "rIK")
2500 (clobber (reg:CC CC_REGNUM))]
2501 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2503 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2504 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2505 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2507 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2509 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2510 (match_dup 0) (match_dup 3)))]
2512 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2514 [(set_attr "conds" "clob")
2515 (set_attr "length" "8")
2516 (set_attr "type" "multiple")]
2519 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2521 [(set (match_operand:SI 0 "s_register_operand" "")
2522 (match_operator:SI 1 "shiftable_operator"
2523 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2524 (match_operand:SI 3 "const_int_operand" "")
2525 (match_operand:SI 4 "const_int_operand" ""))
2526 (match_operand:SI 5 "s_register_operand" "")]))
2527 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2529 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2532 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2535 HOST_WIDE_INT temp = INTVAL (operands[3]);
2537 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2538 operands[4] = GEN_INT (32 - temp);
2543 [(set (match_operand:SI 0 "s_register_operand" "")
2544 (match_operator:SI 1 "shiftable_operator"
2545 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2546 (match_operand:SI 3 "const_int_operand" "")
2547 (match_operand:SI 4 "const_int_operand" ""))
2548 (match_operand:SI 5 "s_register_operand" "")]))
2549 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2551 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2554 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2557 HOST_WIDE_INT temp = INTVAL (operands[3]);
2559 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2560 operands[4] = GEN_INT (32 - temp);
2564 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2565 ;;; represented by the bitfield, then this will produce incorrect results.
2566 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2567 ;;; which have a real bit-field insert instruction, the truncation happens
2568 ;;; in the bit-field insert instruction itself. Since arm does not have a
2569 ;;; bit-field insert instruction, we would have to emit code here to truncate
2570 ;;; the value before we insert. This loses some of the advantage of having
2571 ;;; this insv pattern, so this pattern needs to be reevalutated.
2573 (define_expand "insv"
2574 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2575 (match_operand 1 "general_operand")
2576 (match_operand 2 "general_operand"))
2577 (match_operand 3 "reg_or_int_operand"))]
2578 "TARGET_ARM || arm_arch_thumb2"
2581 int start_bit = INTVAL (operands[2]);
2582 int width = INTVAL (operands[1]);
2583 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2584 rtx target, subtarget;
2586 if (arm_arch_thumb2)
2588 if (unaligned_access && MEM_P (operands[0])
2589 && s_register_operand (operands[3], GET_MODE (operands[3]))
2590 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2594 if (BYTES_BIG_ENDIAN)
2595 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2600 base_addr = adjust_address (operands[0], SImode,
2601 start_bit / BITS_PER_UNIT);
2602 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2606 rtx tmp = gen_reg_rtx (HImode);
2608 base_addr = adjust_address (operands[0], HImode,
2609 start_bit / BITS_PER_UNIT);
2610 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2611 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2615 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2617 bool use_bfi = TRUE;
2619 if (CONST_INT_P (operands[3]))
2621 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2625 emit_insn (gen_insv_zero (operands[0], operands[1],
2630 /* See if the set can be done with a single orr instruction. */
2631 if (val == mask && const_ok_for_arm (val << start_bit))
2637 if (!REG_P (operands[3]))
2638 operands[3] = force_reg (SImode, operands[3]);
2640 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2649 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2652 target = copy_rtx (operands[0]);
2653 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2654 subreg as the final target. */
2655 if (GET_CODE (target) == SUBREG)
2657 subtarget = gen_reg_rtx (SImode);
2658 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2659 < GET_MODE_SIZE (SImode))
2660 target = SUBREG_REG (target);
2665 if (CONST_INT_P (operands[3]))
2667 /* Since we are inserting a known constant, we may be able to
2668 reduce the number of bits that we have to clear so that
2669 the mask becomes simple. */
2670 /* ??? This code does not check to see if the new mask is actually
2671 simpler. It may not be. */
2672 rtx op1 = gen_reg_rtx (SImode);
2673 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2674 start of this pattern. */
2675 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2676 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2678 emit_insn (gen_andsi3 (op1, operands[0],
2679 gen_int_mode (~mask2, SImode)));
2680 emit_insn (gen_iorsi3 (subtarget, op1,
2681 gen_int_mode (op3_value << start_bit, SImode)));
2683 else if (start_bit == 0
2684 && !(const_ok_for_arm (mask)
2685 || const_ok_for_arm (~mask)))
2687 /* A Trick, since we are setting the bottom bits in the word,
2688 we can shift operand[3] up, operand[0] down, OR them together
2689 and rotate the result back again. This takes 3 insns, and
2690 the third might be mergeable into another op. */
2691 /* The shift up copes with the possibility that operand[3] is
2692 wider than the bitfield. */
2693 rtx op0 = gen_reg_rtx (SImode);
2694 rtx op1 = gen_reg_rtx (SImode);
2696 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2697 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2698 emit_insn (gen_iorsi3 (op1, op1, op0));
2699 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2701 else if ((width + start_bit == 32)
2702 && !(const_ok_for_arm (mask)
2703 || const_ok_for_arm (~mask)))
2705 /* Similar trick, but slightly less efficient. */
2707 rtx op0 = gen_reg_rtx (SImode);
2708 rtx op1 = gen_reg_rtx (SImode);
2710 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2711 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2712 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2713 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2717 rtx op0 = gen_int_mode (mask, SImode);
2718 rtx op1 = gen_reg_rtx (SImode);
2719 rtx op2 = gen_reg_rtx (SImode);
2721 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2723 rtx tmp = gen_reg_rtx (SImode);
2725 emit_insn (gen_movsi (tmp, op0));
2729 /* Mask out any bits in operand[3] that are not needed. */
2730 emit_insn (gen_andsi3 (op1, operands[3], op0));
2732 if (CONST_INT_P (op0)
2733 && (const_ok_for_arm (mask << start_bit)
2734 || const_ok_for_arm (~(mask << start_bit))))
2736 op0 = gen_int_mode (~(mask << start_bit), SImode);
2737 emit_insn (gen_andsi3 (op2, operands[0], op0));
2741 if (CONST_INT_P (op0))
2743 rtx tmp = gen_reg_rtx (SImode);
2745 emit_insn (gen_movsi (tmp, op0));
2750 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2752 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2756 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2758 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2761 if (subtarget != target)
2763 /* If TARGET is still a SUBREG, then it must be wider than a word,
2764 so we must be careful only to set the subword we were asked to. */
2765 if (GET_CODE (target) == SUBREG)
2766 emit_move_insn (target, subtarget);
2768 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2775 (define_insn "insv_zero"
2776 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2777 (match_operand:SI 1 "const_int_M_operand" "M")
2778 (match_operand:SI 2 "const_int_M_operand" "M"))
2782 [(set_attr "length" "4")
2783 (set_attr "predicable" "yes")
2784 (set_attr "type" "bfm")]
2787 (define_insn "insv_t2"
2788 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2789 (match_operand:SI 1 "const_int_M_operand" "M")
2790 (match_operand:SI 2 "const_int_M_operand" "M"))
2791 (match_operand:SI 3 "s_register_operand" "r"))]
2793 "bfi%?\t%0, %3, %2, %1"
2794 [(set_attr "length" "4")
2795 (set_attr "predicable" "yes")
2796 (set_attr "type" "bfm")]
2799 (define_insn "andsi_notsi_si"
2800 [(set (match_operand:SI 0 "s_register_operand" "=r")
2801 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2802 (match_operand:SI 1 "s_register_operand" "r")))]
2804 "bic%?\\t%0, %1, %2"
2805 [(set_attr "predicable" "yes")
2806 (set_attr "type" "logic_reg")]
2809 (define_insn "andsi_not_shiftsi_si"
2810 [(set (match_operand:SI 0 "s_register_operand" "=r")
2811 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2812 [(match_operand:SI 2 "s_register_operand" "r")
2813 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2814 (match_operand:SI 1 "s_register_operand" "r")))]
2816 "bic%?\\t%0, %1, %2%S4"
2817 [(set_attr "predicable" "yes")
2818 (set_attr "shift" "2")
2819 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2820 (const_string "logic_shift_imm")
2821 (const_string "logic_shift_reg")))]
2824 ;; Shifted bics pattern used to set up CC status register and not reusing
2825 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2826 ;; does not support shift by register.
2827 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2828 [(set (reg:CC_NOOV CC_REGNUM)
2830 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2831 [(match_operand:SI 1 "s_register_operand" "r")
2832 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2833 (match_operand:SI 3 "s_register_operand" "r"))
2835 (clobber (match_scratch:SI 4 "=r"))]
2836 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2837 "bics%?\\t%4, %3, %1%S0"
2838 [(set_attr "predicable" "yes")
2839 (set_attr "conds" "set")
2840 (set_attr "shift" "1")
2841 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2842 (const_string "logic_shift_imm")
2843 (const_string "logic_shift_reg")))]
2846 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2847 ;; getting reused later.
2848 (define_insn "andsi_not_shiftsi_si_scc"
2849 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2851 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2852 [(match_operand:SI 1 "s_register_operand" "r")
2853 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2854 (match_operand:SI 3 "s_register_operand" "r"))
2856 (set (match_operand:SI 4 "s_register_operand" "=r")
2857 (and:SI (not:SI (match_op_dup 0
2861 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2862 "bics%?\\t%4, %3, %1%S0"
2863 [(set_attr "predicable" "yes")
2864 (set_attr "conds" "set")
2865 (set_attr "shift" "1")
2866 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2867 (const_string "logic_shift_imm")
2868 (const_string "logic_shift_reg")))]
2871 (define_insn "*andsi_notsi_si_compare0"
2872 [(set (reg:CC_NOOV CC_REGNUM)
2874 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2875 (match_operand:SI 1 "s_register_operand" "r"))
2877 (set (match_operand:SI 0 "s_register_operand" "=r")
2878 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2881 [(set_attr "conds" "set")
2882 (set_attr "type" "logics_shift_reg")]
2885 (define_insn "*andsi_notsi_si_compare0_scratch"
2886 [(set (reg:CC_NOOV CC_REGNUM)
2888 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2889 (match_operand:SI 1 "s_register_operand" "r"))
2891 (clobber (match_scratch:SI 0 "=r"))]
2894 [(set_attr "conds" "set")
2895 (set_attr "type" "logics_shift_reg")]
2898 (define_expand "iorsi3"
2899 [(set (match_operand:SI 0 "s_register_operand")
2900 (ior:SI (match_operand:SI 1 "s_register_operand")
2901 (match_operand:SI 2 "reg_or_int_operand")))]
2904 if (CONST_INT_P (operands[2]))
2908 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2909 operands[2] = force_reg (SImode, operands[2]);
2912 arm_split_constant (IOR, SImode, NULL_RTX,
2913 INTVAL (operands[2]), operands[0],
2915 optimize && can_create_pseudo_p ());
2919 else /* TARGET_THUMB1 */
2921 rtx tmp = force_reg (SImode, operands[2]);
2922 if (rtx_equal_p (operands[0], operands[1]))
2926 operands[2] = operands[1];
2934 (define_insn_and_split "*iorsi3_insn"
2935 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2936 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2937 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2942 orn%?\\t%0, %1, #%B2
2946 && CONST_INT_P (operands[2])
2947 && !(const_ok_for_arm (INTVAL (operands[2]))
2948 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2949 [(clobber (const_int 0))]
2951 arm_split_constant (IOR, SImode, curr_insn,
2952 INTVAL (operands[2]), operands[0], operands[1], 0);
2955 [(set_attr "length" "4,4,4,4,16")
2956 (set_attr "arch" "32,t2,t2,32,32")
2957 (set_attr "predicable" "yes")
2958 (set_attr "predicable_short_it" "no,yes,no,no,no")
2959 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2963 [(match_scratch:SI 3 "r")
2964 (set (match_operand:SI 0 "arm_general_register_operand" "")
2965 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2966 (match_operand:SI 2 "const_int_operand" "")))]
2968 && !const_ok_for_arm (INTVAL (operands[2]))
2969 && const_ok_for_arm (~INTVAL (operands[2]))"
2970 [(set (match_dup 3) (match_dup 2))
2971 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2975 (define_insn "*iorsi3_compare0"
2976 [(set (reg:CC_NOOV CC_REGNUM)
2978 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2979 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2981 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2982 (ior:SI (match_dup 1) (match_dup 2)))]
2984 "orrs%?\\t%0, %1, %2"
2985 [(set_attr "conds" "set")
2986 (set_attr "arch" "*,t2,*")
2987 (set_attr "length" "4,2,4")
2988 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2991 (define_insn "*iorsi3_compare0_scratch"
2992 [(set (reg:CC_NOOV CC_REGNUM)
2994 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2995 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2997 (clobber (match_scratch:SI 0 "=r,l,r"))]
2999 "orrs%?\\t%0, %1, %2"
3000 [(set_attr "conds" "set")
3001 (set_attr "arch" "*,t2,*")
3002 (set_attr "length" "4,2,4")
3003 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3006 (define_expand "xorsi3"
3007 [(set (match_operand:SI 0 "s_register_operand")
3008 (xor:SI (match_operand:SI 1 "s_register_operand")
3009 (match_operand:SI 2 "reg_or_int_operand")))]
3011 "if (CONST_INT_P (operands[2]))
3015 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3016 operands[2] = force_reg (SImode, operands[2]);
3019 arm_split_constant (XOR, SImode, NULL_RTX,
3020 INTVAL (operands[2]), operands[0],
3022 optimize && can_create_pseudo_p ());
3026 else /* TARGET_THUMB1 */
3028 rtx tmp = force_reg (SImode, operands[2]);
3029 if (rtx_equal_p (operands[0], operands[1]))
3033 operands[2] = operands[1];
3040 (define_insn_and_split "*arm_xorsi3"
3041 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3042 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3043 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3051 && CONST_INT_P (operands[2])
3052 && !const_ok_for_arm (INTVAL (operands[2]))"
3053 [(clobber (const_int 0))]
3055 arm_split_constant (XOR, SImode, curr_insn,
3056 INTVAL (operands[2]), operands[0], operands[1], 0);
3059 [(set_attr "length" "4,4,4,16")
3060 (set_attr "predicable" "yes")
3061 (set_attr "predicable_short_it" "no,yes,no,no")
3062 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3065 (define_insn "*xorsi3_compare0"
3066 [(set (reg:CC_NOOV CC_REGNUM)
3067 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3068 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3070 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3071 (xor:SI (match_dup 1) (match_dup 2)))]
3073 "eors%?\\t%0, %1, %2"
3074 [(set_attr "conds" "set")
3075 (set_attr "type" "logics_imm,logics_reg")]
3078 (define_insn "*xorsi3_compare0_scratch"
3079 [(set (reg:CC_NOOV CC_REGNUM)
3080 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3081 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3085 [(set_attr "conds" "set")
3086 (set_attr "type" "logics_imm,logics_reg")]
3089 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3090 ; (NOT D) we can sometimes merge the final NOT into one of the following
3094 [(set (match_operand:SI 0 "s_register_operand" "")
3095 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3096 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3097 (match_operand:SI 3 "arm_rhs_operand" "")))
3098 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3100 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3101 (not:SI (match_dup 3))))
3102 (set (match_dup 0) (not:SI (match_dup 4)))]
3106 (define_insn_and_split "*andsi_iorsi3_notsi"
3107 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3108 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3109 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3110 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3112 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3113 "&& reload_completed"
3114 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3115 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3117 /* If operands[3] is a constant make sure to fold the NOT into it
3118 to avoid creating a NOT of a CONST_INT. */
3119 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3120 if (CONST_INT_P (not_rtx))
3122 operands[4] = operands[0];
3123 operands[5] = not_rtx;
3127 operands[5] = operands[0];
3128 operands[4] = not_rtx;
3131 [(set_attr "length" "8")
3132 (set_attr "ce_count" "2")
3133 (set_attr "predicable" "yes")
3134 (set_attr "type" "multiple")]
3137 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3138 ; insns are available?
3140 [(set (match_operand:SI 0 "s_register_operand" "")
3141 (match_operator:SI 1 "logical_binary_operator"
3142 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3143 (match_operand:SI 3 "const_int_operand" "")
3144 (match_operand:SI 4 "const_int_operand" ""))
3145 (match_operator:SI 9 "logical_binary_operator"
3146 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3147 (match_operand:SI 6 "const_int_operand" ""))
3148 (match_operand:SI 7 "s_register_operand" "")])]))
3149 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3151 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3152 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3155 [(ashift:SI (match_dup 2) (match_dup 4))
3159 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3162 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3166 [(set (match_operand:SI 0 "s_register_operand" "")
3167 (match_operator:SI 1 "logical_binary_operator"
3168 [(match_operator:SI 9 "logical_binary_operator"
3169 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3170 (match_operand:SI 6 "const_int_operand" ""))
3171 (match_operand:SI 7 "s_register_operand" "")])
3172 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3173 (match_operand:SI 3 "const_int_operand" "")
3174 (match_operand:SI 4 "const_int_operand" ""))]))
3175 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3177 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3178 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3181 [(ashift:SI (match_dup 2) (match_dup 4))
3185 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3188 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3192 [(set (match_operand:SI 0 "s_register_operand" "")
3193 (match_operator:SI 1 "logical_binary_operator"
3194 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3195 (match_operand:SI 3 "const_int_operand" "")
3196 (match_operand:SI 4 "const_int_operand" ""))
3197 (match_operator:SI 9 "logical_binary_operator"
3198 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3199 (match_operand:SI 6 "const_int_operand" ""))
3200 (match_operand:SI 7 "s_register_operand" "")])]))
3201 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3203 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3204 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3207 [(ashift:SI (match_dup 2) (match_dup 4))
3211 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3214 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3218 [(set (match_operand:SI 0 "s_register_operand" "")
3219 (match_operator:SI 1 "logical_binary_operator"
3220 [(match_operator:SI 9 "logical_binary_operator"
3221 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3222 (match_operand:SI 6 "const_int_operand" ""))
3223 (match_operand:SI 7 "s_register_operand" "")])
3224 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3225 (match_operand:SI 3 "const_int_operand" "")
3226 (match_operand:SI 4 "const_int_operand" ""))]))
3227 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3229 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3230 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3233 [(ashift:SI (match_dup 2) (match_dup 4))
3237 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3240 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3244 ;; Minimum and maximum insns
3246 (define_expand "smaxsi3"
3248 (set (match_operand:SI 0 "s_register_operand")
3249 (smax:SI (match_operand:SI 1 "s_register_operand")
3250 (match_operand:SI 2 "arm_rhs_operand")))
3251 (clobber (reg:CC CC_REGNUM))])]
3254 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3256 /* No need for a clobber of the condition code register here. */
3257 emit_insn (gen_rtx_SET (operands[0],
3258 gen_rtx_SMAX (SImode, operands[1],
3264 (define_insn "*smax_0"
3265 [(set (match_operand:SI 0 "s_register_operand" "=r")
3266 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3269 "bic%?\\t%0, %1, %1, asr #31"
3270 [(set_attr "predicable" "yes")
3271 (set_attr "type" "logic_shift_reg")]
3274 (define_insn "*smax_m1"
3275 [(set (match_operand:SI 0 "s_register_operand" "=r")
3276 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3279 "orr%?\\t%0, %1, %1, asr #31"
3280 [(set_attr "predicable" "yes")
3281 (set_attr "type" "logic_shift_reg")]
3284 (define_insn_and_split "*arm_smax_insn"
3285 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3286 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3287 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3288 (clobber (reg:CC CC_REGNUM))]
3291 ; cmp\\t%1, %2\;movlt\\t%0, %2
3292 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3294 [(set (reg:CC CC_REGNUM)
3295 (compare:CC (match_dup 1) (match_dup 2)))
3297 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3301 [(set_attr "conds" "clob")
3302 (set_attr "length" "8,12")
3303 (set_attr "type" "multiple")]
3306 (define_expand "sminsi3"
3308 (set (match_operand:SI 0 "s_register_operand")
3309 (smin:SI (match_operand:SI 1 "s_register_operand")
3310 (match_operand:SI 2 "arm_rhs_operand")))
3311 (clobber (reg:CC CC_REGNUM))])]
3314 if (operands[2] == const0_rtx)
3316 /* No need for a clobber of the condition code register here. */
3317 emit_insn (gen_rtx_SET (operands[0],
3318 gen_rtx_SMIN (SImode, operands[1],
3324 (define_insn "*smin_0"
3325 [(set (match_operand:SI 0 "s_register_operand" "=r")
3326 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3329 "and%?\\t%0, %1, %1, asr #31"
3330 [(set_attr "predicable" "yes")
3331 (set_attr "type" "logic_shift_reg")]
3334 (define_insn_and_split "*arm_smin_insn"
3335 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3336 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3337 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3338 (clobber (reg:CC CC_REGNUM))]
3341 ; cmp\\t%1, %2\;movge\\t%0, %2
3342 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3344 [(set (reg:CC CC_REGNUM)
3345 (compare:CC (match_dup 1) (match_dup 2)))
3347 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3351 [(set_attr "conds" "clob")
3352 (set_attr "length" "8,12")
3353 (set_attr "type" "multiple,multiple")]
3356 (define_expand "umaxsi3"
3358 (set (match_operand:SI 0 "s_register_operand")
3359 (umax:SI (match_operand:SI 1 "s_register_operand")
3360 (match_operand:SI 2 "arm_rhs_operand")))
3361 (clobber (reg:CC CC_REGNUM))])]
3366 (define_insn_and_split "*arm_umaxsi3"
3367 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3368 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3369 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3370 (clobber (reg:CC CC_REGNUM))]
3373 ; cmp\\t%1, %2\;movcc\\t%0, %2
3374 ; cmp\\t%1, %2\;movcs\\t%0, %1
3375 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3377 [(set (reg:CC CC_REGNUM)
3378 (compare:CC (match_dup 1) (match_dup 2)))
3380 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3384 [(set_attr "conds" "clob")
3385 (set_attr "length" "8,8,12")
3386 (set_attr "type" "store_4")]
3389 (define_expand "uminsi3"
3391 (set (match_operand:SI 0 "s_register_operand")
3392 (umin:SI (match_operand:SI 1 "s_register_operand")
3393 (match_operand:SI 2 "arm_rhs_operand")))
3394 (clobber (reg:CC CC_REGNUM))])]
3399 (define_insn_and_split "*arm_uminsi3"
3400 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3401 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3402 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3403 (clobber (reg:CC CC_REGNUM))]
3406 ; cmp\\t%1, %2\;movcs\\t%0, %2
3407 ; cmp\\t%1, %2\;movcc\\t%0, %1
3408 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3410 [(set (reg:CC CC_REGNUM)
3411 (compare:CC (match_dup 1) (match_dup 2)))
3413 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3417 [(set_attr "conds" "clob")
3418 (set_attr "length" "8,8,12")
3419 (set_attr "type" "store_4")]
3422 (define_insn "*store_minmaxsi"
3423 [(set (match_operand:SI 0 "memory_operand" "=m")
3424 (match_operator:SI 3 "minmax_operator"
3425 [(match_operand:SI 1 "s_register_operand" "r")
3426 (match_operand:SI 2 "s_register_operand" "r")]))
3427 (clobber (reg:CC CC_REGNUM))]
3428 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3430 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3431 operands[1], operands[2]);
3432 output_asm_insn (\"cmp\\t%1, %2\", operands);
3434 output_asm_insn (\"ite\t%d3\", operands);
3435 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3436 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3439 [(set_attr "conds" "clob")
3440 (set (attr "length")
3441 (if_then_else (eq_attr "is_thumb" "yes")
3444 (set_attr "type" "store_4")]
3447 ; Reject the frame pointer in operand[1], since reloading this after
3448 ; it has been eliminated can cause carnage.
3449 (define_insn "*minmax_arithsi"
3450 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3451 (match_operator:SI 4 "shiftable_operator"
3452 [(match_operator:SI 5 "minmax_operator"
3453 [(match_operand:SI 2 "s_register_operand" "r,r")
3454 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3455 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3456 (clobber (reg:CC CC_REGNUM))]
3457 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3460 enum rtx_code code = GET_CODE (operands[4]);
3463 if (which_alternative != 0 || operands[3] != const0_rtx
3464 || (code != PLUS && code != IOR && code != XOR))
3469 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3470 operands[2], operands[3]);
3471 output_asm_insn (\"cmp\\t%2, %3\", operands);
3475 output_asm_insn (\"ite\\t%d5\", operands);
3477 output_asm_insn (\"it\\t%d5\", operands);
3479 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3481 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3484 [(set_attr "conds" "clob")
3485 (set (attr "length")
3486 (if_then_else (eq_attr "is_thumb" "yes")
3489 (set_attr "type" "multiple")]
3492 ; Reject the frame pointer in operand[1], since reloading this after
3493 ; it has been eliminated can cause carnage.
3494 (define_insn_and_split "*minmax_arithsi_non_canon"
3495 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3497 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3498 (match_operator:SI 4 "minmax_operator"
3499 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3500 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3501 (clobber (reg:CC CC_REGNUM))]
3502 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3503 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3505 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3506 [(set (reg:CC CC_REGNUM)
3507 (compare:CC (match_dup 2) (match_dup 3)))
3509 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3511 (minus:SI (match_dup 1)
3513 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3517 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3518 operands[2], operands[3]);
3519 enum rtx_code rc = minmax_code (operands[4]);
3520 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3521 operands[2], operands[3]);
3523 if (mode == CCFPmode || mode == CCFPEmode)
3524 rc = reverse_condition_maybe_unordered (rc);
3526 rc = reverse_condition (rc);
3527 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3528 if (CONST_INT_P (operands[3]))
3529 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3531 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3533 [(set_attr "conds" "clob")
3534 (set (attr "length")
3535 (if_then_else (eq_attr "is_thumb" "yes")
3538 (set_attr "type" "multiple")]
3541 (define_code_iterator SAT [smin smax])
3542 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3543 (define_code_attr SATlo [(smin "1") (smax "2")])
3544 (define_code_attr SAThi [(smin "2") (smax "1")])
3546 (define_insn "*satsi_<SAT:code>"
3547 [(set (match_operand:SI 0 "s_register_operand" "=r")
3548 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3549 (match_operand:SI 1 "const_int_operand" "i"))
3550 (match_operand:SI 2 "const_int_operand" "i")))]
3551 "TARGET_32BIT && arm_arch6
3552 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3556 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3557 &mask, &signed_sat))
3560 operands[1] = GEN_INT (mask);
3562 return "ssat%?\t%0, %1, %3";
3564 return "usat%?\t%0, %1, %3";
3566 [(set_attr "predicable" "yes")
3567 (set_attr "type" "alus_imm")]
3570 (define_insn "*satsi_<SAT:code>_shift"
3571 [(set (match_operand:SI 0 "s_register_operand" "=r")
3572 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3573 [(match_operand:SI 4 "s_register_operand" "r")
3574 (match_operand:SI 5 "const_int_operand" "i")])
3575 (match_operand:SI 1 "const_int_operand" "i"))
3576 (match_operand:SI 2 "const_int_operand" "i")))]
3577 "TARGET_32BIT && arm_arch6
3578 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3582 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3583 &mask, &signed_sat))
3586 operands[1] = GEN_INT (mask);
3588 return "ssat%?\t%0, %1, %4%S3";
3590 return "usat%?\t%0, %1, %4%S3";
3592 [(set_attr "predicable" "yes")
3593 (set_attr "shift" "3")
3594 (set_attr "type" "logic_shift_reg")])
3596 ;; Shift and rotation insns
3598 (define_expand "ashldi3"
3599 [(set (match_operand:DI 0 "s_register_operand")
3600 (ashift:DI (match_operand:DI 1 "s_register_operand")
3601 (match_operand:SI 2 "reg_or_int_operand")))]
3604 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3605 operands[2], gen_reg_rtx (SImode),
3606 gen_reg_rtx (SImode));
3610 (define_expand "ashlsi3"
3611 [(set (match_operand:SI 0 "s_register_operand")
3612 (ashift:SI (match_operand:SI 1 "s_register_operand")
3613 (match_operand:SI 2 "arm_rhs_operand")))]
3616 if (CONST_INT_P (operands[2])
3617 && (UINTVAL (operands[2])) > 31)
3619 emit_insn (gen_movsi (operands[0], const0_rtx));
3625 (define_expand "ashrdi3"
3626 [(set (match_operand:DI 0 "s_register_operand")
3627 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3628 (match_operand:SI 2 "reg_or_int_operand")))]
3631 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3632 operands[2], gen_reg_rtx (SImode),
3633 gen_reg_rtx (SImode));
3637 (define_expand "ashrsi3"
3638 [(set (match_operand:SI 0 "s_register_operand")
3639 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3640 (match_operand:SI 2 "arm_rhs_operand")))]
3643 if (CONST_INT_P (operands[2])
3644 && UINTVAL (operands[2]) > 31)
3645 operands[2] = GEN_INT (31);
3649 (define_expand "lshrdi3"
3650 [(set (match_operand:DI 0 "s_register_operand")
3651 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3652 (match_operand:SI 2 "reg_or_int_operand")))]
3655 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3656 operands[2], gen_reg_rtx (SImode),
3657 gen_reg_rtx (SImode));
3661 (define_expand "lshrsi3"
3662 [(set (match_operand:SI 0 "s_register_operand")
3663 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3664 (match_operand:SI 2 "arm_rhs_operand")))]
3667 if (CONST_INT_P (operands[2])
3668 && (UINTVAL (operands[2])) > 31)
3670 emit_insn (gen_movsi (operands[0], const0_rtx));
3676 (define_expand "rotlsi3"
3677 [(set (match_operand:SI 0 "s_register_operand")
3678 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3679 (match_operand:SI 2 "reg_or_int_operand")))]
3682 if (CONST_INT_P (operands[2]))
3683 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3686 rtx reg = gen_reg_rtx (SImode);
3687 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3693 (define_expand "rotrsi3"
3694 [(set (match_operand:SI 0 "s_register_operand")
3695 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3696 (match_operand:SI 2 "arm_rhs_operand")))]
3701 if (CONST_INT_P (operands[2])
3702 && UINTVAL (operands[2]) > 31)
3703 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3705 else /* TARGET_THUMB1 */
3707 if (CONST_INT_P (operands [2]))
3708 operands [2] = force_reg (SImode, operands[2]);
3713 (define_insn "*arm_shiftsi3"
3714 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3715 (match_operator:SI 3 "shift_operator"
3716 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3717 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3719 "* return arm_output_shift(operands, 0);"
3720 [(set_attr "predicable" "yes")
3721 (set_attr "arch" "t2,t2,*,*")
3722 (set_attr "predicable_short_it" "yes,yes,no,no")
3723 (set_attr "length" "4")
3724 (set_attr "shift" "1")
3725 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3728 (define_insn "*shiftsi3_compare0"
3729 [(set (reg:CC_NOOV CC_REGNUM)
3730 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3731 [(match_operand:SI 1 "s_register_operand" "r,r")
3732 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3734 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3735 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3737 "* return arm_output_shift(operands, 1);"
3738 [(set_attr "conds" "set")
3739 (set_attr "shift" "1")
3740 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3743 (define_insn "*shiftsi3_compare0_scratch"
3744 [(set (reg:CC_NOOV CC_REGNUM)
3745 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3746 [(match_operand:SI 1 "s_register_operand" "r,r")
3747 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3749 (clobber (match_scratch:SI 0 "=r,r"))]
3751 "* return arm_output_shift(operands, 1);"
3752 [(set_attr "conds" "set")
3753 (set_attr "shift" "1")
3754 (set_attr "type" "shift_imm,shift_reg")]
3757 (define_insn "*not_shiftsi"
3758 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3759 (not:SI (match_operator:SI 3 "shift_operator"
3760 [(match_operand:SI 1 "s_register_operand" "r,r")
3761 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3764 [(set_attr "predicable" "yes")
3765 (set_attr "shift" "1")
3766 (set_attr "arch" "32,a")
3767 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3769 (define_insn "*not_shiftsi_compare0"
3770 [(set (reg:CC_NOOV CC_REGNUM)
3772 (not:SI (match_operator:SI 3 "shift_operator"
3773 [(match_operand:SI 1 "s_register_operand" "r,r")
3774 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3776 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3777 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3779 "mvns%?\\t%0, %1%S3"
3780 [(set_attr "conds" "set")
3781 (set_attr "shift" "1")
3782 (set_attr "arch" "32,a")
3783 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3785 (define_insn "*not_shiftsi_compare0_scratch"
3786 [(set (reg:CC_NOOV CC_REGNUM)
3788 (not:SI (match_operator:SI 3 "shift_operator"
3789 [(match_operand:SI 1 "s_register_operand" "r,r")
3790 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3792 (clobber (match_scratch:SI 0 "=r,r"))]
3794 "mvns%?\\t%0, %1%S3"
3795 [(set_attr "conds" "set")
3796 (set_attr "shift" "1")
3797 (set_attr "arch" "32,a")
3798 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3800 ;; We don't really have extzv, but defining this using shifts helps
3801 ;; to reduce register pressure later on.
3803 (define_expand "extzv"
3804 [(set (match_operand 0 "s_register_operand")
3805 (zero_extract (match_operand 1 "nonimmediate_operand")
3806 (match_operand 2 "const_int_operand")
3807 (match_operand 3 "const_int_operand")))]
3808 "TARGET_THUMB1 || arm_arch_thumb2"
3811 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3812 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3814 if (arm_arch_thumb2)
3816 HOST_WIDE_INT width = INTVAL (operands[2]);
3817 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3819 if (unaligned_access && MEM_P (operands[1])
3820 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3824 if (BYTES_BIG_ENDIAN)
3825 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3830 base_addr = adjust_address (operands[1], SImode,
3831 bitpos / BITS_PER_UNIT);
3832 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3836 rtx dest = operands[0];
3837 rtx tmp = gen_reg_rtx (SImode);
3839 /* We may get a paradoxical subreg here. Strip it off. */
3840 if (GET_CODE (dest) == SUBREG
3841 && GET_MODE (dest) == SImode
3842 && GET_MODE (SUBREG_REG (dest)) == HImode)
3843 dest = SUBREG_REG (dest);
3845 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3848 base_addr = adjust_address (operands[1], HImode,
3849 bitpos / BITS_PER_UNIT);
3850 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3851 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3855 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3857 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3865 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3868 operands[3] = GEN_INT (rshift);
3872 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3876 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3877 operands[3], gen_reg_rtx (SImode)));
3882 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3884 (define_expand "extzv_t1"
3885 [(set (match_operand:SI 4 "s_register_operand")
3886 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3887 (match_operand:SI 2 "const_int_operand")))
3888 (set (match_operand:SI 0 "s_register_operand")
3889 (lshiftrt:SI (match_dup 4)
3890 (match_operand:SI 3 "const_int_operand")))]
3894 (define_expand "extv"
3895 [(set (match_operand 0 "s_register_operand")
3896 (sign_extract (match_operand 1 "nonimmediate_operand")
3897 (match_operand 2 "const_int_operand")
3898 (match_operand 3 "const_int_operand")))]
3901 HOST_WIDE_INT width = INTVAL (operands[2]);
3902 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3904 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3905 && (bitpos % BITS_PER_UNIT) == 0)
3909 if (BYTES_BIG_ENDIAN)
3910 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3914 base_addr = adjust_address (operands[1], SImode,
3915 bitpos / BITS_PER_UNIT);
3916 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3920 rtx dest = operands[0];
3921 rtx tmp = gen_reg_rtx (SImode);
3923 /* We may get a paradoxical subreg here. Strip it off. */
3924 if (GET_CODE (dest) == SUBREG
3925 && GET_MODE (dest) == SImode
3926 && GET_MODE (SUBREG_REG (dest)) == HImode)
3927 dest = SUBREG_REG (dest);
3929 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3932 base_addr = adjust_address (operands[1], HImode,
3933 bitpos / BITS_PER_UNIT);
3934 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3935 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3940 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3942 else if (GET_MODE (operands[0]) == SImode
3943 && GET_MODE (operands[1]) == SImode)
3945 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3953 ; Helper to expand register forms of extv with the proper modes.
3955 (define_expand "extv_regsi"
3956 [(set (match_operand:SI 0 "s_register_operand")
3957 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3958 (match_operand 2 "const_int_operand")
3959 (match_operand 3 "const_int_operand")))]
3964 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3966 (define_insn "unaligned_loaddi"
3967 [(set (match_operand:DI 0 "s_register_operand" "=r")
3968 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3969 UNSPEC_UNALIGNED_LOAD))]
3970 "TARGET_32BIT && TARGET_LDRD"
3972 return output_move_double (operands, true, NULL);
3974 [(set_attr "length" "8")
3975 (set_attr "type" "load_8")])
3977 (define_insn "unaligned_loadsi"
3978 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3979 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3980 UNSPEC_UNALIGNED_LOAD))]
3983 ldr\t%0, %1\t@ unaligned
3984 ldr%?\t%0, %1\t@ unaligned
3985 ldr%?\t%0, %1\t@ unaligned"
3986 [(set_attr "arch" "t1,t2,32")
3987 (set_attr "length" "2,2,4")
3988 (set_attr "predicable" "no,yes,yes")
3989 (set_attr "predicable_short_it" "no,yes,no")
3990 (set_attr "type" "load_4")])
3992 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3993 ;; address (there's no immediate format). That's tricky to support
3994 ;; here and we don't really need this pattern for that case, so only
3995 ;; enable for 32-bit ISAs.
3996 (define_insn "unaligned_loadhis"
3997 [(set (match_operand:SI 0 "s_register_operand" "=r")
3999 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4000 UNSPEC_UNALIGNED_LOAD)))]
4001 "unaligned_access && TARGET_32BIT"
4002 "ldrsh%?\t%0, %1\t@ unaligned"
4003 [(set_attr "predicable" "yes")
4004 (set_attr "type" "load_byte")])
4006 (define_insn "unaligned_loadhiu"
4007 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4009 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4010 UNSPEC_UNALIGNED_LOAD)))]
4013 ldrh\t%0, %1\t@ unaligned
4014 ldrh%?\t%0, %1\t@ unaligned
4015 ldrh%?\t%0, %1\t@ unaligned"
4016 [(set_attr "arch" "t1,t2,32")
4017 (set_attr "length" "2,2,4")
4018 (set_attr "predicable" "no,yes,yes")
4019 (set_attr "predicable_short_it" "no,yes,no")
4020 (set_attr "type" "load_byte")])
4022 (define_insn "unaligned_storedi"
4023 [(set (match_operand:DI 0 "memory_operand" "=m")
4024 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4025 UNSPEC_UNALIGNED_STORE))]
4026 "TARGET_32BIT && TARGET_LDRD"
4028 return output_move_double (operands, true, NULL);
4030 [(set_attr "length" "8")
4031 (set_attr "type" "store_8")])
4033 (define_insn "unaligned_storesi"
4034 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4035 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4036 UNSPEC_UNALIGNED_STORE))]
4039 str\t%1, %0\t@ unaligned
4040 str%?\t%1, %0\t@ unaligned
4041 str%?\t%1, %0\t@ unaligned"
4042 [(set_attr "arch" "t1,t2,32")
4043 (set_attr "length" "2,2,4")
4044 (set_attr "predicable" "no,yes,yes")
4045 (set_attr "predicable_short_it" "no,yes,no")
4046 (set_attr "type" "store_4")])
4048 (define_insn "unaligned_storehi"
4049 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4050 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4051 UNSPEC_UNALIGNED_STORE))]
4054 strh\t%1, %0\t@ unaligned
4055 strh%?\t%1, %0\t@ unaligned
4056 strh%?\t%1, %0\t@ unaligned"
4057 [(set_attr "arch" "t1,t2,32")
4058 (set_attr "length" "2,2,4")
4059 (set_attr "predicable" "no,yes,yes")
4060 (set_attr "predicable_short_it" "no,yes,no")
4061 (set_attr "type" "store_4")])
4064 (define_insn "*extv_reg"
4065 [(set (match_operand:SI 0 "s_register_operand" "=r")
4066 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4067 (match_operand:SI 2 "const_int_operand" "n")
4068 (match_operand:SI 3 "const_int_operand" "n")))]
4070 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4071 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4072 "sbfx%?\t%0, %1, %3, %2"
4073 [(set_attr "length" "4")
4074 (set_attr "predicable" "yes")
4075 (set_attr "type" "bfm")]
4078 (define_insn "extzv_t2"
4079 [(set (match_operand:SI 0 "s_register_operand" "=r")
4080 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4081 (match_operand:SI 2 "const_int_operand" "n")
4082 (match_operand:SI 3 "const_int_operand" "n")))]
4084 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4085 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4086 "ubfx%?\t%0, %1, %3, %2"
4087 [(set_attr "length" "4")
4088 (set_attr "predicable" "yes")
4089 (set_attr "type" "bfm")]
4093 ;; Division instructions
4094 (define_insn "divsi3"
4095 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4096 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4097 (match_operand:SI 2 "s_register_operand" "r,r")))]
4102 [(set_attr "arch" "32,v8mb")
4103 (set_attr "predicable" "yes")
4104 (set_attr "type" "sdiv")]
4107 (define_insn "udivsi3"
4108 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4109 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4110 (match_operand:SI 2 "s_register_operand" "r,r")))]
4115 [(set_attr "arch" "32,v8mb")
4116 (set_attr "predicable" "yes")
4117 (set_attr "type" "udiv")]
4121 ;; Unary arithmetic insns
4123 (define_expand "negvsi3"
4124 [(match_operand:SI 0 "register_operand")
4125 (match_operand:SI 1 "register_operand")
4126 (match_operand 2 "")]
4129 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
4130 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4135 (define_expand "negvdi3"
4136 [(match_operand:DI 0 "register_operand")
4137 (match_operand:DI 1 "register_operand")
4138 (match_operand 2 "")]
4141 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
4142 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4148 (define_insn_and_split "negdi2_compare"
4149 [(set (reg:CC CC_REGNUM)
4152 (match_operand:DI 1 "register_operand" "0,r")))
4153 (set (match_operand:DI 0 "register_operand" "=r,&r")
4154 (minus:DI (const_int 0) (match_dup 1)))]
4157 "&& reload_completed"
4158 [(parallel [(set (reg:CC CC_REGNUM)
4159 (compare:CC (const_int 0) (match_dup 1)))
4160 (set (match_dup 0) (minus:SI (const_int 0)
4162 (parallel [(set (reg:CC CC_REGNUM)
4163 (compare:CC (const_int 0) (match_dup 3)))
4166 (minus:SI (const_int 0) (match_dup 3))
4167 (ltu:SI (reg:CC CC_REGNUM)
4170 operands[2] = gen_highpart (SImode, operands[0]);
4171 operands[0] = gen_lowpart (SImode, operands[0]);
4172 operands[3] = gen_highpart (SImode, operands[1]);
4173 operands[1] = gen_lowpart (SImode, operands[1]);
4175 [(set_attr "conds" "set")
4176 (set_attr "length" "8")
4177 (set_attr "type" "multiple")]
4180 (define_expand "negdi2"
4182 [(set (match_operand:DI 0 "s_register_operand")
4183 (neg:DI (match_operand:DI 1 "s_register_operand")))
4184 (clobber (reg:CC CC_REGNUM))])]
4188 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4189 ;; The first alternative allows the common case of a *full* overlap.
4190 (define_insn_and_split "*negdi2_insn"
4191 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4192 (neg:DI (match_operand:DI 1 "s_register_operand" "0,r")))
4193 (clobber (reg:CC CC_REGNUM))]
4195 "#" ; rsbs %Q0, %Q1, #0; rsc %R0, %R1, #0 (ARM)
4196 ; negs %Q0, %Q1 ; sbc %R0, %R1, %R1, lsl #1 (Thumb-2)
4198 [(parallel [(set (reg:CC CC_REGNUM)
4199 (compare:CC (const_int 0) (match_dup 1)))
4200 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4201 (set (match_dup 2) (minus:SI (minus:SI (const_int 0) (match_dup 3))
4202 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))]
4204 operands[2] = gen_highpart (SImode, operands[0]);
4205 operands[0] = gen_lowpart (SImode, operands[0]);
4206 operands[3] = gen_highpart (SImode, operands[1]);
4207 operands[1] = gen_lowpart (SImode, operands[1]);
4209 [(set_attr "conds" "clob")
4210 (set_attr "length" "8")
4211 (set_attr "type" "multiple")]
4214 (define_insn "*negsi2_carryin_compare"
4215 [(set (reg:CC CC_REGNUM)
4216 (compare:CC (const_int 0)
4217 (match_operand:SI 1 "s_register_operand" "r")))
4218 (set (match_operand:SI 0 "s_register_operand" "=r")
4219 (minus:SI (minus:SI (const_int 0)
4221 (match_operand:SI 2 "arm_borrow_operation" "")))]
4224 [(set_attr "conds" "set")
4225 (set_attr "type" "alus_imm")]
4228 (define_expand "negsi2"
4229 [(set (match_operand:SI 0 "s_register_operand")
4230 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4235 (define_insn "*arm_negsi2"
4236 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4237 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4239 "rsb%?\\t%0, %1, #0"
4240 [(set_attr "predicable" "yes")
4241 (set_attr "predicable_short_it" "yes,no")
4242 (set_attr "arch" "t2,*")
4243 (set_attr "length" "4")
4244 (set_attr "type" "alu_sreg")]
4247 (define_expand "negsf2"
4248 [(set (match_operand:SF 0 "s_register_operand")
4249 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4250 "TARGET_32BIT && TARGET_HARD_FLOAT"
4254 (define_expand "negdf2"
4255 [(set (match_operand:DF 0 "s_register_operand")
4256 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4257 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4260 (define_insn_and_split "*zextendsidi_negsi"
4261 [(set (match_operand:DI 0 "s_register_operand" "=r")
4262 (zero_extend:DI (neg:SI (match_operand:SI 1 "s_register_operand" "r"))))]
4267 (neg:SI (match_dup 1)))
4271 operands[2] = gen_lowpart (SImode, operands[0]);
4272 operands[3] = gen_highpart (SImode, operands[0]);
4274 [(set_attr "length" "8")
4275 (set_attr "type" "multiple")]
4278 ;; Negate an extended 32-bit value.
4279 (define_insn_and_split "*negdi_extendsidi"
4280 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
4281 (neg:DI (sign_extend:DI
4282 (match_operand:SI 1 "s_register_operand" "l,r"))))
4283 (clobber (reg:CC CC_REGNUM))]
4286 "&& reload_completed"
4289 rtx low = gen_lowpart (SImode, operands[0]);
4290 rtx high = gen_highpart (SImode, operands[0]);
4292 if (reg_overlap_mentioned_p (low, operands[1]))
4294 /* Input overlaps the low word of the output. Use:
4297 rsc Rhi, Rhi, #0 (thumb2: sbc Rhi, Rhi, Rhi, lsl #1). */
4298 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
4300 emit_insn (gen_rtx_SET (high,
4301 gen_rtx_ASHIFTRT (SImode, operands[1],
4304 emit_insn (gen_subsi3_compare (low, const0_rtx, operands[1]));
4306 emit_insn (gen_rtx_SET (high,
4307 gen_rtx_MINUS (SImode,
4308 gen_rtx_MINUS (SImode,
4311 gen_rtx_LTU (SImode,
4316 rtx two_x = gen_rtx_ASHIFT (SImode, high, GEN_INT (1));
4317 emit_insn (gen_rtx_SET (high,
4318 gen_rtx_MINUS (SImode,
4319 gen_rtx_MINUS (SImode,
4322 gen_rtx_LTU (SImode,
4329 /* No overlap, or overlap on high word. Use:
4333 Flags not needed for this sequence. */
4334 emit_insn (gen_rtx_SET (low, gen_rtx_NEG (SImode, operands[1])));
4335 emit_insn (gen_rtx_SET (high,
4336 gen_rtx_AND (SImode,
4337 gen_rtx_NOT (SImode, operands[1]),
4339 emit_insn (gen_rtx_SET (high,
4340 gen_rtx_ASHIFTRT (SImode, high,
4345 [(set_attr "length" "12")
4346 (set_attr "arch" "t2,*")
4347 (set_attr "type" "multiple")]
4350 (define_insn_and_split "*negdi_zero_extendsidi"
4351 [(set (match_operand:DI 0 "s_register_operand" "=r,&r")
4352 (neg:DI (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))))
4353 (clobber (reg:CC CC_REGNUM))]
4355 "#" ; "rsbs\\t%Q0, %1, #0\;sbc\\t%R0,%R0,%R0"
4356 ;; Don't care what register is input to sbc,
4357 ;; since we just need to propagate the carry.
4358 "&& reload_completed"
4359 [(parallel [(set (reg:CC CC_REGNUM)
4360 (compare:CC (const_int 0) (match_dup 1)))
4361 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1)))])
4362 (set (match_dup 2) (minus:SI (minus:SI (match_dup 2) (match_dup 2))
4363 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))]
4365 operands[2] = gen_highpart (SImode, operands[0]);
4366 operands[0] = gen_lowpart (SImode, operands[0]);
4368 [(set_attr "conds" "clob")
4369 (set_attr "length" "8")
4370 (set_attr "type" "multiple")] ;; length in thumb is 4
4373 ;; abssi2 doesn't really clobber the condition codes if a different register
4374 ;; is being set. To keep things simple, assume during rtl manipulations that
4375 ;; it does, but tell the final scan operator the truth. Similarly for
4378 (define_expand "abssi2"
4380 [(set (match_operand:SI 0 "s_register_operand")
4381 (abs:SI (match_operand:SI 1 "s_register_operand")))
4382 (clobber (match_dup 2))])]
4386 operands[2] = gen_rtx_SCRATCH (SImode);
4388 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4391 (define_insn_and_split "*arm_abssi2"
4392 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4393 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4394 (clobber (reg:CC CC_REGNUM))]
4397 "&& reload_completed"
4400 /* if (which_alternative == 0) */
4401 if (REGNO(operands[0]) == REGNO(operands[1]))
4403 /* Emit the pattern:
4404 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4405 [(set (reg:CC CC_REGNUM)
4406 (compare:CC (match_dup 0) (const_int 0)))
4407 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4408 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4410 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4411 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4412 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4413 (gen_rtx_LT (SImode,
4414 gen_rtx_REG (CCmode, CC_REGNUM),
4416 (gen_rtx_SET (operands[0],
4417 (gen_rtx_MINUS (SImode,
4424 /* Emit the pattern:
4425 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4427 (xor:SI (match_dup 1)
4428 (ashiftrt:SI (match_dup 1) (const_int 31))))
4430 (minus:SI (match_dup 0)
4431 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4433 emit_insn (gen_rtx_SET (operands[0],
4434 gen_rtx_XOR (SImode,
4435 gen_rtx_ASHIFTRT (SImode,
4439 emit_insn (gen_rtx_SET (operands[0],
4440 gen_rtx_MINUS (SImode,
4442 gen_rtx_ASHIFTRT (SImode,
4448 [(set_attr "conds" "clob,*")
4449 (set_attr "shift" "1")
4450 (set_attr "predicable" "no, yes")
4451 (set_attr "length" "8")
4452 (set_attr "type" "multiple")]
4455 (define_insn_and_split "*arm_neg_abssi2"
4456 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4457 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4458 (clobber (reg:CC CC_REGNUM))]
4461 "&& reload_completed"
4464 /* if (which_alternative == 0) */
4465 if (REGNO (operands[0]) == REGNO (operands[1]))
4467 /* Emit the pattern:
4468 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4470 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4471 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4472 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4474 gen_rtx_REG (CCmode, CC_REGNUM),
4476 gen_rtx_SET (operands[0],
4477 (gen_rtx_MINUS (SImode,
4483 /* Emit the pattern:
4484 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4486 emit_insn (gen_rtx_SET (operands[0],
4487 gen_rtx_XOR (SImode,
4488 gen_rtx_ASHIFTRT (SImode,
4492 emit_insn (gen_rtx_SET (operands[0],
4493 gen_rtx_MINUS (SImode,
4494 gen_rtx_ASHIFTRT (SImode,
4501 [(set_attr "conds" "clob,*")
4502 (set_attr "shift" "1")
4503 (set_attr "predicable" "no, yes")
4504 (set_attr "length" "8")
4505 (set_attr "type" "multiple")]
4508 (define_expand "abssf2"
4509 [(set (match_operand:SF 0 "s_register_operand")
4510 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4511 "TARGET_32BIT && TARGET_HARD_FLOAT"
4514 (define_expand "absdf2"
4515 [(set (match_operand:DF 0 "s_register_operand")
4516 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4517 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4520 (define_expand "sqrtsf2"
4521 [(set (match_operand:SF 0 "s_register_operand")
4522 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4523 "TARGET_32BIT && TARGET_HARD_FLOAT"
4526 (define_expand "sqrtdf2"
4527 [(set (match_operand:DF 0 "s_register_operand")
4528 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4529 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4532 (define_expand "one_cmplsi2"
4533 [(set (match_operand:SI 0 "s_register_operand")
4534 (not:SI (match_operand:SI 1 "s_register_operand")))]
4539 (define_insn "*arm_one_cmplsi2"
4540 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4541 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4544 [(set_attr "predicable" "yes")
4545 (set_attr "predicable_short_it" "yes,no")
4546 (set_attr "arch" "t2,*")
4547 (set_attr "length" "4")
4548 (set_attr "type" "mvn_reg")]
4551 (define_insn "*notsi_compare0"
4552 [(set (reg:CC_NOOV CC_REGNUM)
4553 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4555 (set (match_operand:SI 0 "s_register_operand" "=r")
4556 (not:SI (match_dup 1)))]
4559 [(set_attr "conds" "set")
4560 (set_attr "type" "mvn_reg")]
4563 (define_insn "*notsi_compare0_scratch"
4564 [(set (reg:CC_NOOV CC_REGNUM)
4565 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4567 (clobber (match_scratch:SI 0 "=r"))]
4570 [(set_attr "conds" "set")
4571 (set_attr "type" "mvn_reg")]
4574 ;; Fixed <--> Floating conversion insns
4576 (define_expand "floatsihf2"
4577 [(set (match_operand:HF 0 "general_operand")
4578 (float:HF (match_operand:SI 1 "general_operand")))]
4582 rtx op1 = gen_reg_rtx (SFmode);
4583 expand_float (op1, operands[1], 0);
4584 op1 = convert_to_mode (HFmode, op1, 0);
4585 emit_move_insn (operands[0], op1);
4590 (define_expand "floatdihf2"
4591 [(set (match_operand:HF 0 "general_operand")
4592 (float:HF (match_operand:DI 1 "general_operand")))]
4596 rtx op1 = gen_reg_rtx (SFmode);
4597 expand_float (op1, operands[1], 0);
4598 op1 = convert_to_mode (HFmode, op1, 0);
4599 emit_move_insn (operands[0], op1);
4604 (define_expand "floatsisf2"
4605 [(set (match_operand:SF 0 "s_register_operand")
4606 (float:SF (match_operand:SI 1 "s_register_operand")))]
4607 "TARGET_32BIT && TARGET_HARD_FLOAT"
4611 (define_expand "floatsidf2"
4612 [(set (match_operand:DF 0 "s_register_operand")
4613 (float:DF (match_operand:SI 1 "s_register_operand")))]
4614 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4618 (define_expand "fix_trunchfsi2"
4619 [(set (match_operand:SI 0 "general_operand")
4620 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4624 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4625 expand_fix (operands[0], op1, 0);
4630 (define_expand "fix_trunchfdi2"
4631 [(set (match_operand:DI 0 "general_operand")
4632 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4636 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4637 expand_fix (operands[0], op1, 0);
4642 (define_expand "fix_truncsfsi2"
4643 [(set (match_operand:SI 0 "s_register_operand")
4644 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4645 "TARGET_32BIT && TARGET_HARD_FLOAT"
4649 (define_expand "fix_truncdfsi2"
4650 [(set (match_operand:SI 0 "s_register_operand")
4651 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4652 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4658 (define_expand "truncdfsf2"
4659 [(set (match_operand:SF 0 "s_register_operand")
4661 (match_operand:DF 1 "s_register_operand")))]
4662 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4666 ;; DFmode to HFmode conversions on targets without a single-step hardware
4667 ;; instruction for it would have to go through SFmode. This is dangerous
4668 ;; as it introduces double rounding.
4670 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4671 ;; a single-step instruction.
4673 (define_expand "truncdfhf2"
4674 [(set (match_operand:HF 0 "s_register_operand")
4676 (match_operand:DF 1 "s_register_operand")))]
4677 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4678 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4680 /* We don't have a direct instruction for this, so we must be in
4681 an unsafe math mode, and going via SFmode. */
4683 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4686 op1 = convert_to_mode (SFmode, operands[1], 0);
4687 op1 = convert_to_mode (HFmode, op1, 0);
4688 emit_move_insn (operands[0], op1);
4691 /* Otherwise, we will pick this up as a single instruction with
4692 no intermediary rounding. */
4696 ;; Zero and sign extension instructions.
4698 (define_insn "zero_extend<mode>di2"
4699 [(set (match_operand:DI 0 "s_register_operand" "=r,?r")
4700 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
4701 "<qhs_zextenddi_cstr>")))]
4702 "TARGET_32BIT <qhs_zextenddi_cond>"
4704 [(set_attr "length" "4,8")
4705 (set_attr "arch" "*,*")
4706 (set_attr "ce_count" "2")
4707 (set_attr "predicable" "yes")
4708 (set_attr "type" "mov_reg,multiple")]
4711 (define_insn "extend<mode>di2"
4712 [(set (match_operand:DI 0 "s_register_operand" "=r,?r,?r")
4713 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4714 "<qhs_extenddi_cstr>")))]
4715 "TARGET_32BIT <qhs_sextenddi_cond>"
4717 [(set_attr "length" "4,8,8")
4718 (set_attr "ce_count" "2")
4719 (set_attr "shift" "1")
4720 (set_attr "predicable" "yes")
4721 (set_attr "arch" "*,a,t")
4722 (set_attr "type" "mov_reg,multiple,multiple")]
4725 ;; Splits for all extensions to DImode
4727 [(set (match_operand:DI 0 "s_register_operand" "")
4728 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4730 [(set (match_dup 0) (match_dup 1))]
4732 rtx lo_part = gen_lowpart (SImode, operands[0]);
4733 machine_mode src_mode = GET_MODE (operands[1]);
4735 if (src_mode == SImode)
4736 emit_move_insn (lo_part, operands[1]);
4738 emit_insn (gen_rtx_SET (lo_part,
4739 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4740 operands[0] = gen_highpart (SImode, operands[0]);
4741 operands[1] = const0_rtx;
4745 [(set (match_operand:DI 0 "s_register_operand" "")
4746 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4748 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4750 rtx lo_part = gen_lowpart (SImode, operands[0]);
4751 machine_mode src_mode = GET_MODE (operands[1]);
4753 if (src_mode == SImode)
4754 emit_move_insn (lo_part, operands[1]);
4756 emit_insn (gen_rtx_SET (lo_part,
4757 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4758 operands[1] = lo_part;
4759 operands[0] = gen_highpart (SImode, operands[0]);
4762 (define_expand "zero_extendhisi2"
4763 [(set (match_operand:SI 0 "s_register_operand")
4764 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4767 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4769 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4772 if (!arm_arch6 && !MEM_P (operands[1]))
4774 rtx t = gen_lowpart (SImode, operands[1]);
4775 rtx tmp = gen_reg_rtx (SImode);
4776 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4777 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4783 [(set (match_operand:SI 0 "s_register_operand" "")
4784 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4785 "!TARGET_THUMB2 && !arm_arch6"
4786 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4787 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4789 operands[2] = gen_lowpart (SImode, operands[1]);
4792 (define_insn "*arm_zero_extendhisi2"
4793 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4794 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4795 "TARGET_ARM && arm_arch4 && !arm_arch6"
4799 [(set_attr "type" "alu_shift_reg,load_byte")
4800 (set_attr "predicable" "yes")]
4803 (define_insn "*arm_zero_extendhisi2_v6"
4804 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4805 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4806 "TARGET_ARM && arm_arch6"
4810 [(set_attr "predicable" "yes")
4811 (set_attr "type" "extend,load_byte")]
4814 (define_insn "*arm_zero_extendhisi2addsi"
4815 [(set (match_operand:SI 0 "s_register_operand" "=r")
4816 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4817 (match_operand:SI 2 "s_register_operand" "r")))]
4819 "uxtah%?\\t%0, %2, %1"
4820 [(set_attr "type" "alu_shift_reg")
4821 (set_attr "predicable" "yes")]
4824 (define_expand "zero_extendqisi2"
4825 [(set (match_operand:SI 0 "s_register_operand")
4826 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4829 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4831 emit_insn (gen_andsi3 (operands[0],
4832 gen_lowpart (SImode, operands[1]),
4836 if (!arm_arch6 && !MEM_P (operands[1]))
4838 rtx t = gen_lowpart (SImode, operands[1]);
4839 rtx tmp = gen_reg_rtx (SImode);
4840 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4841 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4847 [(set (match_operand:SI 0 "s_register_operand" "")
4848 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4850 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4851 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4853 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4856 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4861 (define_insn "*arm_zero_extendqisi2"
4862 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4863 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4864 "TARGET_ARM && !arm_arch6"
4867 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4868 [(set_attr "length" "8,4")
4869 (set_attr "type" "alu_shift_reg,load_byte")
4870 (set_attr "predicable" "yes")]
4873 (define_insn "*arm_zero_extendqisi2_v6"
4874 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4875 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4876 "TARGET_ARM && arm_arch6"
4879 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4880 [(set_attr "type" "extend,load_byte")
4881 (set_attr "predicable" "yes")]
4884 (define_insn "*arm_zero_extendqisi2addsi"
4885 [(set (match_operand:SI 0 "s_register_operand" "=r")
4886 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4887 (match_operand:SI 2 "s_register_operand" "r")))]
4889 "uxtab%?\\t%0, %2, %1"
4890 [(set_attr "predicable" "yes")
4891 (set_attr "type" "alu_shift_reg")]
4895 [(set (match_operand:SI 0 "s_register_operand" "")
4896 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4897 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4898 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4899 [(set (match_dup 2) (match_dup 1))
4900 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4905 [(set (match_operand:SI 0 "s_register_operand" "")
4906 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4907 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4908 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4909 [(set (match_dup 2) (match_dup 1))
4910 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4916 [(set (match_operand:SI 0 "s_register_operand" "")
4917 (IOR_XOR:SI (and:SI (ashift:SI
4918 (match_operand:SI 1 "s_register_operand" "")
4919 (match_operand:SI 2 "const_int_operand" ""))
4920 (match_operand:SI 3 "const_int_operand" ""))
4922 (match_operator 5 "subreg_lowpart_operator"
4923 [(match_operand:SI 4 "s_register_operand" "")]))))]
4925 && (UINTVAL (operands[3])
4926 == (GET_MODE_MASK (GET_MODE (operands[5]))
4927 & (GET_MODE_MASK (GET_MODE (operands[5]))
4928 << (INTVAL (operands[2])))))"
4929 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4931 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4932 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4935 (define_insn "*compareqi_eq0"
4936 [(set (reg:CC_Z CC_REGNUM)
4937 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4941 [(set_attr "conds" "set")
4942 (set_attr "predicable" "yes")
4943 (set_attr "type" "logic_imm")]
4946 (define_expand "extendhisi2"
4947 [(set (match_operand:SI 0 "s_register_operand")
4948 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4953 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4956 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4958 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4962 if (!arm_arch6 && !MEM_P (operands[1]))
4964 rtx t = gen_lowpart (SImode, operands[1]);
4965 rtx tmp = gen_reg_rtx (SImode);
4966 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4967 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4974 [(set (match_operand:SI 0 "register_operand" "")
4975 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4976 (clobber (match_scratch:SI 2 ""))])]
4978 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4979 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4981 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4984 ;; This pattern will only be used when ldsh is not available
4985 (define_expand "extendhisi2_mem"
4986 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4988 (zero_extend:SI (match_dup 7)))
4989 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4990 (set (match_operand:SI 0 "" "")
4991 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4996 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4998 mem1 = change_address (operands[1], QImode, addr);
4999 mem2 = change_address (operands[1], QImode,
5000 plus_constant (Pmode, addr, 1));
5001 operands[0] = gen_lowpart (SImode, operands[0]);
5003 operands[2] = gen_reg_rtx (SImode);
5004 operands[3] = gen_reg_rtx (SImode);
5005 operands[6] = gen_reg_rtx (SImode);
5008 if (BYTES_BIG_ENDIAN)
5010 operands[4] = operands[2];
5011 operands[5] = operands[3];
5015 operands[4] = operands[3];
5016 operands[5] = operands[2];
5022 [(set (match_operand:SI 0 "register_operand" "")
5023 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5025 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5026 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5028 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5031 (define_insn "*arm_extendhisi2"
5032 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5033 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5034 "TARGET_ARM && arm_arch4 && !arm_arch6"
5038 [(set_attr "length" "8,4")
5039 (set_attr "type" "alu_shift_reg,load_byte")
5040 (set_attr "predicable" "yes")]
5043 ;; ??? Check Thumb-2 pool range
5044 (define_insn "*arm_extendhisi2_v6"
5045 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5046 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5047 "TARGET_32BIT && arm_arch6"
5051 [(set_attr "type" "extend,load_byte")
5052 (set_attr "predicable" "yes")]
5055 (define_insn "*arm_extendhisi2addsi"
5056 [(set (match_operand:SI 0 "s_register_operand" "=r")
5057 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5058 (match_operand:SI 2 "s_register_operand" "r")))]
5060 "sxtah%?\\t%0, %2, %1"
5061 [(set_attr "type" "alu_shift_reg")]
5064 (define_expand "extendqihi2"
5066 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5068 (set (match_operand:HI 0 "s_register_operand")
5069 (ashiftrt:SI (match_dup 2)
5074 if (arm_arch4 && MEM_P (operands[1]))
5076 emit_insn (gen_rtx_SET (operands[0],
5077 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5080 if (!s_register_operand (operands[1], QImode))
5081 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5082 operands[0] = gen_lowpart (SImode, operands[0]);
5083 operands[1] = gen_lowpart (SImode, operands[1]);
5084 operands[2] = gen_reg_rtx (SImode);
5088 (define_insn "*arm_extendqihi_insn"
5089 [(set (match_operand:HI 0 "s_register_operand" "=r")
5090 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5091 "TARGET_ARM && arm_arch4"
5093 [(set_attr "type" "load_byte")
5094 (set_attr "predicable" "yes")]
5097 (define_expand "extendqisi2"
5098 [(set (match_operand:SI 0 "s_register_operand")
5099 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5102 if (!arm_arch4 && MEM_P (operands[1]))
5103 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5105 if (!arm_arch6 && !MEM_P (operands[1]))
5107 rtx t = gen_lowpart (SImode, operands[1]);
5108 rtx tmp = gen_reg_rtx (SImode);
5109 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5110 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5116 [(set (match_operand:SI 0 "register_operand" "")
5117 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5119 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5120 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5122 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5125 (define_insn "*arm_extendqisi"
5126 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5127 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5128 "TARGET_ARM && arm_arch4 && !arm_arch6"
5132 [(set_attr "length" "8,4")
5133 (set_attr "type" "alu_shift_reg,load_byte")
5134 (set_attr "predicable" "yes")]
5137 (define_insn "*arm_extendqisi_v6"
5138 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5140 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5141 "TARGET_ARM && arm_arch6"
5145 [(set_attr "type" "extend,load_byte")
5146 (set_attr "predicable" "yes")]
5149 (define_insn "*arm_extendqisi2addsi"
5150 [(set (match_operand:SI 0 "s_register_operand" "=r")
5151 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5152 (match_operand:SI 2 "s_register_operand" "r")))]
5154 "sxtab%?\\t%0, %2, %1"
5155 [(set_attr "type" "alu_shift_reg")
5156 (set_attr "predicable" "yes")]
5159 (define_expand "extendsfdf2"
5160 [(set (match_operand:DF 0 "s_register_operand")
5161 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5162 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5166 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5167 ;; must go through SFmode.
5169 ;; This is always safe for an extend.
5171 (define_expand "extendhfdf2"
5172 [(set (match_operand:DF 0 "s_register_operand")
5173 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5176 /* We don't have a direct instruction for this, so go via SFmode. */
5177 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5180 op1 = convert_to_mode (SFmode, operands[1], 0);
5181 op1 = convert_to_mode (DFmode, op1, 0);
5182 emit_insn (gen_movdf (operands[0], op1));
5185 /* Otherwise, we're done producing RTL and will pick up the correct
5186 pattern to do this with one rounding-step in a single instruction. */
5190 ;; Move insns (including loads and stores)
5192 ;; XXX Just some ideas about movti.
5193 ;; I don't think these are a good idea on the arm, there just aren't enough
5195 ;;(define_expand "loadti"
5196 ;; [(set (match_operand:TI 0 "s_register_operand")
5197 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5200 ;;(define_expand "storeti"
5201 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5202 ;; (match_operand:TI 1 "s_register_operand"))]
5205 ;;(define_expand "movti"
5206 ;; [(set (match_operand:TI 0 "general_operand")
5207 ;; (match_operand:TI 1 "general_operand"))]
5213 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5214 ;; operands[1] = copy_to_reg (operands[1]);
5215 ;; if (MEM_P (operands[0]))
5216 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5217 ;; else if (MEM_P (operands[1]))
5218 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5222 ;; emit_insn (insn);
5226 ;; Recognize garbage generated above.
5229 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5230 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5234 ;; register mem = (which_alternative < 3);
5235 ;; register const char *template;
5237 ;; operands[mem] = XEXP (operands[mem], 0);
5238 ;; switch (which_alternative)
5240 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5241 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5242 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5243 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5244 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5245 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5247 ;; output_asm_insn (template, operands);
5251 (define_expand "movdi"
5252 [(set (match_operand:DI 0 "general_operand")
5253 (match_operand:DI 1 "general_operand"))]
5256 gcc_checking_assert (aligned_operand (operands[0], DImode));
5257 gcc_checking_assert (aligned_operand (operands[1], DImode));
5258 if (can_create_pseudo_p ())
5260 if (!REG_P (operands[0]))
5261 operands[1] = force_reg (DImode, operands[1]);
5263 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
5264 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
5266 /* Avoid LDRD's into an odd-numbered register pair in ARM state
5267 when expanding function calls. */
5268 gcc_assert (can_create_pseudo_p ());
5269 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
5271 /* Perform load into legal reg pair first, then move. */
5272 rtx reg = gen_reg_rtx (DImode);
5273 emit_insn (gen_movdi (reg, operands[1]));
5276 emit_move_insn (gen_lowpart (SImode, operands[0]),
5277 gen_lowpart (SImode, operands[1]));
5278 emit_move_insn (gen_highpart (SImode, operands[0]),
5279 gen_highpart (SImode, operands[1]));
5282 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
5283 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
5285 /* Avoid STRD's from an odd-numbered register pair in ARM state
5286 when expanding function prologue. */
5287 gcc_assert (can_create_pseudo_p ());
5288 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
5289 ? gen_reg_rtx (DImode)
5291 emit_move_insn (gen_lowpart (SImode, split_dest),
5292 gen_lowpart (SImode, operands[1]));
5293 emit_move_insn (gen_highpart (SImode, split_dest),
5294 gen_highpart (SImode, operands[1]));
5295 if (split_dest != operands[0])
5296 emit_insn (gen_movdi (operands[0], split_dest));
5302 (define_insn "*arm_movdi"
5303 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5304 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5306 && !(TARGET_HARD_FLOAT)
5308 && ( register_operand (operands[0], DImode)
5309 || register_operand (operands[1], DImode))"
5311 switch (which_alternative)
5318 /* Cannot load it directly, split to load it via MOV / MOVT. */
5319 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
5323 return output_move_double (operands, true, NULL);
5326 [(set_attr "length" "8,12,16,8,8")
5327 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5328 (set_attr "arm_pool_range" "*,*,*,1020,*")
5329 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5330 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5331 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5335 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5336 (match_operand:ANY64 1 "immediate_operand" ""))]
5339 && (arm_disable_literal_pool
5340 || (arm_const_double_inline_cost (operands[1])
5341 <= arm_max_const_double_inline_cost ()))"
5344 arm_split_constant (SET, SImode, curr_insn,
5345 INTVAL (gen_lowpart (SImode, operands[1])),
5346 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5347 arm_split_constant (SET, SImode, curr_insn,
5348 INTVAL (gen_highpart_mode (SImode,
5349 GET_MODE (operands[0]),
5351 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5356 ; If optimizing for size, or if we have load delay slots, then
5357 ; we want to split the constant into two separate operations.
5358 ; In both cases this may split a trivial part into a single data op
5359 ; leaving a single complex constant to load. We can also get longer
5360 ; offsets in a LDR which means we get better chances of sharing the pool
5361 ; entries. Finally, we can normally do a better job of scheduling
5362 ; LDR instructions than we can with LDM.
5363 ; This pattern will only match if the one above did not.
5365 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5366 (match_operand:ANY64 1 "const_double_operand" ""))]
5367 "TARGET_ARM && reload_completed
5368 && arm_const_double_by_parts (operands[1])"
5369 [(set (match_dup 0) (match_dup 1))
5370 (set (match_dup 2) (match_dup 3))]
5372 operands[2] = gen_highpart (SImode, operands[0]);
5373 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5375 operands[0] = gen_lowpart (SImode, operands[0]);
5376 operands[1] = gen_lowpart (SImode, operands[1]);
5381 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5382 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5383 "TARGET_EITHER && reload_completed"
5384 [(set (match_dup 0) (match_dup 1))
5385 (set (match_dup 2) (match_dup 3))]
5387 operands[2] = gen_highpart (SImode, operands[0]);
5388 operands[3] = gen_highpart (SImode, operands[1]);
5389 operands[0] = gen_lowpart (SImode, operands[0]);
5390 operands[1] = gen_lowpart (SImode, operands[1]);
5392 /* Handle a partial overlap. */
5393 if (rtx_equal_p (operands[0], operands[3]))
5395 rtx tmp0 = operands[0];
5396 rtx tmp1 = operands[1];
5398 operands[0] = operands[2];
5399 operands[1] = operands[3];
5406 ;; We can't actually do base+index doubleword loads if the index and
5407 ;; destination overlap. Split here so that we at least have chance to
5410 [(set (match_operand:DI 0 "s_register_operand" "")
5411 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5412 (match_operand:SI 2 "s_register_operand" ""))))]
5414 && reg_overlap_mentioned_p (operands[0], operands[1])
5415 && reg_overlap_mentioned_p (operands[0], operands[2])"
5417 (plus:SI (match_dup 1)
5420 (mem:DI (match_dup 4)))]
5422 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5426 (define_expand "movsi"
5427 [(set (match_operand:SI 0 "general_operand")
5428 (match_operand:SI 1 "general_operand"))]
5432 rtx base, offset, tmp;
5434 gcc_checking_assert (aligned_operand (operands[0], SImode));
5435 gcc_checking_assert (aligned_operand (operands[1], SImode));
5436 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5438 /* Everything except mem = const or mem = mem can be done easily. */
5439 if (MEM_P (operands[0]))
5440 operands[1] = force_reg (SImode, operands[1]);
5441 if (arm_general_register_operand (operands[0], SImode)
5442 && CONST_INT_P (operands[1])
5443 && !(const_ok_for_arm (INTVAL (operands[1]))
5444 || const_ok_for_arm (~INTVAL (operands[1]))))
5446 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5448 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5453 arm_split_constant (SET, SImode, NULL_RTX,
5454 INTVAL (operands[1]), operands[0], NULL_RTX,
5455 optimize && can_create_pseudo_p ());
5460 else /* Target doesn't have MOVT... */
5462 if (can_create_pseudo_p ())
5464 if (!REG_P (operands[0]))
5465 operands[1] = force_reg (SImode, operands[1]);
5469 split_const (operands[1], &base, &offset);
5470 if (INTVAL (offset) != 0
5471 && targetm.cannot_force_const_mem (SImode, operands[1]))
5473 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5474 emit_move_insn (tmp, base);
5475 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5479 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5481 /* Recognize the case where operand[1] is a reference to thread-local
5482 data and load its address to a register. Offsets have been split off
5484 if (arm_tls_referenced_p (operands[1]))
5485 operands[1] = legitimize_tls_address (operands[1], tmp);
5487 && (CONSTANT_P (operands[1])
5488 || symbol_mentioned_p (operands[1])
5489 || label_mentioned_p (operands[1])))
5491 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5496 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5497 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5498 ;; so this does not matter.
5499 (define_insn "*arm_movt"
5500 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5501 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5502 (match_operand:SI 2 "general_operand" "i,i")))]
5503 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5505 movt%?\t%0, #:upper16:%c2
5506 movt\t%0, #:upper16:%c2"
5507 [(set_attr "arch" "32,v8mb")
5508 (set_attr "predicable" "yes")
5509 (set_attr "length" "4")
5510 (set_attr "type" "alu_sreg")]
5513 (define_insn "*arm_movsi_insn"
5514 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5515 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5516 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5517 && ( register_operand (operands[0], SImode)
5518 || register_operand (operands[1], SImode))"
5526 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5527 (set_attr "predicable" "yes")
5528 (set_attr "arch" "*,*,*,v6t2,*,*")
5529 (set_attr "pool_range" "*,*,*,*,4096,*")
5530 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5534 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5535 (match_operand:SI 1 "const_int_operand" ""))]
5536 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5537 && (!(const_ok_for_arm (INTVAL (operands[1]))
5538 || const_ok_for_arm (~INTVAL (operands[1]))))"
5539 [(clobber (const_int 0))]
5541 arm_split_constant (SET, SImode, NULL_RTX,
5542 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5547 ;; A normal way to do (symbol + offset) requires three instructions at least
5548 ;; (depends on how big the offset is) as below:
5549 ;; movw r0, #:lower16:g
5550 ;; movw r0, #:upper16:g
5553 ;; A better way would be:
5554 ;; movw r0, #:lower16:g+4
5555 ;; movw r0, #:upper16:g+4
5557 ;; The limitation of this way is that the length of offset should be a 16-bit
5558 ;; signed value, because current assembler only supports REL type relocation for
5559 ;; such case. If the more powerful RELA type is supported in future, we should
5560 ;; update this pattern to go with better way.
5562 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5563 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5564 (match_operand:SI 2 "const_int_operand" ""))))]
5567 && arm_disable_literal_pool
5569 && GET_CODE (operands[1]) == SYMBOL_REF"
5570 [(clobber (const_int 0))]
5572 int offset = INTVAL (operands[2]);
5574 if (offset < -0x8000 || offset > 0x7fff)
5576 arm_emit_movpair (operands[0], operands[1]);
5577 emit_insn (gen_rtx_SET (operands[0],
5578 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5582 rtx op = gen_rtx_CONST (SImode,
5583 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5584 arm_emit_movpair (operands[0], op);
5589 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5590 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5591 ;; and lo_sum would be merged back into memory load at cprop. However,
5592 ;; if the default is to prefer movt/movw rather than a load from the constant
5593 ;; pool, the performance is better.
5595 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5596 (match_operand:SI 1 "general_operand" ""))]
5597 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5598 && !target_word_relocations
5599 && !arm_tls_referenced_p (operands[1])"
5600 [(clobber (const_int 0))]
5602 arm_emit_movpair (operands[0], operands[1]);
5606 ;; When generating pic, we need to load the symbol offset into a register.
5607 ;; So that the optimizer does not confuse this with a normal symbol load
5608 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5609 ;; since that is the only type of relocation we can use.
5611 ;; Wrap calculation of the whole PIC address in a single pattern for the
5612 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5613 ;; a PIC address involves two loads from memory, so we want to CSE it
5614 ;; as often as possible.
5615 ;; This pattern will be split into one of the pic_load_addr_* patterns
5616 ;; and a move after GCSE optimizations.
5618 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5619 (define_expand "calculate_pic_address"
5620 [(set (match_operand:SI 0 "register_operand")
5621 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5622 (unspec:SI [(match_operand:SI 2 "" "")]
5627 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5629 [(set (match_operand:SI 0 "register_operand" "")
5630 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5631 (unspec:SI [(match_operand:SI 2 "" "")]
5634 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5635 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5636 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5639 ;; operand1 is the memory address to go into
5640 ;; pic_load_addr_32bit.
5641 ;; operand2 is the PIC label to be emitted
5642 ;; from pic_add_dot_plus_eight.
5643 ;; We do this to allow hoisting of the entire insn.
5644 (define_insn_and_split "pic_load_addr_unified"
5645 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5646 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5647 (match_operand:SI 2 "" "")]
5648 UNSPEC_PIC_UNIFIED))]
5651 "&& reload_completed"
5652 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5653 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5654 (match_dup 2)] UNSPEC_PIC_BASE))]
5655 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5656 [(set_attr "type" "load_4,load_4,load_4")
5657 (set_attr "pool_range" "4096,4094,1022")
5658 (set_attr "neg_pool_range" "4084,0,0")
5659 (set_attr "arch" "a,t2,t1")
5660 (set_attr "length" "8,6,4")]
5663 ;; The rather odd constraints on the following are to force reload to leave
5664 ;; the insn alone, and to force the minipool generation pass to then move
5665 ;; the GOT symbol to memory.
5667 (define_insn "pic_load_addr_32bit"
5668 [(set (match_operand:SI 0 "s_register_operand" "=r")
5669 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5670 "TARGET_32BIT && flag_pic"
5672 [(set_attr "type" "load_4")
5673 (set (attr "pool_range")
5674 (if_then_else (eq_attr "is_thumb" "no")
5677 (set (attr "neg_pool_range")
5678 (if_then_else (eq_attr "is_thumb" "no")
5683 (define_insn "pic_load_addr_thumb1"
5684 [(set (match_operand:SI 0 "s_register_operand" "=l")
5685 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5686 "TARGET_THUMB1 && flag_pic"
5688 [(set_attr "type" "load_4")
5689 (set (attr "pool_range") (const_int 1018))]
5692 (define_insn "pic_add_dot_plus_four"
5693 [(set (match_operand:SI 0 "register_operand" "=r")
5694 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5696 (match_operand 2 "" "")]
5700 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5701 INTVAL (operands[2]));
5702 return \"add\\t%0, %|pc\";
5704 [(set_attr "length" "2")
5705 (set_attr "type" "alu_sreg")]
5708 (define_insn "pic_add_dot_plus_eight"
5709 [(set (match_operand:SI 0 "register_operand" "=r")
5710 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5712 (match_operand 2 "" "")]
5716 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5717 INTVAL (operands[2]));
5718 return \"add%?\\t%0, %|pc, %1\";
5720 [(set_attr "predicable" "yes")
5721 (set_attr "type" "alu_sreg")]
5724 (define_insn "tls_load_dot_plus_eight"
5725 [(set (match_operand:SI 0 "register_operand" "=r")
5726 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5728 (match_operand 2 "" "")]
5732 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5733 INTVAL (operands[2]));
5734 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5736 [(set_attr "predicable" "yes")
5737 (set_attr "type" "load_4")]
5740 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5741 ;; followed by a load. These sequences can be crunched down to
5742 ;; tls_load_dot_plus_eight by a peephole.
5745 [(set (match_operand:SI 0 "register_operand" "")
5746 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5748 (match_operand 1 "" "")]
5750 (set (match_operand:SI 2 "arm_general_register_operand" "")
5751 (mem:SI (match_dup 0)))]
5752 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5754 (mem:SI (unspec:SI [(match_dup 3)
5761 (define_insn "pic_offset_arm"
5762 [(set (match_operand:SI 0 "register_operand" "=r")
5763 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5764 (unspec:SI [(match_operand:SI 2 "" "X")]
5765 UNSPEC_PIC_OFFSET))))]
5766 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5767 "ldr%?\\t%0, [%1,%2]"
5768 [(set_attr "type" "load_4")]
5771 (define_expand "builtin_setjmp_receiver"
5772 [(label_ref (match_operand 0 "" ""))]
5776 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5778 if (arm_pic_register != INVALID_REGNUM)
5779 arm_load_pic_register (1UL << 3, NULL_RTX);
5783 ;; If copying one reg to another we can set the condition codes according to
5784 ;; its value. Such a move is common after a return from subroutine and the
5785 ;; result is being tested against zero.
5787 (define_insn "*movsi_compare0"
5788 [(set (reg:CC CC_REGNUM)
5789 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5791 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5796 subs%?\\t%0, %1, #0"
5797 [(set_attr "conds" "set")
5798 (set_attr "type" "alus_imm,alus_imm")]
5801 ;; Subroutine to store a half word from a register into memory.
5802 ;; Operand 0 is the source register (HImode)
5803 ;; Operand 1 is the destination address in a register (SImode)
5805 ;; In both this routine and the next, we must be careful not to spill
5806 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5807 ;; can generate unrecognizable rtl.
5809 (define_expand "storehi"
5810 [;; store the low byte
5811 (set (match_operand 1 "" "") (match_dup 3))
5812 ;; extract the high byte
5814 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5815 ;; store the high byte
5816 (set (match_dup 4) (match_dup 5))]
5820 rtx op1 = operands[1];
5821 rtx addr = XEXP (op1, 0);
5822 enum rtx_code code = GET_CODE (addr);
5824 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5826 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5828 operands[4] = adjust_address (op1, QImode, 1);
5829 operands[1] = adjust_address (operands[1], QImode, 0);
5830 operands[3] = gen_lowpart (QImode, operands[0]);
5831 operands[0] = gen_lowpart (SImode, operands[0]);
5832 operands[2] = gen_reg_rtx (SImode);
5833 operands[5] = gen_lowpart (QImode, operands[2]);
5837 (define_expand "storehi_bigend"
5838 [(set (match_dup 4) (match_dup 3))
5840 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5841 (set (match_operand 1 "" "") (match_dup 5))]
5845 rtx op1 = operands[1];
5846 rtx addr = XEXP (op1, 0);
5847 enum rtx_code code = GET_CODE (addr);
5849 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5851 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5853 operands[4] = adjust_address (op1, QImode, 1);
5854 operands[1] = adjust_address (operands[1], QImode, 0);
5855 operands[3] = gen_lowpart (QImode, operands[0]);
5856 operands[0] = gen_lowpart (SImode, operands[0]);
5857 operands[2] = gen_reg_rtx (SImode);
5858 operands[5] = gen_lowpart (QImode, operands[2]);
5862 ;; Subroutine to store a half word integer constant into memory.
5863 (define_expand "storeinthi"
5864 [(set (match_operand 0 "" "")
5865 (match_operand 1 "" ""))
5866 (set (match_dup 3) (match_dup 2))]
5870 HOST_WIDE_INT value = INTVAL (operands[1]);
5871 rtx addr = XEXP (operands[0], 0);
5872 rtx op0 = operands[0];
5873 enum rtx_code code = GET_CODE (addr);
5875 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5877 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5879 operands[1] = gen_reg_rtx (SImode);
5880 if (BYTES_BIG_ENDIAN)
5882 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5883 if ((value & 255) == ((value >> 8) & 255))
5884 operands[2] = operands[1];
5887 operands[2] = gen_reg_rtx (SImode);
5888 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5893 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5894 if ((value & 255) == ((value >> 8) & 255))
5895 operands[2] = operands[1];
5898 operands[2] = gen_reg_rtx (SImode);
5899 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5903 operands[3] = adjust_address (op0, QImode, 1);
5904 operands[0] = adjust_address (operands[0], QImode, 0);
5905 operands[2] = gen_lowpart (QImode, operands[2]);
5906 operands[1] = gen_lowpart (QImode, operands[1]);
5910 (define_expand "storehi_single_op"
5911 [(set (match_operand:HI 0 "memory_operand")
5912 (match_operand:HI 1 "general_operand"))]
5913 "TARGET_32BIT && arm_arch4"
5915 if (!s_register_operand (operands[1], HImode))
5916 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5920 (define_expand "movhi"
5921 [(set (match_operand:HI 0 "general_operand")
5922 (match_operand:HI 1 "general_operand"))]
5925 gcc_checking_assert (aligned_operand (operands[0], HImode));
5926 gcc_checking_assert (aligned_operand (operands[1], HImode));
5929 if (can_create_pseudo_p ())
5931 if (MEM_P (operands[0]))
5935 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5938 if (CONST_INT_P (operands[1]))
5939 emit_insn (gen_storeinthi (operands[0], operands[1]));
5942 if (MEM_P (operands[1]))
5943 operands[1] = force_reg (HImode, operands[1]);
5944 if (BYTES_BIG_ENDIAN)
5945 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5947 emit_insn (gen_storehi (operands[1], operands[0]));
5951 /* Sign extend a constant, and keep it in an SImode reg. */
5952 else if (CONST_INT_P (operands[1]))
5954 rtx reg = gen_reg_rtx (SImode);
5955 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5957 /* If the constant is already valid, leave it alone. */
5958 if (!const_ok_for_arm (val))
5960 /* If setting all the top bits will make the constant
5961 loadable in a single instruction, then set them.
5962 Otherwise, sign extend the number. */
5964 if (const_ok_for_arm (~(val | ~0xffff)))
5966 else if (val & 0x8000)
5970 emit_insn (gen_movsi (reg, GEN_INT (val)));
5971 operands[1] = gen_lowpart (HImode, reg);
5973 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5974 && MEM_P (operands[1]))
5976 rtx reg = gen_reg_rtx (SImode);
5978 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5979 operands[1] = gen_lowpart (HImode, reg);
5981 else if (!arm_arch4)
5983 if (MEM_P (operands[1]))
5986 rtx offset = const0_rtx;
5987 rtx reg = gen_reg_rtx (SImode);
5989 if ((REG_P (base = XEXP (operands[1], 0))
5990 || (GET_CODE (base) == PLUS
5991 && (CONST_INT_P (offset = XEXP (base, 1)))
5992 && ((INTVAL(offset) & 1) != 1)
5993 && REG_P (base = XEXP (base, 0))))
5994 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5998 new_rtx = widen_memory_access (operands[1], SImode,
5999 ((INTVAL (offset) & ~3)
6000 - INTVAL (offset)));
6001 emit_insn (gen_movsi (reg, new_rtx));
6002 if (((INTVAL (offset) & 2) != 0)
6003 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6005 rtx reg2 = gen_reg_rtx (SImode);
6007 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6012 emit_insn (gen_movhi_bytes (reg, operands[1]));
6014 operands[1] = gen_lowpart (HImode, reg);
6018 /* Handle loading a large integer during reload. */
6019 else if (CONST_INT_P (operands[1])
6020 && !const_ok_for_arm (INTVAL (operands[1]))
6021 && !const_ok_for_arm (~INTVAL (operands[1])))
6023 /* Writing a constant to memory needs a scratch, which should
6024 be handled with SECONDARY_RELOADs. */
6025 gcc_assert (REG_P (operands[0]));
6027 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6028 emit_insn (gen_movsi (operands[0], operands[1]));
6032 else if (TARGET_THUMB2)
6034 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6035 if (can_create_pseudo_p ())
6037 if (!REG_P (operands[0]))
6038 operands[1] = force_reg (HImode, operands[1]);
6039 /* Zero extend a constant, and keep it in an SImode reg. */
6040 else if (CONST_INT_P (operands[1]))
6042 rtx reg = gen_reg_rtx (SImode);
6043 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6045 emit_insn (gen_movsi (reg, GEN_INT (val)));
6046 operands[1] = gen_lowpart (HImode, reg);
6050 else /* TARGET_THUMB1 */
6052 if (can_create_pseudo_p ())
6054 if (CONST_INT_P (operands[1]))
6056 rtx reg = gen_reg_rtx (SImode);
6058 emit_insn (gen_movsi (reg, operands[1]));
6059 operands[1] = gen_lowpart (HImode, reg);
6062 /* ??? We shouldn't really get invalid addresses here, but this can
6063 happen if we are passed a SP (never OK for HImode/QImode) or
6064 virtual register (also rejected as illegitimate for HImode/QImode)
6065 relative address. */
6066 /* ??? This should perhaps be fixed elsewhere, for instance, in
6067 fixup_stack_1, by checking for other kinds of invalid addresses,
6068 e.g. a bare reference to a virtual register. This may confuse the
6069 alpha though, which must handle this case differently. */
6070 if (MEM_P (operands[0])
6071 && !memory_address_p (GET_MODE (operands[0]),
6072 XEXP (operands[0], 0)))
6074 = replace_equiv_address (operands[0],
6075 copy_to_reg (XEXP (operands[0], 0)));
6077 if (MEM_P (operands[1])
6078 && !memory_address_p (GET_MODE (operands[1]),
6079 XEXP (operands[1], 0)))
6081 = replace_equiv_address (operands[1],
6082 copy_to_reg (XEXP (operands[1], 0)));
6084 if (MEM_P (operands[1]) && optimize > 0)
6086 rtx reg = gen_reg_rtx (SImode);
6088 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6089 operands[1] = gen_lowpart (HImode, reg);
6092 if (MEM_P (operands[0]))
6093 operands[1] = force_reg (HImode, operands[1]);
6095 else if (CONST_INT_P (operands[1])
6096 && !satisfies_constraint_I (operands[1]))
6098 /* Handle loading a large integer during reload. */
6100 /* Writing a constant to memory needs a scratch, which should
6101 be handled with SECONDARY_RELOADs. */
6102 gcc_assert (REG_P (operands[0]));
6104 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6105 emit_insn (gen_movsi (operands[0], operands[1]));
6112 (define_expand "movhi_bytes"
6113 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6115 (zero_extend:SI (match_dup 6)))
6116 (set (match_operand:SI 0 "" "")
6117 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6122 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6124 mem1 = change_address (operands[1], QImode, addr);
6125 mem2 = change_address (operands[1], QImode,
6126 plus_constant (Pmode, addr, 1));
6127 operands[0] = gen_lowpart (SImode, operands[0]);
6129 operands[2] = gen_reg_rtx (SImode);
6130 operands[3] = gen_reg_rtx (SImode);
6133 if (BYTES_BIG_ENDIAN)
6135 operands[4] = operands[2];
6136 operands[5] = operands[3];
6140 operands[4] = operands[3];
6141 operands[5] = operands[2];
6146 (define_expand "movhi_bigend"
6148 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6151 (ashiftrt:SI (match_dup 2) (const_int 16)))
6152 (set (match_operand:HI 0 "s_register_operand")
6156 operands[2] = gen_reg_rtx (SImode);
6157 operands[3] = gen_reg_rtx (SImode);
6158 operands[4] = gen_lowpart (HImode, operands[3]);
6162 ;; Pattern to recognize insn generated default case above
6163 (define_insn "*movhi_insn_arch4"
6164 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6165 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6167 && arm_arch4 && !TARGET_HARD_FLOAT
6168 && (register_operand (operands[0], HImode)
6169 || register_operand (operands[1], HImode))"
6171 mov%?\\t%0, %1\\t%@ movhi
6172 mvn%?\\t%0, #%B1\\t%@ movhi
6173 movw%?\\t%0, %L1\\t%@ movhi
6174 strh%?\\t%1, %0\\t%@ movhi
6175 ldrh%?\\t%0, %1\\t%@ movhi"
6176 [(set_attr "predicable" "yes")
6177 (set_attr "pool_range" "*,*,*,*,256")
6178 (set_attr "neg_pool_range" "*,*,*,*,244")
6179 (set_attr "arch" "*,*,v6t2,*,*")
6180 (set_attr_alternative "type"
6181 [(if_then_else (match_operand 1 "const_int_operand" "")
6182 (const_string "mov_imm" )
6183 (const_string "mov_reg"))
6184 (const_string "mvn_imm")
6185 (const_string "mov_imm")
6186 (const_string "store_4")
6187 (const_string "load_4")])]
6190 (define_insn "*movhi_bytes"
6191 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6192 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6193 "TARGET_ARM && !TARGET_HARD_FLOAT"
6195 mov%?\\t%0, %1\\t%@ movhi
6196 mov%?\\t%0, %1\\t%@ movhi
6197 mvn%?\\t%0, #%B1\\t%@ movhi"
6198 [(set_attr "predicable" "yes")
6199 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6202 ;; We use a DImode scratch because we may occasionally need an additional
6203 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6204 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6205 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6206 ;; to be correctly handled in default_secondary_reload function.
6207 (define_expand "reload_outhi"
6208 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6209 (match_operand:HI 1 "s_register_operand" "r")
6210 (match_operand:DI 2 "s_register_operand" "=&l")])]
6213 arm_reload_out_hi (operands);
6215 thumb_reload_out_hi (operands);
6220 (define_expand "reload_inhi"
6221 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6222 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6223 (match_operand:DI 2 "s_register_operand" "=&r")])]
6227 arm_reload_in_hi (operands);
6229 thumb_reload_out_hi (operands);
6233 (define_expand "movqi"
6234 [(set (match_operand:QI 0 "general_operand")
6235 (match_operand:QI 1 "general_operand"))]
6238 /* Everything except mem = const or mem = mem can be done easily */
6240 if (can_create_pseudo_p ())
6242 if (CONST_INT_P (operands[1]))
6244 rtx reg = gen_reg_rtx (SImode);
6246 /* For thumb we want an unsigned immediate, then we are more likely
6247 to be able to use a movs insn. */
6249 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6251 emit_insn (gen_movsi (reg, operands[1]));
6252 operands[1] = gen_lowpart (QImode, reg);
6257 /* ??? We shouldn't really get invalid addresses here, but this can
6258 happen if we are passed a SP (never OK for HImode/QImode) or
6259 virtual register (also rejected as illegitimate for HImode/QImode)
6260 relative address. */
6261 /* ??? This should perhaps be fixed elsewhere, for instance, in
6262 fixup_stack_1, by checking for other kinds of invalid addresses,
6263 e.g. a bare reference to a virtual register. This may confuse the
6264 alpha though, which must handle this case differently. */
6265 if (MEM_P (operands[0])
6266 && !memory_address_p (GET_MODE (operands[0]),
6267 XEXP (operands[0], 0)))
6269 = replace_equiv_address (operands[0],
6270 copy_to_reg (XEXP (operands[0], 0)));
6271 if (MEM_P (operands[1])
6272 && !memory_address_p (GET_MODE (operands[1]),
6273 XEXP (operands[1], 0)))
6275 = replace_equiv_address (operands[1],
6276 copy_to_reg (XEXP (operands[1], 0)));
6279 if (MEM_P (operands[1]) && optimize > 0)
6281 rtx reg = gen_reg_rtx (SImode);
6283 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6284 operands[1] = gen_lowpart (QImode, reg);
6287 if (MEM_P (operands[0]))
6288 operands[1] = force_reg (QImode, operands[1]);
6290 else if (TARGET_THUMB
6291 && CONST_INT_P (operands[1])
6292 && !satisfies_constraint_I (operands[1]))
6294 /* Handle loading a large integer during reload. */
6296 /* Writing a constant to memory needs a scratch, which should
6297 be handled with SECONDARY_RELOADs. */
6298 gcc_assert (REG_P (operands[0]));
6300 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6301 emit_insn (gen_movsi (operands[0], operands[1]));
6307 (define_insn "*arm_movqi_insn"
6308 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
6309 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
6311 && ( register_operand (operands[0], QImode)
6312 || register_operand (operands[1], QImode))"
6323 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
6324 (set_attr "predicable" "yes")
6325 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6326 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6327 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6331 (define_expand "movhf"
6332 [(set (match_operand:HF 0 "general_operand")
6333 (match_operand:HF 1 "general_operand"))]
6336 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6337 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6340 if (MEM_P (operands[0]))
6341 operands[1] = force_reg (HFmode, operands[1]);
6343 else /* TARGET_THUMB1 */
6345 if (can_create_pseudo_p ())
6347 if (!REG_P (operands[0]))
6348 operands[1] = force_reg (HFmode, operands[1]);
6354 (define_insn "*arm32_movhf"
6355 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6356 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6357 "TARGET_32BIT && !TARGET_HARD_FLOAT
6358 && ( s_register_operand (operands[0], HFmode)
6359 || s_register_operand (operands[1], HFmode))"
6361 switch (which_alternative)
6363 case 0: /* ARM register from memory */
6364 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6365 case 1: /* memory from ARM register */
6366 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6367 case 2: /* ARM register from ARM register */
6368 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6369 case 3: /* ARM register from constant */
6374 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6376 ops[0] = operands[0];
6377 ops[1] = GEN_INT (bits);
6378 ops[2] = GEN_INT (bits & 0xff00);
6379 ops[3] = GEN_INT (bits & 0x00ff);
6381 if (arm_arch_thumb2)
6382 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6384 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6391 [(set_attr "conds" "unconditional")
6392 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6393 (set_attr "length" "4,4,4,8")
6394 (set_attr "predicable" "yes")]
6397 (define_expand "movsf"
6398 [(set (match_operand:SF 0 "general_operand")
6399 (match_operand:SF 1 "general_operand"))]
6402 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6403 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6406 if (MEM_P (operands[0]))
6407 operands[1] = force_reg (SFmode, operands[1]);
6409 else /* TARGET_THUMB1 */
6411 if (can_create_pseudo_p ())
6413 if (!REG_P (operands[0]))
6414 operands[1] = force_reg (SFmode, operands[1]);
6418 /* Cannot load it directly, generate a load with clobber so that it can be
6419 loaded via GPR with MOV / MOVT. */
6420 if (arm_disable_literal_pool
6421 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6422 && CONST_DOUBLE_P (operands[1])
6423 && TARGET_HARD_FLOAT
6424 && !vfp3_const_double_rtx (operands[1]))
6426 rtx clobreg = gen_reg_rtx (SFmode);
6427 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6434 ;; Transform a floating-point move of a constant into a core register into
6435 ;; an SImode operation.
6437 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6438 (match_operand:SF 1 "immediate_operand" ""))]
6441 && CONST_DOUBLE_P (operands[1])"
6442 [(set (match_dup 2) (match_dup 3))]
6444 operands[2] = gen_lowpart (SImode, operands[0]);
6445 operands[3] = gen_lowpart (SImode, operands[1]);
6446 if (operands[2] == 0 || operands[3] == 0)
6451 (define_insn "*arm_movsf_soft_insn"
6452 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6453 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6455 && TARGET_SOFT_FLOAT
6456 && (!MEM_P (operands[0])
6457 || register_operand (operands[1], SFmode))"
6459 switch (which_alternative)
6461 case 0: return \"mov%?\\t%0, %1\";
6463 /* Cannot load it directly, split to load it via MOV / MOVT. */
6464 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6466 return \"ldr%?\\t%0, %1\\t%@ float\";
6467 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6468 default: gcc_unreachable ();
6471 [(set_attr "predicable" "yes")
6472 (set_attr "type" "mov_reg,load_4,store_4")
6473 (set_attr "arm_pool_range" "*,4096,*")
6474 (set_attr "thumb2_pool_range" "*,4094,*")
6475 (set_attr "arm_neg_pool_range" "*,4084,*")
6476 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6479 ;; Splitter for the above.
6481 [(set (match_operand:SF 0 "s_register_operand")
6482 (match_operand:SF 1 "const_double_operand"))]
6483 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6487 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6488 rtx cst = gen_int_mode (buf, SImode);
6489 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6494 (define_expand "movdf"
6495 [(set (match_operand:DF 0 "general_operand")
6496 (match_operand:DF 1 "general_operand"))]
6499 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6500 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6503 if (MEM_P (operands[0]))
6504 operands[1] = force_reg (DFmode, operands[1]);
6506 else /* TARGET_THUMB */
6508 if (can_create_pseudo_p ())
6510 if (!REG_P (operands[0]))
6511 operands[1] = force_reg (DFmode, operands[1]);
6515 /* Cannot load it directly, generate a load with clobber so that it can be
6516 loaded via GPR with MOV / MOVT. */
6517 if (arm_disable_literal_pool
6518 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6519 && CONSTANT_P (operands[1])
6520 && TARGET_HARD_FLOAT
6521 && !arm_const_double_rtx (operands[1])
6522 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6524 rtx clobreg = gen_reg_rtx (DFmode);
6525 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6532 ;; Reloading a df mode value stored in integer regs to memory can require a
6534 ;; Another reload_out<m> pattern that requires special constraints.
6535 (define_expand "reload_outdf"
6536 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6537 (match_operand:DF 1 "s_register_operand" "r")
6538 (match_operand:SI 2 "s_register_operand" "=&r")]
6542 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6545 operands[2] = XEXP (operands[0], 0);
6546 else if (code == POST_INC || code == PRE_DEC)
6548 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6549 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6550 emit_insn (gen_movdi (operands[0], operands[1]));
6553 else if (code == PRE_INC)
6555 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6557 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6560 else if (code == POST_DEC)
6561 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6563 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6564 XEXP (XEXP (operands[0], 0), 1)));
6566 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6569 if (code == POST_DEC)
6570 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6576 (define_insn "*movdf_soft_insn"
6577 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6578 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6579 "TARGET_32BIT && TARGET_SOFT_FLOAT
6580 && ( register_operand (operands[0], DFmode)
6581 || register_operand (operands[1], DFmode))"
6583 switch (which_alternative)
6590 /* Cannot load it directly, split to load it via MOV / MOVT. */
6591 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6595 return output_move_double (operands, true, NULL);
6598 [(set_attr "length" "8,12,16,8,8")
6599 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6600 (set_attr "arm_pool_range" "*,*,*,1020,*")
6601 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6602 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6603 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6606 ;; Splitter for the above.
6608 [(set (match_operand:DF 0 "s_register_operand")
6609 (match_operand:DF 1 "const_double_operand"))]
6610 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6614 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6615 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6616 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6617 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6618 rtx cst = gen_int_mode (ival, DImode);
6619 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6625 ;; load- and store-multiple insns
6626 ;; The arm can load/store any set of registers, provided that they are in
6627 ;; ascending order, but these expanders assume a contiguous set.
6629 (define_expand "load_multiple"
6630 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6631 (match_operand:SI 1 "" ""))
6632 (use (match_operand:SI 2 "" ""))])]
6635 HOST_WIDE_INT offset = 0;
6637 /* Support only fixed point registers. */
6638 if (!CONST_INT_P (operands[2])
6639 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6640 || INTVAL (operands[2]) < 2
6641 || !MEM_P (operands[1])
6642 || !REG_P (operands[0])
6643 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6644 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6648 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6649 INTVAL (operands[2]),
6650 force_reg (SImode, XEXP (operands[1], 0)),
6651 FALSE, operands[1], &offset);
6654 (define_expand "store_multiple"
6655 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6656 (match_operand:SI 1 "" ""))
6657 (use (match_operand:SI 2 "" ""))])]
6660 HOST_WIDE_INT offset = 0;
6662 /* Support only fixed point registers. */
6663 if (!CONST_INT_P (operands[2])
6664 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6665 || INTVAL (operands[2]) < 2
6666 || !REG_P (operands[1])
6667 || !MEM_P (operands[0])
6668 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6669 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6673 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6674 INTVAL (operands[2]),
6675 force_reg (SImode, XEXP (operands[0], 0)),
6676 FALSE, operands[0], &offset);
6680 (define_expand "setmemsi"
6681 [(match_operand:BLK 0 "general_operand")
6682 (match_operand:SI 1 "const_int_operand")
6683 (match_operand:SI 2 "const_int_operand")
6684 (match_operand:SI 3 "const_int_operand")]
6687 if (arm_gen_setmem (operands))
6694 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6695 ;; We could let this apply for blocks of less than this, but it clobbers so
6696 ;; many registers that there is then probably a better way.
6698 (define_expand "cpymemqi"
6699 [(match_operand:BLK 0 "general_operand")
6700 (match_operand:BLK 1 "general_operand")
6701 (match_operand:SI 2 "const_int_operand")
6702 (match_operand:SI 3 "const_int_operand")]
6707 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6708 && !optimize_function_for_size_p (cfun))
6710 if (gen_cpymem_ldrd_strd (operands))
6715 if (arm_gen_cpymemqi (operands))
6719 else /* TARGET_THUMB1 */
6721 if ( INTVAL (operands[3]) != 4
6722 || INTVAL (operands[2]) > 48)
6725 thumb_expand_cpymemqi (operands);
6732 ;; Compare & branch insns
6733 ;; The range calculations are based as follows:
6734 ;; For forward branches, the address calculation returns the address of
6735 ;; the next instruction. This is 2 beyond the branch instruction.
6736 ;; For backward branches, the address calculation returns the address of
6737 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6738 ;; instruction for the shortest sequence, and 4 before the branch instruction
6739 ;; if we have to jump around an unconditional branch.
6740 ;; To the basic branch range the PC offset must be added (this is +4).
6741 ;; So for forward branches we have
6742 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6743 ;; And for backward branches we have
6744 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6746 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6747 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6749 (define_expand "cbranchsi4"
6750 [(set (pc) (if_then_else
6751 (match_operator 0 "expandable_comparison_operator"
6752 [(match_operand:SI 1 "s_register_operand")
6753 (match_operand:SI 2 "nonmemory_operand")])
6754 (label_ref (match_operand 3 "" ""))
6760 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6762 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6766 if (thumb1_cmpneg_operand (operands[2], SImode))
6768 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6769 operands[3], operands[0]));
6772 if (!thumb1_cmp_operand (operands[2], SImode))
6773 operands[2] = force_reg (SImode, operands[2]);
6776 (define_expand "cbranchsf4"
6777 [(set (pc) (if_then_else
6778 (match_operator 0 "expandable_comparison_operator"
6779 [(match_operand:SF 1 "s_register_operand")
6780 (match_operand:SF 2 "vfp_compare_operand")])
6781 (label_ref (match_operand 3 "" ""))
6783 "TARGET_32BIT && TARGET_HARD_FLOAT"
6784 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6785 operands[3])); DONE;"
6788 (define_expand "cbranchdf4"
6789 [(set (pc) (if_then_else
6790 (match_operator 0 "expandable_comparison_operator"
6791 [(match_operand:DF 1 "s_register_operand")
6792 (match_operand:DF 2 "vfp_compare_operand")])
6793 (label_ref (match_operand 3 "" ""))
6795 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6796 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6797 operands[3])); DONE;"
6800 (define_expand "cbranchdi4"
6801 [(set (pc) (if_then_else
6802 (match_operator 0 "expandable_comparison_operator"
6803 [(match_operand:DI 1 "s_register_operand")
6804 (match_operand:DI 2 "cmpdi_operand")])
6805 (label_ref (match_operand 3 "" ""))
6809 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6811 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6817 ;; Comparison and test insns
6819 (define_insn "*arm_cmpsi_insn"
6820 [(set (reg:CC CC_REGNUM)
6821 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6822 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6830 [(set_attr "conds" "set")
6831 (set_attr "arch" "t2,t2,any,any,any")
6832 (set_attr "length" "2,2,4,4,4")
6833 (set_attr "predicable" "yes")
6834 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6835 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6838 (define_insn "*cmpsi_shiftsi"
6839 [(set (reg:CC CC_REGNUM)
6840 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6841 (match_operator:SI 3 "shift_operator"
6842 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6843 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6846 [(set_attr "conds" "set")
6847 (set_attr "shift" "1")
6848 (set_attr "arch" "32,a,a")
6849 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6851 (define_insn "*cmpsi_shiftsi_swp"
6852 [(set (reg:CC_SWP CC_REGNUM)
6853 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6854 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6855 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6856 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6859 [(set_attr "conds" "set")
6860 (set_attr "shift" "1")
6861 (set_attr "arch" "32,a,a")
6862 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6864 (define_insn "*arm_cmpsi_negshiftsi_si"
6865 [(set (reg:CC_Z CC_REGNUM)
6867 (neg:SI (match_operator:SI 1 "shift_operator"
6868 [(match_operand:SI 2 "s_register_operand" "r")
6869 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6870 (match_operand:SI 0 "s_register_operand" "r")))]
6873 [(set_attr "conds" "set")
6874 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6875 (const_string "alus_shift_imm")
6876 (const_string "alus_shift_reg")))
6877 (set_attr "predicable" "yes")]
6880 ;; DImode comparisons. The generic code generates branches that
6881 ;; if-conversion cannot reduce to a conditional compare, so we do
6884 (define_insn_and_split "*arm_cmpdi_insn"
6885 [(set (reg:CC_NCV CC_REGNUM)
6886 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6887 (match_operand:DI 1 "arm_di_operand" "rDi")))
6888 (clobber (match_scratch:SI 2 "=r"))]
6890 "#" ; "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6891 "&& reload_completed"
6892 [(set (reg:CC CC_REGNUM)
6893 (compare:CC (match_dup 0) (match_dup 1)))
6894 (parallel [(set (reg:CC CC_REGNUM)
6895 (compare:CC (match_dup 3) (match_dup 4)))
6897 (minus:SI (match_dup 5)
6898 (ltu:SI (reg:CC CC_REGNUM) (const_int 0))))])]
6900 operands[3] = gen_highpart (SImode, operands[0]);
6901 operands[0] = gen_lowpart (SImode, operands[0]);
6902 if (CONST_INT_P (operands[1]))
6904 operands[4] = gen_highpart_mode (SImode, DImode, operands[1]);
6905 if (operands[4] == const0_rtx)
6906 operands[5] = operands[3];
6908 operands[5] = gen_rtx_PLUS (SImode, operands[3],
6909 gen_int_mode (-UINTVAL (operands[4]),
6914 operands[4] = gen_highpart (SImode, operands[1]);
6915 operands[5] = gen_rtx_MINUS (SImode, operands[3], operands[4]);
6917 operands[1] = gen_lowpart (SImode, operands[1]);
6918 operands[2] = gen_lowpart (SImode, operands[2]);
6920 [(set_attr "conds" "set")
6921 (set_attr "length" "8")
6922 (set_attr "type" "multiple")]
6925 (define_insn_and_split "*arm_cmpdi_unsigned"
6926 [(set (reg:CC_CZ CC_REGNUM)
6927 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6928 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6931 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6932 "&& reload_completed"
6933 [(set (reg:CC CC_REGNUM)
6934 (compare:CC (match_dup 2) (match_dup 3)))
6935 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6936 (set (reg:CC CC_REGNUM)
6937 (compare:CC (match_dup 0) (match_dup 1))))]
6939 operands[2] = gen_highpart (SImode, operands[0]);
6940 operands[0] = gen_lowpart (SImode, operands[0]);
6941 if (CONST_INT_P (operands[1]))
6942 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6944 operands[3] = gen_highpart (SImode, operands[1]);
6945 operands[1] = gen_lowpart (SImode, operands[1]);
6947 [(set_attr "conds" "set")
6948 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6949 (set_attr "arch" "t2,t2,t2,a")
6950 (set_attr "length" "6,6,10,8")
6951 (set_attr "type" "multiple")]
6954 (define_insn "*arm_cmpdi_zero"
6955 [(set (reg:CC_Z CC_REGNUM)
6956 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
6958 (clobber (match_scratch:SI 1 "=r"))]
6960 "orrs%?\\t%1, %Q0, %R0"
6961 [(set_attr "conds" "set")
6962 (set_attr "type" "logics_reg")]
6965 ; This insn allows redundant compares to be removed by cse, nothing should
6966 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6967 ; is deleted later on. The match_dup will match the mode here, so that
6968 ; mode changes of the condition codes aren't lost by this even though we don't
6969 ; specify what they are.
6971 (define_insn "*deleted_compare"
6972 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6974 "\\t%@ deleted compare"
6975 [(set_attr "conds" "set")
6976 (set_attr "length" "0")
6977 (set_attr "type" "no_insn")]
6981 ;; Conditional branch insns
6983 (define_expand "cbranch_cc"
6985 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6986 (match_operand 2 "" "")])
6987 (label_ref (match_operand 3 "" ""))
6990 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6991 operands[1], operands[2], NULL_RTX);
6992 operands[2] = const0_rtx;"
6996 ;; Patterns to match conditional branch insns.
6999 (define_insn "arm_cond_branch"
7001 (if_then_else (match_operator 1 "arm_comparison_operator"
7002 [(match_operand 2 "cc_register" "") (const_int 0)])
7003 (label_ref (match_operand 0 "" ""))
7007 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7009 arm_ccfsm_state += 2;
7012 return \"b%d1\\t%l0\";
7014 [(set_attr "conds" "use")
7015 (set_attr "type" "branch")
7016 (set (attr "length")
7018 (and (match_test "TARGET_THUMB2")
7019 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7020 (le (minus (match_dup 0) (pc)) (const_int 256))))
7025 (define_insn "*arm_cond_branch_reversed"
7027 (if_then_else (match_operator 1 "arm_comparison_operator"
7028 [(match_operand 2 "cc_register" "") (const_int 0)])
7030 (label_ref (match_operand 0 "" ""))))]
7033 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7035 arm_ccfsm_state += 2;
7038 return \"b%D1\\t%l0\";
7040 [(set_attr "conds" "use")
7041 (set_attr "type" "branch")
7042 (set (attr "length")
7044 (and (match_test "TARGET_THUMB2")
7045 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7046 (le (minus (match_dup 0) (pc)) (const_int 256))))
7055 (define_expand "cstore_cc"
7056 [(set (match_operand:SI 0 "s_register_operand")
7057 (match_operator:SI 1 "" [(match_operand 2 "" "")
7058 (match_operand 3 "" "")]))]
7060 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7061 operands[2], operands[3], NULL_RTX);
7062 operands[3] = const0_rtx;"
7065 (define_insn_and_split "*mov_scc"
7066 [(set (match_operand:SI 0 "s_register_operand" "=r")
7067 (match_operator:SI 1 "arm_comparison_operator_mode"
7068 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7070 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7073 (if_then_else:SI (match_dup 1)
7077 [(set_attr "conds" "use")
7078 (set_attr "length" "8")
7079 (set_attr "type" "multiple")]
7082 (define_insn_and_split "*mov_negscc"
7083 [(set (match_operand:SI 0 "s_register_operand" "=r")
7084 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7085 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7087 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7090 (if_then_else:SI (match_dup 1)
7094 operands[3] = GEN_INT (~0);
7096 [(set_attr "conds" "use")
7097 (set_attr "length" "8")
7098 (set_attr "type" "multiple")]
7101 (define_insn_and_split "*mov_notscc"
7102 [(set (match_operand:SI 0 "s_register_operand" "=r")
7103 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7104 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7106 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7109 (if_then_else:SI (match_dup 1)
7113 operands[3] = GEN_INT (~1);
7114 operands[4] = GEN_INT (~0);
7116 [(set_attr "conds" "use")
7117 (set_attr "length" "8")
7118 (set_attr "type" "multiple")]
7121 (define_expand "cstoresi4"
7122 [(set (match_operand:SI 0 "s_register_operand")
7123 (match_operator:SI 1 "expandable_comparison_operator"
7124 [(match_operand:SI 2 "s_register_operand")
7125 (match_operand:SI 3 "reg_or_int_operand")]))]
7126 "TARGET_32BIT || TARGET_THUMB1"
7128 rtx op3, scratch, scratch2;
7132 if (!arm_add_operand (operands[3], SImode))
7133 operands[3] = force_reg (SImode, operands[3]);
7134 emit_insn (gen_cstore_cc (operands[0], operands[1],
7135 operands[2], operands[3]));
7139 if (operands[3] == const0_rtx)
7141 switch (GET_CODE (operands[1]))
7144 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7148 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7152 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7153 NULL_RTX, 0, OPTAB_WIDEN);
7154 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7155 NULL_RTX, 0, OPTAB_WIDEN);
7156 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7157 operands[0], 1, OPTAB_WIDEN);
7161 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7163 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7164 NULL_RTX, 1, OPTAB_WIDEN);
7168 scratch = expand_binop (SImode, ashr_optab, operands[2],
7169 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7170 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7171 NULL_RTX, 0, OPTAB_WIDEN);
7172 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7176 /* LT is handled by generic code. No need for unsigned with 0. */
7183 switch (GET_CODE (operands[1]))
7186 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7187 NULL_RTX, 0, OPTAB_WIDEN);
7188 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7192 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7193 NULL_RTX, 0, OPTAB_WIDEN);
7194 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7198 op3 = force_reg (SImode, operands[3]);
7200 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7201 NULL_RTX, 1, OPTAB_WIDEN);
7202 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7203 NULL_RTX, 0, OPTAB_WIDEN);
7204 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7210 if (!thumb1_cmp_operand (op3, SImode))
7211 op3 = force_reg (SImode, op3);
7212 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7213 NULL_RTX, 0, OPTAB_WIDEN);
7214 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7215 NULL_RTX, 1, OPTAB_WIDEN);
7216 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7221 op3 = force_reg (SImode, operands[3]);
7222 scratch = force_reg (SImode, const0_rtx);
7223 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7229 if (!thumb1_cmp_operand (op3, SImode))
7230 op3 = force_reg (SImode, op3);
7231 scratch = force_reg (SImode, const0_rtx);
7232 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7238 if (!thumb1_cmp_operand (op3, SImode))
7239 op3 = force_reg (SImode, op3);
7240 scratch = gen_reg_rtx (SImode);
7241 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7245 op3 = force_reg (SImode, operands[3]);
7246 scratch = gen_reg_rtx (SImode);
7247 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7250 /* No good sequences for GT, LT. */
7257 (define_expand "cstorehf4"
7258 [(set (match_operand:SI 0 "s_register_operand")
7259 (match_operator:SI 1 "expandable_comparison_operator"
7260 [(match_operand:HF 2 "s_register_operand")
7261 (match_operand:HF 3 "vfp_compare_operand")]))]
7262 "TARGET_VFP_FP16INST"
7264 if (!arm_validize_comparison (&operands[1],
7269 emit_insn (gen_cstore_cc (operands[0], operands[1],
7270 operands[2], operands[3]));
7275 (define_expand "cstoresf4"
7276 [(set (match_operand:SI 0 "s_register_operand")
7277 (match_operator:SI 1 "expandable_comparison_operator"
7278 [(match_operand:SF 2 "s_register_operand")
7279 (match_operand:SF 3 "vfp_compare_operand")]))]
7280 "TARGET_32BIT && TARGET_HARD_FLOAT"
7281 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7282 operands[2], operands[3])); DONE;"
7285 (define_expand "cstoredf4"
7286 [(set (match_operand:SI 0 "s_register_operand")
7287 (match_operator:SI 1 "expandable_comparison_operator"
7288 [(match_operand:DF 2 "s_register_operand")
7289 (match_operand:DF 3 "vfp_compare_operand")]))]
7290 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7291 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7292 operands[2], operands[3])); DONE;"
7295 (define_expand "cstoredi4"
7296 [(set (match_operand:SI 0 "s_register_operand")
7297 (match_operator:SI 1 "expandable_comparison_operator"
7298 [(match_operand:DI 2 "s_register_operand")
7299 (match_operand:DI 3 "cmpdi_operand")]))]
7302 if (!arm_validize_comparison (&operands[1],
7306 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7313 ;; Conditional move insns
7315 (define_expand "movsicc"
7316 [(set (match_operand:SI 0 "s_register_operand")
7317 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
7318 (match_operand:SI 2 "arm_not_operand")
7319 (match_operand:SI 3 "arm_not_operand")))]
7326 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7327 &XEXP (operands[1], 1)))
7330 code = GET_CODE (operands[1]);
7331 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7332 XEXP (operands[1], 1), NULL_RTX);
7333 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7337 (define_expand "movhfcc"
7338 [(set (match_operand:HF 0 "s_register_operand")
7339 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
7340 (match_operand:HF 2 "s_register_operand")
7341 (match_operand:HF 3 "s_register_operand")))]
7342 "TARGET_VFP_FP16INST"
7345 enum rtx_code code = GET_CODE (operands[1]);
7348 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7349 &XEXP (operands[1], 1)))
7352 code = GET_CODE (operands[1]);
7353 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7354 XEXP (operands[1], 1), NULL_RTX);
7355 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7359 (define_expand "movsfcc"
7360 [(set (match_operand:SF 0 "s_register_operand")
7361 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7362 (match_operand:SF 2 "s_register_operand")
7363 (match_operand:SF 3 "s_register_operand")))]
7364 "TARGET_32BIT && TARGET_HARD_FLOAT"
7367 enum rtx_code code = GET_CODE (operands[1]);
7370 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7371 &XEXP (operands[1], 1)))
7374 code = GET_CODE (operands[1]);
7375 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7376 XEXP (operands[1], 1), NULL_RTX);
7377 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7381 (define_expand "movdfcc"
7382 [(set (match_operand:DF 0 "s_register_operand")
7383 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7384 (match_operand:DF 2 "s_register_operand")
7385 (match_operand:DF 3 "s_register_operand")))]
7386 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7389 enum rtx_code code = GET_CODE (operands[1]);
7392 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7393 &XEXP (operands[1], 1)))
7395 code = GET_CODE (operands[1]);
7396 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7397 XEXP (operands[1], 1), NULL_RTX);
7398 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7402 (define_insn "*cmov<mode>"
7403 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7404 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7405 [(match_operand 2 "cc_register" "") (const_int 0)])
7406 (match_operand:SDF 3 "s_register_operand"
7408 (match_operand:SDF 4 "s_register_operand"
7409 "<F_constraint>")))]
7410 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7413 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7420 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7425 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7431 [(set_attr "conds" "use")
7432 (set_attr "type" "fcsel")]
7435 (define_insn "*cmovhf"
7436 [(set (match_operand:HF 0 "s_register_operand" "=t")
7437 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7438 [(match_operand 2 "cc_register" "") (const_int 0)])
7439 (match_operand:HF 3 "s_register_operand" "t")
7440 (match_operand:HF 4 "s_register_operand" "t")))]
7441 "TARGET_VFP_FP16INST"
7444 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7451 return \"vsel%d1.f16\\t%0, %3, %4\";
7456 return \"vsel%D1.f16\\t%0, %4, %3\";
7462 [(set_attr "conds" "use")
7463 (set_attr "type" "fcsel")]
7466 (define_insn_and_split "*movsicc_insn"
7467 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7469 (match_operator 3 "arm_comparison_operator"
7470 [(match_operand 4 "cc_register" "") (const_int 0)])
7471 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7472 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7483 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7484 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7485 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7486 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7487 "&& reload_completed"
7490 enum rtx_code rev_code;
7494 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7496 gen_rtx_SET (operands[0], operands[1])));
7498 rev_code = GET_CODE (operands[3]);
7499 mode = GET_MODE (operands[4]);
7500 if (mode == CCFPmode || mode == CCFPEmode)
7501 rev_code = reverse_condition_maybe_unordered (rev_code);
7503 rev_code = reverse_condition (rev_code);
7505 rev_cond = gen_rtx_fmt_ee (rev_code,
7509 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7511 gen_rtx_SET (operands[0], operands[2])));
7514 [(set_attr "length" "4,4,4,4,8,8,8,8")
7515 (set_attr "conds" "use")
7516 (set_attr_alternative "type"
7517 [(if_then_else (match_operand 2 "const_int_operand" "")
7518 (const_string "mov_imm")
7519 (const_string "mov_reg"))
7520 (const_string "mvn_imm")
7521 (if_then_else (match_operand 1 "const_int_operand" "")
7522 (const_string "mov_imm")
7523 (const_string "mov_reg"))
7524 (const_string "mvn_imm")
7525 (const_string "multiple")
7526 (const_string "multiple")
7527 (const_string "multiple")
7528 (const_string "multiple")])]
7531 (define_insn "*movsfcc_soft_insn"
7532 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7533 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7534 [(match_operand 4 "cc_register" "") (const_int 0)])
7535 (match_operand:SF 1 "s_register_operand" "0,r")
7536 (match_operand:SF 2 "s_register_operand" "r,0")))]
7537 "TARGET_ARM && TARGET_SOFT_FLOAT"
7541 [(set_attr "conds" "use")
7542 (set_attr "type" "mov_reg")]
7546 ;; Jump and linkage insns
7548 (define_expand "jump"
7550 (label_ref (match_operand 0 "" "")))]
7555 (define_insn "*arm_jump"
7557 (label_ref (match_operand 0 "" "")))]
7561 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7563 arm_ccfsm_state += 2;
7566 return \"b%?\\t%l0\";
7569 [(set_attr "predicable" "yes")
7570 (set (attr "length")
7572 (and (match_test "TARGET_THUMB2")
7573 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7574 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7577 (set_attr "type" "branch")]
7580 (define_expand "call"
7581 [(parallel [(call (match_operand 0 "memory_operand")
7582 (match_operand 1 "general_operand"))
7583 (use (match_operand 2 "" ""))
7584 (clobber (reg:SI LR_REGNUM))])]
7589 tree addr = MEM_EXPR (operands[0]);
7591 /* In an untyped call, we can get NULL for operand 2. */
7592 if (operands[2] == NULL_RTX)
7593 operands[2] = const0_rtx;
7595 /* Decide if we should generate indirect calls by loading the
7596 32-bit address of the callee into a register before performing the
7598 callee = XEXP (operands[0], 0);
7599 if (GET_CODE (callee) == SYMBOL_REF
7600 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7602 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7604 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7605 /* Indirect call: set r9 with FDPIC value of callee. */
7606 XEXP (operands[0], 0)
7607 = arm_load_function_descriptor (XEXP (operands[0], 0));
7609 if (detect_cmse_nonsecure_call (addr))
7611 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7613 emit_call_insn (pat);
7617 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7618 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7621 /* Restore FDPIC register (r9) after call. */
7624 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7625 rtx initial_fdpic_reg
7626 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7628 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7629 initial_fdpic_reg));
7636 (define_insn "restore_pic_register_after_call"
7637 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7638 (unspec:SI [(match_dup 0)
7639 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7640 UNSPEC_PIC_RESTORE))]
7647 (define_expand "call_internal"
7648 [(parallel [(call (match_operand 0 "memory_operand")
7649 (match_operand 1 "general_operand"))
7650 (use (match_operand 2 "" ""))
7651 (clobber (reg:SI LR_REGNUM))])])
7653 (define_expand "nonsecure_call_internal"
7654 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7655 UNSPEC_NONSECURE_MEM)
7656 (match_operand 1 "general_operand"))
7657 (use (match_operand 2 "" ""))
7658 (clobber (reg:SI LR_REGNUM))])]
7663 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7664 gen_rtx_REG (SImode, R4_REGNUM),
7667 operands[0] = replace_equiv_address (operands[0], tmp);
7670 (define_insn "*call_reg_armv5"
7671 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7672 (match_operand 1 "" ""))
7673 (use (match_operand 2 "" ""))
7674 (clobber (reg:SI LR_REGNUM))]
7675 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7677 [(set_attr "type" "call")]
7680 (define_insn "*call_reg_arm"
7681 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7682 (match_operand 1 "" ""))
7683 (use (match_operand 2 "" ""))
7684 (clobber (reg:SI LR_REGNUM))]
7685 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7687 return output_call (operands);
7689 ;; length is worst case, normally it is only two
7690 [(set_attr "length" "12")
7691 (set_attr "type" "call")]
7695 (define_expand "call_value"
7696 [(parallel [(set (match_operand 0 "" "")
7697 (call (match_operand 1 "memory_operand")
7698 (match_operand 2 "general_operand")))
7699 (use (match_operand 3 "" ""))
7700 (clobber (reg:SI LR_REGNUM))])]
7705 tree addr = MEM_EXPR (operands[1]);
7707 /* In an untyped call, we can get NULL for operand 2. */
7708 if (operands[3] == 0)
7709 operands[3] = const0_rtx;
7711 /* Decide if we should generate indirect calls by loading the
7712 32-bit address of the callee into a register before performing the
7714 callee = XEXP (operands[1], 0);
7715 if (GET_CODE (callee) == SYMBOL_REF
7716 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7718 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7720 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7721 /* Indirect call: set r9 with FDPIC value of callee. */
7722 XEXP (operands[1], 0)
7723 = arm_load_function_descriptor (XEXP (operands[1], 0));
7725 if (detect_cmse_nonsecure_call (addr))
7727 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7728 operands[2], operands[3]);
7729 emit_call_insn (pat);
7733 pat = gen_call_value_internal (operands[0], operands[1],
7734 operands[2], operands[3]);
7735 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7738 /* Restore FDPIC register (r9) after call. */
7741 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7742 rtx initial_fdpic_reg
7743 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7745 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7746 initial_fdpic_reg));
7753 (define_expand "call_value_internal"
7754 [(parallel [(set (match_operand 0 "" "")
7755 (call (match_operand 1 "memory_operand")
7756 (match_operand 2 "general_operand")))
7757 (use (match_operand 3 "" ""))
7758 (clobber (reg:SI LR_REGNUM))])])
7760 (define_expand "nonsecure_call_value_internal"
7761 [(parallel [(set (match_operand 0 "" "")
7762 (call (unspec:SI [(match_operand 1 "memory_operand")]
7763 UNSPEC_NONSECURE_MEM)
7764 (match_operand 2 "general_operand")))
7765 (use (match_operand 3 "" ""))
7766 (clobber (reg:SI LR_REGNUM))])]
7771 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7772 gen_rtx_REG (SImode, R4_REGNUM),
7775 operands[1] = replace_equiv_address (operands[1], tmp);
7778 (define_insn "*call_value_reg_armv5"
7779 [(set (match_operand 0 "" "")
7780 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7781 (match_operand 2 "" "")))
7782 (use (match_operand 3 "" ""))
7783 (clobber (reg:SI LR_REGNUM))]
7784 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7786 [(set_attr "type" "call")]
7789 (define_insn "*call_value_reg_arm"
7790 [(set (match_operand 0 "" "")
7791 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7792 (match_operand 2 "" "")))
7793 (use (match_operand 3 "" ""))
7794 (clobber (reg:SI LR_REGNUM))]
7795 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7797 return output_call (&operands[1]);
7799 [(set_attr "length" "12")
7800 (set_attr "type" "call")]
7803 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7804 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7806 (define_insn "*call_symbol"
7807 [(call (mem:SI (match_operand:SI 0 "" ""))
7808 (match_operand 1 "" ""))
7809 (use (match_operand 2 "" ""))
7810 (clobber (reg:SI LR_REGNUM))]
7812 && !SIBLING_CALL_P (insn)
7813 && (GET_CODE (operands[0]) == SYMBOL_REF)
7814 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7817 rtx op = operands[0];
7819 /* Switch mode now when possible. */
7820 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7821 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7822 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7824 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7826 [(set_attr "type" "call")]
7829 (define_insn "*call_value_symbol"
7830 [(set (match_operand 0 "" "")
7831 (call (mem:SI (match_operand:SI 1 "" ""))
7832 (match_operand:SI 2 "" "")))
7833 (use (match_operand 3 "" ""))
7834 (clobber (reg:SI LR_REGNUM))]
7836 && !SIBLING_CALL_P (insn)
7837 && (GET_CODE (operands[1]) == SYMBOL_REF)
7838 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7841 rtx op = operands[1];
7843 /* Switch mode now when possible. */
7844 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7845 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7846 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7848 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7850 [(set_attr "type" "call")]
7853 (define_expand "sibcall_internal"
7854 [(parallel [(call (match_operand 0 "memory_operand")
7855 (match_operand 1 "general_operand"))
7857 (use (match_operand 2 "" ""))])])
7859 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7860 (define_expand "sibcall"
7861 [(parallel [(call (match_operand 0 "memory_operand")
7862 (match_operand 1 "general_operand"))
7864 (use (match_operand 2 "" ""))])]
7870 if ((!REG_P (XEXP (operands[0], 0))
7871 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7872 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7873 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7874 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7876 if (operands[2] == NULL_RTX)
7877 operands[2] = const0_rtx;
7879 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7880 arm_emit_call_insn (pat, operands[0], true);
7885 (define_expand "sibcall_value_internal"
7886 [(parallel [(set (match_operand 0 "" "")
7887 (call (match_operand 1 "memory_operand")
7888 (match_operand 2 "general_operand")))
7890 (use (match_operand 3 "" ""))])])
7892 (define_expand "sibcall_value"
7893 [(parallel [(set (match_operand 0 "" "")
7894 (call (match_operand 1 "memory_operand")
7895 (match_operand 2 "general_operand")))
7897 (use (match_operand 3 "" ""))])]
7903 if ((!REG_P (XEXP (operands[1], 0))
7904 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7905 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7906 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7907 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7909 if (operands[3] == NULL_RTX)
7910 operands[3] = const0_rtx;
7912 pat = gen_sibcall_value_internal (operands[0], operands[1],
7913 operands[2], operands[3]);
7914 arm_emit_call_insn (pat, operands[1], true);
7919 (define_insn "*sibcall_insn"
7920 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7921 (match_operand 1 "" ""))
7923 (use (match_operand 2 "" ""))]
7924 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7926 if (which_alternative == 1)
7927 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7930 if (arm_arch5t || arm_arch4t)
7931 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7933 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7936 [(set_attr "type" "call")]
7939 (define_insn "*sibcall_value_insn"
7940 [(set (match_operand 0 "" "")
7941 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7942 (match_operand 2 "" "")))
7944 (use (match_operand 3 "" ""))]
7945 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7947 if (which_alternative == 1)
7948 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7951 if (arm_arch5t || arm_arch4t)
7952 return \"bx%?\\t%1\";
7954 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7957 [(set_attr "type" "call")]
7960 (define_expand "<return_str>return"
7962 "(TARGET_ARM || (TARGET_THUMB2
7963 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7964 && !IS_STACKALIGN (arm_current_func_type ())))
7965 <return_cond_false>"
7970 thumb2_expand_return (<return_simple_p>);
7977 ;; Often the return insn will be the same as loading from memory, so set attr
7978 (define_insn "*arm_return"
7980 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7983 if (arm_ccfsm_state == 2)
7985 arm_ccfsm_state += 2;
7988 return output_return_instruction (const_true_rtx, true, false, false);
7990 [(set_attr "type" "load_4")
7991 (set_attr "length" "12")
7992 (set_attr "predicable" "yes")]
7995 (define_insn "*cond_<return_str>return"
7997 (if_then_else (match_operator 0 "arm_comparison_operator"
7998 [(match_operand 1 "cc_register" "") (const_int 0)])
8001 "TARGET_ARM <return_cond_true>"
8004 if (arm_ccfsm_state == 2)
8006 arm_ccfsm_state += 2;
8009 return output_return_instruction (operands[0], true, false,
8012 [(set_attr "conds" "use")
8013 (set_attr "length" "12")
8014 (set_attr "type" "load_4")]
8017 (define_insn "*cond_<return_str>return_inverted"
8019 (if_then_else (match_operator 0 "arm_comparison_operator"
8020 [(match_operand 1 "cc_register" "") (const_int 0)])
8023 "TARGET_ARM <return_cond_true>"
8026 if (arm_ccfsm_state == 2)
8028 arm_ccfsm_state += 2;
8031 return output_return_instruction (operands[0], true, true,
8034 [(set_attr "conds" "use")
8035 (set_attr "length" "12")
8036 (set_attr "type" "load_4")]
8039 (define_insn "*arm_simple_return"
8044 if (arm_ccfsm_state == 2)
8046 arm_ccfsm_state += 2;
8049 return output_return_instruction (const_true_rtx, true, false, true);
8051 [(set_attr "type" "branch")
8052 (set_attr "length" "4")
8053 (set_attr "predicable" "yes")]
8056 ;; Generate a sequence of instructions to determine if the processor is
8057 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8060 (define_expand "return_addr_mask"
8062 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8064 (set (match_operand:SI 0 "s_register_operand")
8065 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8067 (const_int 67108860)))] ; 0x03fffffc
8070 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8073 (define_insn "*check_arch2"
8074 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8075 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8078 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8079 [(set_attr "length" "8")
8080 (set_attr "conds" "set")
8081 (set_attr "type" "multiple")]
8084 ;; Call subroutine returning any type.
8086 (define_expand "untyped_call"
8087 [(parallel [(call (match_operand 0 "" "")
8089 (match_operand 1 "" "")
8090 (match_operand 2 "" "")])]
8091 "TARGET_EITHER && !TARGET_FDPIC"
8095 rtx par = gen_rtx_PARALLEL (VOIDmode,
8096 rtvec_alloc (XVECLEN (operands[2], 0)));
8097 rtx addr = gen_reg_rtx (Pmode);
8101 emit_move_insn (addr, XEXP (operands[1], 0));
8102 mem = change_address (operands[1], BLKmode, addr);
8104 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8106 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8108 /* Default code only uses r0 as a return value, but we could
8109 be using anything up to 4 registers. */
8110 if (REGNO (src) == R0_REGNUM)
8111 src = gen_rtx_REG (TImode, R0_REGNUM);
8113 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8115 size += GET_MODE_SIZE (GET_MODE (src));
8118 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8122 for (i = 0; i < XVECLEN (par, 0); i++)
8124 HOST_WIDE_INT offset = 0;
8125 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8128 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8130 mem = change_address (mem, GET_MODE (reg), NULL);
8131 if (REGNO (reg) == R0_REGNUM)
8133 /* On thumb we have to use a write-back instruction. */
8134 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8135 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8136 size = TARGET_ARM ? 16 : 0;
8140 emit_move_insn (mem, reg);
8141 size = GET_MODE_SIZE (GET_MODE (reg));
8145 /* The optimizer does not know that the call sets the function value
8146 registers we stored in the result block. We avoid problems by
8147 claiming that all hard registers are used and clobbered at this
8149 emit_insn (gen_blockage ());
8155 (define_expand "untyped_return"
8156 [(match_operand:BLK 0 "memory_operand")
8157 (match_operand 1 "" "")]
8158 "TARGET_EITHER && !TARGET_FDPIC"
8162 rtx addr = gen_reg_rtx (Pmode);
8166 emit_move_insn (addr, XEXP (operands[0], 0));
8167 mem = change_address (operands[0], BLKmode, addr);
8169 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8171 HOST_WIDE_INT offset = 0;
8172 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8175 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8177 mem = change_address (mem, GET_MODE (reg), NULL);
8178 if (REGNO (reg) == R0_REGNUM)
8180 /* On thumb we have to use a write-back instruction. */
8181 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8182 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8183 size = TARGET_ARM ? 16 : 0;
8187 emit_move_insn (reg, mem);
8188 size = GET_MODE_SIZE (GET_MODE (reg));
8192 /* Emit USE insns before the return. */
8193 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8194 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8196 /* Construct the return. */
8197 expand_naked_return ();
8203 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8204 ;; all of memory. This blocks insns from being moved across this point.
8206 (define_insn "blockage"
8207 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8210 [(set_attr "length" "0")
8211 (set_attr "type" "block")]
8214 ;; Since we hard code r0 here use the 'o' constraint to prevent
8215 ;; provoking undefined behaviour in the hardware with putting out
8216 ;; auto-increment operations with potentially r0 as the base register.
8217 (define_insn "probe_stack"
8218 [(set (match_operand:SI 0 "memory_operand" "=o")
8219 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8222 [(set_attr "type" "store_4")
8223 (set_attr "predicable" "yes")]
8226 (define_insn "probe_stack_range"
8227 [(set (match_operand:SI 0 "register_operand" "=r")
8228 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8229 (match_operand:SI 2 "register_operand" "r")]
8230 VUNSPEC_PROBE_STACK_RANGE))]
8233 return output_probe_stack_range (operands[0], operands[2]);
8235 [(set_attr "type" "multiple")
8236 (set_attr "conds" "clob")]
8239 ;; Named patterns for stack smashing protection.
8240 (define_expand "stack_protect_combined_set"
8242 [(set (match_operand:SI 0 "memory_operand")
8243 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8245 (clobber (match_scratch:SI 2 ""))
8246 (clobber (match_scratch:SI 3 ""))])]
8251 ;; Use a separate insn from the above expand to be able to have the mem outside
8252 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8253 ;; try to reload the guard since we need to control how PIC access is done in
8254 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8255 ;; legitimize_pic_address ()).
8256 (define_insn_and_split "*stack_protect_combined_set_insn"
8257 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8258 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8260 (clobber (match_scratch:SI 2 "=&l,&r"))
8261 (clobber (match_scratch:SI 3 "=&l,&r"))]
8265 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8267 (clobber (match_dup 2))])]
8272 /* Forces recomputing of GOT base now. */
8273 legitimize_pic_address (operands[1], SImode, operands[2], operands[3],
8274 true /*compute_now*/);
8278 if (address_operand (operands[1], SImode))
8279 operands[2] = operands[1];
8282 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8283 emit_move_insn (operands[2], mem);
8287 [(set_attr "arch" "t1,32")]
8290 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8291 ;; canary value does not live beyond the life of this sequence.
8292 (define_insn "*stack_protect_set_insn"
8293 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8294 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
8296 (clobber (match_dup 1))]
8299 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
8300 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
8301 [(set_attr "length" "8,12")
8302 (set_attr "conds" "clob,nocond")
8303 (set_attr "type" "multiple")
8304 (set_attr "arch" "t1,32")]
8307 (define_expand "stack_protect_combined_test"
8311 (eq (match_operand:SI 0 "memory_operand")
8312 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8314 (label_ref (match_operand 2))
8316 (clobber (match_scratch:SI 3 ""))
8317 (clobber (match_scratch:SI 4 ""))
8318 (clobber (reg:CC CC_REGNUM))])]
8323 ;; Use a separate insn from the above expand to be able to have the mem outside
8324 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8325 ;; try to reload the guard since we need to control how PIC access is done in
8326 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8327 ;; legitimize_pic_address ()).
8328 (define_insn_and_split "*stack_protect_combined_test_insn"
8331 (eq (match_operand:SI 0 "memory_operand" "m,m")
8332 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8334 (label_ref (match_operand 2))
8336 (clobber (match_scratch:SI 3 "=&l,&r"))
8337 (clobber (match_scratch:SI 4 "=&l,&r"))
8338 (clobber (reg:CC CC_REGNUM))]
8348 /* Forces recomputing of GOT base now. */
8349 legitimize_pic_address (operands[1], SImode, operands[3], operands[4],
8350 true /*compute_now*/);
8354 if (address_operand (operands[1], SImode))
8355 operands[3] = operands[1];
8358 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8359 emit_move_insn (operands[3], mem);
8364 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8366 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8367 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8368 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8372 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8374 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8375 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8380 [(set_attr "arch" "t1,32")]
8383 (define_insn "arm_stack_protect_test_insn"
8384 [(set (reg:CC_Z CC_REGNUM)
8385 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8386 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8389 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8390 (clobber (match_dup 2))]
8392 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8393 [(set_attr "length" "8,12")
8394 (set_attr "conds" "set")
8395 (set_attr "type" "multiple")
8396 (set_attr "arch" "t,32")]
8399 (define_expand "casesi"
8400 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8401 (match_operand:SI 1 "const_int_operand") ; lower bound
8402 (match_operand:SI 2 "const_int_operand") ; total range
8403 (match_operand:SI 3 "" "") ; table label
8404 (match_operand:SI 4 "" "")] ; Out of range label
8405 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8408 enum insn_code code;
8409 if (operands[1] != const0_rtx)
8411 rtx reg = gen_reg_rtx (SImode);
8413 emit_insn (gen_addsi3 (reg, operands[0],
8414 gen_int_mode (-INTVAL (operands[1]),
8420 code = CODE_FOR_arm_casesi_internal;
8421 else if (TARGET_THUMB1)
8422 code = CODE_FOR_thumb1_casesi_internal_pic;
8424 code = CODE_FOR_thumb2_casesi_internal_pic;
8426 code = CODE_FOR_thumb2_casesi_internal;
8428 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8429 operands[2] = force_reg (SImode, operands[2]);
8431 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8432 operands[3], operands[4]));
8437 ;; The USE in this pattern is needed to tell flow analysis that this is
8438 ;; a CASESI insn. It has no other purpose.
8439 (define_expand "arm_casesi_internal"
8440 [(parallel [(set (pc)
8442 (leu (match_operand:SI 0 "s_register_operand")
8443 (match_operand:SI 1 "arm_rhs_operand"))
8445 (label_ref:SI (match_operand 3 ""))))
8446 (clobber (reg:CC CC_REGNUM))
8447 (use (label_ref:SI (match_operand 2 "")))])]
8450 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8451 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8452 gen_rtx_LABEL_REF (SImode, operands[2]));
8453 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8454 MEM_READONLY_P (operands[4]) = 1;
8455 MEM_NOTRAP_P (operands[4]) = 1;
8458 (define_insn "*arm_casesi_internal"
8459 [(parallel [(set (pc)
8461 (leu (match_operand:SI 0 "s_register_operand" "r")
8462 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8463 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8464 (label_ref:SI (match_operand 2 "" ""))))
8465 (label_ref:SI (match_operand 3 "" ""))))
8466 (clobber (reg:CC CC_REGNUM))
8467 (use (label_ref:SI (match_dup 2)))])]
8471 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8472 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8474 [(set_attr "conds" "clob")
8475 (set_attr "length" "12")
8476 (set_attr "type" "multiple")]
8479 (define_expand "indirect_jump"
8481 (match_operand:SI 0 "s_register_operand"))]
8484 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8485 address and use bx. */
8489 tmp = gen_reg_rtx (SImode);
8490 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8496 ;; NB Never uses BX.
8497 (define_insn "*arm_indirect_jump"
8499 (match_operand:SI 0 "s_register_operand" "r"))]
8501 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8502 [(set_attr "predicable" "yes")
8503 (set_attr "type" "branch")]
8506 (define_insn "*load_indirect_jump"
8508 (match_operand:SI 0 "memory_operand" "m"))]
8510 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8511 [(set_attr "type" "load_4")
8512 (set_attr "pool_range" "4096")
8513 (set_attr "neg_pool_range" "4084")
8514 (set_attr "predicable" "yes")]
8524 [(set (attr "length")
8525 (if_then_else (eq_attr "is_thumb" "yes")
8528 (set_attr "type" "mov_reg")]
8532 [(trap_if (const_int 1) (const_int 0))]
8536 return \".inst\\t0xe7f000f0\";
8538 return \".inst\\t0xdeff\";
8540 [(set (attr "length")
8541 (if_then_else (eq_attr "is_thumb" "yes")
8544 (set_attr "type" "trap")
8545 (set_attr "conds" "unconditional")]
8549 ;; Patterns to allow combination of arithmetic, cond code and shifts
8551 (define_insn "*<arith_shift_insn>_multsi"
8552 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8554 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8555 (match_operand:SI 3 "power_of_two_operand" ""))
8556 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8558 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8559 [(set_attr "predicable" "yes")
8560 (set_attr "shift" "2")
8561 (set_attr "arch" "a,t2")
8562 (set_attr "type" "alu_shift_imm")])
8564 (define_insn "*<arith_shift_insn>_shiftsi"
8565 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8567 (match_operator:SI 2 "shift_nomul_operator"
8568 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8569 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8570 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8571 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8572 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8573 [(set_attr "predicable" "yes")
8574 (set_attr "shift" "3")
8575 (set_attr "arch" "a,t2,a")
8576 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8579 [(set (match_operand:SI 0 "s_register_operand" "")
8580 (match_operator:SI 1 "shiftable_operator"
8581 [(match_operator:SI 2 "shiftable_operator"
8582 [(match_operator:SI 3 "shift_operator"
8583 [(match_operand:SI 4 "s_register_operand" "")
8584 (match_operand:SI 5 "reg_or_int_operand" "")])
8585 (match_operand:SI 6 "s_register_operand" "")])
8586 (match_operand:SI 7 "arm_rhs_operand" "")]))
8587 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8590 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8593 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8596 (define_insn "*arith_shiftsi_compare0"
8597 [(set (reg:CC_NOOV CC_REGNUM)
8599 (match_operator:SI 1 "shiftable_operator"
8600 [(match_operator:SI 3 "shift_operator"
8601 [(match_operand:SI 4 "s_register_operand" "r,r")
8602 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8603 (match_operand:SI 2 "s_register_operand" "r,r")])
8605 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8606 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8609 "%i1s%?\\t%0, %2, %4%S3"
8610 [(set_attr "conds" "set")
8611 (set_attr "shift" "4")
8612 (set_attr "arch" "32,a")
8613 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8615 (define_insn "*arith_shiftsi_compare0_scratch"
8616 [(set (reg:CC_NOOV CC_REGNUM)
8618 (match_operator:SI 1 "shiftable_operator"
8619 [(match_operator:SI 3 "shift_operator"
8620 [(match_operand:SI 4 "s_register_operand" "r,r")
8621 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8622 (match_operand:SI 2 "s_register_operand" "r,r")])
8624 (clobber (match_scratch:SI 0 "=r,r"))]
8626 "%i1s%?\\t%0, %2, %4%S3"
8627 [(set_attr "conds" "set")
8628 (set_attr "shift" "4")
8629 (set_attr "arch" "32,a")
8630 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8632 (define_insn "*sub_shiftsi"
8633 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8634 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8635 (match_operator:SI 2 "shift_operator"
8636 [(match_operand:SI 3 "s_register_operand" "r,r")
8637 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8639 "sub%?\\t%0, %1, %3%S2"
8640 [(set_attr "predicable" "yes")
8641 (set_attr "predicable_short_it" "no")
8642 (set_attr "shift" "3")
8643 (set_attr "arch" "32,a")
8644 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8646 (define_insn "*sub_shiftsi_compare0"
8647 [(set (reg:CC_NOOV CC_REGNUM)
8649 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8650 (match_operator:SI 2 "shift_operator"
8651 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8652 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8654 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8655 (minus:SI (match_dup 1)
8656 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8658 "subs%?\\t%0, %1, %3%S2"
8659 [(set_attr "conds" "set")
8660 (set_attr "shift" "3")
8661 (set_attr "arch" "32,a,a")
8662 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8664 (define_insn "*sub_shiftsi_compare0_scratch"
8665 [(set (reg:CC_NOOV CC_REGNUM)
8667 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8668 (match_operator:SI 2 "shift_operator"
8669 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8670 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8672 (clobber (match_scratch:SI 0 "=r,r,r"))]
8674 "subs%?\\t%0, %1, %3%S2"
8675 [(set_attr "conds" "set")
8676 (set_attr "shift" "3")
8677 (set_attr "arch" "32,a,a")
8678 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8681 (define_insn_and_split "*and_scc"
8682 [(set (match_operand:SI 0 "s_register_operand" "=r")
8683 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8684 [(match_operand 2 "cc_register" "") (const_int 0)])
8685 (match_operand:SI 3 "s_register_operand" "r")))]
8687 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8688 "&& reload_completed"
8689 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8690 (cond_exec (match_dup 4) (set (match_dup 0)
8691 (and:SI (match_dup 3) (const_int 1))))]
8693 machine_mode mode = GET_MODE (operands[2]);
8694 enum rtx_code rc = GET_CODE (operands[1]);
8696 /* Note that operands[4] is the same as operands[1],
8697 but with VOIDmode as the result. */
8698 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8699 if (mode == CCFPmode || mode == CCFPEmode)
8700 rc = reverse_condition_maybe_unordered (rc);
8702 rc = reverse_condition (rc);
8703 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8705 [(set_attr "conds" "use")
8706 (set_attr "type" "multiple")
8707 (set_attr "length" "8")]
8710 (define_insn_and_split "*ior_scc"
8711 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8712 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8713 [(match_operand 2 "cc_register" "") (const_int 0)])
8714 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8719 "&& reload_completed
8720 && REGNO (operands [0]) != REGNO (operands[3])"
8721 ;; && which_alternative == 1
8722 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8723 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8724 (cond_exec (match_dup 4) (set (match_dup 0)
8725 (ior:SI (match_dup 3) (const_int 1))))]
8727 machine_mode mode = GET_MODE (operands[2]);
8728 enum rtx_code rc = GET_CODE (operands[1]);
8730 /* Note that operands[4] is the same as operands[1],
8731 but with VOIDmode as the result. */
8732 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8733 if (mode == CCFPmode || mode == CCFPEmode)
8734 rc = reverse_condition_maybe_unordered (rc);
8736 rc = reverse_condition (rc);
8737 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8739 [(set_attr "conds" "use")
8740 (set_attr "length" "4,8")
8741 (set_attr "type" "logic_imm,multiple")]
8744 ; A series of splitters for the compare_scc pattern below. Note that
8745 ; order is important.
8747 [(set (match_operand:SI 0 "s_register_operand" "")
8748 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8750 (clobber (reg:CC CC_REGNUM))]
8751 "TARGET_32BIT && reload_completed"
8752 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8755 [(set (match_operand:SI 0 "s_register_operand" "")
8756 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8758 (clobber (reg:CC CC_REGNUM))]
8759 "TARGET_32BIT && reload_completed"
8760 [(set (match_dup 0) (not:SI (match_dup 1)))
8761 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8764 [(set (match_operand:SI 0 "s_register_operand" "")
8765 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8767 (clobber (reg:CC CC_REGNUM))]
8768 "arm_arch5t && TARGET_32BIT"
8769 [(set (match_dup 0) (clz:SI (match_dup 1)))
8770 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8774 [(set (match_operand:SI 0 "s_register_operand" "")
8775 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8777 (clobber (reg:CC CC_REGNUM))]
8778 "TARGET_32BIT && reload_completed"
8780 [(set (reg:CC CC_REGNUM)
8781 (compare:CC (const_int 1) (match_dup 1)))
8783 (minus:SI (const_int 1) (match_dup 1)))])
8784 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8785 (set (match_dup 0) (const_int 0)))])
8788 [(set (match_operand:SI 0 "s_register_operand" "")
8789 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8790 (match_operand:SI 2 "const_int_operand" "")))
8791 (clobber (reg:CC CC_REGNUM))]
8792 "TARGET_32BIT && reload_completed"
8794 [(set (reg:CC CC_REGNUM)
8795 (compare:CC (match_dup 1) (match_dup 2)))
8796 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8797 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8798 (set (match_dup 0) (const_int 1)))]
8800 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8804 [(set (match_operand:SI 0 "s_register_operand" "")
8805 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8806 (match_operand:SI 2 "arm_add_operand" "")))
8807 (clobber (reg:CC CC_REGNUM))]
8808 "TARGET_32BIT && reload_completed"
8810 [(set (reg:CC_NOOV CC_REGNUM)
8811 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8813 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8814 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8815 (set (match_dup 0) (const_int 1)))])
8817 (define_insn_and_split "*compare_scc"
8818 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8819 (match_operator:SI 1 "arm_comparison_operator"
8820 [(match_operand:SI 2 "s_register_operand" "r,r")
8821 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8822 (clobber (reg:CC CC_REGNUM))]
8825 "&& reload_completed"
8826 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8827 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8828 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8831 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8832 operands[2], operands[3]);
8833 enum rtx_code rc = GET_CODE (operands[1]);
8835 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8837 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8838 if (mode == CCFPmode || mode == CCFPEmode)
8839 rc = reverse_condition_maybe_unordered (rc);
8841 rc = reverse_condition (rc);
8842 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8844 [(set_attr "type" "multiple")]
8847 ;; Attempt to improve the sequence generated by the compare_scc splitters
8848 ;; not to use conditional execution.
8850 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8854 [(set (reg:CC CC_REGNUM)
8855 (compare:CC (match_operand:SI 1 "register_operand" "")
8857 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8858 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8859 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8860 (set (match_dup 0) (const_int 1)))]
8861 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8862 [(set (match_dup 0) (clz:SI (match_dup 1)))
8863 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8866 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8870 [(set (reg:CC CC_REGNUM)
8871 (compare:CC (match_operand:SI 1 "register_operand" "")
8873 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8874 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8875 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8876 (set (match_dup 0) (const_int 1)))
8877 (match_scratch:SI 2 "r")]
8878 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8880 [(set (reg:CC CC_REGNUM)
8881 (compare:CC (const_int 0) (match_dup 1)))
8882 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8884 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8885 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8888 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8889 ;; sub Rd, Reg1, reg2
8893 [(set (reg:CC CC_REGNUM)
8894 (compare:CC (match_operand:SI 1 "register_operand" "")
8895 (match_operand:SI 2 "arm_rhs_operand" "")))
8896 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8897 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8898 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8899 (set (match_dup 0) (const_int 1)))]
8900 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8901 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8902 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8903 (set (match_dup 0) (clz:SI (match_dup 0)))
8904 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8908 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8909 ;; sub T1, Reg1, reg2
8913 [(set (reg:CC CC_REGNUM)
8914 (compare:CC (match_operand:SI 1 "register_operand" "")
8915 (match_operand:SI 2 "arm_rhs_operand" "")))
8916 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8917 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8918 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8919 (set (match_dup 0) (const_int 1)))
8920 (match_scratch:SI 3 "r")]
8921 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8922 [(set (match_dup 3) (match_dup 4))
8924 [(set (reg:CC CC_REGNUM)
8925 (compare:CC (const_int 0) (match_dup 3)))
8926 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8928 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8929 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8931 if (CONST_INT_P (operands[2]))
8932 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8934 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8937 (define_insn "*cond_move"
8938 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8939 (if_then_else:SI (match_operator 3 "equality_operator"
8940 [(match_operator 4 "arm_comparison_operator"
8941 [(match_operand 5 "cc_register" "") (const_int 0)])
8943 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8944 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8947 if (GET_CODE (operands[3]) == NE)
8949 if (which_alternative != 1)
8950 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8951 if (which_alternative != 0)
8952 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8955 if (which_alternative != 0)
8956 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8957 if (which_alternative != 1)
8958 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8961 [(set_attr "conds" "use")
8962 (set_attr_alternative "type"
8963 [(if_then_else (match_operand 2 "const_int_operand" "")
8964 (const_string "mov_imm")
8965 (const_string "mov_reg"))
8966 (if_then_else (match_operand 1 "const_int_operand" "")
8967 (const_string "mov_imm")
8968 (const_string "mov_reg"))
8969 (const_string "multiple")])
8970 (set_attr "length" "4,4,8")]
8973 (define_insn "*cond_arith"
8974 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8975 (match_operator:SI 5 "shiftable_operator"
8976 [(match_operator:SI 4 "arm_comparison_operator"
8977 [(match_operand:SI 2 "s_register_operand" "r,r")
8978 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8979 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8980 (clobber (reg:CC CC_REGNUM))]
8983 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8984 return \"%i5\\t%0, %1, %2, lsr #31\";
8986 output_asm_insn (\"cmp\\t%2, %3\", operands);
8987 if (GET_CODE (operands[5]) == AND)
8988 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8989 else if (GET_CODE (operands[5]) == MINUS)
8990 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8991 else if (which_alternative != 0)
8992 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8993 return \"%i5%d4\\t%0, %1, #1\";
8995 [(set_attr "conds" "clob")
8996 (set_attr "length" "12")
8997 (set_attr "type" "multiple")]
9000 (define_insn "*cond_sub"
9001 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9002 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9003 (match_operator:SI 4 "arm_comparison_operator"
9004 [(match_operand:SI 2 "s_register_operand" "r,r")
9005 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9006 (clobber (reg:CC CC_REGNUM))]
9009 output_asm_insn (\"cmp\\t%2, %3\", operands);
9010 if (which_alternative != 0)
9011 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9012 return \"sub%d4\\t%0, %1, #1\";
9014 [(set_attr "conds" "clob")
9015 (set_attr "length" "8,12")
9016 (set_attr "type" "multiple")]
9019 (define_insn "*cmp_ite0"
9020 [(set (match_operand 6 "dominant_cc_register" "")
9023 (match_operator 4 "arm_comparison_operator"
9024 [(match_operand:SI 0 "s_register_operand"
9025 "l,l,l,r,r,r,r,r,r")
9026 (match_operand:SI 1 "arm_add_operand"
9027 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9028 (match_operator:SI 5 "arm_comparison_operator"
9029 [(match_operand:SI 2 "s_register_operand"
9030 "l,r,r,l,l,r,r,r,r")
9031 (match_operand:SI 3 "arm_add_operand"
9032 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9038 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9040 {\"cmp%d5\\t%0, %1\",
9041 \"cmp%d4\\t%2, %3\"},
9042 {\"cmn%d5\\t%0, #%n1\",
9043 \"cmp%d4\\t%2, %3\"},
9044 {\"cmp%d5\\t%0, %1\",
9045 \"cmn%d4\\t%2, #%n3\"},
9046 {\"cmn%d5\\t%0, #%n1\",
9047 \"cmn%d4\\t%2, #%n3\"}
9049 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9054 \"cmn\\t%0, #%n1\"},
9055 {\"cmn\\t%2, #%n3\",
9057 {\"cmn\\t%2, #%n3\",
9060 static const char * const ite[2] =
9065 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9066 CMP_CMP, CMN_CMP, CMP_CMP,
9067 CMN_CMP, CMP_CMN, CMN_CMN};
9069 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9071 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9072 if (TARGET_THUMB2) {
9073 output_asm_insn (ite[swap], operands);
9075 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9078 [(set_attr "conds" "set")
9079 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9080 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9081 (set_attr "type" "multiple")
9082 (set_attr_alternative "length"
9088 (if_then_else (eq_attr "is_thumb" "no")
9091 (if_then_else (eq_attr "is_thumb" "no")
9094 (if_then_else (eq_attr "is_thumb" "no")
9097 (if_then_else (eq_attr "is_thumb" "no")
9102 (define_insn "*cmp_ite1"
9103 [(set (match_operand 6 "dominant_cc_register" "")
9106 (match_operator 4 "arm_comparison_operator"
9107 [(match_operand:SI 0 "s_register_operand"
9108 "l,l,l,r,r,r,r,r,r")
9109 (match_operand:SI 1 "arm_add_operand"
9110 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9111 (match_operator:SI 5 "arm_comparison_operator"
9112 [(match_operand:SI 2 "s_register_operand"
9113 "l,r,r,l,l,r,r,r,r")
9114 (match_operand:SI 3 "arm_add_operand"
9115 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9121 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9125 {\"cmn\\t%0, #%n1\",
9128 \"cmn\\t%2, #%n3\"},
9129 {\"cmn\\t%0, #%n1\",
9132 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9134 {\"cmp%d4\\t%2, %3\",
9135 \"cmp%D5\\t%0, %1\"},
9136 {\"cmp%d4\\t%2, %3\",
9137 \"cmn%D5\\t%0, #%n1\"},
9138 {\"cmn%d4\\t%2, #%n3\",
9139 \"cmp%D5\\t%0, %1\"},
9140 {\"cmn%d4\\t%2, #%n3\",
9141 \"cmn%D5\\t%0, #%n1\"}
9143 static const char * const ite[2] =
9148 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9149 CMP_CMP, CMN_CMP, CMP_CMP,
9150 CMN_CMP, CMP_CMN, CMN_CMN};
9152 comparison_dominates_p (GET_CODE (operands[5]),
9153 reverse_condition (GET_CODE (operands[4])));
9155 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9156 if (TARGET_THUMB2) {
9157 output_asm_insn (ite[swap], operands);
9159 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9162 [(set_attr "conds" "set")
9163 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9164 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9165 (set_attr_alternative "length"
9171 (if_then_else (eq_attr "is_thumb" "no")
9174 (if_then_else (eq_attr "is_thumb" "no")
9177 (if_then_else (eq_attr "is_thumb" "no")
9180 (if_then_else (eq_attr "is_thumb" "no")
9183 (set_attr "type" "multiple")]
9186 (define_insn "*cmp_and"
9187 [(set (match_operand 6 "dominant_cc_register" "")
9190 (match_operator 4 "arm_comparison_operator"
9191 [(match_operand:SI 0 "s_register_operand"
9192 "l,l,l,r,r,r,r,r,r,r")
9193 (match_operand:SI 1 "arm_add_operand"
9194 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9195 (match_operator:SI 5 "arm_comparison_operator"
9196 [(match_operand:SI 2 "s_register_operand"
9197 "l,r,r,l,l,r,r,r,r,r")
9198 (match_operand:SI 3 "arm_add_operand"
9199 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9204 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9206 {\"cmp%d5\\t%0, %1\",
9207 \"cmp%d4\\t%2, %3\"},
9208 {\"cmn%d5\\t%0, #%n1\",
9209 \"cmp%d4\\t%2, %3\"},
9210 {\"cmp%d5\\t%0, %1\",
9211 \"cmn%d4\\t%2, #%n3\"},
9212 {\"cmn%d5\\t%0, #%n1\",
9213 \"cmn%d4\\t%2, #%n3\"}
9215 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9220 \"cmn\\t%0, #%n1\"},
9221 {\"cmn\\t%2, #%n3\",
9223 {\"cmn\\t%2, #%n3\",
9226 static const char *const ite[2] =
9231 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9232 CMP_CMP, CMN_CMP, CMP_CMP,
9233 CMP_CMP, CMN_CMP, CMP_CMN,
9236 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9238 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9239 if (TARGET_THUMB2) {
9240 output_asm_insn (ite[swap], operands);
9242 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9245 [(set_attr "conds" "set")
9246 (set_attr "predicable" "no")
9247 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9248 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9249 (set_attr_alternative "length"
9256 (if_then_else (eq_attr "is_thumb" "no")
9259 (if_then_else (eq_attr "is_thumb" "no")
9262 (if_then_else (eq_attr "is_thumb" "no")
9265 (if_then_else (eq_attr "is_thumb" "no")
9268 (set_attr "type" "multiple")]
9271 (define_insn "*cmp_ior"
9272 [(set (match_operand 6 "dominant_cc_register" "")
9275 (match_operator 4 "arm_comparison_operator"
9276 [(match_operand:SI 0 "s_register_operand"
9277 "l,l,l,r,r,r,r,r,r,r")
9278 (match_operand:SI 1 "arm_add_operand"
9279 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9280 (match_operator:SI 5 "arm_comparison_operator"
9281 [(match_operand:SI 2 "s_register_operand"
9282 "l,r,r,l,l,r,r,r,r,r")
9283 (match_operand:SI 3 "arm_add_operand"
9284 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9289 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9293 {\"cmn\\t%0, #%n1\",
9296 \"cmn\\t%2, #%n3\"},
9297 {\"cmn\\t%0, #%n1\",
9300 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9302 {\"cmp%D4\\t%2, %3\",
9303 \"cmp%D5\\t%0, %1\"},
9304 {\"cmp%D4\\t%2, %3\",
9305 \"cmn%D5\\t%0, #%n1\"},
9306 {\"cmn%D4\\t%2, #%n3\",
9307 \"cmp%D5\\t%0, %1\"},
9308 {\"cmn%D4\\t%2, #%n3\",
9309 \"cmn%D5\\t%0, #%n1\"}
9311 static const char *const ite[2] =
9316 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9317 CMP_CMP, CMN_CMP, CMP_CMP,
9318 CMP_CMP, CMN_CMP, CMP_CMN,
9321 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9323 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9324 if (TARGET_THUMB2) {
9325 output_asm_insn (ite[swap], operands);
9327 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9331 [(set_attr "conds" "set")
9332 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9333 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9334 (set_attr_alternative "length"
9341 (if_then_else (eq_attr "is_thumb" "no")
9344 (if_then_else (eq_attr "is_thumb" "no")
9347 (if_then_else (eq_attr "is_thumb" "no")
9350 (if_then_else (eq_attr "is_thumb" "no")
9353 (set_attr "type" "multiple")]
9356 (define_insn_and_split "*ior_scc_scc"
9357 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9358 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9359 [(match_operand:SI 1 "s_register_operand" "l,r")
9360 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9361 (match_operator:SI 6 "arm_comparison_operator"
9362 [(match_operand:SI 4 "s_register_operand" "l,r")
9363 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9364 (clobber (reg:CC CC_REGNUM))]
9366 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9369 "TARGET_32BIT && reload_completed"
9373 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9374 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9376 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9378 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9381 [(set_attr "conds" "clob")
9382 (set_attr "enabled_for_short_it" "yes,no")
9383 (set_attr "length" "16")
9384 (set_attr "type" "multiple")]
9387 ; If the above pattern is followed by a CMP insn, then the compare is
9388 ; redundant, since we can rework the conditional instruction that follows.
9389 (define_insn_and_split "*ior_scc_scc_cmp"
9390 [(set (match_operand 0 "dominant_cc_register" "")
9391 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9392 [(match_operand:SI 1 "s_register_operand" "l,r")
9393 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9394 (match_operator:SI 6 "arm_comparison_operator"
9395 [(match_operand:SI 4 "s_register_operand" "l,r")
9396 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9398 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9399 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9400 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9403 "TARGET_32BIT && reload_completed"
9407 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9408 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9410 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9412 [(set_attr "conds" "set")
9413 (set_attr "enabled_for_short_it" "yes,no")
9414 (set_attr "length" "16")
9415 (set_attr "type" "multiple")]
9418 (define_insn_and_split "*and_scc_scc"
9419 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9420 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9421 [(match_operand:SI 1 "s_register_operand" "l,r")
9422 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9423 (match_operator:SI 6 "arm_comparison_operator"
9424 [(match_operand:SI 4 "s_register_operand" "l,r")
9425 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9426 (clobber (reg:CC CC_REGNUM))]
9428 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9431 "TARGET_32BIT && reload_completed
9432 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9437 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9438 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9440 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9442 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9445 [(set_attr "conds" "clob")
9446 (set_attr "enabled_for_short_it" "yes,no")
9447 (set_attr "length" "16")
9448 (set_attr "type" "multiple")]
9451 ; If the above pattern is followed by a CMP insn, then the compare is
9452 ; redundant, since we can rework the conditional instruction that follows.
9453 (define_insn_and_split "*and_scc_scc_cmp"
9454 [(set (match_operand 0 "dominant_cc_register" "")
9455 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9456 [(match_operand:SI 1 "s_register_operand" "l,r")
9457 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9458 (match_operator:SI 6 "arm_comparison_operator"
9459 [(match_operand:SI 4 "s_register_operand" "l,r")
9460 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9462 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9463 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9464 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9467 "TARGET_32BIT && reload_completed"
9471 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9472 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9474 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9476 [(set_attr "conds" "set")
9477 (set_attr "enabled_for_short_it" "yes,no")
9478 (set_attr "length" "16")
9479 (set_attr "type" "multiple")]
9482 ;; If there is no dominance in the comparison, then we can still save an
9483 ;; instruction in the AND case, since we can know that the second compare
9484 ;; need only zero the value if false (if true, then the value is already
9486 (define_insn_and_split "*and_scc_scc_nodom"
9487 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9488 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9489 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9490 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9491 (match_operator:SI 6 "arm_comparison_operator"
9492 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9493 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9494 (clobber (reg:CC CC_REGNUM))]
9496 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9499 "TARGET_32BIT && reload_completed"
9500 [(parallel [(set (match_dup 0)
9501 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9502 (clobber (reg:CC CC_REGNUM))])
9503 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9505 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9508 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9509 operands[4], operands[5]),
9511 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9513 [(set_attr "conds" "clob")
9514 (set_attr "length" "20")
9515 (set_attr "type" "multiple")]
9519 [(set (reg:CC_NOOV CC_REGNUM)
9520 (compare:CC_NOOV (ior:SI
9521 (and:SI (match_operand:SI 0 "s_register_operand" "")
9523 (match_operator:SI 1 "arm_comparison_operator"
9524 [(match_operand:SI 2 "s_register_operand" "")
9525 (match_operand:SI 3 "arm_add_operand" "")]))
9527 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9530 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9532 (set (reg:CC_NOOV CC_REGNUM)
9533 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9538 [(set (reg:CC_NOOV CC_REGNUM)
9539 (compare:CC_NOOV (ior:SI
9540 (match_operator:SI 1 "arm_comparison_operator"
9541 [(match_operand:SI 2 "s_register_operand" "")
9542 (match_operand:SI 3 "arm_add_operand" "")])
9543 (and:SI (match_operand:SI 0 "s_register_operand" "")
9546 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9549 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9551 (set (reg:CC_NOOV CC_REGNUM)
9552 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9555 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9557 (define_insn_and_split "*negscc"
9558 [(set (match_operand:SI 0 "s_register_operand" "=r")
9559 (neg:SI (match_operator 3 "arm_comparison_operator"
9560 [(match_operand:SI 1 "s_register_operand" "r")
9561 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9562 (clobber (reg:CC CC_REGNUM))]
9565 "&& reload_completed"
9568 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9570 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9572 /* Emit mov\\t%0, %1, asr #31 */
9573 emit_insn (gen_rtx_SET (operands[0],
9574 gen_rtx_ASHIFTRT (SImode,
9579 else if (GET_CODE (operands[3]) == NE)
9581 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9582 if (CONST_INT_P (operands[2]))
9583 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9584 gen_int_mode (-INTVAL (operands[2]),
9587 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9589 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9593 gen_rtx_SET (operands[0],
9599 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9600 emit_insn (gen_rtx_SET (cc_reg,
9601 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9602 enum rtx_code rc = GET_CODE (operands[3]);
9604 rc = reverse_condition (rc);
9605 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9610 gen_rtx_SET (operands[0], const0_rtx)));
9611 rc = GET_CODE (operands[3]);
9612 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9617 gen_rtx_SET (operands[0],
9623 [(set_attr "conds" "clob")
9624 (set_attr "length" "12")
9625 (set_attr "type" "multiple")]
9628 (define_insn_and_split "movcond_addsi"
9629 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9631 (match_operator 5 "comparison_operator"
9632 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9633 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9635 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9636 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9637 (clobber (reg:CC CC_REGNUM))]
9640 "&& reload_completed"
9641 [(set (reg:CC_NOOV CC_REGNUM)
9643 (plus:SI (match_dup 3)
9646 (set (match_dup 0) (match_dup 1))
9647 (cond_exec (match_dup 6)
9648 (set (match_dup 0) (match_dup 2)))]
9651 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9652 operands[3], operands[4]);
9653 enum rtx_code rc = GET_CODE (operands[5]);
9654 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9655 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9656 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9657 rc = reverse_condition (rc);
9659 std::swap (operands[1], operands[2]);
9661 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9664 [(set_attr "conds" "clob")
9665 (set_attr "enabled_for_short_it" "no,yes,yes")
9666 (set_attr "type" "multiple")]
9669 (define_insn "movcond"
9670 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9672 (match_operator 5 "arm_comparison_operator"
9673 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9674 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9675 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9676 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9677 (clobber (reg:CC CC_REGNUM))]
9680 if (GET_CODE (operands[5]) == LT
9681 && (operands[4] == const0_rtx))
9683 if (which_alternative != 1 && REG_P (operands[1]))
9685 if (operands[2] == const0_rtx)
9686 return \"and\\t%0, %1, %3, asr #31\";
9687 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9689 else if (which_alternative != 0 && REG_P (operands[2]))
9691 if (operands[1] == const0_rtx)
9692 return \"bic\\t%0, %2, %3, asr #31\";
9693 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9695 /* The only case that falls through to here is when both ops 1 & 2
9699 if (GET_CODE (operands[5]) == GE
9700 && (operands[4] == const0_rtx))
9702 if (which_alternative != 1 && REG_P (operands[1]))
9704 if (operands[2] == const0_rtx)
9705 return \"bic\\t%0, %1, %3, asr #31\";
9706 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9708 else if (which_alternative != 0 && REG_P (operands[2]))
9710 if (operands[1] == const0_rtx)
9711 return \"and\\t%0, %2, %3, asr #31\";
9712 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9714 /* The only case that falls through to here is when both ops 1 & 2
9717 if (CONST_INT_P (operands[4])
9718 && !const_ok_for_arm (INTVAL (operands[4])))
9719 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9721 output_asm_insn (\"cmp\\t%3, %4\", operands);
9722 if (which_alternative != 0)
9723 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9724 if (which_alternative != 1)
9725 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9728 [(set_attr "conds" "clob")
9729 (set_attr "length" "8,8,12")
9730 (set_attr "type" "multiple")]
9733 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9735 (define_insn "*ifcompare_plus_move"
9736 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9737 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9738 [(match_operand:SI 4 "s_register_operand" "r,r")
9739 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9741 (match_operand:SI 2 "s_register_operand" "r,r")
9742 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9743 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9744 (clobber (reg:CC CC_REGNUM))]
9747 [(set_attr "conds" "clob")
9748 (set_attr "length" "8,12")
9749 (set_attr "type" "multiple")]
9752 (define_insn "*if_plus_move"
9753 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9755 (match_operator 4 "arm_comparison_operator"
9756 [(match_operand 5 "cc_register" "") (const_int 0)])
9758 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9759 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9760 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9764 sub%d4\\t%0, %2, #%n3
9765 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9766 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9767 [(set_attr "conds" "use")
9768 (set_attr "length" "4,4,8,8")
9769 (set_attr_alternative "type"
9770 [(if_then_else (match_operand 3 "const_int_operand" "")
9771 (const_string "alu_imm" )
9772 (const_string "alu_sreg"))
9773 (const_string "alu_imm")
9774 (const_string "multiple")
9775 (const_string "multiple")])]
9778 (define_insn "*ifcompare_move_plus"
9779 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9780 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9781 [(match_operand:SI 4 "s_register_operand" "r,r")
9782 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9783 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9785 (match_operand:SI 2 "s_register_operand" "r,r")
9786 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9787 (clobber (reg:CC CC_REGNUM))]
9790 [(set_attr "conds" "clob")
9791 (set_attr "length" "8,12")
9792 (set_attr "type" "multiple")]
9795 (define_insn "*if_move_plus"
9796 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9798 (match_operator 4 "arm_comparison_operator"
9799 [(match_operand 5 "cc_register" "") (const_int 0)])
9800 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9802 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9803 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9807 sub%D4\\t%0, %2, #%n3
9808 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9809 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9810 [(set_attr "conds" "use")
9811 (set_attr "length" "4,4,8,8")
9812 (set_attr_alternative "type"
9813 [(if_then_else (match_operand 3 "const_int_operand" "")
9814 (const_string "alu_imm" )
9815 (const_string "alu_sreg"))
9816 (const_string "alu_imm")
9817 (const_string "multiple")
9818 (const_string "multiple")])]
9821 (define_insn "*ifcompare_arith_arith"
9822 [(set (match_operand:SI 0 "s_register_operand" "=r")
9823 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9824 [(match_operand:SI 5 "s_register_operand" "r")
9825 (match_operand:SI 6 "arm_add_operand" "rIL")])
9826 (match_operator:SI 8 "shiftable_operator"
9827 [(match_operand:SI 1 "s_register_operand" "r")
9828 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9829 (match_operator:SI 7 "shiftable_operator"
9830 [(match_operand:SI 3 "s_register_operand" "r")
9831 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9832 (clobber (reg:CC CC_REGNUM))]
9835 [(set_attr "conds" "clob")
9836 (set_attr "length" "12")
9837 (set_attr "type" "multiple")]
9840 (define_insn "*if_arith_arith"
9841 [(set (match_operand:SI 0 "s_register_operand" "=r")
9842 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9843 [(match_operand 8 "cc_register" "") (const_int 0)])
9844 (match_operator:SI 6 "shiftable_operator"
9845 [(match_operand:SI 1 "s_register_operand" "r")
9846 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9847 (match_operator:SI 7 "shiftable_operator"
9848 [(match_operand:SI 3 "s_register_operand" "r")
9849 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9851 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9852 [(set_attr "conds" "use")
9853 (set_attr "length" "8")
9854 (set_attr "type" "multiple")]
9857 (define_insn "*ifcompare_arith_move"
9858 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9859 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9860 [(match_operand:SI 2 "s_register_operand" "r,r")
9861 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9862 (match_operator:SI 7 "shiftable_operator"
9863 [(match_operand:SI 4 "s_register_operand" "r,r")
9864 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9865 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9866 (clobber (reg:CC CC_REGNUM))]
9869 /* If we have an operation where (op x 0) is the identity operation and
9870 the conditional operator is LT or GE and we are comparing against zero and
9871 everything is in registers then we can do this in two instructions. */
9872 if (operands[3] == const0_rtx
9873 && GET_CODE (operands[7]) != AND
9874 && REG_P (operands[5])
9875 && REG_P (operands[1])
9876 && REGNO (operands[1]) == REGNO (operands[4])
9877 && REGNO (operands[4]) != REGNO (operands[0]))
9879 if (GET_CODE (operands[6]) == LT)
9880 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9881 else if (GET_CODE (operands[6]) == GE)
9882 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9884 if (CONST_INT_P (operands[3])
9885 && !const_ok_for_arm (INTVAL (operands[3])))
9886 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9888 output_asm_insn (\"cmp\\t%2, %3\", operands);
9889 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9890 if (which_alternative != 0)
9891 return \"mov%D6\\t%0, %1\";
9894 [(set_attr "conds" "clob")
9895 (set_attr "length" "8,12")
9896 (set_attr "type" "multiple")]
9899 (define_insn "*if_arith_move"
9900 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9901 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9902 [(match_operand 6 "cc_register" "") (const_int 0)])
9903 (match_operator:SI 5 "shiftable_operator"
9904 [(match_operand:SI 2 "s_register_operand" "r,r")
9905 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9906 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9910 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9911 [(set_attr "conds" "use")
9912 (set_attr "length" "4,8")
9913 (set_attr_alternative "type"
9914 [(if_then_else (match_operand 3 "const_int_operand" "")
9915 (const_string "alu_shift_imm" )
9916 (const_string "alu_shift_reg"))
9917 (const_string "multiple")])]
9920 (define_insn "*ifcompare_move_arith"
9921 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9922 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9923 [(match_operand:SI 4 "s_register_operand" "r,r")
9924 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9925 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9926 (match_operator:SI 7 "shiftable_operator"
9927 [(match_operand:SI 2 "s_register_operand" "r,r")
9928 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9929 (clobber (reg:CC CC_REGNUM))]
9932 /* If we have an operation where (op x 0) is the identity operation and
9933 the conditional operator is LT or GE and we are comparing against zero and
9934 everything is in registers then we can do this in two instructions */
9935 if (operands[5] == const0_rtx
9936 && GET_CODE (operands[7]) != AND
9937 && REG_P (operands[3])
9938 && REG_P (operands[1])
9939 && REGNO (operands[1]) == REGNO (operands[2])
9940 && REGNO (operands[2]) != REGNO (operands[0]))
9942 if (GET_CODE (operands[6]) == GE)
9943 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9944 else if (GET_CODE (operands[6]) == LT)
9945 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9948 if (CONST_INT_P (operands[5])
9949 && !const_ok_for_arm (INTVAL (operands[5])))
9950 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9952 output_asm_insn (\"cmp\\t%4, %5\", operands);
9954 if (which_alternative != 0)
9955 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9956 return \"%I7%D6\\t%0, %2, %3\";
9958 [(set_attr "conds" "clob")
9959 (set_attr "length" "8,12")
9960 (set_attr "type" "multiple")]
9963 (define_insn "*if_move_arith"
9964 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9966 (match_operator 4 "arm_comparison_operator"
9967 [(match_operand 6 "cc_register" "") (const_int 0)])
9968 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9969 (match_operator:SI 5 "shiftable_operator"
9970 [(match_operand:SI 2 "s_register_operand" "r,r")
9971 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9975 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9976 [(set_attr "conds" "use")
9977 (set_attr "length" "4,8")
9978 (set_attr_alternative "type"
9979 [(if_then_else (match_operand 3 "const_int_operand" "")
9980 (const_string "alu_shift_imm" )
9981 (const_string "alu_shift_reg"))
9982 (const_string "multiple")])]
9985 (define_insn "*ifcompare_move_not"
9986 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9988 (match_operator 5 "arm_comparison_operator"
9989 [(match_operand:SI 3 "s_register_operand" "r,r")
9990 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9991 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9993 (match_operand:SI 2 "s_register_operand" "r,r"))))
9994 (clobber (reg:CC CC_REGNUM))]
9997 [(set_attr "conds" "clob")
9998 (set_attr "length" "8,12")
9999 (set_attr "type" "multiple")]
10002 (define_insn "*if_move_not"
10003 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10005 (match_operator 4 "arm_comparison_operator"
10006 [(match_operand 3 "cc_register" "") (const_int 0)])
10007 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10008 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10012 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10013 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10014 [(set_attr "conds" "use")
10015 (set_attr "type" "mvn_reg")
10016 (set_attr "length" "4,8,8")
10017 (set_attr "type" "mvn_reg,multiple,multiple")]
10020 (define_insn "*ifcompare_not_move"
10021 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10023 (match_operator 5 "arm_comparison_operator"
10024 [(match_operand:SI 3 "s_register_operand" "r,r")
10025 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10027 (match_operand:SI 2 "s_register_operand" "r,r"))
10028 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10029 (clobber (reg:CC CC_REGNUM))]
10032 [(set_attr "conds" "clob")
10033 (set_attr "length" "8,12")
10034 (set_attr "type" "multiple")]
10037 (define_insn "*if_not_move"
10038 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10040 (match_operator 4 "arm_comparison_operator"
10041 [(match_operand 3 "cc_register" "") (const_int 0)])
10042 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10043 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10047 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10048 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10049 [(set_attr "conds" "use")
10050 (set_attr "type" "mvn_reg,multiple,multiple")
10051 (set_attr "length" "4,8,8")]
10054 (define_insn "*ifcompare_shift_move"
10055 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10057 (match_operator 6 "arm_comparison_operator"
10058 [(match_operand:SI 4 "s_register_operand" "r,r")
10059 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10060 (match_operator:SI 7 "shift_operator"
10061 [(match_operand:SI 2 "s_register_operand" "r,r")
10062 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10063 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10064 (clobber (reg:CC CC_REGNUM))]
10067 [(set_attr "conds" "clob")
10068 (set_attr "length" "8,12")
10069 (set_attr "type" "multiple")]
10072 (define_insn "*if_shift_move"
10073 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10075 (match_operator 5 "arm_comparison_operator"
10076 [(match_operand 6 "cc_register" "") (const_int 0)])
10077 (match_operator:SI 4 "shift_operator"
10078 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10079 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10080 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10084 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10085 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10086 [(set_attr "conds" "use")
10087 (set_attr "shift" "2")
10088 (set_attr "length" "4,8,8")
10089 (set_attr_alternative "type"
10090 [(if_then_else (match_operand 3 "const_int_operand" "")
10091 (const_string "mov_shift" )
10092 (const_string "mov_shift_reg"))
10093 (const_string "multiple")
10094 (const_string "multiple")])]
10097 (define_insn "*ifcompare_move_shift"
10098 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10100 (match_operator 6 "arm_comparison_operator"
10101 [(match_operand:SI 4 "s_register_operand" "r,r")
10102 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10103 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10104 (match_operator:SI 7 "shift_operator"
10105 [(match_operand:SI 2 "s_register_operand" "r,r")
10106 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10107 (clobber (reg:CC CC_REGNUM))]
10110 [(set_attr "conds" "clob")
10111 (set_attr "length" "8,12")
10112 (set_attr "type" "multiple")]
10115 (define_insn "*if_move_shift"
10116 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10118 (match_operator 5 "arm_comparison_operator"
10119 [(match_operand 6 "cc_register" "") (const_int 0)])
10120 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10121 (match_operator:SI 4 "shift_operator"
10122 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10123 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10127 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10128 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10129 [(set_attr "conds" "use")
10130 (set_attr "shift" "2")
10131 (set_attr "length" "4,8,8")
10132 (set_attr_alternative "type"
10133 [(if_then_else (match_operand 3 "const_int_operand" "")
10134 (const_string "mov_shift" )
10135 (const_string "mov_shift_reg"))
10136 (const_string "multiple")
10137 (const_string "multiple")])]
10140 (define_insn "*ifcompare_shift_shift"
10141 [(set (match_operand:SI 0 "s_register_operand" "=r")
10143 (match_operator 7 "arm_comparison_operator"
10144 [(match_operand:SI 5 "s_register_operand" "r")
10145 (match_operand:SI 6 "arm_add_operand" "rIL")])
10146 (match_operator:SI 8 "shift_operator"
10147 [(match_operand:SI 1 "s_register_operand" "r")
10148 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10149 (match_operator:SI 9 "shift_operator"
10150 [(match_operand:SI 3 "s_register_operand" "r")
10151 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10152 (clobber (reg:CC CC_REGNUM))]
10155 [(set_attr "conds" "clob")
10156 (set_attr "length" "12")
10157 (set_attr "type" "multiple")]
10160 (define_insn "*if_shift_shift"
10161 [(set (match_operand:SI 0 "s_register_operand" "=r")
10163 (match_operator 5 "arm_comparison_operator"
10164 [(match_operand 8 "cc_register" "") (const_int 0)])
10165 (match_operator:SI 6 "shift_operator"
10166 [(match_operand:SI 1 "s_register_operand" "r")
10167 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10168 (match_operator:SI 7 "shift_operator"
10169 [(match_operand:SI 3 "s_register_operand" "r")
10170 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10172 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10173 [(set_attr "conds" "use")
10174 (set_attr "shift" "1")
10175 (set_attr "length" "8")
10176 (set (attr "type") (if_then_else
10177 (and (match_operand 2 "const_int_operand" "")
10178 (match_operand 4 "const_int_operand" ""))
10179 (const_string "mov_shift")
10180 (const_string "mov_shift_reg")))]
10183 (define_insn "*ifcompare_not_arith"
10184 [(set (match_operand:SI 0 "s_register_operand" "=r")
10186 (match_operator 6 "arm_comparison_operator"
10187 [(match_operand:SI 4 "s_register_operand" "r")
10188 (match_operand:SI 5 "arm_add_operand" "rIL")])
10189 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10190 (match_operator:SI 7 "shiftable_operator"
10191 [(match_operand:SI 2 "s_register_operand" "r")
10192 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10193 (clobber (reg:CC CC_REGNUM))]
10196 [(set_attr "conds" "clob")
10197 (set_attr "length" "12")
10198 (set_attr "type" "multiple")]
10201 (define_insn "*if_not_arith"
10202 [(set (match_operand:SI 0 "s_register_operand" "=r")
10204 (match_operator 5 "arm_comparison_operator"
10205 [(match_operand 4 "cc_register" "") (const_int 0)])
10206 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10207 (match_operator:SI 6 "shiftable_operator"
10208 [(match_operand:SI 2 "s_register_operand" "r")
10209 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10211 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10212 [(set_attr "conds" "use")
10213 (set_attr "type" "mvn_reg")
10214 (set_attr "length" "8")]
10217 (define_insn "*ifcompare_arith_not"
10218 [(set (match_operand:SI 0 "s_register_operand" "=r")
10220 (match_operator 6 "arm_comparison_operator"
10221 [(match_operand:SI 4 "s_register_operand" "r")
10222 (match_operand:SI 5 "arm_add_operand" "rIL")])
10223 (match_operator:SI 7 "shiftable_operator"
10224 [(match_operand:SI 2 "s_register_operand" "r")
10225 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10226 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10227 (clobber (reg:CC CC_REGNUM))]
10230 [(set_attr "conds" "clob")
10231 (set_attr "length" "12")
10232 (set_attr "type" "multiple")]
10235 (define_insn "*if_arith_not"
10236 [(set (match_operand:SI 0 "s_register_operand" "=r")
10238 (match_operator 5 "arm_comparison_operator"
10239 [(match_operand 4 "cc_register" "") (const_int 0)])
10240 (match_operator:SI 6 "shiftable_operator"
10241 [(match_operand:SI 2 "s_register_operand" "r")
10242 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10243 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10245 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10246 [(set_attr "conds" "use")
10247 (set_attr "type" "multiple")
10248 (set_attr "length" "8")]
10251 (define_insn "*ifcompare_neg_move"
10252 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10254 (match_operator 5 "arm_comparison_operator"
10255 [(match_operand:SI 3 "s_register_operand" "r,r")
10256 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10257 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10258 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10259 (clobber (reg:CC CC_REGNUM))]
10262 [(set_attr "conds" "clob")
10263 (set_attr "length" "8,12")
10264 (set_attr "type" "multiple")]
10267 (define_insn_and_split "*if_neg_move"
10268 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10270 (match_operator 4 "arm_comparison_operator"
10271 [(match_operand 3 "cc_register" "") (const_int 0)])
10272 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10273 (match_operand:SI 1 "s_register_operand" "0,0")))]
10276 "&& reload_completed"
10277 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10278 (set (match_dup 0) (neg:SI (match_dup 2))))]
10280 [(set_attr "conds" "use")
10281 (set_attr "length" "4")
10282 (set_attr "arch" "t2,32")
10283 (set_attr "enabled_for_short_it" "yes,no")
10284 (set_attr "type" "logic_shift_imm")]
10287 (define_insn "*ifcompare_move_neg"
10288 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10290 (match_operator 5 "arm_comparison_operator"
10291 [(match_operand:SI 3 "s_register_operand" "r,r")
10292 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10293 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10294 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10295 (clobber (reg:CC CC_REGNUM))]
10298 [(set_attr "conds" "clob")
10299 (set_attr "length" "8,12")
10300 (set_attr "type" "multiple")]
10303 (define_insn_and_split "*if_move_neg"
10304 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10306 (match_operator 4 "arm_comparison_operator"
10307 [(match_operand 3 "cc_register" "") (const_int 0)])
10308 (match_operand:SI 1 "s_register_operand" "0,0")
10309 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
10312 "&& reload_completed"
10313 [(cond_exec (match_dup 5)
10314 (set (match_dup 0) (neg:SI (match_dup 2))))]
10316 machine_mode mode = GET_MODE (operands[3]);
10317 rtx_code rc = GET_CODE (operands[4]);
10319 if (mode == CCFPmode || mode == CCFPEmode)
10320 rc = reverse_condition_maybe_unordered (rc);
10322 rc = reverse_condition (rc);
10324 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
10326 [(set_attr "conds" "use")
10327 (set_attr "length" "4")
10328 (set_attr "arch" "t2,32")
10329 (set_attr "enabled_for_short_it" "yes,no")
10330 (set_attr "type" "logic_shift_imm")]
10333 (define_insn "*arith_adjacentmem"
10334 [(set (match_operand:SI 0 "s_register_operand" "=r")
10335 (match_operator:SI 1 "shiftable_operator"
10336 [(match_operand:SI 2 "memory_operand" "m")
10337 (match_operand:SI 3 "memory_operand" "m")]))
10338 (clobber (match_scratch:SI 4 "=r"))]
10339 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10345 HOST_WIDE_INT val1 = 0, val2 = 0;
10347 if (REGNO (operands[0]) > REGNO (operands[4]))
10349 ldm[1] = operands[4];
10350 ldm[2] = operands[0];
10354 ldm[1] = operands[0];
10355 ldm[2] = operands[4];
10358 base_reg = XEXP (operands[2], 0);
10360 if (!REG_P (base_reg))
10362 val1 = INTVAL (XEXP (base_reg, 1));
10363 base_reg = XEXP (base_reg, 0);
10366 if (!REG_P (XEXP (operands[3], 0)))
10367 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10369 arith[0] = operands[0];
10370 arith[3] = operands[1];
10384 if (val1 !=0 && val2 != 0)
10388 if (val1 == 4 || val2 == 4)
10389 /* Other val must be 8, since we know they are adjacent and neither
10391 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10392 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10394 ldm[0] = ops[0] = operands[4];
10396 ops[2] = GEN_INT (val1);
10397 output_add_immediate (ops);
10399 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10401 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10405 /* Offset is out of range for a single add, so use two ldr. */
10408 ops[2] = GEN_INT (val1);
10409 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10411 ops[2] = GEN_INT (val2);
10412 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10415 else if (val1 != 0)
10418 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10420 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10425 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10427 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10429 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10432 [(set_attr "length" "12")
10433 (set_attr "predicable" "yes")
10434 (set_attr "type" "load_4")]
10437 ; This pattern is never tried by combine, so do it as a peephole
10440 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10441 (match_operand:SI 1 "arm_general_register_operand" ""))
10442 (set (reg:CC CC_REGNUM)
10443 (compare:CC (match_dup 1) (const_int 0)))]
10445 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10446 (set (match_dup 0) (match_dup 1))])]
10451 [(set (match_operand:SI 0 "s_register_operand" "")
10452 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10454 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10455 [(match_operand:SI 3 "s_register_operand" "")
10456 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10457 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10459 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10460 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10465 ;; This split can be used because CC_Z mode implies that the following
10466 ;; branch will be an equality, or an unsigned inequality, so the sign
10467 ;; extension is not needed.
10470 [(set (reg:CC_Z CC_REGNUM)
10472 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10474 (match_operand 1 "const_int_operand" "")))
10475 (clobber (match_scratch:SI 2 ""))]
10477 && ((UINTVAL (operands[1]))
10478 == ((UINTVAL (operands[1])) >> 24) << 24)"
10479 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10480 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10482 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10485 ;; ??? Check the patterns above for Thumb-2 usefulness
10487 (define_expand "prologue"
10488 [(clobber (const_int 0))]
10491 arm_expand_prologue ();
10493 thumb1_expand_prologue ();
10498 (define_expand "epilogue"
10499 [(clobber (const_int 0))]
10502 if (crtl->calls_eh_return)
10503 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10506 thumb1_expand_epilogue ();
10507 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10508 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10510 else if (HAVE_return)
10512 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10513 no need for explicit testing again. */
10514 emit_jump_insn (gen_return ());
10516 else if (TARGET_32BIT)
10518 arm_expand_epilogue (true);
10524 ;; Note - although unspec_volatile's USE all hard registers,
10525 ;; USEs are ignored after relaod has completed. Thus we need
10526 ;; to add an unspec of the link register to ensure that flow
10527 ;; does not think that it is unused by the sibcall branch that
10528 ;; will replace the standard function epilogue.
10529 (define_expand "sibcall_epilogue"
10530 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10531 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10534 arm_expand_epilogue (false);
10539 (define_expand "eh_epilogue"
10540 [(use (match_operand:SI 0 "register_operand"))
10541 (use (match_operand:SI 1 "register_operand"))
10542 (use (match_operand:SI 2 "register_operand"))]
10546 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10547 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10549 rtx ra = gen_rtx_REG (Pmode, 2);
10551 emit_move_insn (ra, operands[2]);
10554 /* This is a hack -- we may have crystalized the function type too
10556 cfun->machine->func_type = 0;
10560 ;; This split is only used during output to reduce the number of patterns
10561 ;; that need assembler instructions adding to them. We allowed the setting
10562 ;; of the conditions to be implicit during rtl generation so that
10563 ;; the conditional compare patterns would work. However this conflicts to
10564 ;; some extent with the conditional data operations, so we have to split them
10567 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10568 ;; conditional execution sufficient?
10571 [(set (match_operand:SI 0 "s_register_operand" "")
10572 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10573 [(match_operand 2 "" "") (match_operand 3 "" "")])
10575 (match_operand 4 "" "")))
10576 (clobber (reg:CC CC_REGNUM))]
10577 "TARGET_ARM && reload_completed"
10578 [(set (match_dup 5) (match_dup 6))
10579 (cond_exec (match_dup 7)
10580 (set (match_dup 0) (match_dup 4)))]
10583 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10584 operands[2], operands[3]);
10585 enum rtx_code rc = GET_CODE (operands[1]);
10587 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10588 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10589 if (mode == CCFPmode || mode == CCFPEmode)
10590 rc = reverse_condition_maybe_unordered (rc);
10592 rc = reverse_condition (rc);
10594 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10599 [(set (match_operand:SI 0 "s_register_operand" "")
10600 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10601 [(match_operand 2 "" "") (match_operand 3 "" "")])
10602 (match_operand 4 "" "")
10604 (clobber (reg:CC CC_REGNUM))]
10605 "TARGET_ARM && reload_completed"
10606 [(set (match_dup 5) (match_dup 6))
10607 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10608 (set (match_dup 0) (match_dup 4)))]
10611 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10612 operands[2], operands[3]);
10614 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10615 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10620 [(set (match_operand:SI 0 "s_register_operand" "")
10621 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10622 [(match_operand 2 "" "") (match_operand 3 "" "")])
10623 (match_operand 4 "" "")
10624 (match_operand 5 "" "")))
10625 (clobber (reg:CC CC_REGNUM))]
10626 "TARGET_ARM && reload_completed"
10627 [(set (match_dup 6) (match_dup 7))
10628 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10629 (set (match_dup 0) (match_dup 4)))
10630 (cond_exec (match_dup 8)
10631 (set (match_dup 0) (match_dup 5)))]
10634 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10635 operands[2], operands[3]);
10636 enum rtx_code rc = GET_CODE (operands[1]);
10638 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10639 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10640 if (mode == CCFPmode || mode == CCFPEmode)
10641 rc = reverse_condition_maybe_unordered (rc);
10643 rc = reverse_condition (rc);
10645 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10650 [(set (match_operand:SI 0 "s_register_operand" "")
10651 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10652 [(match_operand:SI 2 "s_register_operand" "")
10653 (match_operand:SI 3 "arm_add_operand" "")])
10654 (match_operand:SI 4 "arm_rhs_operand" "")
10656 (match_operand:SI 5 "s_register_operand" ""))))
10657 (clobber (reg:CC CC_REGNUM))]
10658 "TARGET_ARM && reload_completed"
10659 [(set (match_dup 6) (match_dup 7))
10660 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10661 (set (match_dup 0) (match_dup 4)))
10662 (cond_exec (match_dup 8)
10663 (set (match_dup 0) (not:SI (match_dup 5))))]
10666 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10667 operands[2], operands[3]);
10668 enum rtx_code rc = GET_CODE (operands[1]);
10670 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10671 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10672 if (mode == CCFPmode || mode == CCFPEmode)
10673 rc = reverse_condition_maybe_unordered (rc);
10675 rc = reverse_condition (rc);
10677 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10681 (define_insn "*cond_move_not"
10682 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10683 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10684 [(match_operand 3 "cc_register" "") (const_int 0)])
10685 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10687 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10691 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10692 [(set_attr "conds" "use")
10693 (set_attr "type" "mvn_reg,multiple")
10694 (set_attr "length" "4,8")]
10697 ;; The next two patterns occur when an AND operation is followed by a
10698 ;; scc insn sequence
10700 (define_insn "*sign_extract_onebit"
10701 [(set (match_operand:SI 0 "s_register_operand" "=r")
10702 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10704 (match_operand:SI 2 "const_int_operand" "n")))
10705 (clobber (reg:CC CC_REGNUM))]
10708 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10709 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10710 return \"mvnne\\t%0, #0\";
10712 [(set_attr "conds" "clob")
10713 (set_attr "length" "8")
10714 (set_attr "type" "multiple")]
10717 (define_insn "*not_signextract_onebit"
10718 [(set (match_operand:SI 0 "s_register_operand" "=r")
10720 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10722 (match_operand:SI 2 "const_int_operand" "n"))))
10723 (clobber (reg:CC CC_REGNUM))]
10726 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10727 output_asm_insn (\"tst\\t%1, %2\", operands);
10728 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10729 return \"movne\\t%0, #0\";
10731 [(set_attr "conds" "clob")
10732 (set_attr "length" "12")
10733 (set_attr "type" "multiple")]
10735 ;; ??? The above patterns need auditing for Thumb-2
10737 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10738 ;; expressions. For simplicity, the first register is also in the unspec
10740 ;; To avoid the usage of GNU extension, the length attribute is computed
10741 ;; in a C function arm_attr_length_push_multi.
10742 (define_insn "*push_multi"
10743 [(match_parallel 2 "multi_register_push"
10744 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10745 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10746 UNSPEC_PUSH_MULT))])]
10750 int num_saves = XVECLEN (operands[2], 0);
10752 /* For the StrongARM at least it is faster to
10753 use STR to store only a single register.
10754 In Thumb mode always use push, and the assembler will pick
10755 something appropriate. */
10756 if (num_saves == 1 && TARGET_ARM)
10757 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10764 strcpy (pattern, \"push%?\\t{%1\");
10766 strcpy (pattern, \"push\\t{%1\");
10768 for (i = 1; i < num_saves; i++)
10770 strcat (pattern, \", %|\");
10772 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10775 strcat (pattern, \"}\");
10776 output_asm_insn (pattern, operands);
10781 [(set_attr "type" "store_16")
10782 (set (attr "length")
10783 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10786 (define_insn "stack_tie"
10787 [(set (mem:BLK (scratch))
10788 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10789 (match_operand:SI 1 "s_register_operand" "rk")]
10793 [(set_attr "length" "0")
10794 (set_attr "type" "block")]
10797 ;; Pop (as used in epilogue RTL)
10799 (define_insn "*load_multiple_with_writeback"
10800 [(match_parallel 0 "load_multiple_operation"
10801 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10802 (plus:SI (match_dup 1)
10803 (match_operand:SI 2 "const_int_I_operand" "I")))
10804 (set (match_operand:SI 3 "s_register_operand" "=rk")
10805 (mem:SI (match_dup 1)))
10807 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10810 arm_output_multireg_pop (operands, /*return_pc=*/false,
10811 /*cond=*/const_true_rtx,
10817 [(set_attr "type" "load_16")
10818 (set_attr "predicable" "yes")
10819 (set (attr "length")
10820 (symbol_ref "arm_attr_length_pop_multi (operands,
10821 /*return_pc=*/false,
10822 /*write_back_p=*/true)"))]
10825 ;; Pop with return (as used in epilogue RTL)
10827 ;; This instruction is generated when the registers are popped at the end of
10828 ;; epilogue. Here, instead of popping the value into LR and then generating
10829 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10831 (define_insn "*pop_multiple_with_writeback_and_return"
10832 [(match_parallel 0 "pop_multiple_return"
10834 (set (match_operand:SI 1 "s_register_operand" "+rk")
10835 (plus:SI (match_dup 1)
10836 (match_operand:SI 2 "const_int_I_operand" "I")))
10837 (set (match_operand:SI 3 "s_register_operand" "=rk")
10838 (mem:SI (match_dup 1)))
10840 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10843 arm_output_multireg_pop (operands, /*return_pc=*/true,
10844 /*cond=*/const_true_rtx,
10850 [(set_attr "type" "load_16")
10851 (set_attr "predicable" "yes")
10852 (set (attr "length")
10853 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10854 /*write_back_p=*/true)"))]
10857 (define_insn "*pop_multiple_with_return"
10858 [(match_parallel 0 "pop_multiple_return"
10860 (set (match_operand:SI 2 "s_register_operand" "=rk")
10861 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10863 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10866 arm_output_multireg_pop (operands, /*return_pc=*/true,
10867 /*cond=*/const_true_rtx,
10873 [(set_attr "type" "load_16")
10874 (set_attr "predicable" "yes")
10875 (set (attr "length")
10876 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10877 /*write_back_p=*/false)"))]
10880 ;; Load into PC and return
10881 (define_insn "*ldr_with_return"
10883 (set (reg:SI PC_REGNUM)
10884 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10885 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10886 "ldr%?\t%|pc, [%0], #4"
10887 [(set_attr "type" "load_4")
10888 (set_attr "predicable" "yes")]
10890 ;; Pop for floating point registers (as used in epilogue RTL)
10891 (define_insn "*vfp_pop_multiple_with_writeback"
10892 [(match_parallel 0 "pop_multiple_fp"
10893 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10894 (plus:SI (match_dup 1)
10895 (match_operand:SI 2 "const_int_I_operand" "I")))
10896 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10897 (mem:DF (match_dup 1)))])]
10898 "TARGET_32BIT && TARGET_HARD_FLOAT"
10901 int num_regs = XVECLEN (operands[0], 0);
10904 strcpy (pattern, \"vldm\\t\");
10905 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10906 strcat (pattern, \"!, {\");
10907 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10908 strcat (pattern, \"%P0\");
10909 if ((num_regs - 1) > 1)
10911 strcat (pattern, \"-%P1\");
10912 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10915 strcat (pattern, \"}\");
10916 output_asm_insn (pattern, op_list);
10920 [(set_attr "type" "load_16")
10921 (set_attr "conds" "unconditional")
10922 (set_attr "predicable" "no")]
10925 ;; Special patterns for dealing with the constant pool
10927 (define_insn "align_4"
10928 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10931 assemble_align (32);
10934 [(set_attr "type" "no_insn")]
10937 (define_insn "align_8"
10938 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10941 assemble_align (64);
10944 [(set_attr "type" "no_insn")]
10947 (define_insn "consttable_end"
10948 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10951 making_const_table = FALSE;
10954 [(set_attr "type" "no_insn")]
10957 (define_insn "consttable_1"
10958 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10961 making_const_table = TRUE;
10962 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10963 assemble_zeros (3);
10966 [(set_attr "length" "4")
10967 (set_attr "type" "no_insn")]
10970 (define_insn "consttable_2"
10971 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10975 rtx x = operands[0];
10976 making_const_table = TRUE;
10977 switch (GET_MODE_CLASS (GET_MODE (x)))
10980 arm_emit_fp16_const (x);
10983 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10984 assemble_zeros (2);
10989 [(set_attr "length" "4")
10990 (set_attr "type" "no_insn")]
10993 (define_insn "consttable_4"
10994 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10998 rtx x = operands[0];
10999 making_const_table = TRUE;
11000 scalar_float_mode float_mode;
11001 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11002 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11005 /* XXX: Sometimes gcc does something really dumb and ends up with
11006 a HIGH in a constant pool entry, usually because it's trying to
11007 load into a VFP register. We know this will always be used in
11008 combination with a LO_SUM which ignores the high bits, so just
11009 strip off the HIGH. */
11010 if (GET_CODE (x) == HIGH)
11012 assemble_integer (x, 4, BITS_PER_WORD, 1);
11013 mark_symbol_refs_as_used (x);
11017 [(set_attr "length" "4")
11018 (set_attr "type" "no_insn")]
11021 (define_insn "consttable_8"
11022 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11026 making_const_table = TRUE;
11027 scalar_float_mode float_mode;
11028 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11029 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11030 float_mode, BITS_PER_WORD);
11032 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11035 [(set_attr "length" "8")
11036 (set_attr "type" "no_insn")]
11039 (define_insn "consttable_16"
11040 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11044 making_const_table = TRUE;
11045 scalar_float_mode float_mode;
11046 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11047 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11048 float_mode, BITS_PER_WORD);
11050 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11053 [(set_attr "length" "16")
11054 (set_attr "type" "no_insn")]
11057 ;; V5 Instructions,
11059 (define_insn "clzsi2"
11060 [(set (match_operand:SI 0 "s_register_operand" "=r")
11061 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11062 "TARGET_32BIT && arm_arch5t"
11064 [(set_attr "predicable" "yes")
11065 (set_attr "type" "clz")])
11067 (define_insn "rbitsi2"
11068 [(set (match_operand:SI 0 "s_register_operand" "=r")
11069 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11070 "TARGET_32BIT && arm_arch_thumb2"
11072 [(set_attr "predicable" "yes")
11073 (set_attr "type" "clz")])
11075 ;; Keep this as a CTZ expression until after reload and then split
11076 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11077 ;; to fold with any other expression.
11079 (define_insn_and_split "ctzsi2"
11080 [(set (match_operand:SI 0 "s_register_operand" "=r")
11081 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11082 "TARGET_32BIT && arm_arch_thumb2"
11084 "&& reload_completed"
11087 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11088 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11092 ;; V5E instructions.
11094 (define_insn "prefetch"
11095 [(prefetch (match_operand:SI 0 "address_operand" "p")
11096 (match_operand:SI 1 "" "")
11097 (match_operand:SI 2 "" ""))]
11098 "TARGET_32BIT && arm_arch5te"
11100 [(set_attr "type" "load_4")]
11103 ;; General predication pattern
11106 [(match_operator 0 "arm_comparison_operator"
11107 [(match_operand 1 "cc_register" "")
11110 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11112 [(set_attr "predicated" "yes")]
11115 (define_insn "force_register_use"
11116 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11119 [(set_attr "length" "0")
11120 (set_attr "type" "no_insn")]
11124 ;; Patterns for exception handling
11126 (define_expand "eh_return"
11127 [(use (match_operand 0 "general_operand"))]
11132 emit_insn (gen_arm_eh_return (operands[0]));
11134 emit_insn (gen_thumb_eh_return (operands[0]));
11139 ;; We can't expand this before we know where the link register is stored.
11140 (define_insn_and_split "arm_eh_return"
11141 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11143 (clobber (match_scratch:SI 1 "=&r"))]
11146 "&& reload_completed"
11150 arm_set_return_address (operands[0], operands[1]);
11158 (define_insn "load_tp_hard"
11159 [(set (match_operand:SI 0 "register_operand" "=r")
11160 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11162 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11163 [(set_attr "predicable" "yes")
11164 (set_attr "type" "mrs")]
11167 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11168 (define_insn "load_tp_soft"
11169 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11170 (clobber (reg:SI LR_REGNUM))
11171 (clobber (reg:SI IP_REGNUM))
11172 (clobber (reg:CC CC_REGNUM))]
11174 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11175 [(set_attr "conds" "clob")
11176 (set_attr "type" "branch")]
11179 ;; tls descriptor call
11180 (define_insn "tlscall"
11181 [(set (reg:SI R0_REGNUM)
11182 (unspec:SI [(reg:SI R0_REGNUM)
11183 (match_operand:SI 0 "" "X")
11184 (match_operand 1 "" "")] UNSPEC_TLS))
11185 (clobber (reg:SI R1_REGNUM))
11186 (clobber (reg:SI LR_REGNUM))
11187 (clobber (reg:SI CC_REGNUM))]
11190 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11191 INTVAL (operands[1]));
11192 return "bl\\t%c0(tlscall)";
11194 [(set_attr "conds" "clob")
11195 (set_attr "length" "4")
11196 (set_attr "type" "branch")]
11199 ;; For thread pointer builtin
11200 (define_expand "get_thread_pointersi"
11201 [(match_operand:SI 0 "s_register_operand")]
11205 arm_load_tp (operands[0]);
11211 ;; We only care about the lower 16 bits of the constant
11212 ;; being inserted into the upper 16 bits of the register.
11213 (define_insn "*arm_movtas_ze"
11214 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11217 (match_operand:SI 1 "const_int_operand" ""))]
11222 [(set_attr "arch" "32,v8mb")
11223 (set_attr "predicable" "yes")
11224 (set_attr "length" "4")
11225 (set_attr "type" "alu_sreg")]
11228 (define_insn "*arm_rev"
11229 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11230 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11236 [(set_attr "arch" "t1,t2,32")
11237 (set_attr "length" "2,2,4")
11238 (set_attr "predicable" "no,yes,yes")
11239 (set_attr "type" "rev")]
11242 (define_expand "arm_legacy_rev"
11243 [(set (match_operand:SI 2 "s_register_operand")
11244 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11248 (lshiftrt:SI (match_dup 2)
11250 (set (match_operand:SI 3 "s_register_operand")
11251 (rotatert:SI (match_dup 1)
11254 (and:SI (match_dup 2)
11255 (const_int -65281)))
11256 (set (match_operand:SI 0 "s_register_operand")
11257 (xor:SI (match_dup 3)
11263 ;; Reuse temporaries to keep register pressure down.
11264 (define_expand "thumb_legacy_rev"
11265 [(set (match_operand:SI 2 "s_register_operand")
11266 (ashift:SI (match_operand:SI 1 "s_register_operand")
11268 (set (match_operand:SI 3 "s_register_operand")
11269 (lshiftrt:SI (match_dup 1)
11272 (ior:SI (match_dup 3)
11274 (set (match_operand:SI 4 "s_register_operand")
11276 (set (match_operand:SI 5 "s_register_operand")
11277 (rotatert:SI (match_dup 1)
11280 (ashift:SI (match_dup 5)
11283 (lshiftrt:SI (match_dup 5)
11286 (ior:SI (match_dup 5)
11289 (rotatert:SI (match_dup 5)
11291 (set (match_operand:SI 0 "s_register_operand")
11292 (ior:SI (match_dup 5)
11298 ;; ARM-specific expansion of signed mod by power of 2
11299 ;; using conditional negate.
11300 ;; For r0 % n where n is a power of 2 produce:
11302 ;; and r0, r0, #(n - 1)
11303 ;; and r1, r1, #(n - 1)
11304 ;; rsbpl r0, r1, #0
11306 (define_expand "modsi3"
11307 [(match_operand:SI 0 "register_operand")
11308 (match_operand:SI 1 "register_operand")
11309 (match_operand:SI 2 "const_int_operand")]
11312 HOST_WIDE_INT val = INTVAL (operands[2]);
11315 || exact_log2 (val) <= 0)
11318 rtx mask = GEN_INT (val - 1);
11320 /* In the special case of x0 % 2 we can do the even shorter:
11323 rsblt r0, r0, #0. */
11327 rtx cc_reg = arm_gen_compare_reg (LT,
11328 operands[1], const0_rtx, NULL_RTX);
11329 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
11330 rtx masked = gen_reg_rtx (SImode);
11332 emit_insn (gen_andsi3 (masked, operands[1], mask));
11333 emit_move_insn (operands[0],
11334 gen_rtx_IF_THEN_ELSE (SImode, cond,
11335 gen_rtx_NEG (SImode,
11341 rtx neg_op = gen_reg_rtx (SImode);
11342 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11345 /* Extract the condition register and mode. */
11346 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11347 rtx cc_reg = SET_DEST (cmp);
11348 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11350 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11352 rtx masked_neg = gen_reg_rtx (SImode);
11353 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11355 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11356 during expand does not always work. Do an IF_THEN_ELSE instead. */
11357 emit_move_insn (operands[0],
11358 gen_rtx_IF_THEN_ELSE (SImode, cond,
11359 gen_rtx_NEG (SImode, masked_neg),
11367 (define_expand "bswapsi2"
11368 [(set (match_operand:SI 0 "s_register_operand")
11369 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11370 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11374 rtx op2 = gen_reg_rtx (SImode);
11375 rtx op3 = gen_reg_rtx (SImode);
11379 rtx op4 = gen_reg_rtx (SImode);
11380 rtx op5 = gen_reg_rtx (SImode);
11382 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11383 op2, op3, op4, op5));
11387 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11396 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11397 ;; and unsigned variants, respectively. For rev16, expose
11398 ;; byte-swapping in the lower 16 bits only.
11399 (define_insn "*arm_revsh"
11400 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11401 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11407 [(set_attr "arch" "t1,t2,32")
11408 (set_attr "length" "2,2,4")
11409 (set_attr "type" "rev")]
11412 (define_insn "*arm_rev16"
11413 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11414 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11420 [(set_attr "arch" "t1,t2,32")
11421 (set_attr "length" "2,2,4")
11422 (set_attr "type" "rev")]
11425 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11426 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11427 ;; each valid permutation.
11429 (define_insn "arm_rev16si2"
11430 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11431 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11433 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11434 (and:SI (lshiftrt:SI (match_dup 1)
11436 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11438 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11439 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11441 [(set_attr "arch" "t1,t2,32")
11442 (set_attr "length" "2,2,4")
11443 (set_attr "type" "rev")]
11446 (define_insn "arm_rev16si2_alt"
11447 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11448 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11450 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11451 (and:SI (ashift:SI (match_dup 1)
11453 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11455 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11456 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11458 [(set_attr "arch" "t1,t2,32")
11459 (set_attr "length" "2,2,4")
11460 (set_attr "type" "rev")]
11463 (define_expand "bswaphi2"
11464 [(set (match_operand:HI 0 "s_register_operand")
11465 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11470 ;; Patterns for LDRD/STRD in Thumb2 mode
11472 (define_insn "*thumb2_ldrd"
11473 [(set (match_operand:SI 0 "s_register_operand" "=r")
11474 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11475 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11476 (set (match_operand:SI 3 "s_register_operand" "=r")
11477 (mem:SI (plus:SI (match_dup 1)
11478 (match_operand:SI 4 "const_int_operand" ""))))]
11479 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11480 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11481 && (operands_ok_ldrd_strd (operands[0], operands[3],
11482 operands[1], INTVAL (operands[2]),
11484 "ldrd%?\t%0, %3, [%1, %2]"
11485 [(set_attr "type" "load_8")
11486 (set_attr "predicable" "yes")])
11488 (define_insn "*thumb2_ldrd_base"
11489 [(set (match_operand:SI 0 "s_register_operand" "=r")
11490 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11491 (set (match_operand:SI 2 "s_register_operand" "=r")
11492 (mem:SI (plus:SI (match_dup 1)
11494 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11495 && (operands_ok_ldrd_strd (operands[0], operands[2],
11496 operands[1], 0, false, true))"
11497 "ldrd%?\t%0, %2, [%1]"
11498 [(set_attr "type" "load_8")
11499 (set_attr "predicable" "yes")])
11501 (define_insn "*thumb2_ldrd_base_neg"
11502 [(set (match_operand:SI 0 "s_register_operand" "=r")
11503 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11505 (set (match_operand:SI 2 "s_register_operand" "=r")
11506 (mem:SI (match_dup 1)))]
11507 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11508 && (operands_ok_ldrd_strd (operands[0], operands[2],
11509 operands[1], -4, false, true))"
11510 "ldrd%?\t%0, %2, [%1, #-4]"
11511 [(set_attr "type" "load_8")
11512 (set_attr "predicable" "yes")])
11514 (define_insn "*thumb2_strd"
11515 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11516 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11517 (match_operand:SI 2 "s_register_operand" "r"))
11518 (set (mem:SI (plus:SI (match_dup 0)
11519 (match_operand:SI 3 "const_int_operand" "")))
11520 (match_operand:SI 4 "s_register_operand" "r"))]
11521 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11522 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11523 && (operands_ok_ldrd_strd (operands[2], operands[4],
11524 operands[0], INTVAL (operands[1]),
11526 "strd%?\t%2, %4, [%0, %1]"
11527 [(set_attr "type" "store_8")
11528 (set_attr "predicable" "yes")])
11530 (define_insn "*thumb2_strd_base"
11531 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11532 (match_operand:SI 1 "s_register_operand" "r"))
11533 (set (mem:SI (plus:SI (match_dup 0)
11535 (match_operand:SI 2 "s_register_operand" "r"))]
11536 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11537 && (operands_ok_ldrd_strd (operands[1], operands[2],
11538 operands[0], 0, false, false))"
11539 "strd%?\t%1, %2, [%0]"
11540 [(set_attr "type" "store_8")
11541 (set_attr "predicable" "yes")])
11543 (define_insn "*thumb2_strd_base_neg"
11544 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11546 (match_operand:SI 1 "s_register_operand" "r"))
11547 (set (mem:SI (match_dup 0))
11548 (match_operand:SI 2 "s_register_operand" "r"))]
11549 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11550 && (operands_ok_ldrd_strd (operands[1], operands[2],
11551 operands[0], -4, false, false))"
11552 "strd%?\t%1, %2, [%0, #-4]"
11553 [(set_attr "type" "store_8")
11554 (set_attr "predicable" "yes")])
11556 ;; ARMv8 CRC32 instructions.
11557 (define_insn "arm_<crc_variant>"
11558 [(set (match_operand:SI 0 "s_register_operand" "=r")
11559 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11560 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11563 "<crc_variant>\\t%0, %1, %2"
11564 [(set_attr "type" "crc")
11565 (set_attr "conds" "unconditional")]
11568 ;; Load the load/store double peephole optimizations.
11569 (include "ldrdstrd.md")
11571 ;; Load the load/store multiple patterns
11572 (include "ldmstm.md")
11574 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11575 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11576 ;; The operands are validated through the load_multiple_operation
11577 ;; match_parallel predicate rather than through constraints so enable it only
11579 (define_insn "*load_multiple"
11580 [(match_parallel 0 "load_multiple_operation"
11581 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11582 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11584 "TARGET_32BIT && reload_completed"
11587 arm_output_multireg_pop (operands, /*return_pc=*/false,
11588 /*cond=*/const_true_rtx,
11594 [(set_attr "predicable" "yes")]
11597 (define_expand "copysignsf3"
11598 [(match_operand:SF 0 "register_operand")
11599 (match_operand:SF 1 "register_operand")
11600 (match_operand:SF 2 "register_operand")]
11601 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11603 emit_move_insn (operands[0], operands[2]);
11604 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11605 GEN_INT (31), GEN_INT (0),
11606 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11611 (define_expand "copysigndf3"
11612 [(match_operand:DF 0 "register_operand")
11613 (match_operand:DF 1 "register_operand")
11614 (match_operand:DF 2 "register_operand")]
11615 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11617 rtx op0_low = gen_lowpart (SImode, operands[0]);
11618 rtx op0_high = gen_highpart (SImode, operands[0]);
11619 rtx op1_low = gen_lowpart (SImode, operands[1]);
11620 rtx op1_high = gen_highpart (SImode, operands[1]);
11621 rtx op2_high = gen_highpart (SImode, operands[2]);
11623 rtx scratch1 = gen_reg_rtx (SImode);
11624 rtx scratch2 = gen_reg_rtx (SImode);
11625 emit_move_insn (scratch1, op2_high);
11626 emit_move_insn (scratch2, op1_high);
11628 emit_insn(gen_rtx_SET(scratch1,
11629 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11630 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11631 emit_move_insn (op0_low, op1_low);
11632 emit_move_insn (op0_high, scratch2);
11638 ;; movmisalign patterns for HImode and SImode.
11639 (define_expand "movmisalign<mode>"
11640 [(match_operand:HSI 0 "general_operand")
11641 (match_operand:HSI 1 "general_operand")]
11644 /* This pattern is not permitted to fail during expansion: if both arguments
11645 are non-registers (e.g. memory := constant), force operand 1 into a
11647 rtx (* gen_unaligned_load)(rtx, rtx);
11648 rtx tmp_dest = operands[0];
11649 if (!s_register_operand (operands[0], <MODE>mode)
11650 && !s_register_operand (operands[1], <MODE>mode))
11651 operands[1] = force_reg (<MODE>mode, operands[1]);
11653 if (<MODE>mode == HImode)
11655 gen_unaligned_load = gen_unaligned_loadhiu;
11656 tmp_dest = gen_reg_rtx (SImode);
11659 gen_unaligned_load = gen_unaligned_loadsi;
11661 if (MEM_P (operands[1]))
11663 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11664 if (<MODE>mode == HImode)
11665 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11668 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11673 (define_insn "arm_<cdp>"
11674 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11675 (match_operand:SI 1 "immediate_operand" "n")
11676 (match_operand:SI 2 "immediate_operand" "n")
11677 (match_operand:SI 3 "immediate_operand" "n")
11678 (match_operand:SI 4 "immediate_operand" "n")
11679 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11680 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11682 arm_const_bounds (operands[0], 0, 16);
11683 arm_const_bounds (operands[1], 0, 16);
11684 arm_const_bounds (operands[2], 0, (1 << 5));
11685 arm_const_bounds (operands[3], 0, (1 << 5));
11686 arm_const_bounds (operands[4], 0, (1 << 5));
11687 arm_const_bounds (operands[5], 0, 8);
11688 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11690 [(set_attr "length" "4")
11691 (set_attr "type" "coproc")])
11693 (define_insn "*ldc"
11694 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11695 (match_operand:SI 1 "immediate_operand" "n")
11696 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11697 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11699 arm_const_bounds (operands[0], 0, 16);
11700 arm_const_bounds (operands[1], 0, (1 << 5));
11701 return "<ldc>\\tp%c0, CR%c1, %2";
11703 [(set_attr "length" "4")
11704 (set_attr "type" "coproc")])
11706 (define_insn "*stc"
11707 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11708 (match_operand:SI 1 "immediate_operand" "n")
11709 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11710 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11712 arm_const_bounds (operands[0], 0, 16);
11713 arm_const_bounds (operands[1], 0, (1 << 5));
11714 return "<stc>\\tp%c0, CR%c1, %2";
11716 [(set_attr "length" "4")
11717 (set_attr "type" "coproc")])
11719 (define_expand "arm_<ldc>"
11720 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11721 (match_operand:SI 1 "immediate_operand")
11722 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11723 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11725 (define_expand "arm_<stc>"
11726 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11727 (match_operand:SI 1 "immediate_operand")
11728 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11729 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11731 (define_insn "arm_<mcr>"
11732 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11733 (match_operand:SI 1 "immediate_operand" "n")
11734 (match_operand:SI 2 "s_register_operand" "r")
11735 (match_operand:SI 3 "immediate_operand" "n")
11736 (match_operand:SI 4 "immediate_operand" "n")
11737 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11738 (use (match_dup 2))]
11739 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11741 arm_const_bounds (operands[0], 0, 16);
11742 arm_const_bounds (operands[1], 0, 8);
11743 arm_const_bounds (operands[3], 0, (1 << 5));
11744 arm_const_bounds (operands[4], 0, (1 << 5));
11745 arm_const_bounds (operands[5], 0, 8);
11746 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11748 [(set_attr "length" "4")
11749 (set_attr "type" "coproc")])
11751 (define_insn "arm_<mrc>"
11752 [(set (match_operand:SI 0 "s_register_operand" "=r")
11753 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11754 (match_operand:SI 2 "immediate_operand" "n")
11755 (match_operand:SI 3 "immediate_operand" "n")
11756 (match_operand:SI 4 "immediate_operand" "n")
11757 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11758 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11760 arm_const_bounds (operands[1], 0, 16);
11761 arm_const_bounds (operands[2], 0, 8);
11762 arm_const_bounds (operands[3], 0, (1 << 5));
11763 arm_const_bounds (operands[4], 0, (1 << 5));
11764 arm_const_bounds (operands[5], 0, 8);
11765 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11767 [(set_attr "length" "4")
11768 (set_attr "type" "coproc")])
11770 (define_insn "arm_<mcrr>"
11771 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11772 (match_operand:SI 1 "immediate_operand" "n")
11773 (match_operand:DI 2 "s_register_operand" "r")
11774 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11775 (use (match_dup 2))]
11776 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11778 arm_const_bounds (operands[0], 0, 16);
11779 arm_const_bounds (operands[1], 0, 8);
11780 arm_const_bounds (operands[3], 0, (1 << 5));
11781 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11783 [(set_attr "length" "4")
11784 (set_attr "type" "coproc")])
11786 (define_insn "arm_<mrrc>"
11787 [(set (match_operand:DI 0 "s_register_operand" "=r")
11788 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11789 (match_operand:SI 2 "immediate_operand" "n")
11790 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11791 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11793 arm_const_bounds (operands[1], 0, 16);
11794 arm_const_bounds (operands[2], 0, 8);
11795 arm_const_bounds (operands[3], 0, (1 << 5));
11796 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11798 [(set_attr "length" "4")
11799 (set_attr "type" "coproc")])
11801 (define_expand "speculation_barrier"
11802 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11805 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11806 have a usable barrier (and probably don't need one in practice).
11807 But to be safe if such code is run on later architectures, call a
11808 helper function in libgcc that will do the thing for the active
11810 if (!(arm_arch7 || arm_arch8))
11812 arm_emit_speculation_barrier_function ();
11818 ;; Generate a hard speculation barrier when we have not enabled speculation
11820 (define_insn "*speculation_barrier_insn"
11821 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11822 "arm_arch7 || arm_arch8"
11824 [(set_attr "type" "block")
11825 (set_attr "length" "8")]
11828 ;; Vector bits common to IWMMXT and Neon
11829 (include "vec-common.md")
11830 ;; Load the Intel Wireless Multimedia Extension patterns
11831 (include "iwmmxt.md")
11832 ;; Load the VFP co-processor patterns
11834 ;; Thumb-1 patterns
11835 (include "thumb1.md")
11836 ;; Thumb-2 patterns
11837 (include "thumb2.md")
11839 (include "neon.md")
11841 (include "crypto.md")
11842 ;; Synchronization Primitives
11843 (include "sync.md")
11844 ;; Fixed-point patterns
11845 (include "arm-fixed.md")