1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
491 (define_expand "addv<mode>4"
492 [(match_operand:SIDI 0 "register_operand")
493 (match_operand:SIDI 1 "register_operand")
494 (match_operand:SIDI 2 "register_operand")
495 (match_operand 3 "")]
498 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
499 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
504 (define_expand "uaddv<mode>4"
505 [(match_operand:SIDI 0 "register_operand")
506 (match_operand:SIDI 1 "register_operand")
507 (match_operand:SIDI 2 "register_operand")
508 (match_operand 3 "")]
511 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
512 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
517 (define_expand "addsi3"
518 [(set (match_operand:SI 0 "s_register_operand")
519 (plus:SI (match_operand:SI 1 "s_register_operand")
520 (match_operand:SI 2 "reg_or_int_operand")))]
523 if (TARGET_32BIT && CONST_INT_P (operands[2]))
525 arm_split_constant (PLUS, SImode, NULL_RTX,
526 INTVAL (operands[2]), operands[0], operands[1],
527 optimize && can_create_pseudo_p ());
533 ; If there is a scratch available, this will be faster than synthesizing the
536 [(match_scratch:SI 3 "r")
537 (set (match_operand:SI 0 "arm_general_register_operand" "")
538 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
539 (match_operand:SI 2 "const_int_operand" "")))]
541 !(const_ok_for_arm (INTVAL (operands[2]))
542 || const_ok_for_arm (-INTVAL (operands[2])))
543 && const_ok_for_arm (~INTVAL (operands[2]))"
544 [(set (match_dup 3) (match_dup 2))
545 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
549 ;; The r/r/k alternative is required when reloading the address
550 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
551 ;; put the duplicated register first, and not try the commutative version.
552 (define_insn_and_split "*arm_addsi3"
553 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
554 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
555 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
571 subw%?\\t%0, %1, #%n2
572 subw%?\\t%0, %1, #%n2
575 && CONST_INT_P (operands[2])
576 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
577 && (reload_completed || !arm_eliminable_register (operands[1]))"
578 [(clobber (const_int 0))]
580 arm_split_constant (PLUS, SImode, curr_insn,
581 INTVAL (operands[2]), operands[0],
585 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
586 (set_attr "predicable" "yes")
587 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
588 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
589 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
590 (const_string "alu_imm")
591 (const_string "alu_sreg")))
595 (define_insn "adddi3_compareV"
596 [(set (reg:CC_V CC_REGNUM)
599 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
600 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
601 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
602 (set (match_operand:DI 0 "s_register_operand" "=&r")
603 (plus:DI (match_dup 1) (match_dup 2)))]
605 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
606 [(set_attr "conds" "set")
607 (set_attr "length" "8")
608 (set_attr "type" "multiple")]
611 (define_insn "addsi3_compareV"
612 [(set (reg:CC_V CC_REGNUM)
615 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
616 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
617 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
618 (set (match_operand:SI 0 "register_operand" "=r")
619 (plus:SI (match_dup 1) (match_dup 2)))]
621 "adds%?\\t%0, %1, %2"
622 [(set_attr "conds" "set")
623 (set_attr "type" "alus_sreg")]
626 (define_insn "adddi3_compareC"
627 [(set (reg:CC_C CC_REGNUM)
630 (match_operand:DI 1 "register_operand" "r")
631 (match_operand:DI 2 "register_operand" "r"))
633 (set (match_operand:DI 0 "register_operand" "=&r")
634 (plus:DI (match_dup 1) (match_dup 2)))]
636 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
637 [(set_attr "conds" "set")
638 (set_attr "length" "8")
639 (set_attr "type" "multiple")]
642 (define_insn "addsi3_compareC"
643 [(set (reg:CC_C CC_REGNUM)
644 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
645 (match_operand:SI 2 "register_operand" "r"))
647 (set (match_operand:SI 0 "register_operand" "=r")
648 (plus:SI (match_dup 1) (match_dup 2)))]
650 "adds%?\\t%0, %1, %2"
651 [(set_attr "conds" "set")
652 (set_attr "type" "alus_sreg")]
655 (define_insn "addsi3_compare0"
656 [(set (reg:CC_NOOV CC_REGNUM)
658 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
659 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
661 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
662 (plus:SI (match_dup 1) (match_dup 2)))]
666 subs%?\\t%0, %1, #%n2
668 [(set_attr "conds" "set")
669 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
672 (define_insn "*addsi3_compare0_scratch"
673 [(set (reg:CC_NOOV CC_REGNUM)
675 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
676 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
683 [(set_attr "conds" "set")
684 (set_attr "predicable" "yes")
685 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
688 (define_insn "*compare_negsi_si"
689 [(set (reg:CC_Z CC_REGNUM)
691 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
692 (match_operand:SI 1 "s_register_operand" "l,r")))]
695 [(set_attr "conds" "set")
696 (set_attr "predicable" "yes")
697 (set_attr "arch" "t2,*")
698 (set_attr "length" "2,4")
699 (set_attr "predicable_short_it" "yes,no")
700 (set_attr "type" "alus_sreg")]
703 ;; This is the canonicalization of subsi3_compare when the
704 ;; addend is a constant.
705 (define_insn "cmpsi2_addneg"
706 [(set (reg:CC CC_REGNUM)
708 (match_operand:SI 1 "s_register_operand" "r,r")
709 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
711 (plus:SI (match_dup 1)
712 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
714 && (INTVAL (operands[2])
715 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
717 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
718 in different condition codes (like cmn rather than like cmp), so that
719 alternative comes first. Both alternatives can match for any 0x??000000
720 where except for 0 and INT_MIN it doesn't matter what we choose, and also
721 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
723 if (which_alternative == 0 && operands[3] != const1_rtx)
724 return "subs%?\\t%0, %1, #%n3";
726 return "adds%?\\t%0, %1, %3";
728 [(set_attr "conds" "set")
729 (set_attr "type" "alus_sreg")]
732 ;; Convert the sequence
734 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
738 ;; bcs dest ((unsigned)rn >= 1)
739 ;; similarly for the beq variant using bcc.
740 ;; This is a common looping idiom (while (n--))
742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
743 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
745 (set (match_operand 2 "cc_register" "")
746 (compare (match_dup 0) (const_int -1)))
748 (if_then_else (match_operator 3 "equality_operator"
749 [(match_dup 2) (const_int 0)])
750 (match_operand 4 "" "")
751 (match_operand 5 "" "")))]
752 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
756 (match_dup 1) (const_int 1)))
757 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
759 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
762 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
763 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
766 operands[2], const0_rtx);"
769 ;; The next four insns work because they compare the result with one of
770 ;; the operands, and we know that the use of the condition code is
771 ;; either GEU or LTU, so we can use the carry flag from the addition
772 ;; instead of doing the compare a second time.
773 (define_insn "*addsi3_compare_op1"
774 [(set (reg:CC_C CC_REGNUM)
776 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
777 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
779 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
780 (plus:SI (match_dup 1) (match_dup 2)))]
785 subs%?\\t%0, %1, #%n2
786 subs%?\\t%0, %0, #%n2
788 subs%?\\t%0, %1, #%n2
790 [(set_attr "conds" "set")
791 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
792 (set_attr "length" "2,2,2,2,4,4,4")
794 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
797 (define_insn "*addsi3_compare_op2"
798 [(set (reg:CC_C CC_REGNUM)
800 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
801 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
803 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
804 (plus:SI (match_dup 1) (match_dup 2)))]
809 subs%?\\t%0, %1, #%n2
810 subs%?\\t%0, %0, #%n2
812 subs%?\\t%0, %1, #%n2
814 [(set_attr "conds" "set")
815 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
816 (set_attr "length" "2,2,2,2,4,4,4")
818 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
821 (define_insn "*compare_addsi2_op0"
822 [(set (reg:CC_C CC_REGNUM)
824 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
825 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
834 [(set_attr "conds" "set")
835 (set_attr "predicable" "yes")
836 (set_attr "arch" "t2,t2,*,*,*")
837 (set_attr "predicable_short_it" "yes,yes,no,no,no")
838 (set_attr "length" "2,2,4,4,4")
839 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
842 (define_insn "*compare_addsi2_op1"
843 [(set (reg:CC_C CC_REGNUM)
845 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
846 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
855 [(set_attr "conds" "set")
856 (set_attr "predicable" "yes")
857 (set_attr "arch" "t2,t2,*,*,*")
858 (set_attr "predicable_short_it" "yes,yes,no,no,no")
859 (set_attr "length" "2,2,4,4,4")
860 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
863 (define_insn "addsi3_carryin"
864 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
865 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
866 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
867 (match_operand:SI 3 "arm_carry_operation" "")))]
872 sbc%?\\t%0, %1, #%B2"
873 [(set_attr "conds" "use")
874 (set_attr "predicable" "yes")
875 (set_attr "arch" "t2,*,*")
876 (set_attr "length" "4")
877 (set_attr "predicable_short_it" "yes,no,no")
878 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
881 ;; Canonicalization of the above when the immediate is zero.
882 (define_insn "add0si3_carryin"
883 [(set (match_operand:SI 0 "s_register_operand" "=r")
884 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
885 (match_operand:SI 1 "arm_not_operand" "r")))]
888 [(set_attr "conds" "use")
889 (set_attr "predicable" "yes")
890 (set_attr "length" "4")
891 (set_attr "type" "adc_imm")]
894 (define_insn "*addsi3_carryin_alt2"
895 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
896 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
897 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
898 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
903 sbc%?\\t%0, %1, #%B2"
904 [(set_attr "conds" "use")
905 (set_attr "predicable" "yes")
906 (set_attr "arch" "t2,*,*")
907 (set_attr "length" "4")
908 (set_attr "predicable_short_it" "yes,no,no")
909 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
912 (define_insn "*addsi3_carryin_shift"
913 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
915 (match_operator:SI 2 "shift_operator"
916 [(match_operand:SI 3 "s_register_operand" "r,r")
917 (match_operand:SI 4 "shift_amount_operand" "M,r")])
918 (match_operand:SI 5 "arm_carry_operation" ""))
919 (match_operand:SI 1 "s_register_operand" "r,r")))]
921 "adc%?\\t%0, %1, %3%S2"
922 [(set_attr "conds" "use")
923 (set_attr "arch" "32,a")
924 (set_attr "shift" "3")
925 (set_attr "predicable" "yes")
926 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
927 (const_string "alu_shift_imm")
928 (const_string "alu_shift_reg")))]
931 (define_insn "*addsi3_carryin_clobercc"
932 [(set (match_operand:SI 0 "s_register_operand" "=r")
933 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
934 (match_operand:SI 2 "arm_rhs_operand" "rI"))
935 (match_operand:SI 3 "arm_carry_operation" "")))
936 (clobber (reg:CC CC_REGNUM))]
938 "adcs%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "type" "adcs_reg")]
943 (define_expand "subv<mode>4"
944 [(match_operand:SIDI 0 "register_operand")
945 (match_operand:SIDI 1 "register_operand")
946 (match_operand:SIDI 2 "register_operand")
947 (match_operand 3 "")]
950 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
951 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
956 (define_expand "usubv<mode>4"
957 [(match_operand:SIDI 0 "register_operand")
958 (match_operand:SIDI 1 "register_operand")
959 (match_operand:SIDI 2 "register_operand")
960 (match_operand 3 "")]
963 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
964 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
969 (define_insn "subdi3_compare1"
970 [(set (reg:CC CC_REGNUM)
972 (match_operand:DI 1 "s_register_operand" "r")
973 (match_operand:DI 2 "s_register_operand" "r")))
974 (set (match_operand:DI 0 "s_register_operand" "=&r")
975 (minus:DI (match_dup 1) (match_dup 2)))]
977 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
978 [(set_attr "conds" "set")
979 (set_attr "length" "8")
980 (set_attr "type" "multiple")]
983 (define_insn "subsi3_compare1"
984 [(set (reg:CC CC_REGNUM)
986 (match_operand:SI 1 "register_operand" "r")
987 (match_operand:SI 2 "register_operand" "r")))
988 (set (match_operand:SI 0 "register_operand" "=r")
989 (minus:SI (match_dup 1) (match_dup 2)))]
991 "subs%?\\t%0, %1, %2"
992 [(set_attr "conds" "set")
993 (set_attr "type" "alus_sreg")]
996 (define_insn "subsi3_carryin"
997 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
998 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
999 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1000 (match_operand:SI 3 "arm_borrow_operation" "")))]
1005 sbc%?\\t%0, %2, %2, lsl #1"
1006 [(set_attr "conds" "use")
1007 (set_attr "arch" "*,a,t2")
1008 (set_attr "predicable" "yes")
1009 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1012 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1013 [(set (reg:<CC_EXTEND> CC_REGNUM)
1014 (compare:<CC_EXTEND>
1015 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1016 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1017 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1018 (clobber (match_scratch:SI 0 "=l,r"))]
1021 [(set_attr "conds" "set")
1022 (set_attr "arch" "t2,*")
1023 (set_attr "length" "2,4")
1024 (set_attr "type" "adc_reg")]
1027 ;; Similar to the above, but handling a constant which has a different
1028 ;; canonicalization.
1029 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1030 [(set (reg:<CC_EXTEND> CC_REGNUM)
1031 (compare:<CC_EXTEND>
1032 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1033 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1034 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1035 (clobber (match_scratch:SI 0 "=l,r"))]
1039 adcs\\t%0, %1, #%B2"
1040 [(set_attr "conds" "set")
1041 (set_attr "type" "adc_imm")]
1044 ;; Further canonicalization when the constant is zero.
1045 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1046 [(set (reg:<CC_EXTEND> CC_REGNUM)
1047 (compare:<CC_EXTEND>
1048 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1049 (match_operand:DI 2 "arm_borrow_operation" "")))
1050 (clobber (match_scratch:SI 0 "=l,r"))]
1053 [(set_attr "conds" "set")
1054 (set_attr "type" "adc_imm")]
1057 (define_insn "*subsi3_carryin_const"
1058 [(set (match_operand:SI 0 "s_register_operand" "=r")
1060 (match_operand:SI 1 "s_register_operand" "r")
1061 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1062 (match_operand:SI 3 "arm_borrow_operation" "")))]
1064 "sbc\\t%0, %1, #%n2"
1065 [(set_attr "conds" "use")
1066 (set_attr "type" "adc_imm")]
1069 (define_insn "*subsi3_carryin_const0"
1070 [(set (match_operand:SI 0 "s_register_operand" "=r")
1071 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1072 (match_operand:SI 2 "arm_borrow_operation" "")))]
1075 [(set_attr "conds" "use")
1076 (set_attr "type" "adc_imm")]
1079 (define_insn "*subsi3_carryin_shift"
1080 [(set (match_operand:SI 0 "s_register_operand" "=r")
1082 (match_operand:SI 1 "s_register_operand" "r")
1083 (match_operator:SI 2 "shift_operator"
1084 [(match_operand:SI 3 "s_register_operand" "r")
1085 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1086 (match_operand:SI 5 "arm_borrow_operation" "")))]
1088 "sbc%?\\t%0, %1, %3%S2"
1089 [(set_attr "conds" "use")
1090 (set_attr "predicable" "yes")
1091 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1092 (const_string "alu_shift_imm")
1093 (const_string "alu_shift_reg")))]
1096 (define_insn "*subsi3_carryin_shift_alt"
1097 [(set (match_operand:SI 0 "s_register_operand" "=r")
1099 (match_operand:SI 1 "s_register_operand" "r")
1100 (match_operand:SI 5 "arm_borrow_operation" ""))
1101 (match_operator:SI 2 "shift_operator"
1102 [(match_operand:SI 3 "s_register_operand" "r")
1103 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1105 "sbc%?\\t%0, %1, %3%S2"
1106 [(set_attr "conds" "use")
1107 (set_attr "predicable" "yes")
1108 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1109 (const_string "alu_shift_imm")
1110 (const_string "alu_shift_reg")))]
1113 (define_insn "*rsbsi3_carryin_shift"
1114 [(set (match_operand:SI 0 "s_register_operand" "=r")
1116 (match_operator:SI 2 "shift_operator"
1117 [(match_operand:SI 3 "s_register_operand" "r")
1118 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1119 (match_operand:SI 1 "s_register_operand" "r"))
1120 (match_operand:SI 5 "arm_borrow_operation" "")))]
1122 "rsc%?\\t%0, %1, %3%S2"
1123 [(set_attr "conds" "use")
1124 (set_attr "predicable" "yes")
1125 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1126 (const_string "alu_shift_imm")
1127 (const_string "alu_shift_reg")))]
1130 (define_insn "*rsbsi3_carryin_shift_alt"
1131 [(set (match_operand:SI 0 "s_register_operand" "=r")
1133 (match_operator:SI 2 "shift_operator"
1134 [(match_operand:SI 3 "s_register_operand" "r")
1135 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1136 (match_operand:SI 5 "arm_borrow_operation" ""))
1137 (match_operand:SI 1 "s_register_operand" "r")))]
1139 "rsc%?\\t%0, %1, %3%S2"
1140 [(set_attr "conds" "use")
1141 (set_attr "predicable" "yes")
1142 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1143 (const_string "alu_shift_imm")
1144 (const_string "alu_shift_reg")))]
1147 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1149 [(set (match_operand:SI 0 "s_register_operand" "")
1150 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1151 (match_operand:SI 2 "s_register_operand" ""))
1153 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1155 [(set (match_dup 3) (match_dup 1))
1156 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1158 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1161 (define_expand "addsf3"
1162 [(set (match_operand:SF 0 "s_register_operand")
1163 (plus:SF (match_operand:SF 1 "s_register_operand")
1164 (match_operand:SF 2 "s_register_operand")))]
1165 "TARGET_32BIT && TARGET_HARD_FLOAT"
1169 (define_expand "adddf3"
1170 [(set (match_operand:DF 0 "s_register_operand")
1171 (plus:DF (match_operand:DF 1 "s_register_operand")
1172 (match_operand:DF 2 "s_register_operand")))]
1173 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1177 (define_expand "subdi3"
1179 [(set (match_operand:DI 0 "s_register_operand")
1180 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1181 (match_operand:DI 2 "s_register_operand")))
1182 (clobber (reg:CC CC_REGNUM))])]
1187 if (!REG_P (operands[1]))
1188 operands[1] = force_reg (DImode, operands[1]);
1192 rtx lo_result, hi_result, lo_dest, hi_dest;
1193 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1196 /* Since operands[1] may be an integer, pass it second, so that
1197 any necessary simplifications will be done on the decomposed
1199 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1201 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1202 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1204 if (!arm_rhs_operand (lo_op1, SImode))
1205 lo_op1 = force_reg (SImode, lo_op1);
1207 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1208 || !arm_rhs_operand (hi_op1, SImode))
1209 hi_op1 = force_reg (SImode, hi_op1);
1212 if (lo_op1 == const0_rtx)
1214 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1215 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1217 else if (CONST_INT_P (lo_op1))
1219 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1220 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1221 GEN_INT (~UINTVAL (lo_op1))));
1225 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1226 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1229 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1231 if (hi_op1 == const0_rtx)
1232 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1234 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1236 if (lo_result != lo_dest)
1237 emit_move_insn (lo_result, lo_dest);
1239 if (hi_result != hi_dest)
1240 emit_move_insn (hi_result, hi_dest);
1247 (define_expand "subsi3"
1248 [(set (match_operand:SI 0 "s_register_operand")
1249 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1250 (match_operand:SI 2 "s_register_operand")))]
1253 if (CONST_INT_P (operands[1]))
1257 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1258 operands[1] = force_reg (SImode, operands[1]);
1261 arm_split_constant (MINUS, SImode, NULL_RTX,
1262 INTVAL (operands[1]), operands[0],
1264 optimize && can_create_pseudo_p ());
1268 else /* TARGET_THUMB1 */
1269 operands[1] = force_reg (SImode, operands[1]);
1274 ; ??? Check Thumb-2 split length
1275 (define_insn_and_split "*arm_subsi3_insn"
1276 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1277 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1278 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1290 "&& (CONST_INT_P (operands[1])
1291 && !const_ok_for_arm (INTVAL (operands[1])))"
1292 [(clobber (const_int 0))]
1294 arm_split_constant (MINUS, SImode, curr_insn,
1295 INTVAL (operands[1]), operands[0], operands[2], 0);
1298 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1299 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1300 (set_attr "predicable" "yes")
1301 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1302 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1306 [(match_scratch:SI 3 "r")
1307 (set (match_operand:SI 0 "arm_general_register_operand" "")
1308 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1309 (match_operand:SI 2 "arm_general_register_operand" "")))]
1311 && !const_ok_for_arm (INTVAL (operands[1]))
1312 && const_ok_for_arm (~INTVAL (operands[1]))"
1313 [(set (match_dup 3) (match_dup 1))
1314 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1318 (define_insn "subsi3_compare0"
1319 [(set (reg:CC_NOOV CC_REGNUM)
1321 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1322 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1324 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1325 (minus:SI (match_dup 1) (match_dup 2)))]
1330 rsbs%?\\t%0, %2, %1"
1331 [(set_attr "conds" "set")
1332 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1335 (define_insn "subsi3_compare"
1336 [(set (reg:CC CC_REGNUM)
1337 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1338 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1339 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1340 (minus:SI (match_dup 1) (match_dup 2)))]
1345 rsbs%?\\t%0, %2, %1"
1346 [(set_attr "conds" "set")
1347 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1350 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1351 ;; rather than (0 cmp reg). This gives the same results for unsigned
1352 ;; and equality compares which is what we mostly need here.
1353 (define_insn "rsb_imm_compare"
1354 [(set (reg:CC_RSB CC_REGNUM)
1355 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1356 (match_operand 3 "const_int_operand" "")))
1357 (set (match_operand:SI 0 "s_register_operand" "=r")
1358 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1360 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1362 [(set_attr "conds" "set")
1363 (set_attr "type" "alus_imm")]
1366 ;; Similarly, but the result is unused.
1367 (define_insn "rsb_imm_compare_scratch"
1368 [(set (reg:CC_RSB CC_REGNUM)
1369 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1370 (match_operand 1 "arm_not_immediate_operand" "K")))
1371 (clobber (match_scratch:SI 0 "=r"))]
1373 "rsbs\\t%0, %2, #%B1"
1374 [(set_attr "conds" "set")
1375 (set_attr "type" "alus_imm")]
1378 ;; Compare the sum of a value plus a carry against a constant. Uses
1379 ;; RSC, so the result is swapped. Only available on Arm
1380 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
1381 [(set (reg:CC_SWP CC_REGNUM)
1383 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
1384 (match_operand:DI 3 "arm_borrow_operation" ""))
1385 (match_operand 1 "arm_immediate_operand" "I")))
1386 (clobber (match_scratch:SI 0 "=r"))]
1389 [(set_attr "conds" "set")
1390 (set_attr "type" "alus_imm")]
1393 (define_expand "subsf3"
1394 [(set (match_operand:SF 0 "s_register_operand")
1395 (minus:SF (match_operand:SF 1 "s_register_operand")
1396 (match_operand:SF 2 "s_register_operand")))]
1397 "TARGET_32BIT && TARGET_HARD_FLOAT"
1401 (define_expand "subdf3"
1402 [(set (match_operand:DF 0 "s_register_operand")
1403 (minus:DF (match_operand:DF 1 "s_register_operand")
1404 (match_operand:DF 2 "s_register_operand")))]
1405 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1410 ;; Multiplication insns
1412 (define_expand "mulhi3"
1413 [(set (match_operand:HI 0 "s_register_operand")
1414 (mult:HI (match_operand:HI 1 "s_register_operand")
1415 (match_operand:HI 2 "s_register_operand")))]
1416 "TARGET_DSP_MULTIPLY"
1419 rtx result = gen_reg_rtx (SImode);
1420 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1421 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1426 (define_expand "mulsi3"
1427 [(set (match_operand:SI 0 "s_register_operand")
1428 (mult:SI (match_operand:SI 2 "s_register_operand")
1429 (match_operand:SI 1 "s_register_operand")))]
1434 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1436 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1437 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1438 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1440 "mul%?\\t%0, %2, %1"
1441 [(set_attr "type" "mul")
1442 (set_attr "predicable" "yes")
1443 (set_attr "arch" "t2,v6,nov6,nov6")
1444 (set_attr "length" "4")
1445 (set_attr "predicable_short_it" "yes,no,*,*")]
1448 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1449 ;; reusing the same register.
1452 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1454 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1455 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1456 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1458 "mla%?\\t%0, %3, %2, %1"
1459 [(set_attr "type" "mla")
1460 (set_attr "predicable" "yes")
1461 (set_attr "arch" "v6,nov6,nov6,nov6")]
1465 [(set (match_operand:SI 0 "s_register_operand" "=r")
1467 (match_operand:SI 1 "s_register_operand" "r")
1468 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1469 (match_operand:SI 2 "s_register_operand" "r"))))]
1470 "TARGET_32BIT && arm_arch_thumb2"
1471 "mls%?\\t%0, %3, %2, %1"
1472 [(set_attr "type" "mla")
1473 (set_attr "predicable" "yes")]
1476 (define_insn "*mulsi3_compare0"
1477 [(set (reg:CC_NOOV CC_REGNUM)
1478 (compare:CC_NOOV (mult:SI
1479 (match_operand:SI 2 "s_register_operand" "r,r")
1480 (match_operand:SI 1 "s_register_operand" "%0,r"))
1482 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1483 (mult:SI (match_dup 2) (match_dup 1)))]
1484 "TARGET_ARM && !arm_arch6"
1485 "muls%?\\t%0, %2, %1"
1486 [(set_attr "conds" "set")
1487 (set_attr "type" "muls")]
1490 (define_insn "*mulsi3_compare0_v6"
1491 [(set (reg:CC_NOOV CC_REGNUM)
1492 (compare:CC_NOOV (mult:SI
1493 (match_operand:SI 2 "s_register_operand" "r")
1494 (match_operand:SI 1 "s_register_operand" "r"))
1496 (set (match_operand:SI 0 "s_register_operand" "=r")
1497 (mult:SI (match_dup 2) (match_dup 1)))]
1498 "TARGET_ARM && arm_arch6 && optimize_size"
1499 "muls%?\\t%0, %2, %1"
1500 [(set_attr "conds" "set")
1501 (set_attr "type" "muls")]
1504 (define_insn "*mulsi_compare0_scratch"
1505 [(set (reg:CC_NOOV CC_REGNUM)
1506 (compare:CC_NOOV (mult:SI
1507 (match_operand:SI 2 "s_register_operand" "r,r")
1508 (match_operand:SI 1 "s_register_operand" "%0,r"))
1510 (clobber (match_scratch:SI 0 "=&r,&r"))]
1511 "TARGET_ARM && !arm_arch6"
1512 "muls%?\\t%0, %2, %1"
1513 [(set_attr "conds" "set")
1514 (set_attr "type" "muls")]
1517 (define_insn "*mulsi_compare0_scratch_v6"
1518 [(set (reg:CC_NOOV CC_REGNUM)
1519 (compare:CC_NOOV (mult:SI
1520 (match_operand:SI 2 "s_register_operand" "r")
1521 (match_operand:SI 1 "s_register_operand" "r"))
1523 (clobber (match_scratch:SI 0 "=r"))]
1524 "TARGET_ARM && arm_arch6 && optimize_size"
1525 "muls%?\\t%0, %2, %1"
1526 [(set_attr "conds" "set")
1527 (set_attr "type" "muls")]
1530 (define_insn "*mulsi3addsi_compare0"
1531 [(set (reg:CC_NOOV CC_REGNUM)
1534 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1535 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1536 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1538 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1539 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1541 "TARGET_ARM && arm_arch6"
1542 "mlas%?\\t%0, %2, %1, %3"
1543 [(set_attr "conds" "set")
1544 (set_attr "type" "mlas")]
1547 (define_insn "*mulsi3addsi_compare0_v6"
1548 [(set (reg:CC_NOOV CC_REGNUM)
1551 (match_operand:SI 2 "s_register_operand" "r")
1552 (match_operand:SI 1 "s_register_operand" "r"))
1553 (match_operand:SI 3 "s_register_operand" "r"))
1555 (set (match_operand:SI 0 "s_register_operand" "=r")
1556 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1558 "TARGET_ARM && arm_arch6 && optimize_size"
1559 "mlas%?\\t%0, %2, %1, %3"
1560 [(set_attr "conds" "set")
1561 (set_attr "type" "mlas")]
1564 (define_insn "*mulsi3addsi_compare0_scratch"
1565 [(set (reg:CC_NOOV CC_REGNUM)
1568 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1569 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1570 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1572 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1573 "TARGET_ARM && !arm_arch6"
1574 "mlas%?\\t%0, %2, %1, %3"
1575 [(set_attr "conds" "set")
1576 (set_attr "type" "mlas")]
1579 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1580 [(set (reg:CC_NOOV CC_REGNUM)
1583 (match_operand:SI 2 "s_register_operand" "r")
1584 (match_operand:SI 1 "s_register_operand" "r"))
1585 (match_operand:SI 3 "s_register_operand" "r"))
1587 (clobber (match_scratch:SI 0 "=r"))]
1588 "TARGET_ARM && arm_arch6 && optimize_size"
1589 "mlas%?\\t%0, %2, %1, %3"
1590 [(set_attr "conds" "set")
1591 (set_attr "type" "mlas")]
1594 ;; 32x32->64 widening multiply.
1595 ;; The only difference between the v3-5 and v6+ versions is the requirement
1596 ;; that the output does not overlap with either input.
1598 (define_expand "<Us>mulsidi3"
1599 [(set (match_operand:DI 0 "s_register_operand")
1601 (SE:DI (match_operand:SI 1 "s_register_operand"))
1602 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1605 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1606 gen_highpart (SImode, operands[0]),
1607 operands[1], operands[2]));
1612 (define_insn "<US>mull"
1613 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1615 (match_operand:SI 2 "s_register_operand" "%r,r")
1616 (match_operand:SI 3 "s_register_operand" "r,r")))
1617 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1620 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1623 "<US>mull%?\\t%0, %1, %2, %3"
1624 [(set_attr "type" "umull")
1625 (set_attr "predicable" "yes")
1626 (set_attr "arch" "v6,nov6")]
1629 (define_expand "<Us>maddsidi4"
1630 [(set (match_operand:DI 0 "s_register_operand")
1633 (SE:DI (match_operand:SI 1 "s_register_operand"))
1634 (SE:DI (match_operand:SI 2 "s_register_operand")))
1635 (match_operand:DI 3 "s_register_operand")))]
1638 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1639 gen_lowpart (SImode, operands[3]),
1640 gen_highpart (SImode, operands[0]),
1641 gen_highpart (SImode, operands[3]),
1642 operands[1], operands[2]));
1647 (define_insn "<US>mlal"
1648 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1651 (match_operand:SI 4 "s_register_operand" "%r,r")
1652 (match_operand:SI 5 "s_register_operand" "r,r"))
1653 (match_operand:SI 1 "s_register_operand" "0,0")))
1654 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1659 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1660 (zero_extend:DI (match_dup 1)))
1662 (match_operand:SI 3 "s_register_operand" "2,2")))]
1664 "<US>mlal%?\\t%0, %2, %4, %5"
1665 [(set_attr "type" "umlal")
1666 (set_attr "predicable" "yes")
1667 (set_attr "arch" "v6,nov6")]
1670 (define_expand "<US>mulsi3_highpart"
1672 [(set (match_operand:SI 0 "s_register_operand")
1676 (SE:DI (match_operand:SI 1 "s_register_operand"))
1677 (SE:DI (match_operand:SI 2 "s_register_operand")))
1679 (clobber (match_scratch:SI 3 ""))])]
1684 (define_insn "*<US>mull_high"
1685 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1689 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1690 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1692 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1694 "<US>mull%?\\t%3, %0, %2, %1"
1695 [(set_attr "type" "umull")
1696 (set_attr "predicable" "yes")
1697 (set_attr "arch" "v6,nov6,nov6")]
1700 (define_insn "mulhisi3"
1701 [(set (match_operand:SI 0 "s_register_operand" "=r")
1702 (mult:SI (sign_extend:SI
1703 (match_operand:HI 1 "s_register_operand" "%r"))
1705 (match_operand:HI 2 "s_register_operand" "r"))))]
1706 "TARGET_DSP_MULTIPLY"
1707 "smulbb%?\\t%0, %1, %2"
1708 [(set_attr "type" "smulxy")
1709 (set_attr "predicable" "yes")]
1712 (define_insn "*mulhisi3tb"
1713 [(set (match_operand:SI 0 "s_register_operand" "=r")
1714 (mult:SI (ashiftrt:SI
1715 (match_operand:SI 1 "s_register_operand" "r")
1718 (match_operand:HI 2 "s_register_operand" "r"))))]
1719 "TARGET_DSP_MULTIPLY"
1720 "smultb%?\\t%0, %1, %2"
1721 [(set_attr "type" "smulxy")
1722 (set_attr "predicable" "yes")]
1725 (define_insn "*mulhisi3bt"
1726 [(set (match_operand:SI 0 "s_register_operand" "=r")
1727 (mult:SI (sign_extend:SI
1728 (match_operand:HI 1 "s_register_operand" "r"))
1730 (match_operand:SI 2 "s_register_operand" "r")
1732 "TARGET_DSP_MULTIPLY"
1733 "smulbt%?\\t%0, %1, %2"
1734 [(set_attr "type" "smulxy")
1735 (set_attr "predicable" "yes")]
1738 (define_insn "*mulhisi3tt"
1739 [(set (match_operand:SI 0 "s_register_operand" "=r")
1740 (mult:SI (ashiftrt:SI
1741 (match_operand:SI 1 "s_register_operand" "r")
1744 (match_operand:SI 2 "s_register_operand" "r")
1746 "TARGET_DSP_MULTIPLY"
1747 "smultt%?\\t%0, %1, %2"
1748 [(set_attr "type" "smulxy")
1749 (set_attr "predicable" "yes")]
1752 (define_insn "maddhisi4"
1753 [(set (match_operand:SI 0 "s_register_operand" "=r")
1754 (plus:SI (mult:SI (sign_extend:SI
1755 (match_operand:HI 1 "s_register_operand" "r"))
1757 (match_operand:HI 2 "s_register_operand" "r")))
1758 (match_operand:SI 3 "s_register_operand" "r")))]
1759 "TARGET_DSP_MULTIPLY"
1760 "smlabb%?\\t%0, %1, %2, %3"
1761 [(set_attr "type" "smlaxy")
1762 (set_attr "predicable" "yes")]
1765 ;; Note: there is no maddhisi4ibt because this one is canonical form
1766 (define_insn "*maddhisi4tb"
1767 [(set (match_operand:SI 0 "s_register_operand" "=r")
1768 (plus:SI (mult:SI (ashiftrt:SI
1769 (match_operand:SI 1 "s_register_operand" "r")
1772 (match_operand:HI 2 "s_register_operand" "r")))
1773 (match_operand:SI 3 "s_register_operand" "r")))]
1774 "TARGET_DSP_MULTIPLY"
1775 "smlatb%?\\t%0, %1, %2, %3"
1776 [(set_attr "type" "smlaxy")
1777 (set_attr "predicable" "yes")]
1780 (define_insn "*maddhisi4tt"
1781 [(set (match_operand:SI 0 "s_register_operand" "=r")
1782 (plus:SI (mult:SI (ashiftrt:SI
1783 (match_operand:SI 1 "s_register_operand" "r")
1786 (match_operand:SI 2 "s_register_operand" "r")
1788 (match_operand:SI 3 "s_register_operand" "r")))]
1789 "TARGET_DSP_MULTIPLY"
1790 "smlatt%?\\t%0, %1, %2, %3"
1791 [(set_attr "type" "smlaxy")
1792 (set_attr "predicable" "yes")]
1795 (define_insn "maddhidi4"
1796 [(set (match_operand:DI 0 "s_register_operand" "=r")
1798 (mult:DI (sign_extend:DI
1799 (match_operand:HI 1 "s_register_operand" "r"))
1801 (match_operand:HI 2 "s_register_operand" "r")))
1802 (match_operand:DI 3 "s_register_operand" "0")))]
1803 "TARGET_DSP_MULTIPLY"
1804 "smlalbb%?\\t%Q0, %R0, %1, %2"
1805 [(set_attr "type" "smlalxy")
1806 (set_attr "predicable" "yes")])
1808 ;; Note: there is no maddhidi4ibt because this one is canonical form
1809 (define_insn "*maddhidi4tb"
1810 [(set (match_operand:DI 0 "s_register_operand" "=r")
1812 (mult:DI (sign_extend:DI
1814 (match_operand:SI 1 "s_register_operand" "r")
1817 (match_operand:HI 2 "s_register_operand" "r")))
1818 (match_operand:DI 3 "s_register_operand" "0")))]
1819 "TARGET_DSP_MULTIPLY"
1820 "smlaltb%?\\t%Q0, %R0, %1, %2"
1821 [(set_attr "type" "smlalxy")
1822 (set_attr "predicable" "yes")])
1824 (define_insn "*maddhidi4tt"
1825 [(set (match_operand:DI 0 "s_register_operand" "=r")
1827 (mult:DI (sign_extend:DI
1829 (match_operand:SI 1 "s_register_operand" "r")
1833 (match_operand:SI 2 "s_register_operand" "r")
1835 (match_operand:DI 3 "s_register_operand" "0")))]
1836 "TARGET_DSP_MULTIPLY"
1837 "smlaltt%?\\t%Q0, %R0, %1, %2"
1838 [(set_attr "type" "smlalxy")
1839 (set_attr "predicable" "yes")])
1841 (define_expand "mulsf3"
1842 [(set (match_operand:SF 0 "s_register_operand")
1843 (mult:SF (match_operand:SF 1 "s_register_operand")
1844 (match_operand:SF 2 "s_register_operand")))]
1845 "TARGET_32BIT && TARGET_HARD_FLOAT"
1849 (define_expand "muldf3"
1850 [(set (match_operand:DF 0 "s_register_operand")
1851 (mult:DF (match_operand:DF 1 "s_register_operand")
1852 (match_operand:DF 2 "s_register_operand")))]
1853 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1859 (define_expand "divsf3"
1860 [(set (match_operand:SF 0 "s_register_operand")
1861 (div:SF (match_operand:SF 1 "s_register_operand")
1862 (match_operand:SF 2 "s_register_operand")))]
1863 "TARGET_32BIT && TARGET_HARD_FLOAT"
1866 (define_expand "divdf3"
1867 [(set (match_operand:DF 0 "s_register_operand")
1868 (div:DF (match_operand:DF 1 "s_register_operand")
1869 (match_operand:DF 2 "s_register_operand")))]
1870 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1874 ; Expand logical operations. The mid-end expander does not split off memory
1875 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1876 ; So an explicit expander is needed to generate better code.
1878 (define_expand "<LOGICAL:optab>di3"
1879 [(set (match_operand:DI 0 "s_register_operand")
1880 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1881 (match_operand:DI 2 "arm_<optab>di_operand")))]
1884 rtx low = simplify_gen_binary (<CODE>, SImode,
1885 gen_lowpart (SImode, operands[1]),
1886 gen_lowpart (SImode, operands[2]));
1887 rtx high = simplify_gen_binary (<CODE>, SImode,
1888 gen_highpart (SImode, operands[1]),
1889 gen_highpart_mode (SImode, DImode,
1892 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1893 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1898 (define_expand "one_cmpldi2"
1899 [(set (match_operand:DI 0 "s_register_operand")
1900 (not:DI (match_operand:DI 1 "s_register_operand")))]
1903 rtx low = simplify_gen_unary (NOT, SImode,
1904 gen_lowpart (SImode, operands[1]),
1906 rtx high = simplify_gen_unary (NOT, SImode,
1907 gen_highpart_mode (SImode, DImode,
1911 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1912 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1917 ;; Split DImode and, ior, xor operations. Simply perform the logical
1918 ;; operation on the upper and lower halves of the registers.
1919 ;; This is needed for atomic operations in arm_split_atomic_op.
1920 ;; Avoid splitting IWMMXT instructions.
1922 [(set (match_operand:DI 0 "s_register_operand" "")
1923 (match_operator:DI 6 "logical_binary_operator"
1924 [(match_operand:DI 1 "s_register_operand" "")
1925 (match_operand:DI 2 "s_register_operand" "")]))]
1926 "TARGET_32BIT && reload_completed
1927 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1928 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1929 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1932 operands[3] = gen_highpart (SImode, operands[0]);
1933 operands[0] = gen_lowpart (SImode, operands[0]);
1934 operands[4] = gen_highpart (SImode, operands[1]);
1935 operands[1] = gen_lowpart (SImode, operands[1]);
1936 operands[5] = gen_highpart (SImode, operands[2]);
1937 operands[2] = gen_lowpart (SImode, operands[2]);
1941 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1942 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1944 [(set (match_operand:DI 0 "s_register_operand")
1945 (not:DI (match_operand:DI 1 "s_register_operand")))]
1947 [(set (match_dup 0) (not:SI (match_dup 1)))
1948 (set (match_dup 2) (not:SI (match_dup 3)))]
1951 operands[2] = gen_highpart (SImode, operands[0]);
1952 operands[0] = gen_lowpart (SImode, operands[0]);
1953 operands[3] = gen_highpart (SImode, operands[1]);
1954 operands[1] = gen_lowpart (SImode, operands[1]);
1958 (define_expand "andsi3"
1959 [(set (match_operand:SI 0 "s_register_operand")
1960 (and:SI (match_operand:SI 1 "s_register_operand")
1961 (match_operand:SI 2 "reg_or_int_operand")))]
1966 if (CONST_INT_P (operands[2]))
1968 if (INTVAL (operands[2]) == 255 && arm_arch6)
1970 operands[1] = convert_to_mode (QImode, operands[1], 1);
1971 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1975 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1976 operands[2] = force_reg (SImode, operands[2]);
1979 arm_split_constant (AND, SImode, NULL_RTX,
1980 INTVAL (operands[2]), operands[0],
1982 optimize && can_create_pseudo_p ());
1988 else /* TARGET_THUMB1 */
1990 if (!CONST_INT_P (operands[2]))
1992 rtx tmp = force_reg (SImode, operands[2]);
1993 if (rtx_equal_p (operands[0], operands[1]))
1997 operands[2] = operands[1];
2005 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2007 operands[2] = force_reg (SImode,
2008 GEN_INT (~INTVAL (operands[2])));
2010 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2015 for (i = 9; i <= 31; i++)
2017 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2019 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2023 else if ((HOST_WIDE_INT_1 << i) - 1
2024 == ~INTVAL (operands[2]))
2026 rtx shift = GEN_INT (i);
2027 rtx reg = gen_reg_rtx (SImode);
2029 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2030 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2036 operands[2] = force_reg (SImode, operands[2]);
2042 ; ??? Check split length for Thumb-2
2043 (define_insn_and_split "*arm_andsi3_insn"
2044 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2045 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2046 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2051 bic%?\\t%0, %1, #%B2
2055 && CONST_INT_P (operands[2])
2056 && !(const_ok_for_arm (INTVAL (operands[2]))
2057 || const_ok_for_arm (~INTVAL (operands[2])))"
2058 [(clobber (const_int 0))]
2060 arm_split_constant (AND, SImode, curr_insn,
2061 INTVAL (operands[2]), operands[0], operands[1], 0);
2064 [(set_attr "length" "4,4,4,4,16")
2065 (set_attr "predicable" "yes")
2066 (set_attr "predicable_short_it" "no,yes,no,no,no")
2067 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2070 (define_insn "*andsi3_compare0"
2071 [(set (reg:CC_NOOV CC_REGNUM)
2073 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2074 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2076 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2077 (and:SI (match_dup 1) (match_dup 2)))]
2081 bics%?\\t%0, %1, #%B2
2082 ands%?\\t%0, %1, %2"
2083 [(set_attr "conds" "set")
2084 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2087 (define_insn "*andsi3_compare0_scratch"
2088 [(set (reg:CC_NOOV CC_REGNUM)
2090 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2091 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2093 (clobber (match_scratch:SI 2 "=X,r,X"))]
2097 bics%?\\t%2, %0, #%B1
2099 [(set_attr "conds" "set")
2100 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2103 (define_insn "*zeroextractsi_compare0_scratch"
2104 [(set (reg:CC_NOOV CC_REGNUM)
2105 (compare:CC_NOOV (zero_extract:SI
2106 (match_operand:SI 0 "s_register_operand" "r")
2107 (match_operand 1 "const_int_operand" "n")
2108 (match_operand 2 "const_int_operand" "n"))
2111 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2112 && INTVAL (operands[1]) > 0
2113 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2114 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2116 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2117 << INTVAL (operands[2]));
2118 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2121 [(set_attr "conds" "set")
2122 (set_attr "predicable" "yes")
2123 (set_attr "type" "logics_imm")]
2126 (define_insn_and_split "*ne_zeroextractsi"
2127 [(set (match_operand:SI 0 "s_register_operand" "=r")
2128 (ne:SI (zero_extract:SI
2129 (match_operand:SI 1 "s_register_operand" "r")
2130 (match_operand:SI 2 "const_int_operand" "n")
2131 (match_operand:SI 3 "const_int_operand" "n"))
2133 (clobber (reg:CC CC_REGNUM))]
2135 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2136 && INTVAL (operands[2]) > 0
2137 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2138 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2141 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2142 && INTVAL (operands[2]) > 0
2143 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2144 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2145 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2146 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2148 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2150 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2151 (match_dup 0) (const_int 1)))]
2153 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2154 << INTVAL (operands[3]));
2156 [(set_attr "conds" "clob")
2157 (set (attr "length")
2158 (if_then_else (eq_attr "is_thumb" "yes")
2161 (set_attr "type" "multiple")]
2164 (define_insn_and_split "*ne_zeroextractsi_shifted"
2165 [(set (match_operand:SI 0 "s_register_operand" "=r")
2166 (ne:SI (zero_extract:SI
2167 (match_operand:SI 1 "s_register_operand" "r")
2168 (match_operand:SI 2 "const_int_operand" "n")
2171 (clobber (reg:CC CC_REGNUM))]
2175 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2176 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2178 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2180 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2181 (match_dup 0) (const_int 1)))]
2183 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2185 [(set_attr "conds" "clob")
2186 (set_attr "length" "8")
2187 (set_attr "type" "multiple")]
2190 (define_insn_and_split "*ite_ne_zeroextractsi"
2191 [(set (match_operand:SI 0 "s_register_operand" "=r")
2192 (if_then_else:SI (ne (zero_extract:SI
2193 (match_operand:SI 1 "s_register_operand" "r")
2194 (match_operand:SI 2 "const_int_operand" "n")
2195 (match_operand:SI 3 "const_int_operand" "n"))
2197 (match_operand:SI 4 "arm_not_operand" "rIK")
2199 (clobber (reg:CC CC_REGNUM))]
2201 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2202 && INTVAL (operands[2]) > 0
2203 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2204 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2205 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2208 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2209 && INTVAL (operands[2]) > 0
2210 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2211 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2212 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2213 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2214 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2216 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2218 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2219 (match_dup 0) (match_dup 4)))]
2221 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2222 << INTVAL (operands[3]));
2224 [(set_attr "conds" "clob")
2225 (set_attr "length" "8")
2226 (set_attr "type" "multiple")]
2229 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2230 [(set (match_operand:SI 0 "s_register_operand" "=r")
2231 (if_then_else:SI (ne (zero_extract:SI
2232 (match_operand:SI 1 "s_register_operand" "r")
2233 (match_operand:SI 2 "const_int_operand" "n")
2236 (match_operand:SI 3 "arm_not_operand" "rIK")
2238 (clobber (reg:CC CC_REGNUM))]
2239 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2241 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2242 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2243 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2245 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2247 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2248 (match_dup 0) (match_dup 3)))]
2250 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2252 [(set_attr "conds" "clob")
2253 (set_attr "length" "8")
2254 (set_attr "type" "multiple")]
2257 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2259 [(set (match_operand:SI 0 "s_register_operand" "")
2260 (match_operator:SI 1 "shiftable_operator"
2261 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2262 (match_operand:SI 3 "const_int_operand" "")
2263 (match_operand:SI 4 "const_int_operand" ""))
2264 (match_operand:SI 5 "s_register_operand" "")]))
2265 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2267 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2270 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2273 HOST_WIDE_INT temp = INTVAL (operands[3]);
2275 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2276 operands[4] = GEN_INT (32 - temp);
2281 [(set (match_operand:SI 0 "s_register_operand" "")
2282 (match_operator:SI 1 "shiftable_operator"
2283 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2284 (match_operand:SI 3 "const_int_operand" "")
2285 (match_operand:SI 4 "const_int_operand" ""))
2286 (match_operand:SI 5 "s_register_operand" "")]))
2287 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2289 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2292 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2295 HOST_WIDE_INT temp = INTVAL (operands[3]);
2297 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2298 operands[4] = GEN_INT (32 - temp);
2302 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2303 ;;; represented by the bitfield, then this will produce incorrect results.
2304 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2305 ;;; which have a real bit-field insert instruction, the truncation happens
2306 ;;; in the bit-field insert instruction itself. Since arm does not have a
2307 ;;; bit-field insert instruction, we would have to emit code here to truncate
2308 ;;; the value before we insert. This loses some of the advantage of having
2309 ;;; this insv pattern, so this pattern needs to be reevalutated.
2311 (define_expand "insv"
2312 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2313 (match_operand 1 "general_operand")
2314 (match_operand 2 "general_operand"))
2315 (match_operand 3 "reg_or_int_operand"))]
2316 "TARGET_ARM || arm_arch_thumb2"
2319 int start_bit = INTVAL (operands[2]);
2320 int width = INTVAL (operands[1]);
2321 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2322 rtx target, subtarget;
2324 if (arm_arch_thumb2)
2326 if (unaligned_access && MEM_P (operands[0])
2327 && s_register_operand (operands[3], GET_MODE (operands[3]))
2328 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2332 if (BYTES_BIG_ENDIAN)
2333 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2338 base_addr = adjust_address (operands[0], SImode,
2339 start_bit / BITS_PER_UNIT);
2340 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2344 rtx tmp = gen_reg_rtx (HImode);
2346 base_addr = adjust_address (operands[0], HImode,
2347 start_bit / BITS_PER_UNIT);
2348 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2349 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2353 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2355 bool use_bfi = TRUE;
2357 if (CONST_INT_P (operands[3]))
2359 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2363 emit_insn (gen_insv_zero (operands[0], operands[1],
2368 /* See if the set can be done with a single orr instruction. */
2369 if (val == mask && const_ok_for_arm (val << start_bit))
2375 if (!REG_P (operands[3]))
2376 operands[3] = force_reg (SImode, operands[3]);
2378 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2387 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2390 target = copy_rtx (operands[0]);
2391 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2392 subreg as the final target. */
2393 if (GET_CODE (target) == SUBREG)
2395 subtarget = gen_reg_rtx (SImode);
2396 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2397 < GET_MODE_SIZE (SImode))
2398 target = SUBREG_REG (target);
2403 if (CONST_INT_P (operands[3]))
2405 /* Since we are inserting a known constant, we may be able to
2406 reduce the number of bits that we have to clear so that
2407 the mask becomes simple. */
2408 /* ??? This code does not check to see if the new mask is actually
2409 simpler. It may not be. */
2410 rtx op1 = gen_reg_rtx (SImode);
2411 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2412 start of this pattern. */
2413 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2414 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2416 emit_insn (gen_andsi3 (op1, operands[0],
2417 gen_int_mode (~mask2, SImode)));
2418 emit_insn (gen_iorsi3 (subtarget, op1,
2419 gen_int_mode (op3_value << start_bit, SImode)));
2421 else if (start_bit == 0
2422 && !(const_ok_for_arm (mask)
2423 || const_ok_for_arm (~mask)))
2425 /* A Trick, since we are setting the bottom bits in the word,
2426 we can shift operand[3] up, operand[0] down, OR them together
2427 and rotate the result back again. This takes 3 insns, and
2428 the third might be mergeable into another op. */
2429 /* The shift up copes with the possibility that operand[3] is
2430 wider than the bitfield. */
2431 rtx op0 = gen_reg_rtx (SImode);
2432 rtx op1 = gen_reg_rtx (SImode);
2434 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2435 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2436 emit_insn (gen_iorsi3 (op1, op1, op0));
2437 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2439 else if ((width + start_bit == 32)
2440 && !(const_ok_for_arm (mask)
2441 || const_ok_for_arm (~mask)))
2443 /* Similar trick, but slightly less efficient. */
2445 rtx op0 = gen_reg_rtx (SImode);
2446 rtx op1 = gen_reg_rtx (SImode);
2448 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2449 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2450 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2451 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2455 rtx op0 = gen_int_mode (mask, SImode);
2456 rtx op1 = gen_reg_rtx (SImode);
2457 rtx op2 = gen_reg_rtx (SImode);
2459 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2461 rtx tmp = gen_reg_rtx (SImode);
2463 emit_insn (gen_movsi (tmp, op0));
2467 /* Mask out any bits in operand[3] that are not needed. */
2468 emit_insn (gen_andsi3 (op1, operands[3], op0));
2470 if (CONST_INT_P (op0)
2471 && (const_ok_for_arm (mask << start_bit)
2472 || const_ok_for_arm (~(mask << start_bit))))
2474 op0 = gen_int_mode (~(mask << start_bit), SImode);
2475 emit_insn (gen_andsi3 (op2, operands[0], op0));
2479 if (CONST_INT_P (op0))
2481 rtx tmp = gen_reg_rtx (SImode);
2483 emit_insn (gen_movsi (tmp, op0));
2488 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2490 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2494 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2496 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2499 if (subtarget != target)
2501 /* If TARGET is still a SUBREG, then it must be wider than a word,
2502 so we must be careful only to set the subword we were asked to. */
2503 if (GET_CODE (target) == SUBREG)
2504 emit_move_insn (target, subtarget);
2506 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2513 (define_insn "insv_zero"
2514 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2515 (match_operand:SI 1 "const_int_M_operand" "M")
2516 (match_operand:SI 2 "const_int_M_operand" "M"))
2520 [(set_attr "length" "4")
2521 (set_attr "predicable" "yes")
2522 (set_attr "type" "bfm")]
2525 (define_insn "insv_t2"
2526 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2527 (match_operand:SI 1 "const_int_M_operand" "M")
2528 (match_operand:SI 2 "const_int_M_operand" "M"))
2529 (match_operand:SI 3 "s_register_operand" "r"))]
2531 "bfi%?\t%0, %3, %2, %1"
2532 [(set_attr "length" "4")
2533 (set_attr "predicable" "yes")
2534 (set_attr "type" "bfm")]
2537 (define_insn "andsi_notsi_si"
2538 [(set (match_operand:SI 0 "s_register_operand" "=r")
2539 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2540 (match_operand:SI 1 "s_register_operand" "r")))]
2542 "bic%?\\t%0, %1, %2"
2543 [(set_attr "predicable" "yes")
2544 (set_attr "type" "logic_reg")]
2547 (define_insn "andsi_not_shiftsi_si"
2548 [(set (match_operand:SI 0 "s_register_operand" "=r")
2549 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2550 [(match_operand:SI 2 "s_register_operand" "r")
2551 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2552 (match_operand:SI 1 "s_register_operand" "r")))]
2554 "bic%?\\t%0, %1, %2%S4"
2555 [(set_attr "predicable" "yes")
2556 (set_attr "shift" "2")
2557 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2558 (const_string "logic_shift_imm")
2559 (const_string "logic_shift_reg")))]
2562 ;; Shifted bics pattern used to set up CC status register and not reusing
2563 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2564 ;; does not support shift by register.
2565 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2566 [(set (reg:CC_NOOV CC_REGNUM)
2568 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2569 [(match_operand:SI 1 "s_register_operand" "r")
2570 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2571 (match_operand:SI 3 "s_register_operand" "r"))
2573 (clobber (match_scratch:SI 4 "=r"))]
2574 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2575 "bics%?\\t%4, %3, %1%S0"
2576 [(set_attr "predicable" "yes")
2577 (set_attr "conds" "set")
2578 (set_attr "shift" "1")
2579 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2580 (const_string "logic_shift_imm")
2581 (const_string "logic_shift_reg")))]
2584 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2585 ;; getting reused later.
2586 (define_insn "andsi_not_shiftsi_si_scc"
2587 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2589 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2590 [(match_operand:SI 1 "s_register_operand" "r")
2591 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2592 (match_operand:SI 3 "s_register_operand" "r"))
2594 (set (match_operand:SI 4 "s_register_operand" "=r")
2595 (and:SI (not:SI (match_op_dup 0
2599 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2600 "bics%?\\t%4, %3, %1%S0"
2601 [(set_attr "predicable" "yes")
2602 (set_attr "conds" "set")
2603 (set_attr "shift" "1")
2604 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2605 (const_string "logic_shift_imm")
2606 (const_string "logic_shift_reg")))]
2609 (define_insn "*andsi_notsi_si_compare0"
2610 [(set (reg:CC_NOOV CC_REGNUM)
2612 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2613 (match_operand:SI 1 "s_register_operand" "r"))
2615 (set (match_operand:SI 0 "s_register_operand" "=r")
2616 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2619 [(set_attr "conds" "set")
2620 (set_attr "type" "logics_shift_reg")]
2623 (define_insn "*andsi_notsi_si_compare0_scratch"
2624 [(set (reg:CC_NOOV CC_REGNUM)
2626 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2627 (match_operand:SI 1 "s_register_operand" "r"))
2629 (clobber (match_scratch:SI 0 "=r"))]
2632 [(set_attr "conds" "set")
2633 (set_attr "type" "logics_shift_reg")]
2636 (define_expand "iorsi3"
2637 [(set (match_operand:SI 0 "s_register_operand")
2638 (ior:SI (match_operand:SI 1 "s_register_operand")
2639 (match_operand:SI 2 "reg_or_int_operand")))]
2642 if (CONST_INT_P (operands[2]))
2646 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2647 operands[2] = force_reg (SImode, operands[2]);
2650 arm_split_constant (IOR, SImode, NULL_RTX,
2651 INTVAL (operands[2]), operands[0],
2653 optimize && can_create_pseudo_p ());
2657 else /* TARGET_THUMB1 */
2659 rtx tmp = force_reg (SImode, operands[2]);
2660 if (rtx_equal_p (operands[0], operands[1]))
2664 operands[2] = operands[1];
2672 (define_insn_and_split "*iorsi3_insn"
2673 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2674 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2675 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2680 orn%?\\t%0, %1, #%B2
2684 && CONST_INT_P (operands[2])
2685 && !(const_ok_for_arm (INTVAL (operands[2]))
2686 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2687 [(clobber (const_int 0))]
2689 arm_split_constant (IOR, SImode, curr_insn,
2690 INTVAL (operands[2]), operands[0], operands[1], 0);
2693 [(set_attr "length" "4,4,4,4,16")
2694 (set_attr "arch" "32,t2,t2,32,32")
2695 (set_attr "predicable" "yes")
2696 (set_attr "predicable_short_it" "no,yes,no,no,no")
2697 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2701 [(match_scratch:SI 3 "r")
2702 (set (match_operand:SI 0 "arm_general_register_operand" "")
2703 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2704 (match_operand:SI 2 "const_int_operand" "")))]
2706 && !const_ok_for_arm (INTVAL (operands[2]))
2707 && const_ok_for_arm (~INTVAL (operands[2]))"
2708 [(set (match_dup 3) (match_dup 2))
2709 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2713 (define_insn "*iorsi3_compare0"
2714 [(set (reg:CC_NOOV CC_REGNUM)
2716 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2717 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2719 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2720 (ior:SI (match_dup 1) (match_dup 2)))]
2722 "orrs%?\\t%0, %1, %2"
2723 [(set_attr "conds" "set")
2724 (set_attr "arch" "*,t2,*")
2725 (set_attr "length" "4,2,4")
2726 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2729 (define_insn "*iorsi3_compare0_scratch"
2730 [(set (reg:CC_NOOV CC_REGNUM)
2732 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2733 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2735 (clobber (match_scratch:SI 0 "=r,l,r"))]
2737 "orrs%?\\t%0, %1, %2"
2738 [(set_attr "conds" "set")
2739 (set_attr "arch" "*,t2,*")
2740 (set_attr "length" "4,2,4")
2741 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2744 (define_expand "xorsi3"
2745 [(set (match_operand:SI 0 "s_register_operand")
2746 (xor:SI (match_operand:SI 1 "s_register_operand")
2747 (match_operand:SI 2 "reg_or_int_operand")))]
2749 "if (CONST_INT_P (operands[2]))
2753 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2754 operands[2] = force_reg (SImode, operands[2]);
2757 arm_split_constant (XOR, SImode, NULL_RTX,
2758 INTVAL (operands[2]), operands[0],
2760 optimize && can_create_pseudo_p ());
2764 else /* TARGET_THUMB1 */
2766 rtx tmp = force_reg (SImode, operands[2]);
2767 if (rtx_equal_p (operands[0], operands[1]))
2771 operands[2] = operands[1];
2778 (define_insn_and_split "*arm_xorsi3"
2779 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2780 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2781 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2789 && CONST_INT_P (operands[2])
2790 && !const_ok_for_arm (INTVAL (operands[2]))"
2791 [(clobber (const_int 0))]
2793 arm_split_constant (XOR, SImode, curr_insn,
2794 INTVAL (operands[2]), operands[0], operands[1], 0);
2797 [(set_attr "length" "4,4,4,16")
2798 (set_attr "predicable" "yes")
2799 (set_attr "predicable_short_it" "no,yes,no,no")
2800 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2803 (define_insn "*xorsi3_compare0"
2804 [(set (reg:CC_NOOV CC_REGNUM)
2805 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2806 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2808 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2809 (xor:SI (match_dup 1) (match_dup 2)))]
2811 "eors%?\\t%0, %1, %2"
2812 [(set_attr "conds" "set")
2813 (set_attr "type" "logics_imm,logics_reg")]
2816 (define_insn "*xorsi3_compare0_scratch"
2817 [(set (reg:CC_NOOV CC_REGNUM)
2818 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2819 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2823 [(set_attr "conds" "set")
2824 (set_attr "type" "logics_imm,logics_reg")]
2827 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2828 ; (NOT D) we can sometimes merge the final NOT into one of the following
2832 [(set (match_operand:SI 0 "s_register_operand" "")
2833 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2834 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2835 (match_operand:SI 3 "arm_rhs_operand" "")))
2836 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2838 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2839 (not:SI (match_dup 3))))
2840 (set (match_dup 0) (not:SI (match_dup 4)))]
2844 (define_insn_and_split "*andsi_iorsi3_notsi"
2845 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2846 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2847 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2848 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2850 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2851 "&& reload_completed"
2852 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2853 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2855 /* If operands[3] is a constant make sure to fold the NOT into it
2856 to avoid creating a NOT of a CONST_INT. */
2857 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2858 if (CONST_INT_P (not_rtx))
2860 operands[4] = operands[0];
2861 operands[5] = not_rtx;
2865 operands[5] = operands[0];
2866 operands[4] = not_rtx;
2869 [(set_attr "length" "8")
2870 (set_attr "ce_count" "2")
2871 (set_attr "predicable" "yes")
2872 (set_attr "type" "multiple")]
2875 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2876 ; insns are available?
2878 [(set (match_operand:SI 0 "s_register_operand" "")
2879 (match_operator:SI 1 "logical_binary_operator"
2880 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2881 (match_operand:SI 3 "const_int_operand" "")
2882 (match_operand:SI 4 "const_int_operand" ""))
2883 (match_operator:SI 9 "logical_binary_operator"
2884 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2885 (match_operand:SI 6 "const_int_operand" ""))
2886 (match_operand:SI 7 "s_register_operand" "")])]))
2887 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2889 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2890 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2893 [(ashift:SI (match_dup 2) (match_dup 4))
2897 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2900 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2904 [(set (match_operand:SI 0 "s_register_operand" "")
2905 (match_operator:SI 1 "logical_binary_operator"
2906 [(match_operator:SI 9 "logical_binary_operator"
2907 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2908 (match_operand:SI 6 "const_int_operand" ""))
2909 (match_operand:SI 7 "s_register_operand" "")])
2910 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2911 (match_operand:SI 3 "const_int_operand" "")
2912 (match_operand:SI 4 "const_int_operand" ""))]))
2913 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2915 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2916 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2919 [(ashift:SI (match_dup 2) (match_dup 4))
2923 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2926 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2930 [(set (match_operand:SI 0 "s_register_operand" "")
2931 (match_operator:SI 1 "logical_binary_operator"
2932 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2933 (match_operand:SI 3 "const_int_operand" "")
2934 (match_operand:SI 4 "const_int_operand" ""))
2935 (match_operator:SI 9 "logical_binary_operator"
2936 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2937 (match_operand:SI 6 "const_int_operand" ""))
2938 (match_operand:SI 7 "s_register_operand" "")])]))
2939 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2941 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2942 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2945 [(ashift:SI (match_dup 2) (match_dup 4))
2949 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2952 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2956 [(set (match_operand:SI 0 "s_register_operand" "")
2957 (match_operator:SI 1 "logical_binary_operator"
2958 [(match_operator:SI 9 "logical_binary_operator"
2959 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2960 (match_operand:SI 6 "const_int_operand" ""))
2961 (match_operand:SI 7 "s_register_operand" "")])
2962 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2963 (match_operand:SI 3 "const_int_operand" "")
2964 (match_operand:SI 4 "const_int_operand" ""))]))
2965 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2967 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2968 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2971 [(ashift:SI (match_dup 2) (match_dup 4))
2975 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2978 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2982 ;; Minimum and maximum insns
2984 (define_expand "smaxsi3"
2986 (set (match_operand:SI 0 "s_register_operand")
2987 (smax:SI (match_operand:SI 1 "s_register_operand")
2988 (match_operand:SI 2 "arm_rhs_operand")))
2989 (clobber (reg:CC CC_REGNUM))])]
2992 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2994 /* No need for a clobber of the condition code register here. */
2995 emit_insn (gen_rtx_SET (operands[0],
2996 gen_rtx_SMAX (SImode, operands[1],
3002 (define_insn "*smax_0"
3003 [(set (match_operand:SI 0 "s_register_operand" "=r")
3004 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3007 "bic%?\\t%0, %1, %1, asr #31"
3008 [(set_attr "predicable" "yes")
3009 (set_attr "type" "logic_shift_reg")]
3012 (define_insn "*smax_m1"
3013 [(set (match_operand:SI 0 "s_register_operand" "=r")
3014 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3017 "orr%?\\t%0, %1, %1, asr #31"
3018 [(set_attr "predicable" "yes")
3019 (set_attr "type" "logic_shift_reg")]
3022 (define_insn_and_split "*arm_smax_insn"
3023 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3024 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3025 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3026 (clobber (reg:CC CC_REGNUM))]
3029 ; cmp\\t%1, %2\;movlt\\t%0, %2
3030 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3032 [(set (reg:CC CC_REGNUM)
3033 (compare:CC (match_dup 1) (match_dup 2)))
3035 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3039 [(set_attr "conds" "clob")
3040 (set_attr "length" "8,12")
3041 (set_attr "type" "multiple")]
3044 (define_expand "sminsi3"
3046 (set (match_operand:SI 0 "s_register_operand")
3047 (smin:SI (match_operand:SI 1 "s_register_operand")
3048 (match_operand:SI 2 "arm_rhs_operand")))
3049 (clobber (reg:CC CC_REGNUM))])]
3052 if (operands[2] == const0_rtx)
3054 /* No need for a clobber of the condition code register here. */
3055 emit_insn (gen_rtx_SET (operands[0],
3056 gen_rtx_SMIN (SImode, operands[1],
3062 (define_insn "*smin_0"
3063 [(set (match_operand:SI 0 "s_register_operand" "=r")
3064 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3067 "and%?\\t%0, %1, %1, asr #31"
3068 [(set_attr "predicable" "yes")
3069 (set_attr "type" "logic_shift_reg")]
3072 (define_insn_and_split "*arm_smin_insn"
3073 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3074 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3075 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3076 (clobber (reg:CC CC_REGNUM))]
3079 ; cmp\\t%1, %2\;movge\\t%0, %2
3080 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3082 [(set (reg:CC CC_REGNUM)
3083 (compare:CC (match_dup 1) (match_dup 2)))
3085 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3089 [(set_attr "conds" "clob")
3090 (set_attr "length" "8,12")
3091 (set_attr "type" "multiple,multiple")]
3094 (define_expand "umaxsi3"
3096 (set (match_operand:SI 0 "s_register_operand")
3097 (umax:SI (match_operand:SI 1 "s_register_operand")
3098 (match_operand:SI 2 "arm_rhs_operand")))
3099 (clobber (reg:CC CC_REGNUM))])]
3104 (define_insn_and_split "*arm_umaxsi3"
3105 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3106 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3107 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3108 (clobber (reg:CC CC_REGNUM))]
3111 ; cmp\\t%1, %2\;movcc\\t%0, %2
3112 ; cmp\\t%1, %2\;movcs\\t%0, %1
3113 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3115 [(set (reg:CC CC_REGNUM)
3116 (compare:CC (match_dup 1) (match_dup 2)))
3118 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3122 [(set_attr "conds" "clob")
3123 (set_attr "length" "8,8,12")
3124 (set_attr "type" "store_4")]
3127 (define_expand "uminsi3"
3129 (set (match_operand:SI 0 "s_register_operand")
3130 (umin:SI (match_operand:SI 1 "s_register_operand")
3131 (match_operand:SI 2 "arm_rhs_operand")))
3132 (clobber (reg:CC CC_REGNUM))])]
3137 (define_insn_and_split "*arm_uminsi3"
3138 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3139 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3140 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3141 (clobber (reg:CC CC_REGNUM))]
3144 ; cmp\\t%1, %2\;movcs\\t%0, %2
3145 ; cmp\\t%1, %2\;movcc\\t%0, %1
3146 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3148 [(set (reg:CC CC_REGNUM)
3149 (compare:CC (match_dup 1) (match_dup 2)))
3151 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3155 [(set_attr "conds" "clob")
3156 (set_attr "length" "8,8,12")
3157 (set_attr "type" "store_4")]
3160 (define_insn "*store_minmaxsi"
3161 [(set (match_operand:SI 0 "memory_operand" "=m")
3162 (match_operator:SI 3 "minmax_operator"
3163 [(match_operand:SI 1 "s_register_operand" "r")
3164 (match_operand:SI 2 "s_register_operand" "r")]))
3165 (clobber (reg:CC CC_REGNUM))]
3166 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3168 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3169 operands[1], operands[2]);
3170 output_asm_insn (\"cmp\\t%1, %2\", operands);
3172 output_asm_insn (\"ite\t%d3\", operands);
3173 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3174 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3177 [(set_attr "conds" "clob")
3178 (set (attr "length")
3179 (if_then_else (eq_attr "is_thumb" "yes")
3182 (set_attr "type" "store_4")]
3185 ; Reject the frame pointer in operand[1], since reloading this after
3186 ; it has been eliminated can cause carnage.
3187 (define_insn "*minmax_arithsi"
3188 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3189 (match_operator:SI 4 "shiftable_operator"
3190 [(match_operator:SI 5 "minmax_operator"
3191 [(match_operand:SI 2 "s_register_operand" "r,r")
3192 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3193 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3194 (clobber (reg:CC CC_REGNUM))]
3195 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3198 enum rtx_code code = GET_CODE (operands[4]);
3201 if (which_alternative != 0 || operands[3] != const0_rtx
3202 || (code != PLUS && code != IOR && code != XOR))
3207 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3208 operands[2], operands[3]);
3209 output_asm_insn (\"cmp\\t%2, %3\", operands);
3213 output_asm_insn (\"ite\\t%d5\", operands);
3215 output_asm_insn (\"it\\t%d5\", operands);
3217 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3219 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3222 [(set_attr "conds" "clob")
3223 (set (attr "length")
3224 (if_then_else (eq_attr "is_thumb" "yes")
3227 (set_attr "type" "multiple")]
3230 ; Reject the frame pointer in operand[1], since reloading this after
3231 ; it has been eliminated can cause carnage.
3232 (define_insn_and_split "*minmax_arithsi_non_canon"
3233 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3235 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3236 (match_operator:SI 4 "minmax_operator"
3237 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3238 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3239 (clobber (reg:CC CC_REGNUM))]
3240 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3241 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3243 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3244 [(set (reg:CC CC_REGNUM)
3245 (compare:CC (match_dup 2) (match_dup 3)))
3247 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3249 (minus:SI (match_dup 1)
3251 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3255 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3256 operands[2], operands[3]);
3257 enum rtx_code rc = minmax_code (operands[4]);
3258 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3259 operands[2], operands[3]);
3261 if (mode == CCFPmode || mode == CCFPEmode)
3262 rc = reverse_condition_maybe_unordered (rc);
3264 rc = reverse_condition (rc);
3265 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3266 if (CONST_INT_P (operands[3]))
3267 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3269 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3271 [(set_attr "conds" "clob")
3272 (set (attr "length")
3273 (if_then_else (eq_attr "is_thumb" "yes")
3276 (set_attr "type" "multiple")]
3279 (define_code_iterator SAT [smin smax])
3280 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3281 (define_code_attr SATlo [(smin "1") (smax "2")])
3282 (define_code_attr SAThi [(smin "2") (smax "1")])
3284 (define_insn "*satsi_<SAT:code>"
3285 [(set (match_operand:SI 0 "s_register_operand" "=r")
3286 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3287 (match_operand:SI 1 "const_int_operand" "i"))
3288 (match_operand:SI 2 "const_int_operand" "i")))]
3289 "TARGET_32BIT && arm_arch6
3290 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3294 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3295 &mask, &signed_sat))
3298 operands[1] = GEN_INT (mask);
3300 return "ssat%?\t%0, %1, %3";
3302 return "usat%?\t%0, %1, %3";
3304 [(set_attr "predicable" "yes")
3305 (set_attr "type" "alus_imm")]
3308 (define_insn "*satsi_<SAT:code>_shift"
3309 [(set (match_operand:SI 0 "s_register_operand" "=r")
3310 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3311 [(match_operand:SI 4 "s_register_operand" "r")
3312 (match_operand:SI 5 "const_int_operand" "i")])
3313 (match_operand:SI 1 "const_int_operand" "i"))
3314 (match_operand:SI 2 "const_int_operand" "i")))]
3315 "TARGET_32BIT && arm_arch6
3316 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3320 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3321 &mask, &signed_sat))
3324 operands[1] = GEN_INT (mask);
3326 return "ssat%?\t%0, %1, %4%S3";
3328 return "usat%?\t%0, %1, %4%S3";
3330 [(set_attr "predicable" "yes")
3331 (set_attr "shift" "3")
3332 (set_attr "type" "logic_shift_reg")])
3334 ;; Shift and rotation insns
3336 (define_expand "ashldi3"
3337 [(set (match_operand:DI 0 "s_register_operand")
3338 (ashift:DI (match_operand:DI 1 "s_register_operand")
3339 (match_operand:SI 2 "reg_or_int_operand")))]
3342 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3343 operands[2], gen_reg_rtx (SImode),
3344 gen_reg_rtx (SImode));
3348 (define_expand "ashlsi3"
3349 [(set (match_operand:SI 0 "s_register_operand")
3350 (ashift:SI (match_operand:SI 1 "s_register_operand")
3351 (match_operand:SI 2 "arm_rhs_operand")))]
3354 if (CONST_INT_P (operands[2])
3355 && (UINTVAL (operands[2])) > 31)
3357 emit_insn (gen_movsi (operands[0], const0_rtx));
3363 (define_expand "ashrdi3"
3364 [(set (match_operand:DI 0 "s_register_operand")
3365 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3366 (match_operand:SI 2 "reg_or_int_operand")))]
3369 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3370 operands[2], gen_reg_rtx (SImode),
3371 gen_reg_rtx (SImode));
3375 (define_expand "ashrsi3"
3376 [(set (match_operand:SI 0 "s_register_operand")
3377 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3378 (match_operand:SI 2 "arm_rhs_operand")))]
3381 if (CONST_INT_P (operands[2])
3382 && UINTVAL (operands[2]) > 31)
3383 operands[2] = GEN_INT (31);
3387 (define_expand "lshrdi3"
3388 [(set (match_operand:DI 0 "s_register_operand")
3389 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3390 (match_operand:SI 2 "reg_or_int_operand")))]
3393 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3394 operands[2], gen_reg_rtx (SImode),
3395 gen_reg_rtx (SImode));
3399 (define_expand "lshrsi3"
3400 [(set (match_operand:SI 0 "s_register_operand")
3401 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3402 (match_operand:SI 2 "arm_rhs_operand")))]
3405 if (CONST_INT_P (operands[2])
3406 && (UINTVAL (operands[2])) > 31)
3408 emit_insn (gen_movsi (operands[0], const0_rtx));
3414 (define_expand "rotlsi3"
3415 [(set (match_operand:SI 0 "s_register_operand")
3416 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3417 (match_operand:SI 2 "reg_or_int_operand")))]
3420 if (CONST_INT_P (operands[2]))
3421 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3424 rtx reg = gen_reg_rtx (SImode);
3425 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3431 (define_expand "rotrsi3"
3432 [(set (match_operand:SI 0 "s_register_operand")
3433 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3434 (match_operand:SI 2 "arm_rhs_operand")))]
3439 if (CONST_INT_P (operands[2])
3440 && UINTVAL (operands[2]) > 31)
3441 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3443 else /* TARGET_THUMB1 */
3445 if (CONST_INT_P (operands [2]))
3446 operands [2] = force_reg (SImode, operands[2]);
3451 (define_insn "*arm_shiftsi3"
3452 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3453 (match_operator:SI 3 "shift_operator"
3454 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3455 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3457 "* return arm_output_shift(operands, 0);"
3458 [(set_attr "predicable" "yes")
3459 (set_attr "arch" "t2,t2,*,*")
3460 (set_attr "predicable_short_it" "yes,yes,no,no")
3461 (set_attr "length" "4")
3462 (set_attr "shift" "1")
3463 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3466 (define_insn "*shiftsi3_compare0"
3467 [(set (reg:CC_NOOV CC_REGNUM)
3468 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3469 [(match_operand:SI 1 "s_register_operand" "r,r")
3470 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3472 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3473 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3475 "* return arm_output_shift(operands, 1);"
3476 [(set_attr "conds" "set")
3477 (set_attr "shift" "1")
3478 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3481 (define_insn "*shiftsi3_compare0_scratch"
3482 [(set (reg:CC_NOOV CC_REGNUM)
3483 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3484 [(match_operand:SI 1 "s_register_operand" "r,r")
3485 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3487 (clobber (match_scratch:SI 0 "=r,r"))]
3489 "* return arm_output_shift(operands, 1);"
3490 [(set_attr "conds" "set")
3491 (set_attr "shift" "1")
3492 (set_attr "type" "shift_imm,shift_reg")]
3495 (define_insn "*not_shiftsi"
3496 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3497 (not:SI (match_operator:SI 3 "shift_operator"
3498 [(match_operand:SI 1 "s_register_operand" "r,r")
3499 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3502 [(set_attr "predicable" "yes")
3503 (set_attr "shift" "1")
3504 (set_attr "arch" "32,a")
3505 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3507 (define_insn "*not_shiftsi_compare0"
3508 [(set (reg:CC_NOOV CC_REGNUM)
3510 (not:SI (match_operator:SI 3 "shift_operator"
3511 [(match_operand:SI 1 "s_register_operand" "r,r")
3512 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3514 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3515 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3517 "mvns%?\\t%0, %1%S3"
3518 [(set_attr "conds" "set")
3519 (set_attr "shift" "1")
3520 (set_attr "arch" "32,a")
3521 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3523 (define_insn "*not_shiftsi_compare0_scratch"
3524 [(set (reg:CC_NOOV CC_REGNUM)
3526 (not:SI (match_operator:SI 3 "shift_operator"
3527 [(match_operand:SI 1 "s_register_operand" "r,r")
3528 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3530 (clobber (match_scratch:SI 0 "=r,r"))]
3532 "mvns%?\\t%0, %1%S3"
3533 [(set_attr "conds" "set")
3534 (set_attr "shift" "1")
3535 (set_attr "arch" "32,a")
3536 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3538 ;; We don't really have extzv, but defining this using shifts helps
3539 ;; to reduce register pressure later on.
3541 (define_expand "extzv"
3542 [(set (match_operand 0 "s_register_operand")
3543 (zero_extract (match_operand 1 "nonimmediate_operand")
3544 (match_operand 2 "const_int_operand")
3545 (match_operand 3 "const_int_operand")))]
3546 "TARGET_THUMB1 || arm_arch_thumb2"
3549 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3550 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3552 if (arm_arch_thumb2)
3554 HOST_WIDE_INT width = INTVAL (operands[2]);
3555 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3557 if (unaligned_access && MEM_P (operands[1])
3558 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3562 if (BYTES_BIG_ENDIAN)
3563 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3568 base_addr = adjust_address (operands[1], SImode,
3569 bitpos / BITS_PER_UNIT);
3570 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3574 rtx dest = operands[0];
3575 rtx tmp = gen_reg_rtx (SImode);
3577 /* We may get a paradoxical subreg here. Strip it off. */
3578 if (GET_CODE (dest) == SUBREG
3579 && GET_MODE (dest) == SImode
3580 && GET_MODE (SUBREG_REG (dest)) == HImode)
3581 dest = SUBREG_REG (dest);
3583 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3586 base_addr = adjust_address (operands[1], HImode,
3587 bitpos / BITS_PER_UNIT);
3588 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3589 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3593 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3595 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3603 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3606 operands[3] = GEN_INT (rshift);
3610 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3614 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3615 operands[3], gen_reg_rtx (SImode)));
3620 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3622 (define_expand "extzv_t1"
3623 [(set (match_operand:SI 4 "s_register_operand")
3624 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3625 (match_operand:SI 2 "const_int_operand")))
3626 (set (match_operand:SI 0 "s_register_operand")
3627 (lshiftrt:SI (match_dup 4)
3628 (match_operand:SI 3 "const_int_operand")))]
3632 (define_expand "extv"
3633 [(set (match_operand 0 "s_register_operand")
3634 (sign_extract (match_operand 1 "nonimmediate_operand")
3635 (match_operand 2 "const_int_operand")
3636 (match_operand 3 "const_int_operand")))]
3639 HOST_WIDE_INT width = INTVAL (operands[2]);
3640 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3642 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3643 && (bitpos % BITS_PER_UNIT) == 0)
3647 if (BYTES_BIG_ENDIAN)
3648 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3652 base_addr = adjust_address (operands[1], SImode,
3653 bitpos / BITS_PER_UNIT);
3654 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3658 rtx dest = operands[0];
3659 rtx tmp = gen_reg_rtx (SImode);
3661 /* We may get a paradoxical subreg here. Strip it off. */
3662 if (GET_CODE (dest) == SUBREG
3663 && GET_MODE (dest) == SImode
3664 && GET_MODE (SUBREG_REG (dest)) == HImode)
3665 dest = SUBREG_REG (dest);
3667 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3670 base_addr = adjust_address (operands[1], HImode,
3671 bitpos / BITS_PER_UNIT);
3672 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3673 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3678 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3680 else if (GET_MODE (operands[0]) == SImode
3681 && GET_MODE (operands[1]) == SImode)
3683 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3691 ; Helper to expand register forms of extv with the proper modes.
3693 (define_expand "extv_regsi"
3694 [(set (match_operand:SI 0 "s_register_operand")
3695 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3696 (match_operand 2 "const_int_operand")
3697 (match_operand 3 "const_int_operand")))]
3702 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3704 (define_insn "unaligned_loaddi"
3705 [(set (match_operand:DI 0 "s_register_operand" "=r")
3706 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3707 UNSPEC_UNALIGNED_LOAD))]
3708 "TARGET_32BIT && TARGET_LDRD"
3710 return output_move_double (operands, true, NULL);
3712 [(set_attr "length" "8")
3713 (set_attr "type" "load_8")])
3715 (define_insn "unaligned_loadsi"
3716 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3717 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3718 UNSPEC_UNALIGNED_LOAD))]
3721 ldr\t%0, %1\t@ unaligned
3722 ldr%?\t%0, %1\t@ unaligned
3723 ldr%?\t%0, %1\t@ unaligned"
3724 [(set_attr "arch" "t1,t2,32")
3725 (set_attr "length" "2,2,4")
3726 (set_attr "predicable" "no,yes,yes")
3727 (set_attr "predicable_short_it" "no,yes,no")
3728 (set_attr "type" "load_4")])
3730 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3731 ;; address (there's no immediate format). That's tricky to support
3732 ;; here and we don't really need this pattern for that case, so only
3733 ;; enable for 32-bit ISAs.
3734 (define_insn "unaligned_loadhis"
3735 [(set (match_operand:SI 0 "s_register_operand" "=r")
3737 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3738 UNSPEC_UNALIGNED_LOAD)))]
3739 "unaligned_access && TARGET_32BIT"
3740 "ldrsh%?\t%0, %1\t@ unaligned"
3741 [(set_attr "predicable" "yes")
3742 (set_attr "type" "load_byte")])
3744 (define_insn "unaligned_loadhiu"
3745 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3747 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3748 UNSPEC_UNALIGNED_LOAD)))]
3751 ldrh\t%0, %1\t@ unaligned
3752 ldrh%?\t%0, %1\t@ unaligned
3753 ldrh%?\t%0, %1\t@ unaligned"
3754 [(set_attr "arch" "t1,t2,32")
3755 (set_attr "length" "2,2,4")
3756 (set_attr "predicable" "no,yes,yes")
3757 (set_attr "predicable_short_it" "no,yes,no")
3758 (set_attr "type" "load_byte")])
3760 (define_insn "unaligned_storedi"
3761 [(set (match_operand:DI 0 "memory_operand" "=m")
3762 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3763 UNSPEC_UNALIGNED_STORE))]
3764 "TARGET_32BIT && TARGET_LDRD"
3766 return output_move_double (operands, true, NULL);
3768 [(set_attr "length" "8")
3769 (set_attr "type" "store_8")])
3771 (define_insn "unaligned_storesi"
3772 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3773 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3774 UNSPEC_UNALIGNED_STORE))]
3777 str\t%1, %0\t@ unaligned
3778 str%?\t%1, %0\t@ unaligned
3779 str%?\t%1, %0\t@ unaligned"
3780 [(set_attr "arch" "t1,t2,32")
3781 (set_attr "length" "2,2,4")
3782 (set_attr "predicable" "no,yes,yes")
3783 (set_attr "predicable_short_it" "no,yes,no")
3784 (set_attr "type" "store_4")])
3786 (define_insn "unaligned_storehi"
3787 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3788 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3789 UNSPEC_UNALIGNED_STORE))]
3792 strh\t%1, %0\t@ unaligned
3793 strh%?\t%1, %0\t@ unaligned
3794 strh%?\t%1, %0\t@ unaligned"
3795 [(set_attr "arch" "t1,t2,32")
3796 (set_attr "length" "2,2,4")
3797 (set_attr "predicable" "no,yes,yes")
3798 (set_attr "predicable_short_it" "no,yes,no")
3799 (set_attr "type" "store_4")])
3802 (define_insn "*extv_reg"
3803 [(set (match_operand:SI 0 "s_register_operand" "=r")
3804 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3805 (match_operand:SI 2 "const_int_operand" "n")
3806 (match_operand:SI 3 "const_int_operand" "n")))]
3808 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3809 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3810 "sbfx%?\t%0, %1, %3, %2"
3811 [(set_attr "length" "4")
3812 (set_attr "predicable" "yes")
3813 (set_attr "type" "bfm")]
3816 (define_insn "extzv_t2"
3817 [(set (match_operand:SI 0 "s_register_operand" "=r")
3818 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3819 (match_operand:SI 2 "const_int_operand" "n")
3820 (match_operand:SI 3 "const_int_operand" "n")))]
3822 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3823 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3824 "ubfx%?\t%0, %1, %3, %2"
3825 [(set_attr "length" "4")
3826 (set_attr "predicable" "yes")
3827 (set_attr "type" "bfm")]
3831 ;; Division instructions
3832 (define_insn "divsi3"
3833 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3834 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3835 (match_operand:SI 2 "s_register_operand" "r,r")))]
3840 [(set_attr "arch" "32,v8mb")
3841 (set_attr "predicable" "yes")
3842 (set_attr "type" "sdiv")]
3845 (define_insn "udivsi3"
3846 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3847 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3848 (match_operand:SI 2 "s_register_operand" "r,r")))]
3853 [(set_attr "arch" "32,v8mb")
3854 (set_attr "predicable" "yes")
3855 (set_attr "type" "udiv")]
3859 ;; Unary arithmetic insns
3861 (define_expand "negvsi3"
3862 [(match_operand:SI 0 "register_operand")
3863 (match_operand:SI 1 "register_operand")
3864 (match_operand 2 "")]
3867 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3868 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3873 (define_expand "negvdi3"
3874 [(match_operand:DI 0 "s_register_operand")
3875 (match_operand:DI 1 "s_register_operand")
3876 (match_operand 2 "")]
3879 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3880 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3886 (define_insn "negdi2_compare"
3887 [(set (reg:CC CC_REGNUM)
3890 (match_operand:DI 1 "register_operand" "r,r")))
3891 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3892 (minus:DI (const_int 0) (match_dup 1)))]
3895 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3896 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3897 [(set_attr "conds" "set")
3898 (set_attr "arch" "a,t2")
3899 (set_attr "length" "8")
3900 (set_attr "type" "multiple")]
3903 (define_expand "negsi2"
3904 [(set (match_operand:SI 0 "s_register_operand")
3905 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3910 (define_insn "*arm_negsi2"
3911 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3912 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3914 "rsb%?\\t%0, %1, #0"
3915 [(set_attr "predicable" "yes")
3916 (set_attr "predicable_short_it" "yes,no")
3917 (set_attr "arch" "t2,*")
3918 (set_attr "length" "4")
3919 (set_attr "type" "alu_imm")]
3922 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3923 ;; rather than (0 cmp reg). This gives the same results for unsigned
3924 ;; and equality compares which is what we mostly need here.
3925 (define_insn "negsi2_0compare"
3926 [(set (reg:CC_RSB CC_REGNUM)
3927 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3929 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3930 (neg:SI (match_dup 1)))]
3935 [(set_attr "conds" "set")
3936 (set_attr "arch" "t2,*")
3937 (set_attr "length" "2,*")
3938 (set_attr "type" "alus_imm")]
3941 (define_insn "negsi2_carryin"
3942 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3943 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3944 (match_operand:SI 2 "arm_borrow_operation" "")))]
3948 sbc\\t%0, %1, %1, lsl #1"
3949 [(set_attr "conds" "use")
3950 (set_attr "arch" "a,t2")
3951 (set_attr "type" "adc_imm,adc_reg")]
3954 (define_expand "negsf2"
3955 [(set (match_operand:SF 0 "s_register_operand")
3956 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3957 "TARGET_32BIT && TARGET_HARD_FLOAT"
3961 (define_expand "negdf2"
3962 [(set (match_operand:DF 0 "s_register_operand")
3963 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3964 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3967 ;; abssi2 doesn't really clobber the condition codes if a different register
3968 ;; is being set. To keep things simple, assume during rtl manipulations that
3969 ;; it does, but tell the final scan operator the truth. Similarly for
3972 (define_expand "abssi2"
3974 [(set (match_operand:SI 0 "s_register_operand")
3975 (abs:SI (match_operand:SI 1 "s_register_operand")))
3976 (clobber (match_dup 2))])]
3980 operands[2] = gen_rtx_SCRATCH (SImode);
3982 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3985 (define_insn_and_split "*arm_abssi2"
3986 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3987 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3988 (clobber (reg:CC CC_REGNUM))]
3991 "&& reload_completed"
3994 /* if (which_alternative == 0) */
3995 if (REGNO(operands[0]) == REGNO(operands[1]))
3997 /* Emit the pattern:
3998 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3999 [(set (reg:CC CC_REGNUM)
4000 (compare:CC (match_dup 0) (const_int 0)))
4001 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4002 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4004 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4005 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4006 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4007 (gen_rtx_LT (SImode,
4008 gen_rtx_REG (CCmode, CC_REGNUM),
4010 (gen_rtx_SET (operands[0],
4011 (gen_rtx_MINUS (SImode,
4018 /* Emit the pattern:
4019 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4021 (xor:SI (match_dup 1)
4022 (ashiftrt:SI (match_dup 1) (const_int 31))))
4024 (minus:SI (match_dup 0)
4025 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4027 emit_insn (gen_rtx_SET (operands[0],
4028 gen_rtx_XOR (SImode,
4029 gen_rtx_ASHIFTRT (SImode,
4033 emit_insn (gen_rtx_SET (operands[0],
4034 gen_rtx_MINUS (SImode,
4036 gen_rtx_ASHIFTRT (SImode,
4042 [(set_attr "conds" "clob,*")
4043 (set_attr "shift" "1")
4044 (set_attr "predicable" "no, yes")
4045 (set_attr "length" "8")
4046 (set_attr "type" "multiple")]
4049 (define_insn_and_split "*arm_neg_abssi2"
4050 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4051 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4052 (clobber (reg:CC CC_REGNUM))]
4055 "&& reload_completed"
4058 /* if (which_alternative == 0) */
4059 if (REGNO (operands[0]) == REGNO (operands[1]))
4061 /* Emit the pattern:
4062 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4064 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4065 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4066 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4068 gen_rtx_REG (CCmode, CC_REGNUM),
4070 gen_rtx_SET (operands[0],
4071 (gen_rtx_MINUS (SImode,
4077 /* Emit the pattern:
4078 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4080 emit_insn (gen_rtx_SET (operands[0],
4081 gen_rtx_XOR (SImode,
4082 gen_rtx_ASHIFTRT (SImode,
4086 emit_insn (gen_rtx_SET (operands[0],
4087 gen_rtx_MINUS (SImode,
4088 gen_rtx_ASHIFTRT (SImode,
4095 [(set_attr "conds" "clob,*")
4096 (set_attr "shift" "1")
4097 (set_attr "predicable" "no, yes")
4098 (set_attr "length" "8")
4099 (set_attr "type" "multiple")]
4102 (define_expand "abssf2"
4103 [(set (match_operand:SF 0 "s_register_operand")
4104 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4105 "TARGET_32BIT && TARGET_HARD_FLOAT"
4108 (define_expand "absdf2"
4109 [(set (match_operand:DF 0 "s_register_operand")
4110 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4111 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4114 (define_expand "sqrtsf2"
4115 [(set (match_operand:SF 0 "s_register_operand")
4116 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4117 "TARGET_32BIT && TARGET_HARD_FLOAT"
4120 (define_expand "sqrtdf2"
4121 [(set (match_operand:DF 0 "s_register_operand")
4122 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4123 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4126 (define_expand "one_cmplsi2"
4127 [(set (match_operand:SI 0 "s_register_operand")
4128 (not:SI (match_operand:SI 1 "s_register_operand")))]
4133 (define_insn "*arm_one_cmplsi2"
4134 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4135 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4138 [(set_attr "predicable" "yes")
4139 (set_attr "predicable_short_it" "yes,no")
4140 (set_attr "arch" "t2,*")
4141 (set_attr "length" "4")
4142 (set_attr "type" "mvn_reg")]
4145 (define_insn "*notsi_compare0"
4146 [(set (reg:CC_NOOV CC_REGNUM)
4147 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4149 (set (match_operand:SI 0 "s_register_operand" "=r")
4150 (not:SI (match_dup 1)))]
4153 [(set_attr "conds" "set")
4154 (set_attr "type" "mvn_reg")]
4157 (define_insn "*notsi_compare0_scratch"
4158 [(set (reg:CC_NOOV CC_REGNUM)
4159 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4161 (clobber (match_scratch:SI 0 "=r"))]
4164 [(set_attr "conds" "set")
4165 (set_attr "type" "mvn_reg")]
4168 ;; Fixed <--> Floating conversion insns
4170 (define_expand "floatsihf2"
4171 [(set (match_operand:HF 0 "general_operand")
4172 (float:HF (match_operand:SI 1 "general_operand")))]
4176 rtx op1 = gen_reg_rtx (SFmode);
4177 expand_float (op1, operands[1], 0);
4178 op1 = convert_to_mode (HFmode, op1, 0);
4179 emit_move_insn (operands[0], op1);
4184 (define_expand "floatdihf2"
4185 [(set (match_operand:HF 0 "general_operand")
4186 (float:HF (match_operand:DI 1 "general_operand")))]
4190 rtx op1 = gen_reg_rtx (SFmode);
4191 expand_float (op1, operands[1], 0);
4192 op1 = convert_to_mode (HFmode, op1, 0);
4193 emit_move_insn (operands[0], op1);
4198 (define_expand "floatsisf2"
4199 [(set (match_operand:SF 0 "s_register_operand")
4200 (float:SF (match_operand:SI 1 "s_register_operand")))]
4201 "TARGET_32BIT && TARGET_HARD_FLOAT"
4205 (define_expand "floatsidf2"
4206 [(set (match_operand:DF 0 "s_register_operand")
4207 (float:DF (match_operand:SI 1 "s_register_operand")))]
4208 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4212 (define_expand "fix_trunchfsi2"
4213 [(set (match_operand:SI 0 "general_operand")
4214 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4218 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4219 expand_fix (operands[0], op1, 0);
4224 (define_expand "fix_trunchfdi2"
4225 [(set (match_operand:DI 0 "general_operand")
4226 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4230 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4231 expand_fix (operands[0], op1, 0);
4236 (define_expand "fix_truncsfsi2"
4237 [(set (match_operand:SI 0 "s_register_operand")
4238 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4239 "TARGET_32BIT && TARGET_HARD_FLOAT"
4243 (define_expand "fix_truncdfsi2"
4244 [(set (match_operand:SI 0 "s_register_operand")
4245 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4246 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4252 (define_expand "truncdfsf2"
4253 [(set (match_operand:SF 0 "s_register_operand")
4255 (match_operand:DF 1 "s_register_operand")))]
4256 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4260 ;; DFmode to HFmode conversions on targets without a single-step hardware
4261 ;; instruction for it would have to go through SFmode. This is dangerous
4262 ;; as it introduces double rounding.
4264 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4265 ;; a single-step instruction.
4267 (define_expand "truncdfhf2"
4268 [(set (match_operand:HF 0 "s_register_operand")
4270 (match_operand:DF 1 "s_register_operand")))]
4271 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4272 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4274 /* We don't have a direct instruction for this, so we must be in
4275 an unsafe math mode, and going via SFmode. */
4277 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4280 op1 = convert_to_mode (SFmode, operands[1], 0);
4281 op1 = convert_to_mode (HFmode, op1, 0);
4282 emit_move_insn (operands[0], op1);
4285 /* Otherwise, we will pick this up as a single instruction with
4286 no intermediary rounding. */
4290 ;; Zero and sign extension instructions.
4292 (define_expand "zero_extend<mode>di2"
4293 [(set (match_operand:DI 0 "s_register_operand" "")
4294 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4295 "TARGET_32BIT <qhs_zextenddi_cond>"
4297 rtx res_lo, res_hi, op0_lo, op0_hi;
4298 res_lo = gen_lowpart (SImode, operands[0]);
4299 res_hi = gen_highpart (SImode, operands[0]);
4300 if (can_create_pseudo_p ())
4302 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4303 op0_hi = gen_reg_rtx (SImode);
4307 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4310 if (<MODE>mode != SImode)
4311 emit_insn (gen_rtx_SET (op0_lo,
4312 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4313 emit_insn (gen_movsi (op0_hi, const0_rtx));
4314 if (res_lo != op0_lo)
4315 emit_move_insn (res_lo, op0_lo);
4316 if (res_hi != op0_hi)
4317 emit_move_insn (res_hi, op0_hi);
4322 (define_expand "extend<mode>di2"
4323 [(set (match_operand:DI 0 "s_register_operand" "")
4324 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4325 "TARGET_32BIT <qhs_sextenddi_cond>"
4327 rtx res_lo, res_hi, op0_lo, op0_hi;
4328 res_lo = gen_lowpart (SImode, operands[0]);
4329 res_hi = gen_highpart (SImode, operands[0]);
4330 if (can_create_pseudo_p ())
4332 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4333 op0_hi = gen_reg_rtx (SImode);
4337 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4340 if (<MODE>mode != SImode)
4341 emit_insn (gen_rtx_SET (op0_lo,
4342 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4343 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4344 if (res_lo != op0_lo)
4345 emit_move_insn (res_lo, op0_lo);
4346 if (res_hi != op0_hi)
4347 emit_move_insn (res_hi, op0_hi);
4352 ;; Splits for all extensions to DImode
4354 [(set (match_operand:DI 0 "s_register_operand" "")
4355 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4357 [(set (match_dup 0) (match_dup 1))]
4359 rtx lo_part = gen_lowpart (SImode, operands[0]);
4360 machine_mode src_mode = GET_MODE (operands[1]);
4362 if (src_mode == SImode)
4363 emit_move_insn (lo_part, operands[1]);
4365 emit_insn (gen_rtx_SET (lo_part,
4366 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4367 operands[0] = gen_highpart (SImode, operands[0]);
4368 operands[1] = const0_rtx;
4372 [(set (match_operand:DI 0 "s_register_operand" "")
4373 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4375 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4377 rtx lo_part = gen_lowpart (SImode, operands[0]);
4378 machine_mode src_mode = GET_MODE (operands[1]);
4380 if (src_mode == SImode)
4381 emit_move_insn (lo_part, operands[1]);
4383 emit_insn (gen_rtx_SET (lo_part,
4384 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4385 operands[1] = lo_part;
4386 operands[0] = gen_highpart (SImode, operands[0]);
4389 (define_expand "zero_extendhisi2"
4390 [(set (match_operand:SI 0 "s_register_operand")
4391 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4394 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4396 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4399 if (!arm_arch6 && !MEM_P (operands[1]))
4401 rtx t = gen_lowpart (SImode, operands[1]);
4402 rtx tmp = gen_reg_rtx (SImode);
4403 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4404 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4410 [(set (match_operand:SI 0 "s_register_operand" "")
4411 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4412 "!TARGET_THUMB2 && !arm_arch6"
4413 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4414 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4416 operands[2] = gen_lowpart (SImode, operands[1]);
4419 (define_insn "*arm_zero_extendhisi2"
4420 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4421 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4422 "TARGET_ARM && arm_arch4 && !arm_arch6"
4426 [(set_attr "type" "alu_shift_reg,load_byte")
4427 (set_attr "predicable" "yes")]
4430 (define_insn "*arm_zero_extendhisi2_v6"
4431 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4432 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4433 "TARGET_ARM && arm_arch6"
4437 [(set_attr "predicable" "yes")
4438 (set_attr "type" "extend,load_byte")]
4441 (define_insn "*arm_zero_extendhisi2addsi"
4442 [(set (match_operand:SI 0 "s_register_operand" "=r")
4443 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4444 (match_operand:SI 2 "s_register_operand" "r")))]
4446 "uxtah%?\\t%0, %2, %1"
4447 [(set_attr "type" "alu_shift_reg")
4448 (set_attr "predicable" "yes")]
4451 (define_expand "zero_extendqisi2"
4452 [(set (match_operand:SI 0 "s_register_operand")
4453 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4456 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4458 emit_insn (gen_andsi3 (operands[0],
4459 gen_lowpart (SImode, operands[1]),
4463 if (!arm_arch6 && !MEM_P (operands[1]))
4465 rtx t = gen_lowpart (SImode, operands[1]);
4466 rtx tmp = gen_reg_rtx (SImode);
4467 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4468 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4474 [(set (match_operand:SI 0 "s_register_operand" "")
4475 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4477 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4478 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4480 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4483 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4488 (define_insn "*arm_zero_extendqisi2"
4489 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4490 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4491 "TARGET_ARM && !arm_arch6"
4494 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4495 [(set_attr "length" "8,4")
4496 (set_attr "type" "alu_shift_reg,load_byte")
4497 (set_attr "predicable" "yes")]
4500 (define_insn "*arm_zero_extendqisi2_v6"
4501 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4502 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4503 "TARGET_ARM && arm_arch6"
4506 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4507 [(set_attr "type" "extend,load_byte")
4508 (set_attr "predicable" "yes")]
4511 (define_insn "*arm_zero_extendqisi2addsi"
4512 [(set (match_operand:SI 0 "s_register_operand" "=r")
4513 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4514 (match_operand:SI 2 "s_register_operand" "r")))]
4516 "uxtab%?\\t%0, %2, %1"
4517 [(set_attr "predicable" "yes")
4518 (set_attr "type" "alu_shift_reg")]
4522 [(set (match_operand:SI 0 "s_register_operand" "")
4523 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4524 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4525 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4526 [(set (match_dup 2) (match_dup 1))
4527 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4532 [(set (match_operand:SI 0 "s_register_operand" "")
4533 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4534 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4535 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4536 [(set (match_dup 2) (match_dup 1))
4537 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4543 [(set (match_operand:SI 0 "s_register_operand" "")
4544 (IOR_XOR:SI (and:SI (ashift:SI
4545 (match_operand:SI 1 "s_register_operand" "")
4546 (match_operand:SI 2 "const_int_operand" ""))
4547 (match_operand:SI 3 "const_int_operand" ""))
4549 (match_operator 5 "subreg_lowpart_operator"
4550 [(match_operand:SI 4 "s_register_operand" "")]))))]
4552 && (UINTVAL (operands[3])
4553 == (GET_MODE_MASK (GET_MODE (operands[5]))
4554 & (GET_MODE_MASK (GET_MODE (operands[5]))
4555 << (INTVAL (operands[2])))))"
4556 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4558 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4559 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4562 (define_insn "*compareqi_eq0"
4563 [(set (reg:CC_Z CC_REGNUM)
4564 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4568 [(set_attr "conds" "set")
4569 (set_attr "predicable" "yes")
4570 (set_attr "type" "logic_imm")]
4573 (define_expand "extendhisi2"
4574 [(set (match_operand:SI 0 "s_register_operand")
4575 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4580 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4583 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4585 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4589 if (!arm_arch6 && !MEM_P (operands[1]))
4591 rtx t = gen_lowpart (SImode, operands[1]);
4592 rtx tmp = gen_reg_rtx (SImode);
4593 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4594 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4601 [(set (match_operand:SI 0 "register_operand" "")
4602 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4603 (clobber (match_scratch:SI 2 ""))])]
4605 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4606 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4608 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4611 ;; This pattern will only be used when ldsh is not available
4612 (define_expand "extendhisi2_mem"
4613 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4615 (zero_extend:SI (match_dup 7)))
4616 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4617 (set (match_operand:SI 0 "" "")
4618 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4623 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4625 mem1 = change_address (operands[1], QImode, addr);
4626 mem2 = change_address (operands[1], QImode,
4627 plus_constant (Pmode, addr, 1));
4628 operands[0] = gen_lowpart (SImode, operands[0]);
4630 operands[2] = gen_reg_rtx (SImode);
4631 operands[3] = gen_reg_rtx (SImode);
4632 operands[6] = gen_reg_rtx (SImode);
4635 if (BYTES_BIG_ENDIAN)
4637 operands[4] = operands[2];
4638 operands[5] = operands[3];
4642 operands[4] = operands[3];
4643 operands[5] = operands[2];
4649 [(set (match_operand:SI 0 "register_operand" "")
4650 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4652 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4653 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4655 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4658 (define_insn "*arm_extendhisi2"
4659 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4660 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4661 "TARGET_ARM && arm_arch4 && !arm_arch6"
4665 [(set_attr "length" "8,4")
4666 (set_attr "type" "alu_shift_reg,load_byte")
4667 (set_attr "predicable" "yes")]
4670 ;; ??? Check Thumb-2 pool range
4671 (define_insn "*arm_extendhisi2_v6"
4672 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4673 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4674 "TARGET_32BIT && arm_arch6"
4678 [(set_attr "type" "extend,load_byte")
4679 (set_attr "predicable" "yes")]
4682 (define_insn "*arm_extendhisi2addsi"
4683 [(set (match_operand:SI 0 "s_register_operand" "=r")
4684 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4685 (match_operand:SI 2 "s_register_operand" "r")))]
4687 "sxtah%?\\t%0, %2, %1"
4688 [(set_attr "type" "alu_shift_reg")]
4691 (define_expand "extendqihi2"
4693 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4695 (set (match_operand:HI 0 "s_register_operand")
4696 (ashiftrt:SI (match_dup 2)
4701 if (arm_arch4 && MEM_P (operands[1]))
4703 emit_insn (gen_rtx_SET (operands[0],
4704 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4707 if (!s_register_operand (operands[1], QImode))
4708 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4709 operands[0] = gen_lowpart (SImode, operands[0]);
4710 operands[1] = gen_lowpart (SImode, operands[1]);
4711 operands[2] = gen_reg_rtx (SImode);
4715 (define_insn "*arm_extendqihi_insn"
4716 [(set (match_operand:HI 0 "s_register_operand" "=r")
4717 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4718 "TARGET_ARM && arm_arch4"
4720 [(set_attr "type" "load_byte")
4721 (set_attr "predicable" "yes")]
4724 (define_expand "extendqisi2"
4725 [(set (match_operand:SI 0 "s_register_operand")
4726 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4729 if (!arm_arch4 && MEM_P (operands[1]))
4730 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4732 if (!arm_arch6 && !MEM_P (operands[1]))
4734 rtx t = gen_lowpart (SImode, operands[1]);
4735 rtx tmp = gen_reg_rtx (SImode);
4736 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4737 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4743 [(set (match_operand:SI 0 "register_operand" "")
4744 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4746 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4747 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4749 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4752 (define_insn "*arm_extendqisi"
4753 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4754 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4755 "TARGET_ARM && arm_arch4 && !arm_arch6"
4759 [(set_attr "length" "8,4")
4760 (set_attr "type" "alu_shift_reg,load_byte")
4761 (set_attr "predicable" "yes")]
4764 (define_insn "*arm_extendqisi_v6"
4765 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4767 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4768 "TARGET_ARM && arm_arch6"
4772 [(set_attr "type" "extend,load_byte")
4773 (set_attr "predicable" "yes")]
4776 (define_insn "*arm_extendqisi2addsi"
4777 [(set (match_operand:SI 0 "s_register_operand" "=r")
4778 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4779 (match_operand:SI 2 "s_register_operand" "r")))]
4781 "sxtab%?\\t%0, %2, %1"
4782 [(set_attr "type" "alu_shift_reg")
4783 (set_attr "predicable" "yes")]
4786 (define_insn "arm_<sup>xtb16"
4787 [(set (match_operand:SI 0 "s_register_operand" "=r")
4789 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4791 "<sup>xtb16%?\\t%0, %1"
4792 [(set_attr "predicable" "yes")
4793 (set_attr "type" "alu_dsp_reg")])
4795 (define_insn "arm_<simd32_op>"
4796 [(set (match_operand:SI 0 "s_register_operand" "=r")
4798 [(match_operand:SI 1 "s_register_operand" "r")
4799 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4801 "<simd32_op>%?\\t%0, %1, %2"
4802 [(set_attr "predicable" "yes")
4803 (set_attr "type" "alu_dsp_reg")])
4805 (define_insn "arm_usada8"
4806 [(set (match_operand:SI 0 "s_register_operand" "=r")
4808 [(match_operand:SI 1 "s_register_operand" "r")
4809 (match_operand:SI 2 "s_register_operand" "r")
4810 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4812 "usada8%?\\t%0, %1, %2, %3"
4813 [(set_attr "predicable" "yes")
4814 (set_attr "type" "alu_dsp_reg")])
4816 (define_insn "arm_<simd32_op>"
4817 [(set (match_operand:DI 0 "s_register_operand" "=r")
4819 [(match_operand:SI 1 "s_register_operand" "r")
4820 (match_operand:SI 2 "s_register_operand" "r")
4821 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4823 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4824 [(set_attr "predicable" "yes")
4825 (set_attr "type" "smlald")])
4827 (define_expand "extendsfdf2"
4828 [(set (match_operand:DF 0 "s_register_operand")
4829 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4830 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4834 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4835 ;; must go through SFmode.
4837 ;; This is always safe for an extend.
4839 (define_expand "extendhfdf2"
4840 [(set (match_operand:DF 0 "s_register_operand")
4841 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4844 /* We don't have a direct instruction for this, so go via SFmode. */
4845 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4848 op1 = convert_to_mode (SFmode, operands[1], 0);
4849 op1 = convert_to_mode (DFmode, op1, 0);
4850 emit_insn (gen_movdf (operands[0], op1));
4853 /* Otherwise, we're done producing RTL and will pick up the correct
4854 pattern to do this with one rounding-step in a single instruction. */
4858 ;; Move insns (including loads and stores)
4860 ;; XXX Just some ideas about movti.
4861 ;; I don't think these are a good idea on the arm, there just aren't enough
4863 ;;(define_expand "loadti"
4864 ;; [(set (match_operand:TI 0 "s_register_operand")
4865 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4868 ;;(define_expand "storeti"
4869 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4870 ;; (match_operand:TI 1 "s_register_operand"))]
4873 ;;(define_expand "movti"
4874 ;; [(set (match_operand:TI 0 "general_operand")
4875 ;; (match_operand:TI 1 "general_operand"))]
4881 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4882 ;; operands[1] = copy_to_reg (operands[1]);
4883 ;; if (MEM_P (operands[0]))
4884 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4885 ;; else if (MEM_P (operands[1]))
4886 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4890 ;; emit_insn (insn);
4894 ;; Recognize garbage generated above.
4897 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4898 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4902 ;; register mem = (which_alternative < 3);
4903 ;; register const char *template;
4905 ;; operands[mem] = XEXP (operands[mem], 0);
4906 ;; switch (which_alternative)
4908 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4909 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4910 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4911 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4912 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4913 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4915 ;; output_asm_insn (template, operands);
4919 (define_expand "movdi"
4920 [(set (match_operand:DI 0 "general_operand")
4921 (match_operand:DI 1 "general_operand"))]
4924 gcc_checking_assert (aligned_operand (operands[0], DImode));
4925 gcc_checking_assert (aligned_operand (operands[1], DImode));
4926 if (can_create_pseudo_p ())
4928 if (!REG_P (operands[0]))
4929 operands[1] = force_reg (DImode, operands[1]);
4931 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4932 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4934 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4935 when expanding function calls. */
4936 gcc_assert (can_create_pseudo_p ());
4937 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4939 /* Perform load into legal reg pair first, then move. */
4940 rtx reg = gen_reg_rtx (DImode);
4941 emit_insn (gen_movdi (reg, operands[1]));
4944 emit_move_insn (gen_lowpart (SImode, operands[0]),
4945 gen_lowpart (SImode, operands[1]));
4946 emit_move_insn (gen_highpart (SImode, operands[0]),
4947 gen_highpart (SImode, operands[1]));
4950 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4951 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4953 /* Avoid STRD's from an odd-numbered register pair in ARM state
4954 when expanding function prologue. */
4955 gcc_assert (can_create_pseudo_p ());
4956 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4957 ? gen_reg_rtx (DImode)
4959 emit_move_insn (gen_lowpart (SImode, split_dest),
4960 gen_lowpart (SImode, operands[1]));
4961 emit_move_insn (gen_highpart (SImode, split_dest),
4962 gen_highpart (SImode, operands[1]));
4963 if (split_dest != operands[0])
4964 emit_insn (gen_movdi (operands[0], split_dest));
4970 (define_insn "*arm_movdi"
4971 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4972 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4974 && !(TARGET_HARD_FLOAT)
4976 && ( register_operand (operands[0], DImode)
4977 || register_operand (operands[1], DImode))"
4979 switch (which_alternative)
4986 /* Cannot load it directly, split to load it via MOV / MOVT. */
4987 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4991 return output_move_double (operands, true, NULL);
4994 [(set_attr "length" "8,12,16,8,8")
4995 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4996 (set_attr "arm_pool_range" "*,*,*,1020,*")
4997 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4998 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4999 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5003 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5004 (match_operand:ANY64 1 "immediate_operand" ""))]
5007 && (arm_disable_literal_pool
5008 || (arm_const_double_inline_cost (operands[1])
5009 <= arm_max_const_double_inline_cost ()))"
5012 arm_split_constant (SET, SImode, curr_insn,
5013 INTVAL (gen_lowpart (SImode, operands[1])),
5014 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5015 arm_split_constant (SET, SImode, curr_insn,
5016 INTVAL (gen_highpart_mode (SImode,
5017 GET_MODE (operands[0]),
5019 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5024 ; If optimizing for size, or if we have load delay slots, then
5025 ; we want to split the constant into two separate operations.
5026 ; In both cases this may split a trivial part into a single data op
5027 ; leaving a single complex constant to load. We can also get longer
5028 ; offsets in a LDR which means we get better chances of sharing the pool
5029 ; entries. Finally, we can normally do a better job of scheduling
5030 ; LDR instructions than we can with LDM.
5031 ; This pattern will only match if the one above did not.
5033 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5034 (match_operand:ANY64 1 "const_double_operand" ""))]
5035 "TARGET_ARM && reload_completed
5036 && arm_const_double_by_parts (operands[1])"
5037 [(set (match_dup 0) (match_dup 1))
5038 (set (match_dup 2) (match_dup 3))]
5040 operands[2] = gen_highpart (SImode, operands[0]);
5041 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5043 operands[0] = gen_lowpart (SImode, operands[0]);
5044 operands[1] = gen_lowpart (SImode, operands[1]);
5049 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5050 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5051 "TARGET_EITHER && reload_completed"
5052 [(set (match_dup 0) (match_dup 1))
5053 (set (match_dup 2) (match_dup 3))]
5055 operands[2] = gen_highpart (SImode, operands[0]);
5056 operands[3] = gen_highpart (SImode, operands[1]);
5057 operands[0] = gen_lowpart (SImode, operands[0]);
5058 operands[1] = gen_lowpart (SImode, operands[1]);
5060 /* Handle a partial overlap. */
5061 if (rtx_equal_p (operands[0], operands[3]))
5063 rtx tmp0 = operands[0];
5064 rtx tmp1 = operands[1];
5066 operands[0] = operands[2];
5067 operands[1] = operands[3];
5074 ;; We can't actually do base+index doubleword loads if the index and
5075 ;; destination overlap. Split here so that we at least have chance to
5078 [(set (match_operand:DI 0 "s_register_operand" "")
5079 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5080 (match_operand:SI 2 "s_register_operand" ""))))]
5082 && reg_overlap_mentioned_p (operands[0], operands[1])
5083 && reg_overlap_mentioned_p (operands[0], operands[2])"
5085 (plus:SI (match_dup 1)
5088 (mem:DI (match_dup 4)))]
5090 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5094 (define_expand "movsi"
5095 [(set (match_operand:SI 0 "general_operand")
5096 (match_operand:SI 1 "general_operand"))]
5100 rtx base, offset, tmp;
5102 gcc_checking_assert (aligned_operand (operands[0], SImode));
5103 gcc_checking_assert (aligned_operand (operands[1], SImode));
5104 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5106 /* Everything except mem = const or mem = mem can be done easily. */
5107 if (MEM_P (operands[0]))
5108 operands[1] = force_reg (SImode, operands[1]);
5109 if (arm_general_register_operand (operands[0], SImode)
5110 && CONST_INT_P (operands[1])
5111 && !(const_ok_for_arm (INTVAL (operands[1]))
5112 || const_ok_for_arm (~INTVAL (operands[1]))))
5114 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5116 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5121 arm_split_constant (SET, SImode, NULL_RTX,
5122 INTVAL (operands[1]), operands[0], NULL_RTX,
5123 optimize && can_create_pseudo_p ());
5128 else /* Target doesn't have MOVT... */
5130 if (can_create_pseudo_p ())
5132 if (!REG_P (operands[0]))
5133 operands[1] = force_reg (SImode, operands[1]);
5137 split_const (operands[1], &base, &offset);
5138 if (INTVAL (offset) != 0
5139 && targetm.cannot_force_const_mem (SImode, operands[1]))
5141 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5142 emit_move_insn (tmp, base);
5143 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5147 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5149 /* Recognize the case where operand[1] is a reference to thread-local
5150 data and load its address to a register. Offsets have been split off
5152 if (arm_tls_referenced_p (operands[1]))
5153 operands[1] = legitimize_tls_address (operands[1], tmp);
5155 && (CONSTANT_P (operands[1])
5156 || symbol_mentioned_p (operands[1])
5157 || label_mentioned_p (operands[1])))
5159 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5164 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5165 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5166 ;; so this does not matter.
5167 (define_insn "*arm_movt"
5168 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5169 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5170 (match_operand:SI 2 "general_operand" "i,i")))]
5171 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5173 movt%?\t%0, #:upper16:%c2
5174 movt\t%0, #:upper16:%c2"
5175 [(set_attr "arch" "32,v8mb")
5176 (set_attr "predicable" "yes")
5177 (set_attr "length" "4")
5178 (set_attr "type" "alu_sreg")]
5181 (define_insn "*arm_movsi_insn"
5182 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5183 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5184 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5185 && ( register_operand (operands[0], SImode)
5186 || register_operand (operands[1], SImode))"
5194 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5195 (set_attr "predicable" "yes")
5196 (set_attr "arch" "*,*,*,v6t2,*,*")
5197 (set_attr "pool_range" "*,*,*,*,4096,*")
5198 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5202 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5203 (match_operand:SI 1 "const_int_operand" ""))]
5204 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5205 && (!(const_ok_for_arm (INTVAL (operands[1]))
5206 || const_ok_for_arm (~INTVAL (operands[1]))))"
5207 [(clobber (const_int 0))]
5209 arm_split_constant (SET, SImode, NULL_RTX,
5210 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5215 ;; A normal way to do (symbol + offset) requires three instructions at least
5216 ;; (depends on how big the offset is) as below:
5217 ;; movw r0, #:lower16:g
5218 ;; movw r0, #:upper16:g
5221 ;; A better way would be:
5222 ;; movw r0, #:lower16:g+4
5223 ;; movw r0, #:upper16:g+4
5225 ;; The limitation of this way is that the length of offset should be a 16-bit
5226 ;; signed value, because current assembler only supports REL type relocation for
5227 ;; such case. If the more powerful RELA type is supported in future, we should
5228 ;; update this pattern to go with better way.
5230 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5231 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5232 (match_operand:SI 2 "const_int_operand" ""))))]
5235 && arm_disable_literal_pool
5237 && GET_CODE (operands[1]) == SYMBOL_REF"
5238 [(clobber (const_int 0))]
5240 int offset = INTVAL (operands[2]);
5242 if (offset < -0x8000 || offset > 0x7fff)
5244 arm_emit_movpair (operands[0], operands[1]);
5245 emit_insn (gen_rtx_SET (operands[0],
5246 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5250 rtx op = gen_rtx_CONST (SImode,
5251 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5252 arm_emit_movpair (operands[0], op);
5257 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5258 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5259 ;; and lo_sum would be merged back into memory load at cprop. However,
5260 ;; if the default is to prefer movt/movw rather than a load from the constant
5261 ;; pool, the performance is better.
5263 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5264 (match_operand:SI 1 "general_operand" ""))]
5265 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5266 && !target_word_relocations
5267 && !arm_tls_referenced_p (operands[1])"
5268 [(clobber (const_int 0))]
5270 arm_emit_movpair (operands[0], operands[1]);
5274 ;; When generating pic, we need to load the symbol offset into a register.
5275 ;; So that the optimizer does not confuse this with a normal symbol load
5276 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5277 ;; since that is the only type of relocation we can use.
5279 ;; Wrap calculation of the whole PIC address in a single pattern for the
5280 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5281 ;; a PIC address involves two loads from memory, so we want to CSE it
5282 ;; as often as possible.
5283 ;; This pattern will be split into one of the pic_load_addr_* patterns
5284 ;; and a move after GCSE optimizations.
5286 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5287 (define_expand "calculate_pic_address"
5288 [(set (match_operand:SI 0 "register_operand")
5289 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5290 (unspec:SI [(match_operand:SI 2 "" "")]
5295 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5297 [(set (match_operand:SI 0 "register_operand" "")
5298 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5299 (unspec:SI [(match_operand:SI 2 "" "")]
5302 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5303 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5304 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5307 ;; operand1 is the memory address to go into
5308 ;; pic_load_addr_32bit.
5309 ;; operand2 is the PIC label to be emitted
5310 ;; from pic_add_dot_plus_eight.
5311 ;; We do this to allow hoisting of the entire insn.
5312 (define_insn_and_split "pic_load_addr_unified"
5313 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5314 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5315 (match_operand:SI 2 "" "")]
5316 UNSPEC_PIC_UNIFIED))]
5319 "&& reload_completed"
5320 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5321 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5322 (match_dup 2)] UNSPEC_PIC_BASE))]
5323 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5324 [(set_attr "type" "load_4,load_4,load_4")
5325 (set_attr "pool_range" "4096,4094,1022")
5326 (set_attr "neg_pool_range" "4084,0,0")
5327 (set_attr "arch" "a,t2,t1")
5328 (set_attr "length" "8,6,4")]
5331 ;; The rather odd constraints on the following are to force reload to leave
5332 ;; the insn alone, and to force the minipool generation pass to then move
5333 ;; the GOT symbol to memory.
5335 (define_insn "pic_load_addr_32bit"
5336 [(set (match_operand:SI 0 "s_register_operand" "=r")
5337 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5338 "TARGET_32BIT && flag_pic"
5340 [(set_attr "type" "load_4")
5341 (set (attr "pool_range")
5342 (if_then_else (eq_attr "is_thumb" "no")
5345 (set (attr "neg_pool_range")
5346 (if_then_else (eq_attr "is_thumb" "no")
5351 (define_insn "pic_load_addr_thumb1"
5352 [(set (match_operand:SI 0 "s_register_operand" "=l")
5353 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5354 "TARGET_THUMB1 && flag_pic"
5356 [(set_attr "type" "load_4")
5357 (set (attr "pool_range") (const_int 1018))]
5360 (define_insn "pic_add_dot_plus_four"
5361 [(set (match_operand:SI 0 "register_operand" "=r")
5362 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5364 (match_operand 2 "" "")]
5368 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5369 INTVAL (operands[2]));
5370 return \"add\\t%0, %|pc\";
5372 [(set_attr "length" "2")
5373 (set_attr "type" "alu_sreg")]
5376 (define_insn "pic_add_dot_plus_eight"
5377 [(set (match_operand:SI 0 "register_operand" "=r")
5378 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5380 (match_operand 2 "" "")]
5384 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5385 INTVAL (operands[2]));
5386 return \"add%?\\t%0, %|pc, %1\";
5388 [(set_attr "predicable" "yes")
5389 (set_attr "type" "alu_sreg")]
5392 (define_insn "tls_load_dot_plus_eight"
5393 [(set (match_operand:SI 0 "register_operand" "=r")
5394 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5396 (match_operand 2 "" "")]
5400 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5401 INTVAL (operands[2]));
5402 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5404 [(set_attr "predicable" "yes")
5405 (set_attr "type" "load_4")]
5408 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5409 ;; followed by a load. These sequences can be crunched down to
5410 ;; tls_load_dot_plus_eight by a peephole.
5413 [(set (match_operand:SI 0 "register_operand" "")
5414 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5416 (match_operand 1 "" "")]
5418 (set (match_operand:SI 2 "arm_general_register_operand" "")
5419 (mem:SI (match_dup 0)))]
5420 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5422 (mem:SI (unspec:SI [(match_dup 3)
5429 (define_insn "pic_offset_arm"
5430 [(set (match_operand:SI 0 "register_operand" "=r")
5431 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5432 (unspec:SI [(match_operand:SI 2 "" "X")]
5433 UNSPEC_PIC_OFFSET))))]
5434 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5435 "ldr%?\\t%0, [%1,%2]"
5436 [(set_attr "type" "load_4")]
5439 (define_expand "builtin_setjmp_receiver"
5440 [(label_ref (match_operand 0 "" ""))]
5444 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5446 if (arm_pic_register != INVALID_REGNUM)
5447 arm_load_pic_register (1UL << 3, NULL_RTX);
5451 ;; If copying one reg to another we can set the condition codes according to
5452 ;; its value. Such a move is common after a return from subroutine and the
5453 ;; result is being tested against zero.
5455 (define_insn "*movsi_compare0"
5456 [(set (reg:CC CC_REGNUM)
5457 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5459 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5464 subs%?\\t%0, %1, #0"
5465 [(set_attr "conds" "set")
5466 (set_attr "type" "alus_imm,alus_imm")]
5469 ;; Subroutine to store a half word from a register into memory.
5470 ;; Operand 0 is the source register (HImode)
5471 ;; Operand 1 is the destination address in a register (SImode)
5473 ;; In both this routine and the next, we must be careful not to spill
5474 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5475 ;; can generate unrecognizable rtl.
5477 (define_expand "storehi"
5478 [;; store the low byte
5479 (set (match_operand 1 "" "") (match_dup 3))
5480 ;; extract the high byte
5482 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5483 ;; store the high byte
5484 (set (match_dup 4) (match_dup 5))]
5488 rtx op1 = operands[1];
5489 rtx addr = XEXP (op1, 0);
5490 enum rtx_code code = GET_CODE (addr);
5492 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5494 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5496 operands[4] = adjust_address (op1, QImode, 1);
5497 operands[1] = adjust_address (operands[1], QImode, 0);
5498 operands[3] = gen_lowpart (QImode, operands[0]);
5499 operands[0] = gen_lowpart (SImode, operands[0]);
5500 operands[2] = gen_reg_rtx (SImode);
5501 operands[5] = gen_lowpart (QImode, operands[2]);
5505 (define_expand "storehi_bigend"
5506 [(set (match_dup 4) (match_dup 3))
5508 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5509 (set (match_operand 1 "" "") (match_dup 5))]
5513 rtx op1 = operands[1];
5514 rtx addr = XEXP (op1, 0);
5515 enum rtx_code code = GET_CODE (addr);
5517 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5519 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5521 operands[4] = adjust_address (op1, QImode, 1);
5522 operands[1] = adjust_address (operands[1], QImode, 0);
5523 operands[3] = gen_lowpart (QImode, operands[0]);
5524 operands[0] = gen_lowpart (SImode, operands[0]);
5525 operands[2] = gen_reg_rtx (SImode);
5526 operands[5] = gen_lowpart (QImode, operands[2]);
5530 ;; Subroutine to store a half word integer constant into memory.
5531 (define_expand "storeinthi"
5532 [(set (match_operand 0 "" "")
5533 (match_operand 1 "" ""))
5534 (set (match_dup 3) (match_dup 2))]
5538 HOST_WIDE_INT value = INTVAL (operands[1]);
5539 rtx addr = XEXP (operands[0], 0);
5540 rtx op0 = operands[0];
5541 enum rtx_code code = GET_CODE (addr);
5543 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5545 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5547 operands[1] = gen_reg_rtx (SImode);
5548 if (BYTES_BIG_ENDIAN)
5550 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5551 if ((value & 255) == ((value >> 8) & 255))
5552 operands[2] = operands[1];
5555 operands[2] = gen_reg_rtx (SImode);
5556 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5561 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5562 if ((value & 255) == ((value >> 8) & 255))
5563 operands[2] = operands[1];
5566 operands[2] = gen_reg_rtx (SImode);
5567 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5571 operands[3] = adjust_address (op0, QImode, 1);
5572 operands[0] = adjust_address (operands[0], QImode, 0);
5573 operands[2] = gen_lowpart (QImode, operands[2]);
5574 operands[1] = gen_lowpart (QImode, operands[1]);
5578 (define_expand "storehi_single_op"
5579 [(set (match_operand:HI 0 "memory_operand")
5580 (match_operand:HI 1 "general_operand"))]
5581 "TARGET_32BIT && arm_arch4"
5583 if (!s_register_operand (operands[1], HImode))
5584 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5588 (define_expand "movhi"
5589 [(set (match_operand:HI 0 "general_operand")
5590 (match_operand:HI 1 "general_operand"))]
5593 gcc_checking_assert (aligned_operand (operands[0], HImode));
5594 gcc_checking_assert (aligned_operand (operands[1], HImode));
5597 if (can_create_pseudo_p ())
5599 if (MEM_P (operands[0]))
5603 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5606 if (CONST_INT_P (operands[1]))
5607 emit_insn (gen_storeinthi (operands[0], operands[1]));
5610 if (MEM_P (operands[1]))
5611 operands[1] = force_reg (HImode, operands[1]);
5612 if (BYTES_BIG_ENDIAN)
5613 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5615 emit_insn (gen_storehi (operands[1], operands[0]));
5619 /* Sign extend a constant, and keep it in an SImode reg. */
5620 else if (CONST_INT_P (operands[1]))
5622 rtx reg = gen_reg_rtx (SImode);
5623 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5625 /* If the constant is already valid, leave it alone. */
5626 if (!const_ok_for_arm (val))
5628 /* If setting all the top bits will make the constant
5629 loadable in a single instruction, then set them.
5630 Otherwise, sign extend the number. */
5632 if (const_ok_for_arm (~(val | ~0xffff)))
5634 else if (val & 0x8000)
5638 emit_insn (gen_movsi (reg, GEN_INT (val)));
5639 operands[1] = gen_lowpart (HImode, reg);
5641 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5642 && MEM_P (operands[1]))
5644 rtx reg = gen_reg_rtx (SImode);
5646 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5647 operands[1] = gen_lowpart (HImode, reg);
5649 else if (!arm_arch4)
5651 if (MEM_P (operands[1]))
5654 rtx offset = const0_rtx;
5655 rtx reg = gen_reg_rtx (SImode);
5657 if ((REG_P (base = XEXP (operands[1], 0))
5658 || (GET_CODE (base) == PLUS
5659 && (CONST_INT_P (offset = XEXP (base, 1)))
5660 && ((INTVAL(offset) & 1) != 1)
5661 && REG_P (base = XEXP (base, 0))))
5662 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5666 new_rtx = widen_memory_access (operands[1], SImode,
5667 ((INTVAL (offset) & ~3)
5668 - INTVAL (offset)));
5669 emit_insn (gen_movsi (reg, new_rtx));
5670 if (((INTVAL (offset) & 2) != 0)
5671 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5673 rtx reg2 = gen_reg_rtx (SImode);
5675 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5680 emit_insn (gen_movhi_bytes (reg, operands[1]));
5682 operands[1] = gen_lowpart (HImode, reg);
5686 /* Handle loading a large integer during reload. */
5687 else if (CONST_INT_P (operands[1])
5688 && !const_ok_for_arm (INTVAL (operands[1]))
5689 && !const_ok_for_arm (~INTVAL (operands[1])))
5691 /* Writing a constant to memory needs a scratch, which should
5692 be handled with SECONDARY_RELOADs. */
5693 gcc_assert (REG_P (operands[0]));
5695 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5696 emit_insn (gen_movsi (operands[0], operands[1]));
5700 else if (TARGET_THUMB2)
5702 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5703 if (can_create_pseudo_p ())
5705 if (!REG_P (operands[0]))
5706 operands[1] = force_reg (HImode, operands[1]);
5707 /* Zero extend a constant, and keep it in an SImode reg. */
5708 else if (CONST_INT_P (operands[1]))
5710 rtx reg = gen_reg_rtx (SImode);
5711 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5713 emit_insn (gen_movsi (reg, GEN_INT (val)));
5714 operands[1] = gen_lowpart (HImode, reg);
5718 else /* TARGET_THUMB1 */
5720 if (can_create_pseudo_p ())
5722 if (CONST_INT_P (operands[1]))
5724 rtx reg = gen_reg_rtx (SImode);
5726 emit_insn (gen_movsi (reg, operands[1]));
5727 operands[1] = gen_lowpart (HImode, reg);
5730 /* ??? We shouldn't really get invalid addresses here, but this can
5731 happen if we are passed a SP (never OK for HImode/QImode) or
5732 virtual register (also rejected as illegitimate for HImode/QImode)
5733 relative address. */
5734 /* ??? This should perhaps be fixed elsewhere, for instance, in
5735 fixup_stack_1, by checking for other kinds of invalid addresses,
5736 e.g. a bare reference to a virtual register. This may confuse the
5737 alpha though, which must handle this case differently. */
5738 if (MEM_P (operands[0])
5739 && !memory_address_p (GET_MODE (operands[0]),
5740 XEXP (operands[0], 0)))
5742 = replace_equiv_address (operands[0],
5743 copy_to_reg (XEXP (operands[0], 0)));
5745 if (MEM_P (operands[1])
5746 && !memory_address_p (GET_MODE (operands[1]),
5747 XEXP (operands[1], 0)))
5749 = replace_equiv_address (operands[1],
5750 copy_to_reg (XEXP (operands[1], 0)));
5752 if (MEM_P (operands[1]) && optimize > 0)
5754 rtx reg = gen_reg_rtx (SImode);
5756 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5757 operands[1] = gen_lowpart (HImode, reg);
5760 if (MEM_P (operands[0]))
5761 operands[1] = force_reg (HImode, operands[1]);
5763 else if (CONST_INT_P (operands[1])
5764 && !satisfies_constraint_I (operands[1]))
5766 /* Handle loading a large integer during reload. */
5768 /* Writing a constant to memory needs a scratch, which should
5769 be handled with SECONDARY_RELOADs. */
5770 gcc_assert (REG_P (operands[0]));
5772 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5773 emit_insn (gen_movsi (operands[0], operands[1]));
5780 (define_expand "movhi_bytes"
5781 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5783 (zero_extend:SI (match_dup 6)))
5784 (set (match_operand:SI 0 "" "")
5785 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5790 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5792 mem1 = change_address (operands[1], QImode, addr);
5793 mem2 = change_address (operands[1], QImode,
5794 plus_constant (Pmode, addr, 1));
5795 operands[0] = gen_lowpart (SImode, operands[0]);
5797 operands[2] = gen_reg_rtx (SImode);
5798 operands[3] = gen_reg_rtx (SImode);
5801 if (BYTES_BIG_ENDIAN)
5803 operands[4] = operands[2];
5804 operands[5] = operands[3];
5808 operands[4] = operands[3];
5809 operands[5] = operands[2];
5814 (define_expand "movhi_bigend"
5816 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5819 (ashiftrt:SI (match_dup 2) (const_int 16)))
5820 (set (match_operand:HI 0 "s_register_operand")
5824 operands[2] = gen_reg_rtx (SImode);
5825 operands[3] = gen_reg_rtx (SImode);
5826 operands[4] = gen_lowpart (HImode, operands[3]);
5830 ;; Pattern to recognize insn generated default case above
5831 (define_insn "*movhi_insn_arch4"
5832 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5833 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5835 && arm_arch4 && !TARGET_HARD_FLOAT
5836 && (register_operand (operands[0], HImode)
5837 || register_operand (operands[1], HImode))"
5839 mov%?\\t%0, %1\\t%@ movhi
5840 mvn%?\\t%0, #%B1\\t%@ movhi
5841 movw%?\\t%0, %L1\\t%@ movhi
5842 strh%?\\t%1, %0\\t%@ movhi
5843 ldrh%?\\t%0, %1\\t%@ movhi"
5844 [(set_attr "predicable" "yes")
5845 (set_attr "pool_range" "*,*,*,*,256")
5846 (set_attr "neg_pool_range" "*,*,*,*,244")
5847 (set_attr "arch" "*,*,v6t2,*,*")
5848 (set_attr_alternative "type"
5849 [(if_then_else (match_operand 1 "const_int_operand" "")
5850 (const_string "mov_imm" )
5851 (const_string "mov_reg"))
5852 (const_string "mvn_imm")
5853 (const_string "mov_imm")
5854 (const_string "store_4")
5855 (const_string "load_4")])]
5858 (define_insn "*movhi_bytes"
5859 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5860 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5861 "TARGET_ARM && !TARGET_HARD_FLOAT"
5863 mov%?\\t%0, %1\\t%@ movhi
5864 mov%?\\t%0, %1\\t%@ movhi
5865 mvn%?\\t%0, #%B1\\t%@ movhi"
5866 [(set_attr "predicable" "yes")
5867 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5870 ;; We use a DImode scratch because we may occasionally need an additional
5871 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5872 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5873 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5874 ;; to be correctly handled in default_secondary_reload function.
5875 (define_expand "reload_outhi"
5876 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5877 (match_operand:HI 1 "s_register_operand" "r")
5878 (match_operand:DI 2 "s_register_operand" "=&l")])]
5881 arm_reload_out_hi (operands);
5883 thumb_reload_out_hi (operands);
5888 (define_expand "reload_inhi"
5889 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5890 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5891 (match_operand:DI 2 "s_register_operand" "=&r")])]
5895 arm_reload_in_hi (operands);
5897 thumb_reload_out_hi (operands);
5901 (define_expand "movqi"
5902 [(set (match_operand:QI 0 "general_operand")
5903 (match_operand:QI 1 "general_operand"))]
5906 /* Everything except mem = const or mem = mem can be done easily */
5908 if (can_create_pseudo_p ())
5910 if (CONST_INT_P (operands[1]))
5912 rtx reg = gen_reg_rtx (SImode);
5914 /* For thumb we want an unsigned immediate, then we are more likely
5915 to be able to use a movs insn. */
5917 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5919 emit_insn (gen_movsi (reg, operands[1]));
5920 operands[1] = gen_lowpart (QImode, reg);
5925 /* ??? We shouldn't really get invalid addresses here, but this can
5926 happen if we are passed a SP (never OK for HImode/QImode) or
5927 virtual register (also rejected as illegitimate for HImode/QImode)
5928 relative address. */
5929 /* ??? This should perhaps be fixed elsewhere, for instance, in
5930 fixup_stack_1, by checking for other kinds of invalid addresses,
5931 e.g. a bare reference to a virtual register. This may confuse the
5932 alpha though, which must handle this case differently. */
5933 if (MEM_P (operands[0])
5934 && !memory_address_p (GET_MODE (operands[0]),
5935 XEXP (operands[0], 0)))
5937 = replace_equiv_address (operands[0],
5938 copy_to_reg (XEXP (operands[0], 0)));
5939 if (MEM_P (operands[1])
5940 && !memory_address_p (GET_MODE (operands[1]),
5941 XEXP (operands[1], 0)))
5943 = replace_equiv_address (operands[1],
5944 copy_to_reg (XEXP (operands[1], 0)));
5947 if (MEM_P (operands[1]) && optimize > 0)
5949 rtx reg = gen_reg_rtx (SImode);
5951 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5952 operands[1] = gen_lowpart (QImode, reg);
5955 if (MEM_P (operands[0]))
5956 operands[1] = force_reg (QImode, operands[1]);
5958 else if (TARGET_THUMB
5959 && CONST_INT_P (operands[1])
5960 && !satisfies_constraint_I (operands[1]))
5962 /* Handle loading a large integer during reload. */
5964 /* Writing a constant to memory needs a scratch, which should
5965 be handled with SECONDARY_RELOADs. */
5966 gcc_assert (REG_P (operands[0]));
5968 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5969 emit_insn (gen_movsi (operands[0], operands[1]));
5975 (define_insn "*arm_movqi_insn"
5976 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5977 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5979 && ( register_operand (operands[0], QImode)
5980 || register_operand (operands[1], QImode))"
5991 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5992 (set_attr "predicable" "yes")
5993 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5994 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5995 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5999 (define_expand "movhf"
6000 [(set (match_operand:HF 0 "general_operand")
6001 (match_operand:HF 1 "general_operand"))]
6004 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6005 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6008 if (MEM_P (operands[0]))
6009 operands[1] = force_reg (HFmode, operands[1]);
6011 else /* TARGET_THUMB1 */
6013 if (can_create_pseudo_p ())
6015 if (!REG_P (operands[0]))
6016 operands[1] = force_reg (HFmode, operands[1]);
6022 (define_insn "*arm32_movhf"
6023 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6024 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6025 "TARGET_32BIT && !TARGET_HARD_FLOAT
6026 && ( s_register_operand (operands[0], HFmode)
6027 || s_register_operand (operands[1], HFmode))"
6029 switch (which_alternative)
6031 case 0: /* ARM register from memory */
6032 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6033 case 1: /* memory from ARM register */
6034 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6035 case 2: /* ARM register from ARM register */
6036 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6037 case 3: /* ARM register from constant */
6042 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6044 ops[0] = operands[0];
6045 ops[1] = GEN_INT (bits);
6046 ops[2] = GEN_INT (bits & 0xff00);
6047 ops[3] = GEN_INT (bits & 0x00ff);
6049 if (arm_arch_thumb2)
6050 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6052 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6059 [(set_attr "conds" "unconditional")
6060 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6061 (set_attr "length" "4,4,4,8")
6062 (set_attr "predicable" "yes")]
6065 (define_expand "movsf"
6066 [(set (match_operand:SF 0 "general_operand")
6067 (match_operand:SF 1 "general_operand"))]
6070 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6071 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6074 if (MEM_P (operands[0]))
6075 operands[1] = force_reg (SFmode, operands[1]);
6077 else /* TARGET_THUMB1 */
6079 if (can_create_pseudo_p ())
6081 if (!REG_P (operands[0]))
6082 operands[1] = force_reg (SFmode, operands[1]);
6086 /* Cannot load it directly, generate a load with clobber so that it can be
6087 loaded via GPR with MOV / MOVT. */
6088 if (arm_disable_literal_pool
6089 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6090 && CONST_DOUBLE_P (operands[1])
6091 && TARGET_HARD_FLOAT
6092 && !vfp3_const_double_rtx (operands[1]))
6094 rtx clobreg = gen_reg_rtx (SFmode);
6095 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6102 ;; Transform a floating-point move of a constant into a core register into
6103 ;; an SImode operation.
6105 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6106 (match_operand:SF 1 "immediate_operand" ""))]
6109 && CONST_DOUBLE_P (operands[1])"
6110 [(set (match_dup 2) (match_dup 3))]
6112 operands[2] = gen_lowpart (SImode, operands[0]);
6113 operands[3] = gen_lowpart (SImode, operands[1]);
6114 if (operands[2] == 0 || operands[3] == 0)
6119 (define_insn "*arm_movsf_soft_insn"
6120 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6121 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6123 && TARGET_SOFT_FLOAT
6124 && (!MEM_P (operands[0])
6125 || register_operand (operands[1], SFmode))"
6127 switch (which_alternative)
6129 case 0: return \"mov%?\\t%0, %1\";
6131 /* Cannot load it directly, split to load it via MOV / MOVT. */
6132 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6134 return \"ldr%?\\t%0, %1\\t%@ float\";
6135 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6136 default: gcc_unreachable ();
6139 [(set_attr "predicable" "yes")
6140 (set_attr "type" "mov_reg,load_4,store_4")
6141 (set_attr "arm_pool_range" "*,4096,*")
6142 (set_attr "thumb2_pool_range" "*,4094,*")
6143 (set_attr "arm_neg_pool_range" "*,4084,*")
6144 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6147 ;; Splitter for the above.
6149 [(set (match_operand:SF 0 "s_register_operand")
6150 (match_operand:SF 1 "const_double_operand"))]
6151 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6155 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6156 rtx cst = gen_int_mode (buf, SImode);
6157 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6162 (define_expand "movdf"
6163 [(set (match_operand:DF 0 "general_operand")
6164 (match_operand:DF 1 "general_operand"))]
6167 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6168 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6171 if (MEM_P (operands[0]))
6172 operands[1] = force_reg (DFmode, operands[1]);
6174 else /* TARGET_THUMB */
6176 if (can_create_pseudo_p ())
6178 if (!REG_P (operands[0]))
6179 operands[1] = force_reg (DFmode, operands[1]);
6183 /* Cannot load it directly, generate a load with clobber so that it can be
6184 loaded via GPR with MOV / MOVT. */
6185 if (arm_disable_literal_pool
6186 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6187 && CONSTANT_P (operands[1])
6188 && TARGET_HARD_FLOAT
6189 && !arm_const_double_rtx (operands[1])
6190 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6192 rtx clobreg = gen_reg_rtx (DFmode);
6193 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6200 ;; Reloading a df mode value stored in integer regs to memory can require a
6202 ;; Another reload_out<m> pattern that requires special constraints.
6203 (define_expand "reload_outdf"
6204 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6205 (match_operand:DF 1 "s_register_operand" "r")
6206 (match_operand:SI 2 "s_register_operand" "=&r")]
6210 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6213 operands[2] = XEXP (operands[0], 0);
6214 else if (code == POST_INC || code == PRE_DEC)
6216 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6217 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6218 emit_insn (gen_movdi (operands[0], operands[1]));
6221 else if (code == PRE_INC)
6223 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6225 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6228 else if (code == POST_DEC)
6229 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6231 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6232 XEXP (XEXP (operands[0], 0), 1)));
6234 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6237 if (code == POST_DEC)
6238 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6244 (define_insn "*movdf_soft_insn"
6245 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6246 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6247 "TARGET_32BIT && TARGET_SOFT_FLOAT
6248 && ( register_operand (operands[0], DFmode)
6249 || register_operand (operands[1], DFmode))"
6251 switch (which_alternative)
6258 /* Cannot load it directly, split to load it via MOV / MOVT. */
6259 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6263 return output_move_double (operands, true, NULL);
6266 [(set_attr "length" "8,12,16,8,8")
6267 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6268 (set_attr "arm_pool_range" "*,*,*,1020,*")
6269 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6270 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6271 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6274 ;; Splitter for the above.
6276 [(set (match_operand:DF 0 "s_register_operand")
6277 (match_operand:DF 1 "const_double_operand"))]
6278 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6282 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6283 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6284 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6285 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6286 rtx cst = gen_int_mode (ival, DImode);
6287 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6293 ;; load- and store-multiple insns
6294 ;; The arm can load/store any set of registers, provided that they are in
6295 ;; ascending order, but these expanders assume a contiguous set.
6297 (define_expand "load_multiple"
6298 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6299 (match_operand:SI 1 "" ""))
6300 (use (match_operand:SI 2 "" ""))])]
6303 HOST_WIDE_INT offset = 0;
6305 /* Support only fixed point registers. */
6306 if (!CONST_INT_P (operands[2])
6307 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6308 || INTVAL (operands[2]) < 2
6309 || !MEM_P (operands[1])
6310 || !REG_P (operands[0])
6311 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6312 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6316 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6317 INTVAL (operands[2]),
6318 force_reg (SImode, XEXP (operands[1], 0)),
6319 FALSE, operands[1], &offset);
6322 (define_expand "store_multiple"
6323 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6324 (match_operand:SI 1 "" ""))
6325 (use (match_operand:SI 2 "" ""))])]
6328 HOST_WIDE_INT offset = 0;
6330 /* Support only fixed point registers. */
6331 if (!CONST_INT_P (operands[2])
6332 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6333 || INTVAL (operands[2]) < 2
6334 || !REG_P (operands[1])
6335 || !MEM_P (operands[0])
6336 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6337 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6341 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6342 INTVAL (operands[2]),
6343 force_reg (SImode, XEXP (operands[0], 0)),
6344 FALSE, operands[0], &offset);
6348 (define_expand "setmemsi"
6349 [(match_operand:BLK 0 "general_operand")
6350 (match_operand:SI 1 "const_int_operand")
6351 (match_operand:SI 2 "const_int_operand")
6352 (match_operand:SI 3 "const_int_operand")]
6355 if (arm_gen_setmem (operands))
6362 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6363 ;; We could let this apply for blocks of less than this, but it clobbers so
6364 ;; many registers that there is then probably a better way.
6366 (define_expand "cpymemqi"
6367 [(match_operand:BLK 0 "general_operand")
6368 (match_operand:BLK 1 "general_operand")
6369 (match_operand:SI 2 "const_int_operand")
6370 (match_operand:SI 3 "const_int_operand")]
6375 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6376 && !optimize_function_for_size_p (cfun))
6378 if (gen_cpymem_ldrd_strd (operands))
6383 if (arm_gen_cpymemqi (operands))
6387 else /* TARGET_THUMB1 */
6389 if ( INTVAL (operands[3]) != 4
6390 || INTVAL (operands[2]) > 48)
6393 thumb_expand_cpymemqi (operands);
6400 ;; Compare & branch insns
6401 ;; The range calculations are based as follows:
6402 ;; For forward branches, the address calculation returns the address of
6403 ;; the next instruction. This is 2 beyond the branch instruction.
6404 ;; For backward branches, the address calculation returns the address of
6405 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6406 ;; instruction for the shortest sequence, and 4 before the branch instruction
6407 ;; if we have to jump around an unconditional branch.
6408 ;; To the basic branch range the PC offset must be added (this is +4).
6409 ;; So for forward branches we have
6410 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6411 ;; And for backward branches we have
6412 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6414 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6415 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6417 (define_expand "cbranchsi4"
6418 [(set (pc) (if_then_else
6419 (match_operator 0 "expandable_comparison_operator"
6420 [(match_operand:SI 1 "s_register_operand")
6421 (match_operand:SI 2 "nonmemory_operand")])
6422 (label_ref (match_operand 3 "" ""))
6428 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6430 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6434 if (thumb1_cmpneg_operand (operands[2], SImode))
6436 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6437 operands[3], operands[0]));
6440 if (!thumb1_cmp_operand (operands[2], SImode))
6441 operands[2] = force_reg (SImode, operands[2]);
6444 (define_expand "cbranchsf4"
6445 [(set (pc) (if_then_else
6446 (match_operator 0 "expandable_comparison_operator"
6447 [(match_operand:SF 1 "s_register_operand")
6448 (match_operand:SF 2 "vfp_compare_operand")])
6449 (label_ref (match_operand 3 "" ""))
6451 "TARGET_32BIT && TARGET_HARD_FLOAT"
6452 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6453 operands[3])); DONE;"
6456 (define_expand "cbranchdf4"
6457 [(set (pc) (if_then_else
6458 (match_operator 0 "expandable_comparison_operator"
6459 [(match_operand:DF 1 "s_register_operand")
6460 (match_operand:DF 2 "vfp_compare_operand")])
6461 (label_ref (match_operand 3 "" ""))
6463 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6464 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6465 operands[3])); DONE;"
6468 (define_expand "cbranchdi4"
6469 [(set (pc) (if_then_else
6470 (match_operator 0 "expandable_comparison_operator"
6471 [(match_operand:DI 1 "s_register_operand")
6472 (match_operand:DI 2 "reg_or_int_operand")])
6473 (label_ref (match_operand 3 "" ""))
6477 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6479 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6485 ;; Comparison and test insns
6487 (define_insn "*arm_cmpsi_insn"
6488 [(set (reg:CC CC_REGNUM)
6489 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6490 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6498 [(set_attr "conds" "set")
6499 (set_attr "arch" "t2,t2,any,any,any")
6500 (set_attr "length" "2,2,4,4,4")
6501 (set_attr "predicable" "yes")
6502 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6503 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6506 (define_insn "*cmpsi_shiftsi"
6507 [(set (reg:CC CC_REGNUM)
6508 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6509 (match_operator:SI 3 "shift_operator"
6510 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6511 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6514 [(set_attr "conds" "set")
6515 (set_attr "shift" "1")
6516 (set_attr "arch" "32,a,a")
6517 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6519 (define_insn "*cmpsi_shiftsi_swp"
6520 [(set (reg:CC_SWP CC_REGNUM)
6521 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6522 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6523 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6524 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6527 [(set_attr "conds" "set")
6528 (set_attr "shift" "1")
6529 (set_attr "arch" "32,a,a")
6530 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6532 (define_insn "*arm_cmpsi_negshiftsi_si"
6533 [(set (reg:CC_Z CC_REGNUM)
6535 (neg:SI (match_operator:SI 1 "shift_operator"
6536 [(match_operand:SI 2 "s_register_operand" "r")
6537 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6538 (match_operand:SI 0 "s_register_operand" "r")))]
6541 [(set_attr "conds" "set")
6542 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6543 (const_string "alus_shift_imm")
6544 (const_string "alus_shift_reg")))
6545 (set_attr "predicable" "yes")]
6548 ;; DImode comparisons. The generic code generates branches that
6549 ;; if-conversion cannot reduce to a conditional compare, so we do
6552 (define_insn "*arm_cmpdi_insn"
6553 [(set (reg:CC_NCV CC_REGNUM)
6554 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6555 (match_operand:DI 1 "arm_di_operand" "rDi")))
6556 (clobber (match_scratch:SI 2 "=r"))]
6558 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6559 [(set_attr "conds" "set")
6560 (set_attr "length" "8")
6561 (set_attr "type" "multiple")]
6564 (define_insn_and_split "*arm_cmpdi_unsigned"
6565 [(set (reg:CC_CZ CC_REGNUM)
6566 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6567 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6570 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6571 "&& reload_completed"
6572 [(set (reg:CC CC_REGNUM)
6573 (compare:CC (match_dup 2) (match_dup 3)))
6574 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6575 (set (reg:CC CC_REGNUM)
6576 (compare:CC (match_dup 0) (match_dup 1))))]
6578 operands[2] = gen_highpart (SImode, operands[0]);
6579 operands[0] = gen_lowpart (SImode, operands[0]);
6580 if (CONST_INT_P (operands[1]))
6581 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6583 operands[3] = gen_highpart (SImode, operands[1]);
6584 operands[1] = gen_lowpart (SImode, operands[1]);
6586 [(set_attr "conds" "set")
6587 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6588 (set_attr "arch" "t2,t2,t2,a")
6589 (set_attr "length" "6,6,10,8")
6590 (set_attr "type" "multiple")]
6593 ; This insn allows redundant compares to be removed by cse, nothing should
6594 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6595 ; is deleted later on. The match_dup will match the mode here, so that
6596 ; mode changes of the condition codes aren't lost by this even though we don't
6597 ; specify what they are.
6599 (define_insn "*deleted_compare"
6600 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6602 "\\t%@ deleted compare"
6603 [(set_attr "conds" "set")
6604 (set_attr "length" "0")
6605 (set_attr "type" "no_insn")]
6609 ;; Conditional branch insns
6611 (define_expand "cbranch_cc"
6613 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6614 (match_operand 2 "" "")])
6615 (label_ref (match_operand 3 "" ""))
6618 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6619 operands[1], operands[2], NULL_RTX);
6620 operands[2] = const0_rtx;"
6624 ;; Patterns to match conditional branch insns.
6627 (define_insn "arm_cond_branch"
6629 (if_then_else (match_operator 1 "arm_comparison_operator"
6630 [(match_operand 2 "cc_register" "") (const_int 0)])
6631 (label_ref (match_operand 0 "" ""))
6635 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6637 arm_ccfsm_state += 2;
6640 return \"b%d1\\t%l0\";
6642 [(set_attr "conds" "use")
6643 (set_attr "type" "branch")
6644 (set (attr "length")
6646 (and (match_test "TARGET_THUMB2")
6647 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6648 (le (minus (match_dup 0) (pc)) (const_int 256))))
6653 (define_insn "*arm_cond_branch_reversed"
6655 (if_then_else (match_operator 1 "arm_comparison_operator"
6656 [(match_operand 2 "cc_register" "") (const_int 0)])
6658 (label_ref (match_operand 0 "" ""))))]
6661 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6663 arm_ccfsm_state += 2;
6666 return \"b%D1\\t%l0\";
6668 [(set_attr "conds" "use")
6669 (set_attr "type" "branch")
6670 (set (attr "length")
6672 (and (match_test "TARGET_THUMB2")
6673 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6674 (le (minus (match_dup 0) (pc)) (const_int 256))))
6683 (define_expand "cstore_cc"
6684 [(set (match_operand:SI 0 "s_register_operand")
6685 (match_operator:SI 1 "" [(match_operand 2 "" "")
6686 (match_operand 3 "" "")]))]
6688 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6689 operands[2], operands[3], NULL_RTX);
6690 operands[3] = const0_rtx;"
6693 (define_insn_and_split "*mov_scc"
6694 [(set (match_operand:SI 0 "s_register_operand" "=r")
6695 (match_operator:SI 1 "arm_comparison_operator_mode"
6696 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6698 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6701 (if_then_else:SI (match_dup 1)
6705 [(set_attr "conds" "use")
6706 (set_attr "length" "8")
6707 (set_attr "type" "multiple")]
6710 (define_insn "*negscc_borrow"
6711 [(set (match_operand:SI 0 "s_register_operand" "=r")
6712 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
6715 [(set_attr "conds" "use")
6716 (set_attr "length" "4")
6717 (set_attr "type" "adc_reg")]
6720 (define_insn_and_split "*mov_negscc"
6721 [(set (match_operand:SI 0 "s_register_operand" "=r")
6722 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6723 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6724 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
6725 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6728 (if_then_else:SI (match_dup 1)
6732 operands[3] = GEN_INT (~0);
6734 [(set_attr "conds" "use")
6735 (set_attr "length" "8")
6736 (set_attr "type" "multiple")]
6739 (define_insn_and_split "*mov_notscc"
6740 [(set (match_operand:SI 0 "s_register_operand" "=r")
6741 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6742 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6744 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6747 (if_then_else:SI (match_dup 1)
6751 operands[3] = GEN_INT (~1);
6752 operands[4] = GEN_INT (~0);
6754 [(set_attr "conds" "use")
6755 (set_attr "length" "8")
6756 (set_attr "type" "multiple")]
6759 (define_expand "cstoresi4"
6760 [(set (match_operand:SI 0 "s_register_operand")
6761 (match_operator:SI 1 "expandable_comparison_operator"
6762 [(match_operand:SI 2 "s_register_operand")
6763 (match_operand:SI 3 "reg_or_int_operand")]))]
6764 "TARGET_32BIT || TARGET_THUMB1"
6766 rtx op3, scratch, scratch2;
6770 if (!arm_add_operand (operands[3], SImode))
6771 operands[3] = force_reg (SImode, operands[3]);
6772 emit_insn (gen_cstore_cc (operands[0], operands[1],
6773 operands[2], operands[3]));
6777 if (operands[3] == const0_rtx)
6779 switch (GET_CODE (operands[1]))
6782 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6786 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6790 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6791 NULL_RTX, 0, OPTAB_WIDEN);
6792 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6793 NULL_RTX, 0, OPTAB_WIDEN);
6794 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6795 operands[0], 1, OPTAB_WIDEN);
6799 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6801 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6802 NULL_RTX, 1, OPTAB_WIDEN);
6806 scratch = expand_binop (SImode, ashr_optab, operands[2],
6807 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6808 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6809 NULL_RTX, 0, OPTAB_WIDEN);
6810 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6814 /* LT is handled by generic code. No need for unsigned with 0. */
6821 switch (GET_CODE (operands[1]))
6824 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6825 NULL_RTX, 0, OPTAB_WIDEN);
6826 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6830 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6831 NULL_RTX, 0, OPTAB_WIDEN);
6832 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6836 op3 = force_reg (SImode, operands[3]);
6838 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6839 NULL_RTX, 1, OPTAB_WIDEN);
6840 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6841 NULL_RTX, 0, OPTAB_WIDEN);
6842 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6848 if (!thumb1_cmp_operand (op3, SImode))
6849 op3 = force_reg (SImode, op3);
6850 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6851 NULL_RTX, 0, OPTAB_WIDEN);
6852 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6853 NULL_RTX, 1, OPTAB_WIDEN);
6854 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6859 op3 = force_reg (SImode, operands[3]);
6860 scratch = force_reg (SImode, const0_rtx);
6861 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6867 if (!thumb1_cmp_operand (op3, SImode))
6868 op3 = force_reg (SImode, op3);
6869 scratch = force_reg (SImode, const0_rtx);
6870 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6876 if (!thumb1_cmp_operand (op3, SImode))
6877 op3 = force_reg (SImode, op3);
6878 scratch = gen_reg_rtx (SImode);
6879 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6883 op3 = force_reg (SImode, operands[3]);
6884 scratch = gen_reg_rtx (SImode);
6885 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6888 /* No good sequences for GT, LT. */
6895 (define_expand "cstorehf4"
6896 [(set (match_operand:SI 0 "s_register_operand")
6897 (match_operator:SI 1 "expandable_comparison_operator"
6898 [(match_operand:HF 2 "s_register_operand")
6899 (match_operand:HF 3 "vfp_compare_operand")]))]
6900 "TARGET_VFP_FP16INST"
6902 if (!arm_validize_comparison (&operands[1],
6907 emit_insn (gen_cstore_cc (operands[0], operands[1],
6908 operands[2], operands[3]));
6913 (define_expand "cstoresf4"
6914 [(set (match_operand:SI 0 "s_register_operand")
6915 (match_operator:SI 1 "expandable_comparison_operator"
6916 [(match_operand:SF 2 "s_register_operand")
6917 (match_operand:SF 3 "vfp_compare_operand")]))]
6918 "TARGET_32BIT && TARGET_HARD_FLOAT"
6919 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6920 operands[2], operands[3])); DONE;"
6923 (define_expand "cstoredf4"
6924 [(set (match_operand:SI 0 "s_register_operand")
6925 (match_operator:SI 1 "expandable_comparison_operator"
6926 [(match_operand:DF 2 "s_register_operand")
6927 (match_operand:DF 3 "vfp_compare_operand")]))]
6928 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6929 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6930 operands[2], operands[3])); DONE;"
6933 (define_expand "cstoredi4"
6934 [(set (match_operand:SI 0 "s_register_operand")
6935 (match_operator:SI 1 "expandable_comparison_operator"
6936 [(match_operand:DI 2 "s_register_operand")
6937 (match_operand:DI 3 "reg_or_int_operand")]))]
6940 if (!arm_validize_comparison (&operands[1],
6944 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6951 ;; Conditional move insns
6953 (define_expand "movsicc"
6954 [(set (match_operand:SI 0 "s_register_operand")
6955 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6956 (match_operand:SI 2 "arm_not_operand")
6957 (match_operand:SI 3 "arm_not_operand")))]
6964 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6965 &XEXP (operands[1], 1)))
6968 code = GET_CODE (operands[1]);
6969 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6970 XEXP (operands[1], 1), NULL_RTX);
6971 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6975 (define_expand "movhfcc"
6976 [(set (match_operand:HF 0 "s_register_operand")
6977 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6978 (match_operand:HF 2 "s_register_operand")
6979 (match_operand:HF 3 "s_register_operand")))]
6980 "TARGET_VFP_FP16INST"
6983 enum rtx_code code = GET_CODE (operands[1]);
6986 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6987 &XEXP (operands[1], 1)))
6990 code = GET_CODE (operands[1]);
6991 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6992 XEXP (operands[1], 1), NULL_RTX);
6993 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6997 (define_expand "movsfcc"
6998 [(set (match_operand:SF 0 "s_register_operand")
6999 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7000 (match_operand:SF 2 "s_register_operand")
7001 (match_operand:SF 3 "s_register_operand")))]
7002 "TARGET_32BIT && TARGET_HARD_FLOAT"
7005 enum rtx_code code = GET_CODE (operands[1]);
7008 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7009 &XEXP (operands[1], 1)))
7012 code = GET_CODE (operands[1]);
7013 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7014 XEXP (operands[1], 1), NULL_RTX);
7015 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7019 (define_expand "movdfcc"
7020 [(set (match_operand:DF 0 "s_register_operand")
7021 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7022 (match_operand:DF 2 "s_register_operand")
7023 (match_operand:DF 3 "s_register_operand")))]
7024 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7027 enum rtx_code code = GET_CODE (operands[1]);
7030 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7031 &XEXP (operands[1], 1)))
7033 code = GET_CODE (operands[1]);
7034 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7035 XEXP (operands[1], 1), NULL_RTX);
7036 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7040 (define_insn "*cmov<mode>"
7041 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7042 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7043 [(match_operand 2 "cc_register" "") (const_int 0)])
7044 (match_operand:SDF 3 "s_register_operand"
7046 (match_operand:SDF 4 "s_register_operand"
7047 "<F_constraint>")))]
7048 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7051 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7058 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7063 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7069 [(set_attr "conds" "use")
7070 (set_attr "type" "fcsel")]
7073 (define_insn "*cmovhf"
7074 [(set (match_operand:HF 0 "s_register_operand" "=t")
7075 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7076 [(match_operand 2 "cc_register" "") (const_int 0)])
7077 (match_operand:HF 3 "s_register_operand" "t")
7078 (match_operand:HF 4 "s_register_operand" "t")))]
7079 "TARGET_VFP_FP16INST"
7082 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7089 return \"vsel%d1.f16\\t%0, %3, %4\";
7094 return \"vsel%D1.f16\\t%0, %4, %3\";
7100 [(set_attr "conds" "use")
7101 (set_attr "type" "fcsel")]
7104 (define_insn_and_split "*movsicc_insn"
7105 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7107 (match_operator 3 "arm_comparison_operator"
7108 [(match_operand 4 "cc_register" "") (const_int 0)])
7109 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7110 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7121 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7122 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7123 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7124 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7125 "&& reload_completed"
7128 enum rtx_code rev_code;
7132 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7134 gen_rtx_SET (operands[0], operands[1])));
7136 rev_code = GET_CODE (operands[3]);
7137 mode = GET_MODE (operands[4]);
7138 if (mode == CCFPmode || mode == CCFPEmode)
7139 rev_code = reverse_condition_maybe_unordered (rev_code);
7141 rev_code = reverse_condition (rev_code);
7143 rev_cond = gen_rtx_fmt_ee (rev_code,
7147 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7149 gen_rtx_SET (operands[0], operands[2])));
7152 [(set_attr "length" "4,4,4,4,8,8,8,8")
7153 (set_attr "conds" "use")
7154 (set_attr_alternative "type"
7155 [(if_then_else (match_operand 2 "const_int_operand" "")
7156 (const_string "mov_imm")
7157 (const_string "mov_reg"))
7158 (const_string "mvn_imm")
7159 (if_then_else (match_operand 1 "const_int_operand" "")
7160 (const_string "mov_imm")
7161 (const_string "mov_reg"))
7162 (const_string "mvn_imm")
7163 (const_string "multiple")
7164 (const_string "multiple")
7165 (const_string "multiple")
7166 (const_string "multiple")])]
7169 (define_insn "*movsfcc_soft_insn"
7170 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7171 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7172 [(match_operand 4 "cc_register" "") (const_int 0)])
7173 (match_operand:SF 1 "s_register_operand" "0,r")
7174 (match_operand:SF 2 "s_register_operand" "r,0")))]
7175 "TARGET_ARM && TARGET_SOFT_FLOAT"
7179 [(set_attr "conds" "use")
7180 (set_attr "type" "mov_reg")]
7184 ;; Jump and linkage insns
7186 (define_expand "jump"
7188 (label_ref (match_operand 0 "" "")))]
7193 (define_insn "*arm_jump"
7195 (label_ref (match_operand 0 "" "")))]
7199 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7201 arm_ccfsm_state += 2;
7204 return \"b%?\\t%l0\";
7207 [(set_attr "predicable" "yes")
7208 (set (attr "length")
7210 (and (match_test "TARGET_THUMB2")
7211 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7212 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7215 (set_attr "type" "branch")]
7218 (define_expand "call"
7219 [(parallel [(call (match_operand 0 "memory_operand")
7220 (match_operand 1 "general_operand"))
7221 (use (match_operand 2 "" ""))
7222 (clobber (reg:SI LR_REGNUM))])]
7227 tree addr = MEM_EXPR (operands[0]);
7229 /* In an untyped call, we can get NULL for operand 2. */
7230 if (operands[2] == NULL_RTX)
7231 operands[2] = const0_rtx;
7233 /* Decide if we should generate indirect calls by loading the
7234 32-bit address of the callee into a register before performing the
7236 callee = XEXP (operands[0], 0);
7237 if (GET_CODE (callee) == SYMBOL_REF
7238 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7240 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7242 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7243 /* Indirect call: set r9 with FDPIC value of callee. */
7244 XEXP (operands[0], 0)
7245 = arm_load_function_descriptor (XEXP (operands[0], 0));
7247 if (detect_cmse_nonsecure_call (addr))
7249 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7251 emit_call_insn (pat);
7255 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7256 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7259 /* Restore FDPIC register (r9) after call. */
7262 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7263 rtx initial_fdpic_reg
7264 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7266 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7267 initial_fdpic_reg));
7274 (define_insn "restore_pic_register_after_call"
7275 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7276 (unspec:SI [(match_dup 0)
7277 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7278 UNSPEC_PIC_RESTORE))]
7285 (define_expand "call_internal"
7286 [(parallel [(call (match_operand 0 "memory_operand")
7287 (match_operand 1 "general_operand"))
7288 (use (match_operand 2 "" ""))
7289 (clobber (reg:SI LR_REGNUM))])])
7291 (define_expand "nonsecure_call_internal"
7292 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7293 UNSPEC_NONSECURE_MEM)
7294 (match_operand 1 "general_operand"))
7295 (use (match_operand 2 "" ""))
7296 (clobber (reg:SI LR_REGNUM))])]
7301 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7302 gen_rtx_REG (SImode, R4_REGNUM),
7305 operands[0] = replace_equiv_address (operands[0], tmp);
7308 (define_insn "*call_reg_armv5"
7309 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7310 (match_operand 1 "" ""))
7311 (use (match_operand 2 "" ""))
7312 (clobber (reg:SI LR_REGNUM))]
7313 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7315 [(set_attr "type" "call")]
7318 (define_insn "*call_reg_arm"
7319 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7320 (match_operand 1 "" ""))
7321 (use (match_operand 2 "" ""))
7322 (clobber (reg:SI LR_REGNUM))]
7323 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7325 return output_call (operands);
7327 ;; length is worst case, normally it is only two
7328 [(set_attr "length" "12")
7329 (set_attr "type" "call")]
7333 (define_expand "call_value"
7334 [(parallel [(set (match_operand 0 "" "")
7335 (call (match_operand 1 "memory_operand")
7336 (match_operand 2 "general_operand")))
7337 (use (match_operand 3 "" ""))
7338 (clobber (reg:SI LR_REGNUM))])]
7343 tree addr = MEM_EXPR (operands[1]);
7345 /* In an untyped call, we can get NULL for operand 2. */
7346 if (operands[3] == 0)
7347 operands[3] = const0_rtx;
7349 /* Decide if we should generate indirect calls by loading the
7350 32-bit address of the callee into a register before performing the
7352 callee = XEXP (operands[1], 0);
7353 if (GET_CODE (callee) == SYMBOL_REF
7354 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7356 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7358 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7359 /* Indirect call: set r9 with FDPIC value of callee. */
7360 XEXP (operands[1], 0)
7361 = arm_load_function_descriptor (XEXP (operands[1], 0));
7363 if (detect_cmse_nonsecure_call (addr))
7365 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7366 operands[2], operands[3]);
7367 emit_call_insn (pat);
7371 pat = gen_call_value_internal (operands[0], operands[1],
7372 operands[2], operands[3]);
7373 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7376 /* Restore FDPIC register (r9) after call. */
7379 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7380 rtx initial_fdpic_reg
7381 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7383 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7384 initial_fdpic_reg));
7391 (define_expand "call_value_internal"
7392 [(parallel [(set (match_operand 0 "" "")
7393 (call (match_operand 1 "memory_operand")
7394 (match_operand 2 "general_operand")))
7395 (use (match_operand 3 "" ""))
7396 (clobber (reg:SI LR_REGNUM))])])
7398 (define_expand "nonsecure_call_value_internal"
7399 [(parallel [(set (match_operand 0 "" "")
7400 (call (unspec:SI [(match_operand 1 "memory_operand")]
7401 UNSPEC_NONSECURE_MEM)
7402 (match_operand 2 "general_operand")))
7403 (use (match_operand 3 "" ""))
7404 (clobber (reg:SI LR_REGNUM))])]
7409 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7410 gen_rtx_REG (SImode, R4_REGNUM),
7413 operands[1] = replace_equiv_address (operands[1], tmp);
7416 (define_insn "*call_value_reg_armv5"
7417 [(set (match_operand 0 "" "")
7418 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7419 (match_operand 2 "" "")))
7420 (use (match_operand 3 "" ""))
7421 (clobber (reg:SI LR_REGNUM))]
7422 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7424 [(set_attr "type" "call")]
7427 (define_insn "*call_value_reg_arm"
7428 [(set (match_operand 0 "" "")
7429 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7430 (match_operand 2 "" "")))
7431 (use (match_operand 3 "" ""))
7432 (clobber (reg:SI LR_REGNUM))]
7433 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7435 return output_call (&operands[1]);
7437 [(set_attr "length" "12")
7438 (set_attr "type" "call")]
7441 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7442 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7444 (define_insn "*call_symbol"
7445 [(call (mem:SI (match_operand:SI 0 "" ""))
7446 (match_operand 1 "" ""))
7447 (use (match_operand 2 "" ""))
7448 (clobber (reg:SI LR_REGNUM))]
7450 && !SIBLING_CALL_P (insn)
7451 && (GET_CODE (operands[0]) == SYMBOL_REF)
7452 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7455 rtx op = operands[0];
7457 /* Switch mode now when possible. */
7458 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7459 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7460 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7462 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7464 [(set_attr "type" "call")]
7467 (define_insn "*call_value_symbol"
7468 [(set (match_operand 0 "" "")
7469 (call (mem:SI (match_operand:SI 1 "" ""))
7470 (match_operand:SI 2 "" "")))
7471 (use (match_operand 3 "" ""))
7472 (clobber (reg:SI LR_REGNUM))]
7474 && !SIBLING_CALL_P (insn)
7475 && (GET_CODE (operands[1]) == SYMBOL_REF)
7476 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7479 rtx op = operands[1];
7481 /* Switch mode now when possible. */
7482 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7483 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7484 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7486 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7488 [(set_attr "type" "call")]
7491 (define_expand "sibcall_internal"
7492 [(parallel [(call (match_operand 0 "memory_operand")
7493 (match_operand 1 "general_operand"))
7495 (use (match_operand 2 "" ""))])])
7497 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7498 (define_expand "sibcall"
7499 [(parallel [(call (match_operand 0 "memory_operand")
7500 (match_operand 1 "general_operand"))
7502 (use (match_operand 2 "" ""))])]
7508 if ((!REG_P (XEXP (operands[0], 0))
7509 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7510 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7511 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7512 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7514 if (operands[2] == NULL_RTX)
7515 operands[2] = const0_rtx;
7517 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7518 arm_emit_call_insn (pat, operands[0], true);
7523 (define_expand "sibcall_value_internal"
7524 [(parallel [(set (match_operand 0 "" "")
7525 (call (match_operand 1 "memory_operand")
7526 (match_operand 2 "general_operand")))
7528 (use (match_operand 3 "" ""))])])
7530 (define_expand "sibcall_value"
7531 [(parallel [(set (match_operand 0 "" "")
7532 (call (match_operand 1 "memory_operand")
7533 (match_operand 2 "general_operand")))
7535 (use (match_operand 3 "" ""))])]
7541 if ((!REG_P (XEXP (operands[1], 0))
7542 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7543 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7544 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7545 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7547 if (operands[3] == NULL_RTX)
7548 operands[3] = const0_rtx;
7550 pat = gen_sibcall_value_internal (operands[0], operands[1],
7551 operands[2], operands[3]);
7552 arm_emit_call_insn (pat, operands[1], true);
7557 (define_insn "*sibcall_insn"
7558 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7559 (match_operand 1 "" ""))
7561 (use (match_operand 2 "" ""))]
7562 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7564 if (which_alternative == 1)
7565 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7568 if (arm_arch5t || arm_arch4t)
7569 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7571 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7574 [(set_attr "type" "call")]
7577 (define_insn "*sibcall_value_insn"
7578 [(set (match_operand 0 "" "")
7579 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7580 (match_operand 2 "" "")))
7582 (use (match_operand 3 "" ""))]
7583 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7585 if (which_alternative == 1)
7586 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7589 if (arm_arch5t || arm_arch4t)
7590 return \"bx%?\\t%1\";
7592 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7595 [(set_attr "type" "call")]
7598 (define_expand "<return_str>return"
7600 "(TARGET_ARM || (TARGET_THUMB2
7601 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7602 && !IS_STACKALIGN (arm_current_func_type ())))
7603 <return_cond_false>"
7608 thumb2_expand_return (<return_simple_p>);
7615 ;; Often the return insn will be the same as loading from memory, so set attr
7616 (define_insn "*arm_return"
7618 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7621 if (arm_ccfsm_state == 2)
7623 arm_ccfsm_state += 2;
7626 return output_return_instruction (const_true_rtx, true, false, false);
7628 [(set_attr "type" "load_4")
7629 (set_attr "length" "12")
7630 (set_attr "predicable" "yes")]
7633 (define_insn "*cond_<return_str>return"
7635 (if_then_else (match_operator 0 "arm_comparison_operator"
7636 [(match_operand 1 "cc_register" "") (const_int 0)])
7639 "TARGET_ARM <return_cond_true>"
7642 if (arm_ccfsm_state == 2)
7644 arm_ccfsm_state += 2;
7647 return output_return_instruction (operands[0], true, false,
7650 [(set_attr "conds" "use")
7651 (set_attr "length" "12")
7652 (set_attr "type" "load_4")]
7655 (define_insn "*cond_<return_str>return_inverted"
7657 (if_then_else (match_operator 0 "arm_comparison_operator"
7658 [(match_operand 1 "cc_register" "") (const_int 0)])
7661 "TARGET_ARM <return_cond_true>"
7664 if (arm_ccfsm_state == 2)
7666 arm_ccfsm_state += 2;
7669 return output_return_instruction (operands[0], true, true,
7672 [(set_attr "conds" "use")
7673 (set_attr "length" "12")
7674 (set_attr "type" "load_4")]
7677 (define_insn "*arm_simple_return"
7682 if (arm_ccfsm_state == 2)
7684 arm_ccfsm_state += 2;
7687 return output_return_instruction (const_true_rtx, true, false, true);
7689 [(set_attr "type" "branch")
7690 (set_attr "length" "4")
7691 (set_attr "predicable" "yes")]
7694 ;; Generate a sequence of instructions to determine if the processor is
7695 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7698 (define_expand "return_addr_mask"
7700 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7702 (set (match_operand:SI 0 "s_register_operand")
7703 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7705 (const_int 67108860)))] ; 0x03fffffc
7708 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7711 (define_insn "*check_arch2"
7712 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7713 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7716 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7717 [(set_attr "length" "8")
7718 (set_attr "conds" "set")
7719 (set_attr "type" "multiple")]
7722 ;; Call subroutine returning any type.
7724 (define_expand "untyped_call"
7725 [(parallel [(call (match_operand 0 "" "")
7727 (match_operand 1 "" "")
7728 (match_operand 2 "" "")])]
7729 "TARGET_EITHER && !TARGET_FDPIC"
7733 rtx par = gen_rtx_PARALLEL (VOIDmode,
7734 rtvec_alloc (XVECLEN (operands[2], 0)));
7735 rtx addr = gen_reg_rtx (Pmode);
7739 emit_move_insn (addr, XEXP (operands[1], 0));
7740 mem = change_address (operands[1], BLKmode, addr);
7742 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7744 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7746 /* Default code only uses r0 as a return value, but we could
7747 be using anything up to 4 registers. */
7748 if (REGNO (src) == R0_REGNUM)
7749 src = gen_rtx_REG (TImode, R0_REGNUM);
7751 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7753 size += GET_MODE_SIZE (GET_MODE (src));
7756 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7760 for (i = 0; i < XVECLEN (par, 0); i++)
7762 HOST_WIDE_INT offset = 0;
7763 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7766 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7768 mem = change_address (mem, GET_MODE (reg), NULL);
7769 if (REGNO (reg) == R0_REGNUM)
7771 /* On thumb we have to use a write-back instruction. */
7772 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7773 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7774 size = TARGET_ARM ? 16 : 0;
7778 emit_move_insn (mem, reg);
7779 size = GET_MODE_SIZE (GET_MODE (reg));
7783 /* The optimizer does not know that the call sets the function value
7784 registers we stored in the result block. We avoid problems by
7785 claiming that all hard registers are used and clobbered at this
7787 emit_insn (gen_blockage ());
7793 (define_expand "untyped_return"
7794 [(match_operand:BLK 0 "memory_operand")
7795 (match_operand 1 "" "")]
7796 "TARGET_EITHER && !TARGET_FDPIC"
7800 rtx addr = gen_reg_rtx (Pmode);
7804 emit_move_insn (addr, XEXP (operands[0], 0));
7805 mem = change_address (operands[0], BLKmode, addr);
7807 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7809 HOST_WIDE_INT offset = 0;
7810 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7813 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7815 mem = change_address (mem, GET_MODE (reg), NULL);
7816 if (REGNO (reg) == R0_REGNUM)
7818 /* On thumb we have to use a write-back instruction. */
7819 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7820 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7821 size = TARGET_ARM ? 16 : 0;
7825 emit_move_insn (reg, mem);
7826 size = GET_MODE_SIZE (GET_MODE (reg));
7830 /* Emit USE insns before the return. */
7831 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7832 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7834 /* Construct the return. */
7835 expand_naked_return ();
7841 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7842 ;; all of memory. This blocks insns from being moved across this point.
7844 (define_insn "blockage"
7845 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7848 [(set_attr "length" "0")
7849 (set_attr "type" "block")]
7852 ;; Since we hard code r0 here use the 'o' constraint to prevent
7853 ;; provoking undefined behaviour in the hardware with putting out
7854 ;; auto-increment operations with potentially r0 as the base register.
7855 (define_insn "probe_stack"
7856 [(set (match_operand:SI 0 "memory_operand" "=o")
7857 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7860 [(set_attr "type" "store_4")
7861 (set_attr "predicable" "yes")]
7864 (define_insn "probe_stack_range"
7865 [(set (match_operand:SI 0 "register_operand" "=r")
7866 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7867 (match_operand:SI 2 "register_operand" "r")]
7868 VUNSPEC_PROBE_STACK_RANGE))]
7871 return output_probe_stack_range (operands[0], operands[2]);
7873 [(set_attr "type" "multiple")
7874 (set_attr "conds" "clob")]
7877 ;; Named patterns for stack smashing protection.
7878 (define_expand "stack_protect_combined_set"
7880 [(set (match_operand:SI 0 "memory_operand")
7881 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7883 (clobber (match_scratch:SI 2 ""))
7884 (clobber (match_scratch:SI 3 ""))])]
7889 ;; Use a separate insn from the above expand to be able to have the mem outside
7890 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7891 ;; try to reload the guard since we need to control how PIC access is done in
7892 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7893 ;; legitimize_pic_address ()).
7894 (define_insn_and_split "*stack_protect_combined_set_insn"
7895 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7896 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7898 (clobber (match_scratch:SI 2 "=&l,&r"))
7899 (clobber (match_scratch:SI 3 "=&l,&r"))]
7903 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7905 (clobber (match_dup 2))])]
7913 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7915 pic_reg = operands[3];
7917 /* Forces recomputing of GOT base now. */
7918 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7919 true /*compute_now*/);
7923 if (address_operand (operands[1], SImode))
7924 operands[2] = operands[1];
7927 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7928 emit_move_insn (operands[2], mem);
7932 [(set_attr "arch" "t1,32")]
7935 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7936 ;; canary value does not live beyond the life of this sequence.
7937 (define_insn "*stack_protect_set_insn"
7938 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7939 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7941 (clobber (match_dup 1))]
7944 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7945 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7946 [(set_attr "length" "8,12")
7947 (set_attr "conds" "clob,nocond")
7948 (set_attr "type" "multiple")
7949 (set_attr "arch" "t1,32")]
7952 (define_expand "stack_protect_combined_test"
7956 (eq (match_operand:SI 0 "memory_operand")
7957 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7959 (label_ref (match_operand 2))
7961 (clobber (match_scratch:SI 3 ""))
7962 (clobber (match_scratch:SI 4 ""))
7963 (clobber (reg:CC CC_REGNUM))])]
7968 ;; Use a separate insn from the above expand to be able to have the mem outside
7969 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7970 ;; try to reload the guard since we need to control how PIC access is done in
7971 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7972 ;; legitimize_pic_address ()).
7973 (define_insn_and_split "*stack_protect_combined_test_insn"
7976 (eq (match_operand:SI 0 "memory_operand" "m,m")
7977 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7979 (label_ref (match_operand 2))
7981 (clobber (match_scratch:SI 3 "=&l,&r"))
7982 (clobber (match_scratch:SI 4 "=&l,&r"))
7983 (clobber (reg:CC CC_REGNUM))]
7996 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7998 pic_reg = operands[4];
8000 /* Forces recomputing of GOT base now. */
8001 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
8002 true /*compute_now*/);
8006 if (address_operand (operands[1], SImode))
8007 operands[3] = operands[1];
8010 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8011 emit_move_insn (operands[3], mem);
8016 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8018 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8019 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8020 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8024 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8026 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8027 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8032 [(set_attr "arch" "t1,32")]
8035 (define_insn "arm_stack_protect_test_insn"
8036 [(set (reg:CC_Z CC_REGNUM)
8037 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8038 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8041 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8042 (clobber (match_dup 2))]
8044 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8045 [(set_attr "length" "8,12")
8046 (set_attr "conds" "set")
8047 (set_attr "type" "multiple")
8048 (set_attr "arch" "t,32")]
8051 (define_expand "casesi"
8052 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8053 (match_operand:SI 1 "const_int_operand") ; lower bound
8054 (match_operand:SI 2 "const_int_operand") ; total range
8055 (match_operand:SI 3 "" "") ; table label
8056 (match_operand:SI 4 "" "")] ; Out of range label
8057 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8060 enum insn_code code;
8061 if (operands[1] != const0_rtx)
8063 rtx reg = gen_reg_rtx (SImode);
8065 emit_insn (gen_addsi3 (reg, operands[0],
8066 gen_int_mode (-INTVAL (operands[1]),
8072 code = CODE_FOR_arm_casesi_internal;
8073 else if (TARGET_THUMB1)
8074 code = CODE_FOR_thumb1_casesi_internal_pic;
8076 code = CODE_FOR_thumb2_casesi_internal_pic;
8078 code = CODE_FOR_thumb2_casesi_internal;
8080 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8081 operands[2] = force_reg (SImode, operands[2]);
8083 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8084 operands[3], operands[4]));
8089 ;; The USE in this pattern is needed to tell flow analysis that this is
8090 ;; a CASESI insn. It has no other purpose.
8091 (define_expand "arm_casesi_internal"
8092 [(parallel [(set (pc)
8094 (leu (match_operand:SI 0 "s_register_operand")
8095 (match_operand:SI 1 "arm_rhs_operand"))
8097 (label_ref:SI (match_operand 3 ""))))
8098 (clobber (reg:CC CC_REGNUM))
8099 (use (label_ref:SI (match_operand 2 "")))])]
8102 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8103 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8104 gen_rtx_LABEL_REF (SImode, operands[2]));
8105 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8106 MEM_READONLY_P (operands[4]) = 1;
8107 MEM_NOTRAP_P (operands[4]) = 1;
8110 (define_insn "*arm_casesi_internal"
8111 [(parallel [(set (pc)
8113 (leu (match_operand:SI 0 "s_register_operand" "r")
8114 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8115 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8116 (label_ref:SI (match_operand 2 "" ""))))
8117 (label_ref:SI (match_operand 3 "" ""))))
8118 (clobber (reg:CC CC_REGNUM))
8119 (use (label_ref:SI (match_dup 2)))])]
8123 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8124 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8126 [(set_attr "conds" "clob")
8127 (set_attr "length" "12")
8128 (set_attr "type" "multiple")]
8131 (define_expand "indirect_jump"
8133 (match_operand:SI 0 "s_register_operand"))]
8136 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8137 address and use bx. */
8141 tmp = gen_reg_rtx (SImode);
8142 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8148 ;; NB Never uses BX.
8149 (define_insn "*arm_indirect_jump"
8151 (match_operand:SI 0 "s_register_operand" "r"))]
8153 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8154 [(set_attr "predicable" "yes")
8155 (set_attr "type" "branch")]
8158 (define_insn "*load_indirect_jump"
8160 (match_operand:SI 0 "memory_operand" "m"))]
8162 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8163 [(set_attr "type" "load_4")
8164 (set_attr "pool_range" "4096")
8165 (set_attr "neg_pool_range" "4084")
8166 (set_attr "predicable" "yes")]
8176 [(set (attr "length")
8177 (if_then_else (eq_attr "is_thumb" "yes")
8180 (set_attr "type" "mov_reg")]
8184 [(trap_if (const_int 1) (const_int 0))]
8188 return \".inst\\t0xe7f000f0\";
8190 return \".inst\\t0xdeff\";
8192 [(set (attr "length")
8193 (if_then_else (eq_attr "is_thumb" "yes")
8196 (set_attr "type" "trap")
8197 (set_attr "conds" "unconditional")]
8201 ;; Patterns to allow combination of arithmetic, cond code and shifts
8203 (define_insn "*<arith_shift_insn>_multsi"
8204 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8206 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8207 (match_operand:SI 3 "power_of_two_operand" ""))
8208 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8210 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8211 [(set_attr "predicable" "yes")
8212 (set_attr "shift" "2")
8213 (set_attr "arch" "a,t2")
8214 (set_attr "type" "alu_shift_imm")])
8216 (define_insn "*<arith_shift_insn>_shiftsi"
8217 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8219 (match_operator:SI 2 "shift_nomul_operator"
8220 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8221 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8222 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8223 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8224 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8225 [(set_attr "predicable" "yes")
8226 (set_attr "shift" "3")
8227 (set_attr "arch" "a,t2,a")
8228 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8231 [(set (match_operand:SI 0 "s_register_operand" "")
8232 (match_operator:SI 1 "shiftable_operator"
8233 [(match_operator:SI 2 "shiftable_operator"
8234 [(match_operator:SI 3 "shift_operator"
8235 [(match_operand:SI 4 "s_register_operand" "")
8236 (match_operand:SI 5 "reg_or_int_operand" "")])
8237 (match_operand:SI 6 "s_register_operand" "")])
8238 (match_operand:SI 7 "arm_rhs_operand" "")]))
8239 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8242 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8245 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8248 (define_insn "*arith_shiftsi_compare0"
8249 [(set (reg:CC_NOOV CC_REGNUM)
8251 (match_operator:SI 1 "shiftable_operator"
8252 [(match_operator:SI 3 "shift_operator"
8253 [(match_operand:SI 4 "s_register_operand" "r,r")
8254 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8255 (match_operand:SI 2 "s_register_operand" "r,r")])
8257 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8258 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8261 "%i1s%?\\t%0, %2, %4%S3"
8262 [(set_attr "conds" "set")
8263 (set_attr "shift" "4")
8264 (set_attr "arch" "32,a")
8265 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8267 (define_insn "*arith_shiftsi_compare0_scratch"
8268 [(set (reg:CC_NOOV CC_REGNUM)
8270 (match_operator:SI 1 "shiftable_operator"
8271 [(match_operator:SI 3 "shift_operator"
8272 [(match_operand:SI 4 "s_register_operand" "r,r")
8273 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8274 (match_operand:SI 2 "s_register_operand" "r,r")])
8276 (clobber (match_scratch:SI 0 "=r,r"))]
8278 "%i1s%?\\t%0, %2, %4%S3"
8279 [(set_attr "conds" "set")
8280 (set_attr "shift" "4")
8281 (set_attr "arch" "32,a")
8282 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8284 (define_insn "*sub_shiftsi"
8285 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8286 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8287 (match_operator:SI 2 "shift_operator"
8288 [(match_operand:SI 3 "s_register_operand" "r,r")
8289 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8291 "sub%?\\t%0, %1, %3%S2"
8292 [(set_attr "predicable" "yes")
8293 (set_attr "predicable_short_it" "no")
8294 (set_attr "shift" "3")
8295 (set_attr "arch" "32,a")
8296 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8298 (define_insn "*sub_shiftsi_compare0"
8299 [(set (reg:CC_NOOV CC_REGNUM)
8301 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8302 (match_operator:SI 2 "shift_operator"
8303 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8304 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8306 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8307 (minus:SI (match_dup 1)
8308 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8310 "subs%?\\t%0, %1, %3%S2"
8311 [(set_attr "conds" "set")
8312 (set_attr "shift" "3")
8313 (set_attr "arch" "32,a,a")
8314 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8316 (define_insn "*sub_shiftsi_compare0_scratch"
8317 [(set (reg:CC_NOOV CC_REGNUM)
8319 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8320 (match_operator:SI 2 "shift_operator"
8321 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8322 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8324 (clobber (match_scratch:SI 0 "=r,r,r"))]
8326 "subs%?\\t%0, %1, %3%S2"
8327 [(set_attr "conds" "set")
8328 (set_attr "shift" "3")
8329 (set_attr "arch" "32,a,a")
8330 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8333 (define_insn_and_split "*and_scc"
8334 [(set (match_operand:SI 0 "s_register_operand" "=r")
8335 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8336 [(match_operand 2 "cc_register" "") (const_int 0)])
8337 (match_operand:SI 3 "s_register_operand" "r")))]
8339 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8340 "&& reload_completed"
8341 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8342 (cond_exec (match_dup 4) (set (match_dup 0)
8343 (and:SI (match_dup 3) (const_int 1))))]
8345 machine_mode mode = GET_MODE (operands[2]);
8346 enum rtx_code rc = GET_CODE (operands[1]);
8348 /* Note that operands[4] is the same as operands[1],
8349 but with VOIDmode as the result. */
8350 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8351 if (mode == CCFPmode || mode == CCFPEmode)
8352 rc = reverse_condition_maybe_unordered (rc);
8354 rc = reverse_condition (rc);
8355 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8357 [(set_attr "conds" "use")
8358 (set_attr "type" "multiple")
8359 (set_attr "length" "8")]
8362 (define_insn_and_split "*ior_scc"
8363 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8364 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8365 [(match_operand 2 "cc_register" "") (const_int 0)])
8366 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8371 "&& reload_completed
8372 && REGNO (operands [0]) != REGNO (operands[3])"
8373 ;; && which_alternative == 1
8374 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8375 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8376 (cond_exec (match_dup 4) (set (match_dup 0)
8377 (ior:SI (match_dup 3) (const_int 1))))]
8379 machine_mode mode = GET_MODE (operands[2]);
8380 enum rtx_code rc = GET_CODE (operands[1]);
8382 /* Note that operands[4] is the same as operands[1],
8383 but with VOIDmode as the result. */
8384 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8385 if (mode == CCFPmode || mode == CCFPEmode)
8386 rc = reverse_condition_maybe_unordered (rc);
8388 rc = reverse_condition (rc);
8389 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8391 [(set_attr "conds" "use")
8392 (set_attr "length" "4,8")
8393 (set_attr "type" "logic_imm,multiple")]
8396 ; A series of splitters for the compare_scc pattern below. Note that
8397 ; order is important.
8399 [(set (match_operand:SI 0 "s_register_operand" "")
8400 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8402 (clobber (reg:CC CC_REGNUM))]
8403 "TARGET_32BIT && reload_completed"
8404 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8407 [(set (match_operand:SI 0 "s_register_operand" "")
8408 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8410 (clobber (reg:CC CC_REGNUM))]
8411 "TARGET_32BIT && reload_completed"
8412 [(set (match_dup 0) (not:SI (match_dup 1)))
8413 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8416 [(set (match_operand:SI 0 "s_register_operand" "")
8417 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8419 (clobber (reg:CC CC_REGNUM))]
8420 "arm_arch5t && TARGET_32BIT"
8421 [(set (match_dup 0) (clz:SI (match_dup 1)))
8422 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8426 [(set (match_operand:SI 0 "s_register_operand" "")
8427 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8429 (clobber (reg:CC CC_REGNUM))]
8430 "TARGET_32BIT && reload_completed"
8432 [(set (reg:CC CC_REGNUM)
8433 (compare:CC (const_int 1) (match_dup 1)))
8435 (minus:SI (const_int 1) (match_dup 1)))])
8436 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8437 (set (match_dup 0) (const_int 0)))])
8440 [(set (match_operand:SI 0 "s_register_operand" "")
8441 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8442 (match_operand:SI 2 "const_int_operand" "")))
8443 (clobber (reg:CC CC_REGNUM))]
8444 "TARGET_32BIT && reload_completed"
8446 [(set (reg:CC CC_REGNUM)
8447 (compare:CC (match_dup 1) (match_dup 2)))
8448 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8449 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8450 (set (match_dup 0) (const_int 1)))]
8452 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8456 [(set (match_operand:SI 0 "s_register_operand" "")
8457 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8458 (match_operand:SI 2 "arm_add_operand" "")))
8459 (clobber (reg:CC CC_REGNUM))]
8460 "TARGET_32BIT && reload_completed"
8462 [(set (reg:CC_NOOV CC_REGNUM)
8463 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8465 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8466 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8467 (set (match_dup 0) (const_int 1)))])
8469 (define_insn_and_split "*compare_scc"
8470 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8471 (match_operator:SI 1 "arm_comparison_operator"
8472 [(match_operand:SI 2 "s_register_operand" "r,r")
8473 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8474 (clobber (reg:CC CC_REGNUM))]
8477 "&& reload_completed"
8478 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8479 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8480 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8483 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8484 operands[2], operands[3]);
8485 enum rtx_code rc = GET_CODE (operands[1]);
8487 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8489 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8490 if (mode == CCFPmode || mode == CCFPEmode)
8491 rc = reverse_condition_maybe_unordered (rc);
8493 rc = reverse_condition (rc);
8494 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8496 [(set_attr "type" "multiple")]
8499 ;; Attempt to improve the sequence generated by the compare_scc splitters
8500 ;; not to use conditional execution.
8502 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8506 [(set (reg:CC CC_REGNUM)
8507 (compare:CC (match_operand:SI 1 "register_operand" "")
8509 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8510 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8511 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8512 (set (match_dup 0) (const_int 1)))]
8513 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8514 [(set (match_dup 0) (clz:SI (match_dup 1)))
8515 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8518 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8522 [(set (reg:CC CC_REGNUM)
8523 (compare:CC (match_operand:SI 1 "register_operand" "")
8525 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8526 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8527 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8528 (set (match_dup 0) (const_int 1)))
8529 (match_scratch:SI 2 "r")]
8530 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8532 [(set (reg:CC CC_REGNUM)
8533 (compare:CC (const_int 0) (match_dup 1)))
8534 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8536 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8537 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8540 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8541 ;; sub Rd, Reg1, reg2
8545 [(set (reg:CC CC_REGNUM)
8546 (compare:CC (match_operand:SI 1 "register_operand" "")
8547 (match_operand:SI 2 "arm_rhs_operand" "")))
8548 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8549 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8550 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8551 (set (match_dup 0) (const_int 1)))]
8552 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8553 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8554 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8555 (set (match_dup 0) (clz:SI (match_dup 0)))
8556 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8560 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8561 ;; sub T1, Reg1, reg2
8565 [(set (reg:CC CC_REGNUM)
8566 (compare:CC (match_operand:SI 1 "register_operand" "")
8567 (match_operand:SI 2 "arm_rhs_operand" "")))
8568 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8569 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8570 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8571 (set (match_dup 0) (const_int 1)))
8572 (match_scratch:SI 3 "r")]
8573 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8574 [(set (match_dup 3) (match_dup 4))
8576 [(set (reg:CC CC_REGNUM)
8577 (compare:CC (const_int 0) (match_dup 3)))
8578 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8580 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8581 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8583 if (CONST_INT_P (operands[2]))
8584 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8586 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8589 (define_insn "*cond_move"
8590 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8591 (if_then_else:SI (match_operator 3 "equality_operator"
8592 [(match_operator 4 "arm_comparison_operator"
8593 [(match_operand 5 "cc_register" "") (const_int 0)])
8595 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8596 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8599 if (GET_CODE (operands[3]) == NE)
8601 if (which_alternative != 1)
8602 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8603 if (which_alternative != 0)
8604 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8607 if (which_alternative != 0)
8608 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8609 if (which_alternative != 1)
8610 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8613 [(set_attr "conds" "use")
8614 (set_attr_alternative "type"
8615 [(if_then_else (match_operand 2 "const_int_operand" "")
8616 (const_string "mov_imm")
8617 (const_string "mov_reg"))
8618 (if_then_else (match_operand 1 "const_int_operand" "")
8619 (const_string "mov_imm")
8620 (const_string "mov_reg"))
8621 (const_string "multiple")])
8622 (set_attr "length" "4,4,8")]
8625 (define_insn "*cond_arith"
8626 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8627 (match_operator:SI 5 "shiftable_operator"
8628 [(match_operator:SI 4 "arm_comparison_operator"
8629 [(match_operand:SI 2 "s_register_operand" "r,r")
8630 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8631 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8632 (clobber (reg:CC CC_REGNUM))]
8635 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8636 return \"%i5\\t%0, %1, %2, lsr #31\";
8638 output_asm_insn (\"cmp\\t%2, %3\", operands);
8639 if (GET_CODE (operands[5]) == AND)
8640 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8641 else if (GET_CODE (operands[5]) == MINUS)
8642 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8643 else if (which_alternative != 0)
8644 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8645 return \"%i5%d4\\t%0, %1, #1\";
8647 [(set_attr "conds" "clob")
8648 (set_attr "length" "12")
8649 (set_attr "type" "multiple")]
8652 (define_insn "*cond_sub"
8653 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8654 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8655 (match_operator:SI 4 "arm_comparison_operator"
8656 [(match_operand:SI 2 "s_register_operand" "r,r")
8657 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8658 (clobber (reg:CC CC_REGNUM))]
8661 output_asm_insn (\"cmp\\t%2, %3\", operands);
8662 if (which_alternative != 0)
8663 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8664 return \"sub%d4\\t%0, %1, #1\";
8666 [(set_attr "conds" "clob")
8667 (set_attr "length" "8,12")
8668 (set_attr "type" "multiple")]
8671 (define_insn "*cmp_ite0"
8672 [(set (match_operand 6 "dominant_cc_register" "")
8675 (match_operator 4 "arm_comparison_operator"
8676 [(match_operand:SI 0 "s_register_operand"
8677 "l,l,l,r,r,r,r,r,r")
8678 (match_operand:SI 1 "arm_add_operand"
8679 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8680 (match_operator:SI 5 "arm_comparison_operator"
8681 [(match_operand:SI 2 "s_register_operand"
8682 "l,r,r,l,l,r,r,r,r")
8683 (match_operand:SI 3 "arm_add_operand"
8684 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8690 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8692 {\"cmp%d5\\t%0, %1\",
8693 \"cmp%d4\\t%2, %3\"},
8694 {\"cmn%d5\\t%0, #%n1\",
8695 \"cmp%d4\\t%2, %3\"},
8696 {\"cmp%d5\\t%0, %1\",
8697 \"cmn%d4\\t%2, #%n3\"},
8698 {\"cmn%d5\\t%0, #%n1\",
8699 \"cmn%d4\\t%2, #%n3\"}
8701 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8706 \"cmn\\t%0, #%n1\"},
8707 {\"cmn\\t%2, #%n3\",
8709 {\"cmn\\t%2, #%n3\",
8712 static const char * const ite[2] =
8717 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8718 CMP_CMP, CMN_CMP, CMP_CMP,
8719 CMN_CMP, CMP_CMN, CMN_CMN};
8721 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8723 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8724 if (TARGET_THUMB2) {
8725 output_asm_insn (ite[swap], operands);
8727 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8730 [(set_attr "conds" "set")
8731 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8732 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8733 (set_attr "type" "multiple")
8734 (set_attr_alternative "length"
8740 (if_then_else (eq_attr "is_thumb" "no")
8743 (if_then_else (eq_attr "is_thumb" "no")
8746 (if_then_else (eq_attr "is_thumb" "no")
8749 (if_then_else (eq_attr "is_thumb" "no")
8754 (define_insn "*cmp_ite1"
8755 [(set (match_operand 6 "dominant_cc_register" "")
8758 (match_operator 4 "arm_comparison_operator"
8759 [(match_operand:SI 0 "s_register_operand"
8760 "l,l,l,r,r,r,r,r,r")
8761 (match_operand:SI 1 "arm_add_operand"
8762 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8763 (match_operator:SI 5 "arm_comparison_operator"
8764 [(match_operand:SI 2 "s_register_operand"
8765 "l,r,r,l,l,r,r,r,r")
8766 (match_operand:SI 3 "arm_add_operand"
8767 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8773 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8777 {\"cmn\\t%0, #%n1\",
8780 \"cmn\\t%2, #%n3\"},
8781 {\"cmn\\t%0, #%n1\",
8784 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8786 {\"cmp%d4\\t%2, %3\",
8787 \"cmp%D5\\t%0, %1\"},
8788 {\"cmp%d4\\t%2, %3\",
8789 \"cmn%D5\\t%0, #%n1\"},
8790 {\"cmn%d4\\t%2, #%n3\",
8791 \"cmp%D5\\t%0, %1\"},
8792 {\"cmn%d4\\t%2, #%n3\",
8793 \"cmn%D5\\t%0, #%n1\"}
8795 static const char * const ite[2] =
8800 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8801 CMP_CMP, CMN_CMP, CMP_CMP,
8802 CMN_CMP, CMP_CMN, CMN_CMN};
8804 comparison_dominates_p (GET_CODE (operands[5]),
8805 reverse_condition (GET_CODE (operands[4])));
8807 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8808 if (TARGET_THUMB2) {
8809 output_asm_insn (ite[swap], operands);
8811 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8814 [(set_attr "conds" "set")
8815 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8816 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8817 (set_attr_alternative "length"
8823 (if_then_else (eq_attr "is_thumb" "no")
8826 (if_then_else (eq_attr "is_thumb" "no")
8829 (if_then_else (eq_attr "is_thumb" "no")
8832 (if_then_else (eq_attr "is_thumb" "no")
8835 (set_attr "type" "multiple")]
8838 (define_insn "*cmp_and"
8839 [(set (match_operand 6 "dominant_cc_register" "")
8842 (match_operator 4 "arm_comparison_operator"
8843 [(match_operand:SI 0 "s_register_operand"
8844 "l,l,l,r,r,r,r,r,r,r")
8845 (match_operand:SI 1 "arm_add_operand"
8846 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8847 (match_operator:SI 5 "arm_comparison_operator"
8848 [(match_operand:SI 2 "s_register_operand"
8849 "l,r,r,l,l,r,r,r,r,r")
8850 (match_operand:SI 3 "arm_add_operand"
8851 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8856 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8858 {\"cmp%d5\\t%0, %1\",
8859 \"cmp%d4\\t%2, %3\"},
8860 {\"cmn%d5\\t%0, #%n1\",
8861 \"cmp%d4\\t%2, %3\"},
8862 {\"cmp%d5\\t%0, %1\",
8863 \"cmn%d4\\t%2, #%n3\"},
8864 {\"cmn%d5\\t%0, #%n1\",
8865 \"cmn%d4\\t%2, #%n3\"}
8867 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8872 \"cmn\\t%0, #%n1\"},
8873 {\"cmn\\t%2, #%n3\",
8875 {\"cmn\\t%2, #%n3\",
8878 static const char *const ite[2] =
8883 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8884 CMP_CMP, CMN_CMP, CMP_CMP,
8885 CMP_CMP, CMN_CMP, CMP_CMN,
8888 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8890 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8891 if (TARGET_THUMB2) {
8892 output_asm_insn (ite[swap], operands);
8894 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8897 [(set_attr "conds" "set")
8898 (set_attr "predicable" "no")
8899 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8900 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8901 (set_attr_alternative "length"
8908 (if_then_else (eq_attr "is_thumb" "no")
8911 (if_then_else (eq_attr "is_thumb" "no")
8914 (if_then_else (eq_attr "is_thumb" "no")
8917 (if_then_else (eq_attr "is_thumb" "no")
8920 (set_attr "type" "multiple")]
8923 (define_insn "*cmp_ior"
8924 [(set (match_operand 6 "dominant_cc_register" "")
8927 (match_operator 4 "arm_comparison_operator"
8928 [(match_operand:SI 0 "s_register_operand"
8929 "l,l,l,r,r,r,r,r,r,r")
8930 (match_operand:SI 1 "arm_add_operand"
8931 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8932 (match_operator:SI 5 "arm_comparison_operator"
8933 [(match_operand:SI 2 "s_register_operand"
8934 "l,r,r,l,l,r,r,r,r,r")
8935 (match_operand:SI 3 "arm_add_operand"
8936 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8941 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8945 {\"cmn\\t%0, #%n1\",
8948 \"cmn\\t%2, #%n3\"},
8949 {\"cmn\\t%0, #%n1\",
8952 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8954 {\"cmp%D4\\t%2, %3\",
8955 \"cmp%D5\\t%0, %1\"},
8956 {\"cmp%D4\\t%2, %3\",
8957 \"cmn%D5\\t%0, #%n1\"},
8958 {\"cmn%D4\\t%2, #%n3\",
8959 \"cmp%D5\\t%0, %1\"},
8960 {\"cmn%D4\\t%2, #%n3\",
8961 \"cmn%D5\\t%0, #%n1\"}
8963 static const char *const ite[2] =
8968 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8969 CMP_CMP, CMN_CMP, CMP_CMP,
8970 CMP_CMP, CMN_CMP, CMP_CMN,
8973 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8975 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8976 if (TARGET_THUMB2) {
8977 output_asm_insn (ite[swap], operands);
8979 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8983 [(set_attr "conds" "set")
8984 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8985 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8986 (set_attr_alternative "length"
8993 (if_then_else (eq_attr "is_thumb" "no")
8996 (if_then_else (eq_attr "is_thumb" "no")
8999 (if_then_else (eq_attr "is_thumb" "no")
9002 (if_then_else (eq_attr "is_thumb" "no")
9005 (set_attr "type" "multiple")]
9008 (define_insn_and_split "*ior_scc_scc"
9009 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9010 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9011 [(match_operand:SI 1 "s_register_operand" "l,r")
9012 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9013 (match_operator:SI 6 "arm_comparison_operator"
9014 [(match_operand:SI 4 "s_register_operand" "l,r")
9015 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9016 (clobber (reg:CC CC_REGNUM))]
9018 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9021 "TARGET_32BIT && reload_completed"
9025 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9026 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9028 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9030 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9033 [(set_attr "conds" "clob")
9034 (set_attr "enabled_for_short_it" "yes,no")
9035 (set_attr "length" "16")
9036 (set_attr "type" "multiple")]
9039 ; If the above pattern is followed by a CMP insn, then the compare is
9040 ; redundant, since we can rework the conditional instruction that follows.
9041 (define_insn_and_split "*ior_scc_scc_cmp"
9042 [(set (match_operand 0 "dominant_cc_register" "")
9043 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9044 [(match_operand:SI 1 "s_register_operand" "l,r")
9045 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9046 (match_operator:SI 6 "arm_comparison_operator"
9047 [(match_operand:SI 4 "s_register_operand" "l,r")
9048 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9050 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9051 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9052 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9055 "TARGET_32BIT && reload_completed"
9059 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9060 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9062 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9064 [(set_attr "conds" "set")
9065 (set_attr "enabled_for_short_it" "yes,no")
9066 (set_attr "length" "16")
9067 (set_attr "type" "multiple")]
9070 (define_insn_and_split "*and_scc_scc"
9071 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9072 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9073 [(match_operand:SI 1 "s_register_operand" "l,r")
9074 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9075 (match_operator:SI 6 "arm_comparison_operator"
9076 [(match_operand:SI 4 "s_register_operand" "l,r")
9077 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9078 (clobber (reg:CC CC_REGNUM))]
9080 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9083 "TARGET_32BIT && reload_completed
9084 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9089 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9090 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9092 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9094 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9097 [(set_attr "conds" "clob")
9098 (set_attr "enabled_for_short_it" "yes,no")
9099 (set_attr "length" "16")
9100 (set_attr "type" "multiple")]
9103 ; If the above pattern is followed by a CMP insn, then the compare is
9104 ; redundant, since we can rework the conditional instruction that follows.
9105 (define_insn_and_split "*and_scc_scc_cmp"
9106 [(set (match_operand 0 "dominant_cc_register" "")
9107 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9108 [(match_operand:SI 1 "s_register_operand" "l,r")
9109 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9110 (match_operator:SI 6 "arm_comparison_operator"
9111 [(match_operand:SI 4 "s_register_operand" "l,r")
9112 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9114 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9115 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9116 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9119 "TARGET_32BIT && reload_completed"
9123 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9124 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9126 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9128 [(set_attr "conds" "set")
9129 (set_attr "enabled_for_short_it" "yes,no")
9130 (set_attr "length" "16")
9131 (set_attr "type" "multiple")]
9134 ;; If there is no dominance in the comparison, then we can still save an
9135 ;; instruction in the AND case, since we can know that the second compare
9136 ;; need only zero the value if false (if true, then the value is already
9138 (define_insn_and_split "*and_scc_scc_nodom"
9139 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9140 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9141 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9142 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9143 (match_operator:SI 6 "arm_comparison_operator"
9144 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9145 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9146 (clobber (reg:CC CC_REGNUM))]
9148 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9151 "TARGET_32BIT && reload_completed"
9152 [(parallel [(set (match_dup 0)
9153 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9154 (clobber (reg:CC CC_REGNUM))])
9155 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9157 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9160 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9161 operands[4], operands[5]),
9163 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9165 [(set_attr "conds" "clob")
9166 (set_attr "length" "20")
9167 (set_attr "type" "multiple")]
9171 [(set (reg:CC_NOOV CC_REGNUM)
9172 (compare:CC_NOOV (ior:SI
9173 (and:SI (match_operand:SI 0 "s_register_operand" "")
9175 (match_operator:SI 1 "arm_comparison_operator"
9176 [(match_operand:SI 2 "s_register_operand" "")
9177 (match_operand:SI 3 "arm_add_operand" "")]))
9179 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9182 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9184 (set (reg:CC_NOOV CC_REGNUM)
9185 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9190 [(set (reg:CC_NOOV CC_REGNUM)
9191 (compare:CC_NOOV (ior:SI
9192 (match_operator:SI 1 "arm_comparison_operator"
9193 [(match_operand:SI 2 "s_register_operand" "")
9194 (match_operand:SI 3 "arm_add_operand" "")])
9195 (and:SI (match_operand:SI 0 "s_register_operand" "")
9198 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9201 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9203 (set (reg:CC_NOOV CC_REGNUM)
9204 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9207 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9209 (define_insn_and_split "*negscc"
9210 [(set (match_operand:SI 0 "s_register_operand" "=r")
9211 (neg:SI (match_operator 3 "arm_comparison_operator"
9212 [(match_operand:SI 1 "s_register_operand" "r")
9213 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9214 (clobber (reg:CC CC_REGNUM))]
9217 "&& reload_completed"
9220 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9222 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9224 /* Emit mov\\t%0, %1, asr #31 */
9225 emit_insn (gen_rtx_SET (operands[0],
9226 gen_rtx_ASHIFTRT (SImode,
9231 else if (GET_CODE (operands[3]) == NE)
9233 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9234 if (CONST_INT_P (operands[2]))
9235 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9236 gen_int_mode (-INTVAL (operands[2]),
9239 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9241 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9245 gen_rtx_SET (operands[0],
9251 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9252 emit_insn (gen_rtx_SET (cc_reg,
9253 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9254 enum rtx_code rc = GET_CODE (operands[3]);
9256 rc = reverse_condition (rc);
9257 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9262 gen_rtx_SET (operands[0], const0_rtx)));
9263 rc = GET_CODE (operands[3]);
9264 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9269 gen_rtx_SET (operands[0],
9275 [(set_attr "conds" "clob")
9276 (set_attr "length" "12")
9277 (set_attr "type" "multiple")]
9280 (define_insn_and_split "movcond_addsi"
9281 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9283 (match_operator 5 "comparison_operator"
9284 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9285 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9287 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9288 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9289 (clobber (reg:CC CC_REGNUM))]
9292 "&& reload_completed"
9293 [(set (reg:CC_NOOV CC_REGNUM)
9295 (plus:SI (match_dup 3)
9298 (set (match_dup 0) (match_dup 1))
9299 (cond_exec (match_dup 6)
9300 (set (match_dup 0) (match_dup 2)))]
9303 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9304 operands[3], operands[4]);
9305 enum rtx_code rc = GET_CODE (operands[5]);
9306 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9307 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9308 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9309 rc = reverse_condition (rc);
9311 std::swap (operands[1], operands[2]);
9313 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9316 [(set_attr "conds" "clob")
9317 (set_attr "enabled_for_short_it" "no,yes,yes")
9318 (set_attr "type" "multiple")]
9321 (define_insn "movcond"
9322 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9324 (match_operator 5 "arm_comparison_operator"
9325 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9326 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9327 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9328 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9329 (clobber (reg:CC CC_REGNUM))]
9332 if (GET_CODE (operands[5]) == LT
9333 && (operands[4] == const0_rtx))
9335 if (which_alternative != 1 && REG_P (operands[1]))
9337 if (operands[2] == const0_rtx)
9338 return \"and\\t%0, %1, %3, asr #31\";
9339 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9341 else if (which_alternative != 0 && REG_P (operands[2]))
9343 if (operands[1] == const0_rtx)
9344 return \"bic\\t%0, %2, %3, asr #31\";
9345 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9347 /* The only case that falls through to here is when both ops 1 & 2
9351 if (GET_CODE (operands[5]) == GE
9352 && (operands[4] == const0_rtx))
9354 if (which_alternative != 1 && REG_P (operands[1]))
9356 if (operands[2] == const0_rtx)
9357 return \"bic\\t%0, %1, %3, asr #31\";
9358 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9360 else if (which_alternative != 0 && REG_P (operands[2]))
9362 if (operands[1] == const0_rtx)
9363 return \"and\\t%0, %2, %3, asr #31\";
9364 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9366 /* The only case that falls through to here is when both ops 1 & 2
9369 if (CONST_INT_P (operands[4])
9370 && !const_ok_for_arm (INTVAL (operands[4])))
9371 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9373 output_asm_insn (\"cmp\\t%3, %4\", operands);
9374 if (which_alternative != 0)
9375 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9376 if (which_alternative != 1)
9377 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9380 [(set_attr "conds" "clob")
9381 (set_attr "length" "8,8,12")
9382 (set_attr "type" "multiple")]
9385 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9387 (define_insn "*ifcompare_plus_move"
9388 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9389 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9390 [(match_operand:SI 4 "s_register_operand" "r,r")
9391 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9393 (match_operand:SI 2 "s_register_operand" "r,r")
9394 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9395 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9396 (clobber (reg:CC CC_REGNUM))]
9399 [(set_attr "conds" "clob")
9400 (set_attr "length" "8,12")
9401 (set_attr "type" "multiple")]
9404 (define_insn "*if_plus_move"
9405 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9407 (match_operator 4 "arm_comparison_operator"
9408 [(match_operand 5 "cc_register" "") (const_int 0)])
9410 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9411 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9412 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9416 sub%d4\\t%0, %2, #%n3
9417 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9418 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9419 [(set_attr "conds" "use")
9420 (set_attr "length" "4,4,8,8")
9421 (set_attr_alternative "type"
9422 [(if_then_else (match_operand 3 "const_int_operand" "")
9423 (const_string "alu_imm" )
9424 (const_string "alu_sreg"))
9425 (const_string "alu_imm")
9426 (const_string "multiple")
9427 (const_string "multiple")])]
9430 (define_insn "*ifcompare_move_plus"
9431 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9432 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9433 [(match_operand:SI 4 "s_register_operand" "r,r")
9434 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9435 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9437 (match_operand:SI 2 "s_register_operand" "r,r")
9438 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9439 (clobber (reg:CC CC_REGNUM))]
9442 [(set_attr "conds" "clob")
9443 (set_attr "length" "8,12")
9444 (set_attr "type" "multiple")]
9447 (define_insn "*if_move_plus"
9448 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9450 (match_operator 4 "arm_comparison_operator"
9451 [(match_operand 5 "cc_register" "") (const_int 0)])
9452 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9454 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9455 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9459 sub%D4\\t%0, %2, #%n3
9460 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9461 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9462 [(set_attr "conds" "use")
9463 (set_attr "length" "4,4,8,8")
9464 (set_attr_alternative "type"
9465 [(if_then_else (match_operand 3 "const_int_operand" "")
9466 (const_string "alu_imm" )
9467 (const_string "alu_sreg"))
9468 (const_string "alu_imm")
9469 (const_string "multiple")
9470 (const_string "multiple")])]
9473 (define_insn "*ifcompare_arith_arith"
9474 [(set (match_operand:SI 0 "s_register_operand" "=r")
9475 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9476 [(match_operand:SI 5 "s_register_operand" "r")
9477 (match_operand:SI 6 "arm_add_operand" "rIL")])
9478 (match_operator:SI 8 "shiftable_operator"
9479 [(match_operand:SI 1 "s_register_operand" "r")
9480 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9481 (match_operator:SI 7 "shiftable_operator"
9482 [(match_operand:SI 3 "s_register_operand" "r")
9483 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9484 (clobber (reg:CC CC_REGNUM))]
9487 [(set_attr "conds" "clob")
9488 (set_attr "length" "12")
9489 (set_attr "type" "multiple")]
9492 (define_insn "*if_arith_arith"
9493 [(set (match_operand:SI 0 "s_register_operand" "=r")
9494 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9495 [(match_operand 8 "cc_register" "") (const_int 0)])
9496 (match_operator:SI 6 "shiftable_operator"
9497 [(match_operand:SI 1 "s_register_operand" "r")
9498 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9499 (match_operator:SI 7 "shiftable_operator"
9500 [(match_operand:SI 3 "s_register_operand" "r")
9501 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9503 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9504 [(set_attr "conds" "use")
9505 (set_attr "length" "8")
9506 (set_attr "type" "multiple")]
9509 (define_insn "*ifcompare_arith_move"
9510 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9511 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9512 [(match_operand:SI 2 "s_register_operand" "r,r")
9513 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9514 (match_operator:SI 7 "shiftable_operator"
9515 [(match_operand:SI 4 "s_register_operand" "r,r")
9516 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9517 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9518 (clobber (reg:CC CC_REGNUM))]
9521 /* If we have an operation where (op x 0) is the identity operation and
9522 the conditional operator is LT or GE and we are comparing against zero and
9523 everything is in registers then we can do this in two instructions. */
9524 if (operands[3] == const0_rtx
9525 && GET_CODE (operands[7]) != AND
9526 && REG_P (operands[5])
9527 && REG_P (operands[1])
9528 && REGNO (operands[1]) == REGNO (operands[4])
9529 && REGNO (operands[4]) != REGNO (operands[0]))
9531 if (GET_CODE (operands[6]) == LT)
9532 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9533 else if (GET_CODE (operands[6]) == GE)
9534 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9536 if (CONST_INT_P (operands[3])
9537 && !const_ok_for_arm (INTVAL (operands[3])))
9538 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9540 output_asm_insn (\"cmp\\t%2, %3\", operands);
9541 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9542 if (which_alternative != 0)
9543 return \"mov%D6\\t%0, %1\";
9546 [(set_attr "conds" "clob")
9547 (set_attr "length" "8,12")
9548 (set_attr "type" "multiple")]
9551 (define_insn "*if_arith_move"
9552 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9553 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9554 [(match_operand 6 "cc_register" "") (const_int 0)])
9555 (match_operator:SI 5 "shiftable_operator"
9556 [(match_operand:SI 2 "s_register_operand" "r,r")
9557 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9558 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9562 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9563 [(set_attr "conds" "use")
9564 (set_attr "length" "4,8")
9565 (set_attr_alternative "type"
9566 [(if_then_else (match_operand 3 "const_int_operand" "")
9567 (const_string "alu_shift_imm" )
9568 (const_string "alu_shift_reg"))
9569 (const_string "multiple")])]
9572 (define_insn "*ifcompare_move_arith"
9573 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9574 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9575 [(match_operand:SI 4 "s_register_operand" "r,r")
9576 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9577 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9578 (match_operator:SI 7 "shiftable_operator"
9579 [(match_operand:SI 2 "s_register_operand" "r,r")
9580 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9581 (clobber (reg:CC CC_REGNUM))]
9584 /* If we have an operation where (op x 0) is the identity operation and
9585 the conditional operator is LT or GE and we are comparing against zero and
9586 everything is in registers then we can do this in two instructions */
9587 if (operands[5] == const0_rtx
9588 && GET_CODE (operands[7]) != AND
9589 && REG_P (operands[3])
9590 && REG_P (operands[1])
9591 && REGNO (operands[1]) == REGNO (operands[2])
9592 && REGNO (operands[2]) != REGNO (operands[0]))
9594 if (GET_CODE (operands[6]) == GE)
9595 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9596 else if (GET_CODE (operands[6]) == LT)
9597 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9600 if (CONST_INT_P (operands[5])
9601 && !const_ok_for_arm (INTVAL (operands[5])))
9602 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9604 output_asm_insn (\"cmp\\t%4, %5\", operands);
9606 if (which_alternative != 0)
9607 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9608 return \"%I7%D6\\t%0, %2, %3\";
9610 [(set_attr "conds" "clob")
9611 (set_attr "length" "8,12")
9612 (set_attr "type" "multiple")]
9615 (define_insn "*if_move_arith"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9618 (match_operator 4 "arm_comparison_operator"
9619 [(match_operand 6 "cc_register" "") (const_int 0)])
9620 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9621 (match_operator:SI 5 "shiftable_operator"
9622 [(match_operand:SI 2 "s_register_operand" "r,r")
9623 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9627 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9628 [(set_attr "conds" "use")
9629 (set_attr "length" "4,8")
9630 (set_attr_alternative "type"
9631 [(if_then_else (match_operand 3 "const_int_operand" "")
9632 (const_string "alu_shift_imm" )
9633 (const_string "alu_shift_reg"))
9634 (const_string "multiple")])]
9637 (define_insn "*ifcompare_move_not"
9638 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9640 (match_operator 5 "arm_comparison_operator"
9641 [(match_operand:SI 3 "s_register_operand" "r,r")
9642 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9643 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9645 (match_operand:SI 2 "s_register_operand" "r,r"))))
9646 (clobber (reg:CC CC_REGNUM))]
9649 [(set_attr "conds" "clob")
9650 (set_attr "length" "8,12")
9651 (set_attr "type" "multiple")]
9654 (define_insn "*if_move_not"
9655 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9657 (match_operator 4 "arm_comparison_operator"
9658 [(match_operand 3 "cc_register" "") (const_int 0)])
9659 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9660 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9664 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9665 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9666 [(set_attr "conds" "use")
9667 (set_attr "type" "mvn_reg")
9668 (set_attr "length" "4,8,8")
9669 (set_attr "type" "mvn_reg,multiple,multiple")]
9672 (define_insn "*ifcompare_not_move"
9673 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9675 (match_operator 5 "arm_comparison_operator"
9676 [(match_operand:SI 3 "s_register_operand" "r,r")
9677 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9679 (match_operand:SI 2 "s_register_operand" "r,r"))
9680 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9681 (clobber (reg:CC CC_REGNUM))]
9684 [(set_attr "conds" "clob")
9685 (set_attr "length" "8,12")
9686 (set_attr "type" "multiple")]
9689 (define_insn "*if_not_move"
9690 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9692 (match_operator 4 "arm_comparison_operator"
9693 [(match_operand 3 "cc_register" "") (const_int 0)])
9694 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9695 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9699 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9700 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9701 [(set_attr "conds" "use")
9702 (set_attr "type" "mvn_reg,multiple,multiple")
9703 (set_attr "length" "4,8,8")]
9706 (define_insn "*ifcompare_shift_move"
9707 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9709 (match_operator 6 "arm_comparison_operator"
9710 [(match_operand:SI 4 "s_register_operand" "r,r")
9711 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9712 (match_operator:SI 7 "shift_operator"
9713 [(match_operand:SI 2 "s_register_operand" "r,r")
9714 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9715 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9716 (clobber (reg:CC CC_REGNUM))]
9719 [(set_attr "conds" "clob")
9720 (set_attr "length" "8,12")
9721 (set_attr "type" "multiple")]
9724 (define_insn "*if_shift_move"
9725 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9727 (match_operator 5 "arm_comparison_operator"
9728 [(match_operand 6 "cc_register" "") (const_int 0)])
9729 (match_operator:SI 4 "shift_operator"
9730 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9731 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9732 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9736 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9737 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9738 [(set_attr "conds" "use")
9739 (set_attr "shift" "2")
9740 (set_attr "length" "4,8,8")
9741 (set_attr_alternative "type"
9742 [(if_then_else (match_operand 3 "const_int_operand" "")
9743 (const_string "mov_shift" )
9744 (const_string "mov_shift_reg"))
9745 (const_string "multiple")
9746 (const_string "multiple")])]
9749 (define_insn "*ifcompare_move_shift"
9750 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9752 (match_operator 6 "arm_comparison_operator"
9753 [(match_operand:SI 4 "s_register_operand" "r,r")
9754 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9755 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9756 (match_operator:SI 7 "shift_operator"
9757 [(match_operand:SI 2 "s_register_operand" "r,r")
9758 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9759 (clobber (reg:CC CC_REGNUM))]
9762 [(set_attr "conds" "clob")
9763 (set_attr "length" "8,12")
9764 (set_attr "type" "multiple")]
9767 (define_insn "*if_move_shift"
9768 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9770 (match_operator 5 "arm_comparison_operator"
9771 [(match_operand 6 "cc_register" "") (const_int 0)])
9772 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9773 (match_operator:SI 4 "shift_operator"
9774 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9775 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9779 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9780 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9781 [(set_attr "conds" "use")
9782 (set_attr "shift" "2")
9783 (set_attr "length" "4,8,8")
9784 (set_attr_alternative "type"
9785 [(if_then_else (match_operand 3 "const_int_operand" "")
9786 (const_string "mov_shift" )
9787 (const_string "mov_shift_reg"))
9788 (const_string "multiple")
9789 (const_string "multiple")])]
9792 (define_insn "*ifcompare_shift_shift"
9793 [(set (match_operand:SI 0 "s_register_operand" "=r")
9795 (match_operator 7 "arm_comparison_operator"
9796 [(match_operand:SI 5 "s_register_operand" "r")
9797 (match_operand:SI 6 "arm_add_operand" "rIL")])
9798 (match_operator:SI 8 "shift_operator"
9799 [(match_operand:SI 1 "s_register_operand" "r")
9800 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9801 (match_operator:SI 9 "shift_operator"
9802 [(match_operand:SI 3 "s_register_operand" "r")
9803 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9804 (clobber (reg:CC CC_REGNUM))]
9807 [(set_attr "conds" "clob")
9808 (set_attr "length" "12")
9809 (set_attr "type" "multiple")]
9812 (define_insn "*if_shift_shift"
9813 [(set (match_operand:SI 0 "s_register_operand" "=r")
9815 (match_operator 5 "arm_comparison_operator"
9816 [(match_operand 8 "cc_register" "") (const_int 0)])
9817 (match_operator:SI 6 "shift_operator"
9818 [(match_operand:SI 1 "s_register_operand" "r")
9819 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9820 (match_operator:SI 7 "shift_operator"
9821 [(match_operand:SI 3 "s_register_operand" "r")
9822 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9824 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9825 [(set_attr "conds" "use")
9826 (set_attr "shift" "1")
9827 (set_attr "length" "8")
9828 (set (attr "type") (if_then_else
9829 (and (match_operand 2 "const_int_operand" "")
9830 (match_operand 4 "const_int_operand" ""))
9831 (const_string "mov_shift")
9832 (const_string "mov_shift_reg")))]
9835 (define_insn "*ifcompare_not_arith"
9836 [(set (match_operand:SI 0 "s_register_operand" "=r")
9838 (match_operator 6 "arm_comparison_operator"
9839 [(match_operand:SI 4 "s_register_operand" "r")
9840 (match_operand:SI 5 "arm_add_operand" "rIL")])
9841 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9842 (match_operator:SI 7 "shiftable_operator"
9843 [(match_operand:SI 2 "s_register_operand" "r")
9844 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9845 (clobber (reg:CC CC_REGNUM))]
9848 [(set_attr "conds" "clob")
9849 (set_attr "length" "12")
9850 (set_attr "type" "multiple")]
9853 (define_insn "*if_not_arith"
9854 [(set (match_operand:SI 0 "s_register_operand" "=r")
9856 (match_operator 5 "arm_comparison_operator"
9857 [(match_operand 4 "cc_register" "") (const_int 0)])
9858 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9859 (match_operator:SI 6 "shiftable_operator"
9860 [(match_operand:SI 2 "s_register_operand" "r")
9861 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9863 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9864 [(set_attr "conds" "use")
9865 (set_attr "type" "mvn_reg")
9866 (set_attr "length" "8")]
9869 (define_insn "*ifcompare_arith_not"
9870 [(set (match_operand:SI 0 "s_register_operand" "=r")
9872 (match_operator 6 "arm_comparison_operator"
9873 [(match_operand:SI 4 "s_register_operand" "r")
9874 (match_operand:SI 5 "arm_add_operand" "rIL")])
9875 (match_operator:SI 7 "shiftable_operator"
9876 [(match_operand:SI 2 "s_register_operand" "r")
9877 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9878 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9879 (clobber (reg:CC CC_REGNUM))]
9882 [(set_attr "conds" "clob")
9883 (set_attr "length" "12")
9884 (set_attr "type" "multiple")]
9887 (define_insn "*if_arith_not"
9888 [(set (match_operand:SI 0 "s_register_operand" "=r")
9890 (match_operator 5 "arm_comparison_operator"
9891 [(match_operand 4 "cc_register" "") (const_int 0)])
9892 (match_operator:SI 6 "shiftable_operator"
9893 [(match_operand:SI 2 "s_register_operand" "r")
9894 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9895 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9897 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9898 [(set_attr "conds" "use")
9899 (set_attr "type" "multiple")
9900 (set_attr "length" "8")]
9903 (define_insn "*ifcompare_neg_move"
9904 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9906 (match_operator 5 "arm_comparison_operator"
9907 [(match_operand:SI 3 "s_register_operand" "r,r")
9908 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9909 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9910 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9911 (clobber (reg:CC CC_REGNUM))]
9914 [(set_attr "conds" "clob")
9915 (set_attr "length" "8,12")
9916 (set_attr "type" "multiple")]
9919 (define_insn_and_split "*if_neg_move"
9920 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9922 (match_operator 4 "arm_comparison_operator"
9923 [(match_operand 3 "cc_register" "") (const_int 0)])
9924 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9925 (match_operand:SI 1 "s_register_operand" "0,0")))]
9928 "&& reload_completed"
9929 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9930 (set (match_dup 0) (neg:SI (match_dup 2))))]
9932 [(set_attr "conds" "use")
9933 (set_attr "length" "4")
9934 (set_attr "arch" "t2,32")
9935 (set_attr "enabled_for_short_it" "yes,no")
9936 (set_attr "type" "logic_shift_imm")]
9939 (define_insn "*ifcompare_move_neg"
9940 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9942 (match_operator 5 "arm_comparison_operator"
9943 [(match_operand:SI 3 "s_register_operand" "r,r")
9944 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9945 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9946 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9947 (clobber (reg:CC CC_REGNUM))]
9950 [(set_attr "conds" "clob")
9951 (set_attr "length" "8,12")
9952 (set_attr "type" "multiple")]
9955 (define_insn_and_split "*if_move_neg"
9956 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9958 (match_operator 4 "arm_comparison_operator"
9959 [(match_operand 3 "cc_register" "") (const_int 0)])
9960 (match_operand:SI 1 "s_register_operand" "0,0")
9961 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9964 "&& reload_completed"
9965 [(cond_exec (match_dup 5)
9966 (set (match_dup 0) (neg:SI (match_dup 2))))]
9968 machine_mode mode = GET_MODE (operands[3]);
9969 rtx_code rc = GET_CODE (operands[4]);
9971 if (mode == CCFPmode || mode == CCFPEmode)
9972 rc = reverse_condition_maybe_unordered (rc);
9974 rc = reverse_condition (rc);
9976 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9978 [(set_attr "conds" "use")
9979 (set_attr "length" "4")
9980 (set_attr "arch" "t2,32")
9981 (set_attr "enabled_for_short_it" "yes,no")
9982 (set_attr "type" "logic_shift_imm")]
9985 (define_insn "*arith_adjacentmem"
9986 [(set (match_operand:SI 0 "s_register_operand" "=r")
9987 (match_operator:SI 1 "shiftable_operator"
9988 [(match_operand:SI 2 "memory_operand" "m")
9989 (match_operand:SI 3 "memory_operand" "m")]))
9990 (clobber (match_scratch:SI 4 "=r"))]
9991 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9997 HOST_WIDE_INT val1 = 0, val2 = 0;
9999 if (REGNO (operands[0]) > REGNO (operands[4]))
10001 ldm[1] = operands[4];
10002 ldm[2] = operands[0];
10006 ldm[1] = operands[0];
10007 ldm[2] = operands[4];
10010 base_reg = XEXP (operands[2], 0);
10012 if (!REG_P (base_reg))
10014 val1 = INTVAL (XEXP (base_reg, 1));
10015 base_reg = XEXP (base_reg, 0);
10018 if (!REG_P (XEXP (operands[3], 0)))
10019 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10021 arith[0] = operands[0];
10022 arith[3] = operands[1];
10036 if (val1 !=0 && val2 != 0)
10040 if (val1 == 4 || val2 == 4)
10041 /* Other val must be 8, since we know they are adjacent and neither
10043 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10044 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10046 ldm[0] = ops[0] = operands[4];
10048 ops[2] = GEN_INT (val1);
10049 output_add_immediate (ops);
10051 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10053 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10057 /* Offset is out of range for a single add, so use two ldr. */
10060 ops[2] = GEN_INT (val1);
10061 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10063 ops[2] = GEN_INT (val2);
10064 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10067 else if (val1 != 0)
10070 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10072 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10077 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10079 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10081 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10084 [(set_attr "length" "12")
10085 (set_attr "predicable" "yes")
10086 (set_attr "type" "load_4")]
10089 ; This pattern is never tried by combine, so do it as a peephole
10092 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10093 (match_operand:SI 1 "arm_general_register_operand" ""))
10094 (set (reg:CC CC_REGNUM)
10095 (compare:CC (match_dup 1) (const_int 0)))]
10097 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10098 (set (match_dup 0) (match_dup 1))])]
10103 [(set (match_operand:SI 0 "s_register_operand" "")
10104 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10106 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10107 [(match_operand:SI 3 "s_register_operand" "")
10108 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10109 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10111 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10112 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10117 ;; This split can be used because CC_Z mode implies that the following
10118 ;; branch will be an equality, or an unsigned inequality, so the sign
10119 ;; extension is not needed.
10122 [(set (reg:CC_Z CC_REGNUM)
10124 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10126 (match_operand 1 "const_int_operand" "")))
10127 (clobber (match_scratch:SI 2 ""))]
10129 && ((UINTVAL (operands[1]))
10130 == ((UINTVAL (operands[1])) >> 24) << 24)"
10131 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10132 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10134 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10137 ;; ??? Check the patterns above for Thumb-2 usefulness
10139 (define_expand "prologue"
10140 [(clobber (const_int 0))]
10143 arm_expand_prologue ();
10145 thumb1_expand_prologue ();
10150 (define_expand "epilogue"
10151 [(clobber (const_int 0))]
10154 if (crtl->calls_eh_return)
10155 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10158 thumb1_expand_epilogue ();
10159 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10160 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10162 else if (HAVE_return)
10164 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10165 no need for explicit testing again. */
10166 emit_jump_insn (gen_return ());
10168 else if (TARGET_32BIT)
10170 arm_expand_epilogue (true);
10176 ;; Note - although unspec_volatile's USE all hard registers,
10177 ;; USEs are ignored after relaod has completed. Thus we need
10178 ;; to add an unspec of the link register to ensure that flow
10179 ;; does not think that it is unused by the sibcall branch that
10180 ;; will replace the standard function epilogue.
10181 (define_expand "sibcall_epilogue"
10182 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10183 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10186 arm_expand_epilogue (false);
10191 (define_expand "eh_epilogue"
10192 [(use (match_operand:SI 0 "register_operand"))
10193 (use (match_operand:SI 1 "register_operand"))
10194 (use (match_operand:SI 2 "register_operand"))]
10198 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10199 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10201 rtx ra = gen_rtx_REG (Pmode, 2);
10203 emit_move_insn (ra, operands[2]);
10206 /* This is a hack -- we may have crystalized the function type too
10208 cfun->machine->func_type = 0;
10212 ;; This split is only used during output to reduce the number of patterns
10213 ;; that need assembler instructions adding to them. We allowed the setting
10214 ;; of the conditions to be implicit during rtl generation so that
10215 ;; the conditional compare patterns would work. However this conflicts to
10216 ;; some extent with the conditional data operations, so we have to split them
10219 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10220 ;; conditional execution sufficient?
10223 [(set (match_operand:SI 0 "s_register_operand" "")
10224 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10225 [(match_operand 2 "" "") (match_operand 3 "" "")])
10227 (match_operand 4 "" "")))
10228 (clobber (reg:CC CC_REGNUM))]
10229 "TARGET_ARM && reload_completed"
10230 [(set (match_dup 5) (match_dup 6))
10231 (cond_exec (match_dup 7)
10232 (set (match_dup 0) (match_dup 4)))]
10235 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10236 operands[2], operands[3]);
10237 enum rtx_code rc = GET_CODE (operands[1]);
10239 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10240 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10241 if (mode == CCFPmode || mode == CCFPEmode)
10242 rc = reverse_condition_maybe_unordered (rc);
10244 rc = reverse_condition (rc);
10246 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10251 [(set (match_operand:SI 0 "s_register_operand" "")
10252 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10253 [(match_operand 2 "" "") (match_operand 3 "" "")])
10254 (match_operand 4 "" "")
10256 (clobber (reg:CC CC_REGNUM))]
10257 "TARGET_ARM && reload_completed"
10258 [(set (match_dup 5) (match_dup 6))
10259 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10260 (set (match_dup 0) (match_dup 4)))]
10263 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10264 operands[2], operands[3]);
10266 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10267 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10272 [(set (match_operand:SI 0 "s_register_operand" "")
10273 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10274 [(match_operand 2 "" "") (match_operand 3 "" "")])
10275 (match_operand 4 "" "")
10276 (match_operand 5 "" "")))
10277 (clobber (reg:CC CC_REGNUM))]
10278 "TARGET_ARM && reload_completed"
10279 [(set (match_dup 6) (match_dup 7))
10280 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10281 (set (match_dup 0) (match_dup 4)))
10282 (cond_exec (match_dup 8)
10283 (set (match_dup 0) (match_dup 5)))]
10286 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10287 operands[2], operands[3]);
10288 enum rtx_code rc = GET_CODE (operands[1]);
10290 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10291 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10292 if (mode == CCFPmode || mode == CCFPEmode)
10293 rc = reverse_condition_maybe_unordered (rc);
10295 rc = reverse_condition (rc);
10297 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10302 [(set (match_operand:SI 0 "s_register_operand" "")
10303 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10304 [(match_operand:SI 2 "s_register_operand" "")
10305 (match_operand:SI 3 "arm_add_operand" "")])
10306 (match_operand:SI 4 "arm_rhs_operand" "")
10308 (match_operand:SI 5 "s_register_operand" ""))))
10309 (clobber (reg:CC CC_REGNUM))]
10310 "TARGET_ARM && reload_completed"
10311 [(set (match_dup 6) (match_dup 7))
10312 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10313 (set (match_dup 0) (match_dup 4)))
10314 (cond_exec (match_dup 8)
10315 (set (match_dup 0) (not:SI (match_dup 5))))]
10318 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10319 operands[2], operands[3]);
10320 enum rtx_code rc = GET_CODE (operands[1]);
10322 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10323 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10324 if (mode == CCFPmode || mode == CCFPEmode)
10325 rc = reverse_condition_maybe_unordered (rc);
10327 rc = reverse_condition (rc);
10329 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10333 (define_insn "*cond_move_not"
10334 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10335 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10336 [(match_operand 3 "cc_register" "") (const_int 0)])
10337 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10339 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10343 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10344 [(set_attr "conds" "use")
10345 (set_attr "type" "mvn_reg,multiple")
10346 (set_attr "length" "4,8")]
10349 ;; The next two patterns occur when an AND operation is followed by a
10350 ;; scc insn sequence
10352 (define_insn "*sign_extract_onebit"
10353 [(set (match_operand:SI 0 "s_register_operand" "=r")
10354 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10356 (match_operand:SI 2 "const_int_operand" "n")))
10357 (clobber (reg:CC CC_REGNUM))]
10360 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10361 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10362 return \"mvnne\\t%0, #0\";
10364 [(set_attr "conds" "clob")
10365 (set_attr "length" "8")
10366 (set_attr "type" "multiple")]
10369 (define_insn "*not_signextract_onebit"
10370 [(set (match_operand:SI 0 "s_register_operand" "=r")
10372 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10374 (match_operand:SI 2 "const_int_operand" "n"))))
10375 (clobber (reg:CC CC_REGNUM))]
10378 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10379 output_asm_insn (\"tst\\t%1, %2\", operands);
10380 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10381 return \"movne\\t%0, #0\";
10383 [(set_attr "conds" "clob")
10384 (set_attr "length" "12")
10385 (set_attr "type" "multiple")]
10387 ;; ??? The above patterns need auditing for Thumb-2
10389 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10390 ;; expressions. For simplicity, the first register is also in the unspec
10392 ;; To avoid the usage of GNU extension, the length attribute is computed
10393 ;; in a C function arm_attr_length_push_multi.
10394 (define_insn "*push_multi"
10395 [(match_parallel 2 "multi_register_push"
10396 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10397 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10398 UNSPEC_PUSH_MULT))])]
10402 int num_saves = XVECLEN (operands[2], 0);
10404 /* For the StrongARM at least it is faster to
10405 use STR to store only a single register.
10406 In Thumb mode always use push, and the assembler will pick
10407 something appropriate. */
10408 if (num_saves == 1 && TARGET_ARM)
10409 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10416 strcpy (pattern, \"push%?\\t{%1\");
10418 strcpy (pattern, \"push\\t{%1\");
10420 for (i = 1; i < num_saves; i++)
10422 strcat (pattern, \", %|\");
10424 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10427 strcat (pattern, \"}\");
10428 output_asm_insn (pattern, operands);
10433 [(set_attr "type" "store_16")
10434 (set (attr "length")
10435 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10438 (define_insn "stack_tie"
10439 [(set (mem:BLK (scratch))
10440 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10441 (match_operand:SI 1 "s_register_operand" "rk")]
10445 [(set_attr "length" "0")
10446 (set_attr "type" "block")]
10449 ;; Pop (as used in epilogue RTL)
10451 (define_insn "*load_multiple_with_writeback"
10452 [(match_parallel 0 "load_multiple_operation"
10453 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10454 (plus:SI (match_dup 1)
10455 (match_operand:SI 2 "const_int_I_operand" "I")))
10456 (set (match_operand:SI 3 "s_register_operand" "=rk")
10457 (mem:SI (match_dup 1)))
10459 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10462 arm_output_multireg_pop (operands, /*return_pc=*/false,
10463 /*cond=*/const_true_rtx,
10469 [(set_attr "type" "load_16")
10470 (set_attr "predicable" "yes")
10471 (set (attr "length")
10472 (symbol_ref "arm_attr_length_pop_multi (operands,
10473 /*return_pc=*/false,
10474 /*write_back_p=*/true)"))]
10477 ;; Pop with return (as used in epilogue RTL)
10479 ;; This instruction is generated when the registers are popped at the end of
10480 ;; epilogue. Here, instead of popping the value into LR and then generating
10481 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10483 (define_insn "*pop_multiple_with_writeback_and_return"
10484 [(match_parallel 0 "pop_multiple_return"
10486 (set (match_operand:SI 1 "s_register_operand" "+rk")
10487 (plus:SI (match_dup 1)
10488 (match_operand:SI 2 "const_int_I_operand" "I")))
10489 (set (match_operand:SI 3 "s_register_operand" "=rk")
10490 (mem:SI (match_dup 1)))
10492 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10495 arm_output_multireg_pop (operands, /*return_pc=*/true,
10496 /*cond=*/const_true_rtx,
10502 [(set_attr "type" "load_16")
10503 (set_attr "predicable" "yes")
10504 (set (attr "length")
10505 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10506 /*write_back_p=*/true)"))]
10509 (define_insn "*pop_multiple_with_return"
10510 [(match_parallel 0 "pop_multiple_return"
10512 (set (match_operand:SI 2 "s_register_operand" "=rk")
10513 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10515 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10518 arm_output_multireg_pop (operands, /*return_pc=*/true,
10519 /*cond=*/const_true_rtx,
10525 [(set_attr "type" "load_16")
10526 (set_attr "predicable" "yes")
10527 (set (attr "length")
10528 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10529 /*write_back_p=*/false)"))]
10532 ;; Load into PC and return
10533 (define_insn "*ldr_with_return"
10535 (set (reg:SI PC_REGNUM)
10536 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10537 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10538 "ldr%?\t%|pc, [%0], #4"
10539 [(set_attr "type" "load_4")
10540 (set_attr "predicable" "yes")]
10542 ;; Pop for floating point registers (as used in epilogue RTL)
10543 (define_insn "*vfp_pop_multiple_with_writeback"
10544 [(match_parallel 0 "pop_multiple_fp"
10545 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10546 (plus:SI (match_dup 1)
10547 (match_operand:SI 2 "const_int_I_operand" "I")))
10548 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10549 (mem:DF (match_dup 1)))])]
10550 "TARGET_32BIT && TARGET_HARD_FLOAT"
10553 int num_regs = XVECLEN (operands[0], 0);
10556 strcpy (pattern, \"vldm\\t\");
10557 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10558 strcat (pattern, \"!, {\");
10559 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10560 strcat (pattern, \"%P0\");
10561 if ((num_regs - 1) > 1)
10563 strcat (pattern, \"-%P1\");
10564 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10567 strcat (pattern, \"}\");
10568 output_asm_insn (pattern, op_list);
10572 [(set_attr "type" "load_16")
10573 (set_attr "conds" "unconditional")
10574 (set_attr "predicable" "no")]
10577 ;; Special patterns for dealing with the constant pool
10579 (define_insn "align_4"
10580 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10583 assemble_align (32);
10586 [(set_attr "type" "no_insn")]
10589 (define_insn "align_8"
10590 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10593 assemble_align (64);
10596 [(set_attr "type" "no_insn")]
10599 (define_insn "consttable_end"
10600 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10603 making_const_table = FALSE;
10606 [(set_attr "type" "no_insn")]
10609 (define_insn "consttable_1"
10610 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10613 making_const_table = TRUE;
10614 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10615 assemble_zeros (3);
10618 [(set_attr "length" "4")
10619 (set_attr "type" "no_insn")]
10622 (define_insn "consttable_2"
10623 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10627 rtx x = operands[0];
10628 making_const_table = TRUE;
10629 switch (GET_MODE_CLASS (GET_MODE (x)))
10632 arm_emit_fp16_const (x);
10635 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10636 assemble_zeros (2);
10641 [(set_attr "length" "4")
10642 (set_attr "type" "no_insn")]
10645 (define_insn "consttable_4"
10646 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10650 rtx x = operands[0];
10651 making_const_table = TRUE;
10652 scalar_float_mode float_mode;
10653 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10654 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10657 /* XXX: Sometimes gcc does something really dumb and ends up with
10658 a HIGH in a constant pool entry, usually because it's trying to
10659 load into a VFP register. We know this will always be used in
10660 combination with a LO_SUM which ignores the high bits, so just
10661 strip off the HIGH. */
10662 if (GET_CODE (x) == HIGH)
10664 assemble_integer (x, 4, BITS_PER_WORD, 1);
10665 mark_symbol_refs_as_used (x);
10669 [(set_attr "length" "4")
10670 (set_attr "type" "no_insn")]
10673 (define_insn "consttable_8"
10674 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10678 making_const_table = TRUE;
10679 scalar_float_mode float_mode;
10680 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10681 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10682 float_mode, BITS_PER_WORD);
10684 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10687 [(set_attr "length" "8")
10688 (set_attr "type" "no_insn")]
10691 (define_insn "consttable_16"
10692 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10696 making_const_table = TRUE;
10697 scalar_float_mode float_mode;
10698 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10699 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10700 float_mode, BITS_PER_WORD);
10702 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10705 [(set_attr "length" "16")
10706 (set_attr "type" "no_insn")]
10709 ;; V5 Instructions,
10711 (define_insn "clzsi2"
10712 [(set (match_operand:SI 0 "s_register_operand" "=r")
10713 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10714 "TARGET_32BIT && arm_arch5t"
10716 [(set_attr "predicable" "yes")
10717 (set_attr "type" "clz")])
10719 (define_insn "rbitsi2"
10720 [(set (match_operand:SI 0 "s_register_operand" "=r")
10721 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10722 "TARGET_32BIT && arm_arch_thumb2"
10724 [(set_attr "predicable" "yes")
10725 (set_attr "type" "clz")])
10727 ;; Keep this as a CTZ expression until after reload and then split
10728 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10729 ;; to fold with any other expression.
10731 (define_insn_and_split "ctzsi2"
10732 [(set (match_operand:SI 0 "s_register_operand" "=r")
10733 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10734 "TARGET_32BIT && arm_arch_thumb2"
10736 "&& reload_completed"
10739 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10740 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10744 ;; V5E instructions.
10746 (define_insn "prefetch"
10747 [(prefetch (match_operand:SI 0 "address_operand" "p")
10748 (match_operand:SI 1 "" "")
10749 (match_operand:SI 2 "" ""))]
10750 "TARGET_32BIT && arm_arch5te"
10752 [(set_attr "type" "load_4")]
10755 ;; General predication pattern
10758 [(match_operator 0 "arm_comparison_operator"
10759 [(match_operand 1 "cc_register" "")
10762 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10764 [(set_attr "predicated" "yes")]
10767 (define_insn "force_register_use"
10768 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10771 [(set_attr "length" "0")
10772 (set_attr "type" "no_insn")]
10776 ;; Patterns for exception handling
10778 (define_expand "eh_return"
10779 [(use (match_operand 0 "general_operand"))]
10784 emit_insn (gen_arm_eh_return (operands[0]));
10786 emit_insn (gen_thumb_eh_return (operands[0]));
10791 ;; We can't expand this before we know where the link register is stored.
10792 (define_insn_and_split "arm_eh_return"
10793 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10795 (clobber (match_scratch:SI 1 "=&r"))]
10798 "&& reload_completed"
10802 arm_set_return_address (operands[0], operands[1]);
10810 (define_insn "load_tp_hard"
10811 [(set (match_operand:SI 0 "register_operand" "=r")
10812 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10814 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10815 [(set_attr "predicable" "yes")
10816 (set_attr "type" "mrs")]
10819 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10820 (define_insn "load_tp_soft_fdpic"
10821 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10822 (clobber (reg:SI FDPIC_REGNUM))
10823 (clobber (reg:SI LR_REGNUM))
10824 (clobber (reg:SI IP_REGNUM))
10825 (clobber (reg:CC CC_REGNUM))]
10826 "TARGET_SOFT_TP && TARGET_FDPIC"
10827 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10828 [(set_attr "conds" "clob")
10829 (set_attr "type" "branch")]
10832 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10833 (define_insn "load_tp_soft"
10834 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10835 (clobber (reg:SI LR_REGNUM))
10836 (clobber (reg:SI IP_REGNUM))
10837 (clobber (reg:CC CC_REGNUM))]
10838 "TARGET_SOFT_TP && !TARGET_FDPIC"
10839 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10840 [(set_attr "conds" "clob")
10841 (set_attr "type" "branch")]
10844 ;; tls descriptor call
10845 (define_insn "tlscall"
10846 [(set (reg:SI R0_REGNUM)
10847 (unspec:SI [(reg:SI R0_REGNUM)
10848 (match_operand:SI 0 "" "X")
10849 (match_operand 1 "" "")] UNSPEC_TLS))
10850 (clobber (reg:SI R1_REGNUM))
10851 (clobber (reg:SI LR_REGNUM))
10852 (clobber (reg:SI CC_REGNUM))]
10855 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10856 INTVAL (operands[1]));
10857 return "bl\\t%c0(tlscall)";
10859 [(set_attr "conds" "clob")
10860 (set_attr "length" "4")
10861 (set_attr "type" "branch")]
10864 ;; For thread pointer builtin
10865 (define_expand "get_thread_pointersi"
10866 [(match_operand:SI 0 "s_register_operand")]
10870 arm_load_tp (operands[0]);
10876 ;; We only care about the lower 16 bits of the constant
10877 ;; being inserted into the upper 16 bits of the register.
10878 (define_insn "*arm_movtas_ze"
10879 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10882 (match_operand:SI 1 "const_int_operand" ""))]
10887 [(set_attr "arch" "32,v8mb")
10888 (set_attr "predicable" "yes")
10889 (set_attr "length" "4")
10890 (set_attr "type" "alu_sreg")]
10893 (define_insn "*arm_rev"
10894 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10895 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10901 [(set_attr "arch" "t1,t2,32")
10902 (set_attr "length" "2,2,4")
10903 (set_attr "predicable" "no,yes,yes")
10904 (set_attr "type" "rev")]
10907 (define_expand "arm_legacy_rev"
10908 [(set (match_operand:SI 2 "s_register_operand")
10909 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10913 (lshiftrt:SI (match_dup 2)
10915 (set (match_operand:SI 3 "s_register_operand")
10916 (rotatert:SI (match_dup 1)
10919 (and:SI (match_dup 2)
10920 (const_int -65281)))
10921 (set (match_operand:SI 0 "s_register_operand")
10922 (xor:SI (match_dup 3)
10928 ;; Reuse temporaries to keep register pressure down.
10929 (define_expand "thumb_legacy_rev"
10930 [(set (match_operand:SI 2 "s_register_operand")
10931 (ashift:SI (match_operand:SI 1 "s_register_operand")
10933 (set (match_operand:SI 3 "s_register_operand")
10934 (lshiftrt:SI (match_dup 1)
10937 (ior:SI (match_dup 3)
10939 (set (match_operand:SI 4 "s_register_operand")
10941 (set (match_operand:SI 5 "s_register_operand")
10942 (rotatert:SI (match_dup 1)
10945 (ashift:SI (match_dup 5)
10948 (lshiftrt:SI (match_dup 5)
10951 (ior:SI (match_dup 5)
10954 (rotatert:SI (match_dup 5)
10956 (set (match_operand:SI 0 "s_register_operand")
10957 (ior:SI (match_dup 5)
10963 ;; ARM-specific expansion of signed mod by power of 2
10964 ;; using conditional negate.
10965 ;; For r0 % n where n is a power of 2 produce:
10967 ;; and r0, r0, #(n - 1)
10968 ;; and r1, r1, #(n - 1)
10969 ;; rsbpl r0, r1, #0
10971 (define_expand "modsi3"
10972 [(match_operand:SI 0 "register_operand")
10973 (match_operand:SI 1 "register_operand")
10974 (match_operand:SI 2 "const_int_operand")]
10977 HOST_WIDE_INT val = INTVAL (operands[2]);
10980 || exact_log2 (val) <= 0)
10983 rtx mask = GEN_INT (val - 1);
10985 /* In the special case of x0 % 2 we can do the even shorter:
10988 rsblt r0, r0, #0. */
10992 rtx cc_reg = arm_gen_compare_reg (LT,
10993 operands[1], const0_rtx, NULL_RTX);
10994 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10995 rtx masked = gen_reg_rtx (SImode);
10997 emit_insn (gen_andsi3 (masked, operands[1], mask));
10998 emit_move_insn (operands[0],
10999 gen_rtx_IF_THEN_ELSE (SImode, cond,
11000 gen_rtx_NEG (SImode,
11006 rtx neg_op = gen_reg_rtx (SImode);
11007 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11010 /* Extract the condition register and mode. */
11011 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11012 rtx cc_reg = SET_DEST (cmp);
11013 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11015 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11017 rtx masked_neg = gen_reg_rtx (SImode);
11018 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11020 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11021 during expand does not always work. Do an IF_THEN_ELSE instead. */
11022 emit_move_insn (operands[0],
11023 gen_rtx_IF_THEN_ELSE (SImode, cond,
11024 gen_rtx_NEG (SImode, masked_neg),
11032 (define_expand "bswapsi2"
11033 [(set (match_operand:SI 0 "s_register_operand")
11034 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11035 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11039 rtx op2 = gen_reg_rtx (SImode);
11040 rtx op3 = gen_reg_rtx (SImode);
11044 rtx op4 = gen_reg_rtx (SImode);
11045 rtx op5 = gen_reg_rtx (SImode);
11047 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11048 op2, op3, op4, op5));
11052 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11061 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11062 ;; and unsigned variants, respectively. For rev16, expose
11063 ;; byte-swapping in the lower 16 bits only.
11064 (define_insn "*arm_revsh"
11065 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11066 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11072 [(set_attr "arch" "t1,t2,32")
11073 (set_attr "length" "2,2,4")
11074 (set_attr "type" "rev")]
11077 (define_insn "*arm_rev16"
11078 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11079 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11085 [(set_attr "arch" "t1,t2,32")
11086 (set_attr "length" "2,2,4")
11087 (set_attr "type" "rev")]
11090 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11091 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11092 ;; each valid permutation.
11094 (define_insn "arm_rev16si2"
11095 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11096 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11098 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11099 (and:SI (lshiftrt:SI (match_dup 1)
11101 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11103 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11104 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11106 [(set_attr "arch" "t1,t2,32")
11107 (set_attr "length" "2,2,4")
11108 (set_attr "type" "rev")]
11111 (define_insn "arm_rev16si2_alt"
11112 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11113 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11115 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11116 (and:SI (ashift:SI (match_dup 1)
11118 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11120 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11121 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11123 [(set_attr "arch" "t1,t2,32")
11124 (set_attr "length" "2,2,4")
11125 (set_attr "type" "rev")]
11128 (define_expand "bswaphi2"
11129 [(set (match_operand:HI 0 "s_register_operand")
11130 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11135 ;; Patterns for LDRD/STRD in Thumb2 mode
11137 (define_insn "*thumb2_ldrd"
11138 [(set (match_operand:SI 0 "s_register_operand" "=r")
11139 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11140 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11141 (set (match_operand:SI 3 "s_register_operand" "=r")
11142 (mem:SI (plus:SI (match_dup 1)
11143 (match_operand:SI 4 "const_int_operand" ""))))]
11144 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11145 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11146 && (operands_ok_ldrd_strd (operands[0], operands[3],
11147 operands[1], INTVAL (operands[2]),
11149 "ldrd%?\t%0, %3, [%1, %2]"
11150 [(set_attr "type" "load_8")
11151 (set_attr "predicable" "yes")])
11153 (define_insn "*thumb2_ldrd_base"
11154 [(set (match_operand:SI 0 "s_register_operand" "=r")
11155 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11156 (set (match_operand:SI 2 "s_register_operand" "=r")
11157 (mem:SI (plus:SI (match_dup 1)
11159 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11160 && (operands_ok_ldrd_strd (operands[0], operands[2],
11161 operands[1], 0, false, true))"
11162 "ldrd%?\t%0, %2, [%1]"
11163 [(set_attr "type" "load_8")
11164 (set_attr "predicable" "yes")])
11166 (define_insn "*thumb2_ldrd_base_neg"
11167 [(set (match_operand:SI 0 "s_register_operand" "=r")
11168 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11170 (set (match_operand:SI 2 "s_register_operand" "=r")
11171 (mem:SI (match_dup 1)))]
11172 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11173 && (operands_ok_ldrd_strd (operands[0], operands[2],
11174 operands[1], -4, false, true))"
11175 "ldrd%?\t%0, %2, [%1, #-4]"
11176 [(set_attr "type" "load_8")
11177 (set_attr "predicable" "yes")])
11179 (define_insn "*thumb2_strd"
11180 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11181 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11182 (match_operand:SI 2 "s_register_operand" "r"))
11183 (set (mem:SI (plus:SI (match_dup 0)
11184 (match_operand:SI 3 "const_int_operand" "")))
11185 (match_operand:SI 4 "s_register_operand" "r"))]
11186 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11187 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11188 && (operands_ok_ldrd_strd (operands[2], operands[4],
11189 operands[0], INTVAL (operands[1]),
11191 "strd%?\t%2, %4, [%0, %1]"
11192 [(set_attr "type" "store_8")
11193 (set_attr "predicable" "yes")])
11195 (define_insn "*thumb2_strd_base"
11196 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11197 (match_operand:SI 1 "s_register_operand" "r"))
11198 (set (mem:SI (plus:SI (match_dup 0)
11200 (match_operand:SI 2 "s_register_operand" "r"))]
11201 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11202 && (operands_ok_ldrd_strd (operands[1], operands[2],
11203 operands[0], 0, false, false))"
11204 "strd%?\t%1, %2, [%0]"
11205 [(set_attr "type" "store_8")
11206 (set_attr "predicable" "yes")])
11208 (define_insn "*thumb2_strd_base_neg"
11209 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11211 (match_operand:SI 1 "s_register_operand" "r"))
11212 (set (mem:SI (match_dup 0))
11213 (match_operand:SI 2 "s_register_operand" "r"))]
11214 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11215 && (operands_ok_ldrd_strd (operands[1], operands[2],
11216 operands[0], -4, false, false))"
11217 "strd%?\t%1, %2, [%0, #-4]"
11218 [(set_attr "type" "store_8")
11219 (set_attr "predicable" "yes")])
11221 ;; ARMv8 CRC32 instructions.
11222 (define_insn "arm_<crc_variant>"
11223 [(set (match_operand:SI 0 "s_register_operand" "=r")
11224 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11225 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11228 "<crc_variant>\\t%0, %1, %2"
11229 [(set_attr "type" "crc")
11230 (set_attr "conds" "unconditional")]
11233 ;; Load the load/store double peephole optimizations.
11234 (include "ldrdstrd.md")
11236 ;; Load the load/store multiple patterns
11237 (include "ldmstm.md")
11239 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11240 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11241 ;; The operands are validated through the load_multiple_operation
11242 ;; match_parallel predicate rather than through constraints so enable it only
11244 (define_insn "*load_multiple"
11245 [(match_parallel 0 "load_multiple_operation"
11246 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11247 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11249 "TARGET_32BIT && reload_completed"
11252 arm_output_multireg_pop (operands, /*return_pc=*/false,
11253 /*cond=*/const_true_rtx,
11259 [(set_attr "predicable" "yes")]
11262 (define_expand "copysignsf3"
11263 [(match_operand:SF 0 "register_operand")
11264 (match_operand:SF 1 "register_operand")
11265 (match_operand:SF 2 "register_operand")]
11266 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11268 emit_move_insn (operands[0], operands[2]);
11269 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11270 GEN_INT (31), GEN_INT (0),
11271 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11276 (define_expand "copysigndf3"
11277 [(match_operand:DF 0 "register_operand")
11278 (match_operand:DF 1 "register_operand")
11279 (match_operand:DF 2 "register_operand")]
11280 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11282 rtx op0_low = gen_lowpart (SImode, operands[0]);
11283 rtx op0_high = gen_highpart (SImode, operands[0]);
11284 rtx op1_low = gen_lowpart (SImode, operands[1]);
11285 rtx op1_high = gen_highpart (SImode, operands[1]);
11286 rtx op2_high = gen_highpart (SImode, operands[2]);
11288 rtx scratch1 = gen_reg_rtx (SImode);
11289 rtx scratch2 = gen_reg_rtx (SImode);
11290 emit_move_insn (scratch1, op2_high);
11291 emit_move_insn (scratch2, op1_high);
11293 emit_insn(gen_rtx_SET(scratch1,
11294 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11295 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11296 emit_move_insn (op0_low, op1_low);
11297 emit_move_insn (op0_high, scratch2);
11303 ;; movmisalign patterns for HImode and SImode.
11304 (define_expand "movmisalign<mode>"
11305 [(match_operand:HSI 0 "general_operand")
11306 (match_operand:HSI 1 "general_operand")]
11309 /* This pattern is not permitted to fail during expansion: if both arguments
11310 are non-registers (e.g. memory := constant), force operand 1 into a
11312 rtx (* gen_unaligned_load)(rtx, rtx);
11313 rtx tmp_dest = operands[0];
11314 if (!s_register_operand (operands[0], <MODE>mode)
11315 && !s_register_operand (operands[1], <MODE>mode))
11316 operands[1] = force_reg (<MODE>mode, operands[1]);
11318 if (<MODE>mode == HImode)
11320 gen_unaligned_load = gen_unaligned_loadhiu;
11321 tmp_dest = gen_reg_rtx (SImode);
11324 gen_unaligned_load = gen_unaligned_loadsi;
11326 if (MEM_P (operands[1]))
11328 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11329 if (<MODE>mode == HImode)
11330 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11333 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11338 (define_insn "arm_<cdp>"
11339 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11340 (match_operand:SI 1 "immediate_operand" "n")
11341 (match_operand:SI 2 "immediate_operand" "n")
11342 (match_operand:SI 3 "immediate_operand" "n")
11343 (match_operand:SI 4 "immediate_operand" "n")
11344 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11345 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11347 arm_const_bounds (operands[0], 0, 16);
11348 arm_const_bounds (operands[1], 0, 16);
11349 arm_const_bounds (operands[2], 0, (1 << 5));
11350 arm_const_bounds (operands[3], 0, (1 << 5));
11351 arm_const_bounds (operands[4], 0, (1 << 5));
11352 arm_const_bounds (operands[5], 0, 8);
11353 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11355 [(set_attr "length" "4")
11356 (set_attr "type" "coproc")])
11358 (define_insn "*ldc"
11359 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11360 (match_operand:SI 1 "immediate_operand" "n")
11361 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11362 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11364 arm_const_bounds (operands[0], 0, 16);
11365 arm_const_bounds (operands[1], 0, (1 << 5));
11366 return "<ldc>\\tp%c0, CR%c1, %2";
11368 [(set_attr "length" "4")
11369 (set_attr "type" "coproc")])
11371 (define_insn "*stc"
11372 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11373 (match_operand:SI 1 "immediate_operand" "n")
11374 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11375 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11377 arm_const_bounds (operands[0], 0, 16);
11378 arm_const_bounds (operands[1], 0, (1 << 5));
11379 return "<stc>\\tp%c0, CR%c1, %2";
11381 [(set_attr "length" "4")
11382 (set_attr "type" "coproc")])
11384 (define_expand "arm_<ldc>"
11385 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11386 (match_operand:SI 1 "immediate_operand")
11387 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11388 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11390 (define_expand "arm_<stc>"
11391 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11392 (match_operand:SI 1 "immediate_operand")
11393 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11394 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11396 (define_insn "arm_<mcr>"
11397 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11398 (match_operand:SI 1 "immediate_operand" "n")
11399 (match_operand:SI 2 "s_register_operand" "r")
11400 (match_operand:SI 3 "immediate_operand" "n")
11401 (match_operand:SI 4 "immediate_operand" "n")
11402 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11403 (use (match_dup 2))]
11404 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11406 arm_const_bounds (operands[0], 0, 16);
11407 arm_const_bounds (operands[1], 0, 8);
11408 arm_const_bounds (operands[3], 0, (1 << 5));
11409 arm_const_bounds (operands[4], 0, (1 << 5));
11410 arm_const_bounds (operands[5], 0, 8);
11411 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11413 [(set_attr "length" "4")
11414 (set_attr "type" "coproc")])
11416 (define_insn "arm_<mrc>"
11417 [(set (match_operand:SI 0 "s_register_operand" "=r")
11418 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11419 (match_operand:SI 2 "immediate_operand" "n")
11420 (match_operand:SI 3 "immediate_operand" "n")
11421 (match_operand:SI 4 "immediate_operand" "n")
11422 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11423 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11425 arm_const_bounds (operands[1], 0, 16);
11426 arm_const_bounds (operands[2], 0, 8);
11427 arm_const_bounds (operands[3], 0, (1 << 5));
11428 arm_const_bounds (operands[4], 0, (1 << 5));
11429 arm_const_bounds (operands[5], 0, 8);
11430 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11432 [(set_attr "length" "4")
11433 (set_attr "type" "coproc")])
11435 (define_insn "arm_<mcrr>"
11436 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11437 (match_operand:SI 1 "immediate_operand" "n")
11438 (match_operand:DI 2 "s_register_operand" "r")
11439 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11440 (use (match_dup 2))]
11441 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11443 arm_const_bounds (operands[0], 0, 16);
11444 arm_const_bounds (operands[1], 0, 8);
11445 arm_const_bounds (operands[3], 0, (1 << 5));
11446 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11448 [(set_attr "length" "4")
11449 (set_attr "type" "coproc")])
11451 (define_insn "arm_<mrrc>"
11452 [(set (match_operand:DI 0 "s_register_operand" "=r")
11453 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11454 (match_operand:SI 2 "immediate_operand" "n")
11455 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11456 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11458 arm_const_bounds (operands[1], 0, 16);
11459 arm_const_bounds (operands[2], 0, 8);
11460 arm_const_bounds (operands[3], 0, (1 << 5));
11461 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11463 [(set_attr "length" "4")
11464 (set_attr "type" "coproc")])
11466 (define_expand "speculation_barrier"
11467 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11470 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11471 have a usable barrier (and probably don't need one in practice).
11472 But to be safe if such code is run on later architectures, call a
11473 helper function in libgcc that will do the thing for the active
11475 if (!(arm_arch7 || arm_arch8))
11477 arm_emit_speculation_barrier_function ();
11483 ;; Generate a hard speculation barrier when we have not enabled speculation
11485 (define_insn "*speculation_barrier_insn"
11486 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11487 "arm_arch7 || arm_arch8"
11489 [(set_attr "type" "block")
11490 (set_attr "length" "8")]
11493 ;; Vector bits common to IWMMXT and Neon
11494 (include "vec-common.md")
11495 ;; Load the Intel Wireless Multimedia Extension patterns
11496 (include "iwmmxt.md")
11497 ;; Load the VFP co-processor patterns
11499 ;; Thumb-1 patterns
11500 (include "thumb1.md")
11501 ;; Thumb-2 patterns
11502 (include "thumb2.md")
11504 (include "neon.md")
11506 (include "crypto.md")
11507 ;; Synchronization Primitives
11508 (include "sync.md")
11509 ;; Fixed-point patterns
11510 (include "arm-fixed.md")