1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
491 (define_expand "addv<mode>4"
492 [(match_operand:SIDI 0 "register_operand")
493 (match_operand:SIDI 1 "register_operand")
494 (match_operand:SIDI 2 "register_operand")
495 (match_operand 3 "")]
498 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
499 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
504 (define_expand "uaddvsi4"
505 [(match_operand:SI 0 "s_register_operand")
506 (match_operand:SI 1 "s_register_operand")
507 (match_operand:SI 2 "arm_add_operand")
508 (match_operand 3 "")]
511 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
512 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
517 (define_expand "uaddvdi4"
518 [(match_operand:DI 0 "s_register_operand")
519 (match_operand:DI 1 "s_register_operand")
520 (match_operand:DI 2 "s_register_operand")
521 (match_operand 3 "")]
524 emit_insn (gen_adddi3_compareC (operands[0], operands[1], operands[2]));
525 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
530 (define_expand "addsi3"
531 [(set (match_operand:SI 0 "s_register_operand")
532 (plus:SI (match_operand:SI 1 "s_register_operand")
533 (match_operand:SI 2 "reg_or_int_operand")))]
536 if (TARGET_32BIT && CONST_INT_P (operands[2]))
538 arm_split_constant (PLUS, SImode, NULL_RTX,
539 INTVAL (operands[2]), operands[0], operands[1],
540 optimize && can_create_pseudo_p ());
546 ; If there is a scratch available, this will be faster than synthesizing the
549 [(match_scratch:SI 3 "r")
550 (set (match_operand:SI 0 "arm_general_register_operand" "")
551 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
552 (match_operand:SI 2 "const_int_operand" "")))]
554 !(const_ok_for_arm (INTVAL (operands[2]))
555 || const_ok_for_arm (-INTVAL (operands[2])))
556 && const_ok_for_arm (~INTVAL (operands[2]))"
557 [(set (match_dup 3) (match_dup 2))
558 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
562 ;; The r/r/k alternative is required when reloading the address
563 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
564 ;; put the duplicated register first, and not try the commutative version.
565 (define_insn_and_split "*arm_addsi3"
566 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
567 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
568 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
584 subw%?\\t%0, %1, #%n2
585 subw%?\\t%0, %1, #%n2
588 && CONST_INT_P (operands[2])
589 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
590 && (reload_completed || !arm_eliminable_register (operands[1]))"
591 [(clobber (const_int 0))]
593 arm_split_constant (PLUS, SImode, curr_insn,
594 INTVAL (operands[2]), operands[0],
598 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
599 (set_attr "predicable" "yes")
600 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
601 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
602 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
603 (const_string "alu_imm")
604 (const_string "alu_sreg")))
608 (define_insn "adddi3_compareV"
609 [(set (reg:CC_V CC_REGNUM)
612 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
613 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
614 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
615 (set (match_operand:DI 0 "s_register_operand" "=&r")
616 (plus:DI (match_dup 1) (match_dup 2)))]
618 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
619 [(set_attr "conds" "set")
620 (set_attr "length" "8")
621 (set_attr "type" "multiple")]
624 (define_insn "addsi3_compareV"
625 [(set (reg:CC_V CC_REGNUM)
628 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
629 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
630 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
631 (set (match_operand:SI 0 "register_operand" "=r")
632 (plus:SI (match_dup 1) (match_dup 2)))]
634 "adds%?\\t%0, %1, %2"
635 [(set_attr "conds" "set")
636 (set_attr "type" "alus_sreg")]
639 (define_insn "adddi3_compareC"
640 [(set (reg:CC_C CC_REGNUM)
643 (match_operand:DI 1 "register_operand" "r")
644 (match_operand:DI 2 "register_operand" "r"))
646 (set (match_operand:DI 0 "register_operand" "=&r")
647 (plus:DI (match_dup 1) (match_dup 2)))]
649 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
650 [(set_attr "conds" "set")
651 (set_attr "length" "8")
652 (set_attr "type" "multiple")]
655 (define_insn "addsi3_compare0"
656 [(set (reg:CC_NOOV CC_REGNUM)
658 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
659 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
661 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
662 (plus:SI (match_dup 1) (match_dup 2)))]
666 subs%?\\t%0, %1, #%n2
668 [(set_attr "conds" "set")
669 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
672 (define_insn "*addsi3_compare0_scratch"
673 [(set (reg:CC_NOOV CC_REGNUM)
675 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
676 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
683 [(set_attr "conds" "set")
684 (set_attr "predicable" "yes")
685 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
688 (define_insn "*compare_negsi_si"
689 [(set (reg:CC_Z CC_REGNUM)
691 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
692 (match_operand:SI 1 "s_register_operand" "l,r")))]
695 [(set_attr "conds" "set")
696 (set_attr "predicable" "yes")
697 (set_attr "arch" "t2,*")
698 (set_attr "length" "2,4")
699 (set_attr "predicable_short_it" "yes,no")
700 (set_attr "type" "alus_sreg")]
703 ;; This is the canonicalization of subsi3_compare when the
704 ;; addend is a constant.
705 (define_insn "cmpsi2_addneg"
706 [(set (reg:CC CC_REGNUM)
708 (match_operand:SI 1 "s_register_operand" "r,r")
709 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
711 (plus:SI (match_dup 1)
712 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
714 && (INTVAL (operands[2])
715 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
717 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
718 in different condition codes (like cmn rather than like cmp), so that
719 alternative comes first. Both alternatives can match for any 0x??000000
720 where except for 0 and INT_MIN it doesn't matter what we choose, and also
721 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
723 if (which_alternative == 0 && operands[3] != const1_rtx)
724 return "subs%?\\t%0, %1, #%n3";
726 return "adds%?\\t%0, %1, %3";
728 [(set_attr "conds" "set")
729 (set_attr "type" "alus_sreg")]
732 ;; Convert the sequence
734 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
738 ;; bcs dest ((unsigned)rn >= 1)
739 ;; similarly for the beq variant using bcc.
740 ;; This is a common looping idiom (while (n--))
742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
743 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
745 (set (match_operand 2 "cc_register" "")
746 (compare (match_dup 0) (const_int -1)))
748 (if_then_else (match_operator 3 "equality_operator"
749 [(match_dup 2) (const_int 0)])
750 (match_operand 4 "" "")
751 (match_operand 5 "" "")))]
752 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
756 (match_dup 1) (const_int 1)))
757 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
759 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
762 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
763 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
766 operands[2], const0_rtx);"
769 ;; The next four insns work because they compare the result with one of
770 ;; the operands, and we know that the use of the condition code is
771 ;; either GEU or LTU, so we can use the carry flag from the addition
772 ;; instead of doing the compare a second time.
773 (define_insn "addsi3_compare_op1"
774 [(set (reg:CC_C CC_REGNUM)
776 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
777 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
779 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
780 (plus:SI (match_dup 1) (match_dup 2)))]
785 subs%?\\t%0, %1, #%n2
786 subs%?\\t%0, %0, #%n2
788 subs%?\\t%0, %1, #%n2"
789 [(set_attr "conds" "set")
790 (set_attr "arch" "t2,t2,t2,t2,*,*")
791 (set_attr "length" "2,2,2,2,4,4")
793 (if_then_else (match_operand 2 "const_int_operand")
794 (const_string "alu_imm")
795 (const_string "alu_sreg")))]
798 (define_insn "*addsi3_compare_op2"
799 [(set (reg:CC_C CC_REGNUM)
801 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
802 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
804 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
805 (plus:SI (match_dup 1) (match_dup 2)))]
810 subs%?\\t%0, %1, #%n2
811 subs%?\\t%0, %0, #%n2
813 subs%?\\t%0, %1, #%n2"
814 [(set_attr "conds" "set")
815 (set_attr "arch" "t2,t2,t2,t2,*,*")
816 (set_attr "length" "2,2,2,2,4,4")
818 (if_then_else (match_operand 2 "const_int_operand")
819 (const_string "alu_imm")
820 (const_string "alu_sreg")))]
823 (define_insn "*compare_addsi2_op0"
824 [(set (reg:CC_C CC_REGNUM)
826 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
827 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
835 [(set_attr "conds" "set")
836 (set_attr "predicable" "yes")
837 (set_attr "arch" "t2,t2,*,*")
838 (set_attr "predicable_short_it" "yes,yes,no,no")
839 (set_attr "length" "2,2,4,4")
841 (if_then_else (match_operand 1 "const_int_operand")
842 (const_string "alu_imm")
843 (const_string "alu_sreg")))]
846 (define_insn "*compare_addsi2_op1"
847 [(set (reg:CC_C CC_REGNUM)
849 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
850 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
858 [(set_attr "conds" "set")
859 (set_attr "predicable" "yes")
860 (set_attr "arch" "t2,t2,*,*")
861 (set_attr "predicable_short_it" "yes,yes,no,no")
862 (set_attr "length" "2,2,4,4")
864 (if_then_else (match_operand 1 "const_int_operand")
865 (const_string "alu_imm")
866 (const_string "alu_sreg")))]
869 (define_insn "addsi3_carryin"
870 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
871 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
872 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
873 (match_operand:SI 3 "arm_carry_operation" "")))]
878 sbc%?\\t%0, %1, #%B2"
879 [(set_attr "conds" "use")
880 (set_attr "predicable" "yes")
881 (set_attr "arch" "t2,*,*")
882 (set_attr "length" "4")
883 (set_attr "predicable_short_it" "yes,no,no")
884 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
887 ;; Canonicalization of the above when the immediate is zero.
888 (define_insn "add0si3_carryin"
889 [(set (match_operand:SI 0 "s_register_operand" "=r")
890 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
891 (match_operand:SI 1 "arm_not_operand" "r")))]
894 [(set_attr "conds" "use")
895 (set_attr "predicable" "yes")
896 (set_attr "length" "4")
897 (set_attr "type" "adc_imm")]
900 (define_insn "*addsi3_carryin_alt2"
901 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
902 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
903 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
904 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
909 sbc%?\\t%0, %1, #%B2"
910 [(set_attr "conds" "use")
911 (set_attr "predicable" "yes")
912 (set_attr "arch" "t2,*,*")
913 (set_attr "length" "4")
914 (set_attr "predicable_short_it" "yes,no,no")
915 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
918 (define_insn "*addsi3_carryin_shift"
919 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
921 (match_operator:SI 2 "shift_operator"
922 [(match_operand:SI 3 "s_register_operand" "r,r")
923 (match_operand:SI 4 "shift_amount_operand" "M,r")])
924 (match_operand:SI 5 "arm_carry_operation" ""))
925 (match_operand:SI 1 "s_register_operand" "r,r")))]
927 "adc%?\\t%0, %1, %3%S2"
928 [(set_attr "conds" "use")
929 (set_attr "arch" "32,a")
930 (set_attr "shift" "3")
931 (set_attr "predicable" "yes")
932 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
933 (const_string "alu_shift_imm")
934 (const_string "alu_shift_reg")))]
937 (define_insn "*addsi3_carryin_clobercc"
938 [(set (match_operand:SI 0 "s_register_operand" "=r")
939 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
940 (match_operand:SI 2 "arm_rhs_operand" "rI"))
941 (match_operand:SI 3 "arm_carry_operation" "")))
942 (clobber (reg:CC CC_REGNUM))]
944 "adcs%?\\t%0, %1, %2"
945 [(set_attr "conds" "set")
946 (set_attr "type" "adcs_reg")]
949 (define_expand "subv<mode>4"
950 [(match_operand:SIDI 0 "register_operand")
951 (match_operand:SIDI 1 "register_operand")
952 (match_operand:SIDI 2 "register_operand")
953 (match_operand 3 "")]
956 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
957 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
962 (define_expand "usubv<mode>4"
963 [(match_operand:SIDI 0 "register_operand")
964 (match_operand:SIDI 1 "register_operand")
965 (match_operand:SIDI 2 "register_operand")
966 (match_operand 3 "")]
969 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
970 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
975 (define_insn "subdi3_compare1"
976 [(set (reg:CC CC_REGNUM)
978 (match_operand:DI 1 "s_register_operand" "r")
979 (match_operand:DI 2 "s_register_operand" "r")))
980 (set (match_operand:DI 0 "s_register_operand" "=&r")
981 (minus:DI (match_dup 1) (match_dup 2)))]
983 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
984 [(set_attr "conds" "set")
985 (set_attr "length" "8")
986 (set_attr "type" "multiple")]
989 (define_insn "subsi3_compare1"
990 [(set (reg:CC CC_REGNUM)
992 (match_operand:SI 1 "register_operand" "r")
993 (match_operand:SI 2 "register_operand" "r")))
994 (set (match_operand:SI 0 "register_operand" "=r")
995 (minus:SI (match_dup 1) (match_dup 2)))]
997 "subs%?\\t%0, %1, %2"
998 [(set_attr "conds" "set")
999 (set_attr "type" "alus_sreg")]
1002 (define_insn "subsi3_carryin"
1003 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1004 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1005 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1006 (match_operand:SI 3 "arm_borrow_operation" "")))]
1011 sbc%?\\t%0, %2, %2, lsl #1"
1012 [(set_attr "conds" "use")
1013 (set_attr "arch" "*,a,t2")
1014 (set_attr "predicable" "yes")
1015 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1018 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1019 [(set (reg:<CC_EXTEND> CC_REGNUM)
1020 (compare:<CC_EXTEND>
1021 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1022 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1023 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1024 (clobber (match_scratch:SI 0 "=l,r"))]
1027 [(set_attr "conds" "set")
1028 (set_attr "arch" "t2,*")
1029 (set_attr "length" "2,4")
1030 (set_attr "type" "adc_reg")]
1033 ;; Similar to the above, but handling a constant which has a different
1034 ;; canonicalization.
1035 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1036 [(set (reg:<CC_EXTEND> CC_REGNUM)
1037 (compare:<CC_EXTEND>
1038 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1039 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1040 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1041 (clobber (match_scratch:SI 0 "=l,r"))]
1045 adcs\\t%0, %1, #%B2"
1046 [(set_attr "conds" "set")
1047 (set_attr "type" "adc_imm")]
1050 ;; Further canonicalization when the constant is zero.
1051 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1052 [(set (reg:<CC_EXTEND> CC_REGNUM)
1053 (compare:<CC_EXTEND>
1054 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1055 (match_operand:DI 2 "arm_borrow_operation" "")))
1056 (clobber (match_scratch:SI 0 "=l,r"))]
1059 [(set_attr "conds" "set")
1060 (set_attr "type" "adc_imm")]
1063 (define_insn "*subsi3_carryin_const"
1064 [(set (match_operand:SI 0 "s_register_operand" "=r")
1066 (match_operand:SI 1 "s_register_operand" "r")
1067 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1068 (match_operand:SI 3 "arm_borrow_operation" "")))]
1070 "sbc\\t%0, %1, #%n2"
1071 [(set_attr "conds" "use")
1072 (set_attr "type" "adc_imm")]
1075 (define_insn "*subsi3_carryin_const0"
1076 [(set (match_operand:SI 0 "s_register_operand" "=r")
1077 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1078 (match_operand:SI 2 "arm_borrow_operation" "")))]
1081 [(set_attr "conds" "use")
1082 (set_attr "type" "adc_imm")]
1085 (define_insn "*subsi3_carryin_shift"
1086 [(set (match_operand:SI 0 "s_register_operand" "=r")
1088 (match_operand:SI 1 "s_register_operand" "r")
1089 (match_operator:SI 2 "shift_operator"
1090 [(match_operand:SI 3 "s_register_operand" "r")
1091 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1092 (match_operand:SI 5 "arm_borrow_operation" "")))]
1094 "sbc%?\\t%0, %1, %3%S2"
1095 [(set_attr "conds" "use")
1096 (set_attr "predicable" "yes")
1097 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1098 (const_string "alu_shift_imm")
1099 (const_string "alu_shift_reg")))]
1102 (define_insn "*subsi3_carryin_shift_alt"
1103 [(set (match_operand:SI 0 "s_register_operand" "=r")
1105 (match_operand:SI 1 "s_register_operand" "r")
1106 (match_operand:SI 5 "arm_borrow_operation" ""))
1107 (match_operator:SI 2 "shift_operator"
1108 [(match_operand:SI 3 "s_register_operand" "r")
1109 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1111 "sbc%?\\t%0, %1, %3%S2"
1112 [(set_attr "conds" "use")
1113 (set_attr "predicable" "yes")
1114 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1115 (const_string "alu_shift_imm")
1116 (const_string "alu_shift_reg")))]
1119 (define_insn "*rsbsi3_carryin_shift"
1120 [(set (match_operand:SI 0 "s_register_operand" "=r")
1122 (match_operator:SI 2 "shift_operator"
1123 [(match_operand:SI 3 "s_register_operand" "r")
1124 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1125 (match_operand:SI 1 "s_register_operand" "r"))
1126 (match_operand:SI 5 "arm_borrow_operation" "")))]
1128 "rsc%?\\t%0, %1, %3%S2"
1129 [(set_attr "conds" "use")
1130 (set_attr "predicable" "yes")
1131 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1132 (const_string "alu_shift_imm")
1133 (const_string "alu_shift_reg")))]
1136 (define_insn "*rsbsi3_carryin_shift_alt"
1137 [(set (match_operand:SI 0 "s_register_operand" "=r")
1139 (match_operator:SI 2 "shift_operator"
1140 [(match_operand:SI 3 "s_register_operand" "r")
1141 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1142 (match_operand:SI 5 "arm_borrow_operation" ""))
1143 (match_operand:SI 1 "s_register_operand" "r")))]
1145 "rsc%?\\t%0, %1, %3%S2"
1146 [(set_attr "conds" "use")
1147 (set_attr "predicable" "yes")
1148 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1149 (const_string "alu_shift_imm")
1150 (const_string "alu_shift_reg")))]
1153 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1155 [(set (match_operand:SI 0 "s_register_operand" "")
1156 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1157 (match_operand:SI 2 "s_register_operand" ""))
1159 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1161 [(set (match_dup 3) (match_dup 1))
1162 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1164 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1167 (define_expand "addsf3"
1168 [(set (match_operand:SF 0 "s_register_operand")
1169 (plus:SF (match_operand:SF 1 "s_register_operand")
1170 (match_operand:SF 2 "s_register_operand")))]
1171 "TARGET_32BIT && TARGET_HARD_FLOAT"
1175 (define_expand "adddf3"
1176 [(set (match_operand:DF 0 "s_register_operand")
1177 (plus:DF (match_operand:DF 1 "s_register_operand")
1178 (match_operand:DF 2 "s_register_operand")))]
1179 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1183 (define_expand "subdi3"
1185 [(set (match_operand:DI 0 "s_register_operand")
1186 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1187 (match_operand:DI 2 "s_register_operand")))
1188 (clobber (reg:CC CC_REGNUM))])]
1193 if (!REG_P (operands[1]))
1194 operands[1] = force_reg (DImode, operands[1]);
1198 rtx lo_result, hi_result, lo_dest, hi_dest;
1199 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1202 /* Since operands[1] may be an integer, pass it second, so that
1203 any necessary simplifications will be done on the decomposed
1205 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1207 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1208 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1210 if (!arm_rhs_operand (lo_op1, SImode))
1211 lo_op1 = force_reg (SImode, lo_op1);
1213 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1214 || !arm_rhs_operand (hi_op1, SImode))
1215 hi_op1 = force_reg (SImode, hi_op1);
1218 if (lo_op1 == const0_rtx)
1220 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1221 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1223 else if (CONST_INT_P (lo_op1))
1225 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1226 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1227 GEN_INT (~UINTVAL (lo_op1))));
1231 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1232 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1235 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1237 if (hi_op1 == const0_rtx)
1238 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1240 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1242 if (lo_result != lo_dest)
1243 emit_move_insn (lo_result, lo_dest);
1245 if (hi_result != hi_dest)
1246 emit_move_insn (hi_result, hi_dest);
1253 (define_expand "subsi3"
1254 [(set (match_operand:SI 0 "s_register_operand")
1255 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1256 (match_operand:SI 2 "s_register_operand")))]
1259 if (CONST_INT_P (operands[1]))
1263 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1264 operands[1] = force_reg (SImode, operands[1]);
1267 arm_split_constant (MINUS, SImode, NULL_RTX,
1268 INTVAL (operands[1]), operands[0],
1270 optimize && can_create_pseudo_p ());
1274 else /* TARGET_THUMB1 */
1275 operands[1] = force_reg (SImode, operands[1]);
1280 ; ??? Check Thumb-2 split length
1281 (define_insn_and_split "*arm_subsi3_insn"
1282 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1283 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1284 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1296 "&& (CONST_INT_P (operands[1])
1297 && !const_ok_for_arm (INTVAL (operands[1])))"
1298 [(clobber (const_int 0))]
1300 arm_split_constant (MINUS, SImode, curr_insn,
1301 INTVAL (operands[1]), operands[0], operands[2], 0);
1304 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1305 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1306 (set_attr "predicable" "yes")
1307 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1308 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1312 [(match_scratch:SI 3 "r")
1313 (set (match_operand:SI 0 "arm_general_register_operand" "")
1314 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1315 (match_operand:SI 2 "arm_general_register_operand" "")))]
1317 && !const_ok_for_arm (INTVAL (operands[1]))
1318 && const_ok_for_arm (~INTVAL (operands[1]))"
1319 [(set (match_dup 3) (match_dup 1))
1320 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1324 (define_insn "subsi3_compare0"
1325 [(set (reg:CC_NOOV CC_REGNUM)
1327 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1328 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1330 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1331 (minus:SI (match_dup 1) (match_dup 2)))]
1336 rsbs%?\\t%0, %2, %1"
1337 [(set_attr "conds" "set")
1338 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1341 (define_insn "subsi3_compare"
1342 [(set (reg:CC CC_REGNUM)
1343 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1344 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1345 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1346 (minus:SI (match_dup 1) (match_dup 2)))]
1351 rsbs%?\\t%0, %2, %1"
1352 [(set_attr "conds" "set")
1353 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1356 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1357 ;; rather than (0 cmp reg). This gives the same results for unsigned
1358 ;; and equality compares which is what we mostly need here.
1359 (define_insn "rsb_imm_compare"
1360 [(set (reg:CC_RSB CC_REGNUM)
1361 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1362 (match_operand 3 "const_int_operand" "")))
1363 (set (match_operand:SI 0 "s_register_operand" "=r")
1364 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1366 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1368 [(set_attr "conds" "set")
1369 (set_attr "type" "alus_imm")]
1372 ;; Similarly, but the result is unused.
1373 (define_insn "rsb_imm_compare_scratch"
1374 [(set (reg:CC_RSB CC_REGNUM)
1375 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1376 (match_operand 1 "arm_not_immediate_operand" "K")))
1377 (clobber (match_scratch:SI 0 "=r"))]
1379 "rsbs\\t%0, %2, #%B1"
1380 [(set_attr "conds" "set")
1381 (set_attr "type" "alus_imm")]
1384 ;; Compare the sum of a value plus a carry against a constant. Uses
1385 ;; RSC, so the result is swapped. Only available on Arm
1386 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
1387 [(set (reg:CC_SWP CC_REGNUM)
1389 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
1390 (match_operand:DI 3 "arm_borrow_operation" ""))
1391 (match_operand 1 "arm_immediate_operand" "I")))
1392 (clobber (match_scratch:SI 0 "=r"))]
1395 [(set_attr "conds" "set")
1396 (set_attr "type" "alus_imm")]
1399 (define_expand "subsf3"
1400 [(set (match_operand:SF 0 "s_register_operand")
1401 (minus:SF (match_operand:SF 1 "s_register_operand")
1402 (match_operand:SF 2 "s_register_operand")))]
1403 "TARGET_32BIT && TARGET_HARD_FLOAT"
1407 (define_expand "subdf3"
1408 [(set (match_operand:DF 0 "s_register_operand")
1409 (minus:DF (match_operand:DF 1 "s_register_operand")
1410 (match_operand:DF 2 "s_register_operand")))]
1411 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1416 ;; Multiplication insns
1418 (define_expand "mulhi3"
1419 [(set (match_operand:HI 0 "s_register_operand")
1420 (mult:HI (match_operand:HI 1 "s_register_operand")
1421 (match_operand:HI 2 "s_register_operand")))]
1422 "TARGET_DSP_MULTIPLY"
1425 rtx result = gen_reg_rtx (SImode);
1426 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1427 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1432 (define_expand "mulsi3"
1433 [(set (match_operand:SI 0 "s_register_operand")
1434 (mult:SI (match_operand:SI 2 "s_register_operand")
1435 (match_operand:SI 1 "s_register_operand")))]
1440 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1442 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1443 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1444 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1446 "mul%?\\t%0, %2, %1"
1447 [(set_attr "type" "mul")
1448 (set_attr "predicable" "yes")
1449 (set_attr "arch" "t2,v6,nov6,nov6")
1450 (set_attr "length" "4")
1451 (set_attr "predicable_short_it" "yes,no,*,*")]
1454 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1455 ;; reusing the same register.
1458 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1460 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1461 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1462 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1464 "mla%?\\t%0, %3, %2, %1"
1465 [(set_attr "type" "mla")
1466 (set_attr "predicable" "yes")
1467 (set_attr "arch" "v6,nov6,nov6,nov6")]
1471 [(set (match_operand:SI 0 "s_register_operand" "=r")
1473 (match_operand:SI 1 "s_register_operand" "r")
1474 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1475 (match_operand:SI 2 "s_register_operand" "r"))))]
1476 "TARGET_32BIT && arm_arch_thumb2"
1477 "mls%?\\t%0, %3, %2, %1"
1478 [(set_attr "type" "mla")
1479 (set_attr "predicable" "yes")]
1482 (define_insn "*mulsi3_compare0"
1483 [(set (reg:CC_NOOV CC_REGNUM)
1484 (compare:CC_NOOV (mult:SI
1485 (match_operand:SI 2 "s_register_operand" "r,r")
1486 (match_operand:SI 1 "s_register_operand" "%0,r"))
1488 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1489 (mult:SI (match_dup 2) (match_dup 1)))]
1490 "TARGET_ARM && !arm_arch6"
1491 "muls%?\\t%0, %2, %1"
1492 [(set_attr "conds" "set")
1493 (set_attr "type" "muls")]
1496 (define_insn "*mulsi3_compare0_v6"
1497 [(set (reg:CC_NOOV CC_REGNUM)
1498 (compare:CC_NOOV (mult:SI
1499 (match_operand:SI 2 "s_register_operand" "r")
1500 (match_operand:SI 1 "s_register_operand" "r"))
1502 (set (match_operand:SI 0 "s_register_operand" "=r")
1503 (mult:SI (match_dup 2) (match_dup 1)))]
1504 "TARGET_ARM && arm_arch6 && optimize_size"
1505 "muls%?\\t%0, %2, %1"
1506 [(set_attr "conds" "set")
1507 (set_attr "type" "muls")]
1510 (define_insn "*mulsi_compare0_scratch"
1511 [(set (reg:CC_NOOV CC_REGNUM)
1512 (compare:CC_NOOV (mult:SI
1513 (match_operand:SI 2 "s_register_operand" "r,r")
1514 (match_operand:SI 1 "s_register_operand" "%0,r"))
1516 (clobber (match_scratch:SI 0 "=&r,&r"))]
1517 "TARGET_ARM && !arm_arch6"
1518 "muls%?\\t%0, %2, %1"
1519 [(set_attr "conds" "set")
1520 (set_attr "type" "muls")]
1523 (define_insn "*mulsi_compare0_scratch_v6"
1524 [(set (reg:CC_NOOV CC_REGNUM)
1525 (compare:CC_NOOV (mult:SI
1526 (match_operand:SI 2 "s_register_operand" "r")
1527 (match_operand:SI 1 "s_register_operand" "r"))
1529 (clobber (match_scratch:SI 0 "=r"))]
1530 "TARGET_ARM && arm_arch6 && optimize_size"
1531 "muls%?\\t%0, %2, %1"
1532 [(set_attr "conds" "set")
1533 (set_attr "type" "muls")]
1536 (define_insn "*mulsi3addsi_compare0"
1537 [(set (reg:CC_NOOV CC_REGNUM)
1540 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1541 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1542 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1544 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1545 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1547 "TARGET_ARM && arm_arch6"
1548 "mlas%?\\t%0, %2, %1, %3"
1549 [(set_attr "conds" "set")
1550 (set_attr "type" "mlas")]
1553 (define_insn "*mulsi3addsi_compare0_v6"
1554 [(set (reg:CC_NOOV CC_REGNUM)
1557 (match_operand:SI 2 "s_register_operand" "r")
1558 (match_operand:SI 1 "s_register_operand" "r"))
1559 (match_operand:SI 3 "s_register_operand" "r"))
1561 (set (match_operand:SI 0 "s_register_operand" "=r")
1562 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1564 "TARGET_ARM && arm_arch6 && optimize_size"
1565 "mlas%?\\t%0, %2, %1, %3"
1566 [(set_attr "conds" "set")
1567 (set_attr "type" "mlas")]
1570 (define_insn "*mulsi3addsi_compare0_scratch"
1571 [(set (reg:CC_NOOV CC_REGNUM)
1574 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1575 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1576 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1578 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1579 "TARGET_ARM && !arm_arch6"
1580 "mlas%?\\t%0, %2, %1, %3"
1581 [(set_attr "conds" "set")
1582 (set_attr "type" "mlas")]
1585 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1586 [(set (reg:CC_NOOV CC_REGNUM)
1589 (match_operand:SI 2 "s_register_operand" "r")
1590 (match_operand:SI 1 "s_register_operand" "r"))
1591 (match_operand:SI 3 "s_register_operand" "r"))
1593 (clobber (match_scratch:SI 0 "=r"))]
1594 "TARGET_ARM && arm_arch6 && optimize_size"
1595 "mlas%?\\t%0, %2, %1, %3"
1596 [(set_attr "conds" "set")
1597 (set_attr "type" "mlas")]
1600 ;; 32x32->64 widening multiply.
1601 ;; The only difference between the v3-5 and v6+ versions is the requirement
1602 ;; that the output does not overlap with either input.
1604 (define_expand "<Us>mulsidi3"
1605 [(set (match_operand:DI 0 "s_register_operand")
1607 (SE:DI (match_operand:SI 1 "s_register_operand"))
1608 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1611 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1612 gen_highpart (SImode, operands[0]),
1613 operands[1], operands[2]));
1618 (define_insn "<US>mull"
1619 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1621 (match_operand:SI 2 "s_register_operand" "%r,r")
1622 (match_operand:SI 3 "s_register_operand" "r,r")))
1623 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1626 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1629 "<US>mull%?\\t%0, %1, %2, %3"
1630 [(set_attr "type" "umull")
1631 (set_attr "predicable" "yes")
1632 (set_attr "arch" "v6,nov6")]
1635 (define_expand "<Us>maddsidi4"
1636 [(set (match_operand:DI 0 "s_register_operand")
1639 (SE:DI (match_operand:SI 1 "s_register_operand"))
1640 (SE:DI (match_operand:SI 2 "s_register_operand")))
1641 (match_operand:DI 3 "s_register_operand")))]
1644 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1645 gen_lowpart (SImode, operands[3]),
1646 gen_highpart (SImode, operands[0]),
1647 gen_highpart (SImode, operands[3]),
1648 operands[1], operands[2]));
1653 (define_insn "<US>mlal"
1654 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1657 (match_operand:SI 4 "s_register_operand" "%r,r")
1658 (match_operand:SI 5 "s_register_operand" "r,r"))
1659 (match_operand:SI 1 "s_register_operand" "0,0")))
1660 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1665 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1666 (zero_extend:DI (match_dup 1)))
1668 (match_operand:SI 3 "s_register_operand" "2,2")))]
1670 "<US>mlal%?\\t%0, %2, %4, %5"
1671 [(set_attr "type" "umlal")
1672 (set_attr "predicable" "yes")
1673 (set_attr "arch" "v6,nov6")]
1676 (define_expand "<US>mulsi3_highpart"
1678 [(set (match_operand:SI 0 "s_register_operand")
1682 (SE:DI (match_operand:SI 1 "s_register_operand"))
1683 (SE:DI (match_operand:SI 2 "s_register_operand")))
1685 (clobber (match_scratch:SI 3 ""))])]
1690 (define_insn "*<US>mull_high"
1691 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1695 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1696 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1698 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1700 "<US>mull%?\\t%3, %0, %2, %1"
1701 [(set_attr "type" "umull")
1702 (set_attr "predicable" "yes")
1703 (set_attr "arch" "v6,nov6,nov6")]
1706 (define_insn "mulhisi3"
1707 [(set (match_operand:SI 0 "s_register_operand" "=r")
1708 (mult:SI (sign_extend:SI
1709 (match_operand:HI 1 "s_register_operand" "%r"))
1711 (match_operand:HI 2 "s_register_operand" "r"))))]
1712 "TARGET_DSP_MULTIPLY"
1713 "smulbb%?\\t%0, %1, %2"
1714 [(set_attr "type" "smulxy")
1715 (set_attr "predicable" "yes")]
1718 (define_insn "*mulhisi3tb"
1719 [(set (match_operand:SI 0 "s_register_operand" "=r")
1720 (mult:SI (ashiftrt:SI
1721 (match_operand:SI 1 "s_register_operand" "r")
1724 (match_operand:HI 2 "s_register_operand" "r"))))]
1725 "TARGET_DSP_MULTIPLY"
1726 "smultb%?\\t%0, %1, %2"
1727 [(set_attr "type" "smulxy")
1728 (set_attr "predicable" "yes")]
1731 (define_insn "*mulhisi3bt"
1732 [(set (match_operand:SI 0 "s_register_operand" "=r")
1733 (mult:SI (sign_extend:SI
1734 (match_operand:HI 1 "s_register_operand" "r"))
1736 (match_operand:SI 2 "s_register_operand" "r")
1738 "TARGET_DSP_MULTIPLY"
1739 "smulbt%?\\t%0, %1, %2"
1740 [(set_attr "type" "smulxy")
1741 (set_attr "predicable" "yes")]
1744 (define_insn "*mulhisi3tt"
1745 [(set (match_operand:SI 0 "s_register_operand" "=r")
1746 (mult:SI (ashiftrt:SI
1747 (match_operand:SI 1 "s_register_operand" "r")
1750 (match_operand:SI 2 "s_register_operand" "r")
1752 "TARGET_DSP_MULTIPLY"
1753 "smultt%?\\t%0, %1, %2"
1754 [(set_attr "type" "smulxy")
1755 (set_attr "predicable" "yes")]
1758 (define_insn "maddhisi4"
1759 [(set (match_operand:SI 0 "s_register_operand" "=r")
1760 (plus:SI (mult:SI (sign_extend:SI
1761 (match_operand:HI 1 "s_register_operand" "r"))
1763 (match_operand:HI 2 "s_register_operand" "r")))
1764 (match_operand:SI 3 "s_register_operand" "r")))]
1765 "TARGET_DSP_MULTIPLY"
1766 "smlabb%?\\t%0, %1, %2, %3"
1767 [(set_attr "type" "smlaxy")
1768 (set_attr "predicable" "yes")]
1771 ;; Note: there is no maddhisi4ibt because this one is canonical form
1772 (define_insn "*maddhisi4tb"
1773 [(set (match_operand:SI 0 "s_register_operand" "=r")
1774 (plus:SI (mult:SI (ashiftrt:SI
1775 (match_operand:SI 1 "s_register_operand" "r")
1778 (match_operand:HI 2 "s_register_operand" "r")))
1779 (match_operand:SI 3 "s_register_operand" "r")))]
1780 "TARGET_DSP_MULTIPLY"
1781 "smlatb%?\\t%0, %1, %2, %3"
1782 [(set_attr "type" "smlaxy")
1783 (set_attr "predicable" "yes")]
1786 (define_insn "*maddhisi4tt"
1787 [(set (match_operand:SI 0 "s_register_operand" "=r")
1788 (plus:SI (mult:SI (ashiftrt:SI
1789 (match_operand:SI 1 "s_register_operand" "r")
1792 (match_operand:SI 2 "s_register_operand" "r")
1794 (match_operand:SI 3 "s_register_operand" "r")))]
1795 "TARGET_DSP_MULTIPLY"
1796 "smlatt%?\\t%0, %1, %2, %3"
1797 [(set_attr "type" "smlaxy")
1798 (set_attr "predicable" "yes")]
1801 (define_insn "maddhidi4"
1802 [(set (match_operand:DI 0 "s_register_operand" "=r")
1804 (mult:DI (sign_extend:DI
1805 (match_operand:HI 1 "s_register_operand" "r"))
1807 (match_operand:HI 2 "s_register_operand" "r")))
1808 (match_operand:DI 3 "s_register_operand" "0")))]
1809 "TARGET_DSP_MULTIPLY"
1810 "smlalbb%?\\t%Q0, %R0, %1, %2"
1811 [(set_attr "type" "smlalxy")
1812 (set_attr "predicable" "yes")])
1814 ;; Note: there is no maddhidi4ibt because this one is canonical form
1815 (define_insn "*maddhidi4tb"
1816 [(set (match_operand:DI 0 "s_register_operand" "=r")
1818 (mult:DI (sign_extend:DI
1820 (match_operand:SI 1 "s_register_operand" "r")
1823 (match_operand:HI 2 "s_register_operand" "r")))
1824 (match_operand:DI 3 "s_register_operand" "0")))]
1825 "TARGET_DSP_MULTIPLY"
1826 "smlaltb%?\\t%Q0, %R0, %1, %2"
1827 [(set_attr "type" "smlalxy")
1828 (set_attr "predicable" "yes")])
1830 (define_insn "*maddhidi4tt"
1831 [(set (match_operand:DI 0 "s_register_operand" "=r")
1833 (mult:DI (sign_extend:DI
1835 (match_operand:SI 1 "s_register_operand" "r")
1839 (match_operand:SI 2 "s_register_operand" "r")
1841 (match_operand:DI 3 "s_register_operand" "0")))]
1842 "TARGET_DSP_MULTIPLY"
1843 "smlaltt%?\\t%Q0, %R0, %1, %2"
1844 [(set_attr "type" "smlalxy")
1845 (set_attr "predicable" "yes")])
1847 (define_expand "mulsf3"
1848 [(set (match_operand:SF 0 "s_register_operand")
1849 (mult:SF (match_operand:SF 1 "s_register_operand")
1850 (match_operand:SF 2 "s_register_operand")))]
1851 "TARGET_32BIT && TARGET_HARD_FLOAT"
1855 (define_expand "muldf3"
1856 [(set (match_operand:DF 0 "s_register_operand")
1857 (mult:DF (match_operand:DF 1 "s_register_operand")
1858 (match_operand:DF 2 "s_register_operand")))]
1859 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1865 (define_expand "divsf3"
1866 [(set (match_operand:SF 0 "s_register_operand")
1867 (div:SF (match_operand:SF 1 "s_register_operand")
1868 (match_operand:SF 2 "s_register_operand")))]
1869 "TARGET_32BIT && TARGET_HARD_FLOAT"
1872 (define_expand "divdf3"
1873 [(set (match_operand:DF 0 "s_register_operand")
1874 (div:DF (match_operand:DF 1 "s_register_operand")
1875 (match_operand:DF 2 "s_register_operand")))]
1876 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1880 ; Expand logical operations. The mid-end expander does not split off memory
1881 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1882 ; So an explicit expander is needed to generate better code.
1884 (define_expand "<LOGICAL:optab>di3"
1885 [(set (match_operand:DI 0 "s_register_operand")
1886 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1887 (match_operand:DI 2 "arm_<optab>di_operand")))]
1890 rtx low = simplify_gen_binary (<CODE>, SImode,
1891 gen_lowpart (SImode, operands[1]),
1892 gen_lowpart (SImode, operands[2]));
1893 rtx high = simplify_gen_binary (<CODE>, SImode,
1894 gen_highpart (SImode, operands[1]),
1895 gen_highpart_mode (SImode, DImode,
1898 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1899 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1904 (define_expand "one_cmpldi2"
1905 [(set (match_operand:DI 0 "s_register_operand")
1906 (not:DI (match_operand:DI 1 "s_register_operand")))]
1909 rtx low = simplify_gen_unary (NOT, SImode,
1910 gen_lowpart (SImode, operands[1]),
1912 rtx high = simplify_gen_unary (NOT, SImode,
1913 gen_highpart_mode (SImode, DImode,
1917 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1918 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1923 ;; Split DImode and, ior, xor operations. Simply perform the logical
1924 ;; operation on the upper and lower halves of the registers.
1925 ;; This is needed for atomic operations in arm_split_atomic_op.
1926 ;; Avoid splitting IWMMXT instructions.
1928 [(set (match_operand:DI 0 "s_register_operand" "")
1929 (match_operator:DI 6 "logical_binary_operator"
1930 [(match_operand:DI 1 "s_register_operand" "")
1931 (match_operand:DI 2 "s_register_operand" "")]))]
1932 "TARGET_32BIT && reload_completed
1933 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1934 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1935 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1938 operands[3] = gen_highpart (SImode, operands[0]);
1939 operands[0] = gen_lowpart (SImode, operands[0]);
1940 operands[4] = gen_highpart (SImode, operands[1]);
1941 operands[1] = gen_lowpart (SImode, operands[1]);
1942 operands[5] = gen_highpart (SImode, operands[2]);
1943 operands[2] = gen_lowpart (SImode, operands[2]);
1947 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1948 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1950 [(set (match_operand:DI 0 "s_register_operand")
1951 (not:DI (match_operand:DI 1 "s_register_operand")))]
1953 [(set (match_dup 0) (not:SI (match_dup 1)))
1954 (set (match_dup 2) (not:SI (match_dup 3)))]
1957 operands[2] = gen_highpart (SImode, operands[0]);
1958 operands[0] = gen_lowpart (SImode, operands[0]);
1959 operands[3] = gen_highpart (SImode, operands[1]);
1960 operands[1] = gen_lowpart (SImode, operands[1]);
1964 (define_expand "andsi3"
1965 [(set (match_operand:SI 0 "s_register_operand")
1966 (and:SI (match_operand:SI 1 "s_register_operand")
1967 (match_operand:SI 2 "reg_or_int_operand")))]
1972 if (CONST_INT_P (operands[2]))
1974 if (INTVAL (operands[2]) == 255 && arm_arch6)
1976 operands[1] = convert_to_mode (QImode, operands[1], 1);
1977 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1981 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1982 operands[2] = force_reg (SImode, operands[2]);
1985 arm_split_constant (AND, SImode, NULL_RTX,
1986 INTVAL (operands[2]), operands[0],
1988 optimize && can_create_pseudo_p ());
1994 else /* TARGET_THUMB1 */
1996 if (!CONST_INT_P (operands[2]))
1998 rtx tmp = force_reg (SImode, operands[2]);
1999 if (rtx_equal_p (operands[0], operands[1]))
2003 operands[2] = operands[1];
2011 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2013 operands[2] = force_reg (SImode,
2014 GEN_INT (~INTVAL (operands[2])));
2016 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2021 for (i = 9; i <= 31; i++)
2023 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2025 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2029 else if ((HOST_WIDE_INT_1 << i) - 1
2030 == ~INTVAL (operands[2]))
2032 rtx shift = GEN_INT (i);
2033 rtx reg = gen_reg_rtx (SImode);
2035 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2036 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2042 operands[2] = force_reg (SImode, operands[2]);
2048 ; ??? Check split length for Thumb-2
2049 (define_insn_and_split "*arm_andsi3_insn"
2050 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2051 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2052 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2057 bic%?\\t%0, %1, #%B2
2061 && CONST_INT_P (operands[2])
2062 && !(const_ok_for_arm (INTVAL (operands[2]))
2063 || const_ok_for_arm (~INTVAL (operands[2])))"
2064 [(clobber (const_int 0))]
2066 arm_split_constant (AND, SImode, curr_insn,
2067 INTVAL (operands[2]), operands[0], operands[1], 0);
2070 [(set_attr "length" "4,4,4,4,16")
2071 (set_attr "predicable" "yes")
2072 (set_attr "predicable_short_it" "no,yes,no,no,no")
2073 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2076 (define_insn "*andsi3_compare0"
2077 [(set (reg:CC_NOOV CC_REGNUM)
2079 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2080 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2082 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2083 (and:SI (match_dup 1) (match_dup 2)))]
2087 bics%?\\t%0, %1, #%B2
2088 ands%?\\t%0, %1, %2"
2089 [(set_attr "conds" "set")
2090 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2093 (define_insn "*andsi3_compare0_scratch"
2094 [(set (reg:CC_NOOV CC_REGNUM)
2096 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2097 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2099 (clobber (match_scratch:SI 2 "=X,r,X"))]
2103 bics%?\\t%2, %0, #%B1
2105 [(set_attr "conds" "set")
2106 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2109 (define_insn "*zeroextractsi_compare0_scratch"
2110 [(set (reg:CC_NOOV CC_REGNUM)
2111 (compare:CC_NOOV (zero_extract:SI
2112 (match_operand:SI 0 "s_register_operand" "r")
2113 (match_operand 1 "const_int_operand" "n")
2114 (match_operand 2 "const_int_operand" "n"))
2117 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2118 && INTVAL (operands[1]) > 0
2119 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2120 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2122 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2123 << INTVAL (operands[2]));
2124 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2127 [(set_attr "conds" "set")
2128 (set_attr "predicable" "yes")
2129 (set_attr "type" "logics_imm")]
2132 (define_insn_and_split "*ne_zeroextractsi"
2133 [(set (match_operand:SI 0 "s_register_operand" "=r")
2134 (ne:SI (zero_extract:SI
2135 (match_operand:SI 1 "s_register_operand" "r")
2136 (match_operand:SI 2 "const_int_operand" "n")
2137 (match_operand:SI 3 "const_int_operand" "n"))
2139 (clobber (reg:CC CC_REGNUM))]
2141 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2142 && INTVAL (operands[2]) > 0
2143 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2144 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2147 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2148 && INTVAL (operands[2]) > 0
2149 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2150 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2151 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2152 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2154 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2156 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2157 (match_dup 0) (const_int 1)))]
2159 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2160 << INTVAL (operands[3]));
2162 [(set_attr "conds" "clob")
2163 (set (attr "length")
2164 (if_then_else (eq_attr "is_thumb" "yes")
2167 (set_attr "type" "multiple")]
2170 (define_insn_and_split "*ne_zeroextractsi_shifted"
2171 [(set (match_operand:SI 0 "s_register_operand" "=r")
2172 (ne:SI (zero_extract:SI
2173 (match_operand:SI 1 "s_register_operand" "r")
2174 (match_operand:SI 2 "const_int_operand" "n")
2177 (clobber (reg:CC CC_REGNUM))]
2181 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2182 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2184 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2186 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2187 (match_dup 0) (const_int 1)))]
2189 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2191 [(set_attr "conds" "clob")
2192 (set_attr "length" "8")
2193 (set_attr "type" "multiple")]
2196 (define_insn_and_split "*ite_ne_zeroextractsi"
2197 [(set (match_operand:SI 0 "s_register_operand" "=r")
2198 (if_then_else:SI (ne (zero_extract:SI
2199 (match_operand:SI 1 "s_register_operand" "r")
2200 (match_operand:SI 2 "const_int_operand" "n")
2201 (match_operand:SI 3 "const_int_operand" "n"))
2203 (match_operand:SI 4 "arm_not_operand" "rIK")
2205 (clobber (reg:CC CC_REGNUM))]
2207 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2208 && INTVAL (operands[2]) > 0
2209 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2210 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2211 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2214 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2215 && INTVAL (operands[2]) > 0
2216 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2217 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2218 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2219 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2220 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2222 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2224 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2225 (match_dup 0) (match_dup 4)))]
2227 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2228 << INTVAL (operands[3]));
2230 [(set_attr "conds" "clob")
2231 (set_attr "length" "8")
2232 (set_attr "type" "multiple")]
2235 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2236 [(set (match_operand:SI 0 "s_register_operand" "=r")
2237 (if_then_else:SI (ne (zero_extract:SI
2238 (match_operand:SI 1 "s_register_operand" "r")
2239 (match_operand:SI 2 "const_int_operand" "n")
2242 (match_operand:SI 3 "arm_not_operand" "rIK")
2244 (clobber (reg:CC CC_REGNUM))]
2245 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2247 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2248 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2249 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2251 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2253 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2254 (match_dup 0) (match_dup 3)))]
2256 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2258 [(set_attr "conds" "clob")
2259 (set_attr "length" "8")
2260 (set_attr "type" "multiple")]
2263 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2265 [(set (match_operand:SI 0 "s_register_operand" "")
2266 (match_operator:SI 1 "shiftable_operator"
2267 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2268 (match_operand:SI 3 "const_int_operand" "")
2269 (match_operand:SI 4 "const_int_operand" ""))
2270 (match_operand:SI 5 "s_register_operand" "")]))
2271 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2273 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2276 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2279 HOST_WIDE_INT temp = INTVAL (operands[3]);
2281 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2282 operands[4] = GEN_INT (32 - temp);
2287 [(set (match_operand:SI 0 "s_register_operand" "")
2288 (match_operator:SI 1 "shiftable_operator"
2289 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2290 (match_operand:SI 3 "const_int_operand" "")
2291 (match_operand:SI 4 "const_int_operand" ""))
2292 (match_operand:SI 5 "s_register_operand" "")]))
2293 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2295 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2298 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2301 HOST_WIDE_INT temp = INTVAL (operands[3]);
2303 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2304 operands[4] = GEN_INT (32 - temp);
2308 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2309 ;;; represented by the bitfield, then this will produce incorrect results.
2310 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2311 ;;; which have a real bit-field insert instruction, the truncation happens
2312 ;;; in the bit-field insert instruction itself. Since arm does not have a
2313 ;;; bit-field insert instruction, we would have to emit code here to truncate
2314 ;;; the value before we insert. This loses some of the advantage of having
2315 ;;; this insv pattern, so this pattern needs to be reevalutated.
2317 (define_expand "insv"
2318 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2319 (match_operand 1 "general_operand")
2320 (match_operand 2 "general_operand"))
2321 (match_operand 3 "reg_or_int_operand"))]
2322 "TARGET_ARM || arm_arch_thumb2"
2325 int start_bit = INTVAL (operands[2]);
2326 int width = INTVAL (operands[1]);
2327 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2328 rtx target, subtarget;
2330 if (arm_arch_thumb2)
2332 if (unaligned_access && MEM_P (operands[0])
2333 && s_register_operand (operands[3], GET_MODE (operands[3]))
2334 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2338 if (BYTES_BIG_ENDIAN)
2339 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2344 base_addr = adjust_address (operands[0], SImode,
2345 start_bit / BITS_PER_UNIT);
2346 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2350 rtx tmp = gen_reg_rtx (HImode);
2352 base_addr = adjust_address (operands[0], HImode,
2353 start_bit / BITS_PER_UNIT);
2354 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2355 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2359 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2361 bool use_bfi = TRUE;
2363 if (CONST_INT_P (operands[3]))
2365 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2369 emit_insn (gen_insv_zero (operands[0], operands[1],
2374 /* See if the set can be done with a single orr instruction. */
2375 if (val == mask && const_ok_for_arm (val << start_bit))
2381 if (!REG_P (operands[3]))
2382 operands[3] = force_reg (SImode, operands[3]);
2384 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2393 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2396 target = copy_rtx (operands[0]);
2397 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2398 subreg as the final target. */
2399 if (GET_CODE (target) == SUBREG)
2401 subtarget = gen_reg_rtx (SImode);
2402 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2403 < GET_MODE_SIZE (SImode))
2404 target = SUBREG_REG (target);
2409 if (CONST_INT_P (operands[3]))
2411 /* Since we are inserting a known constant, we may be able to
2412 reduce the number of bits that we have to clear so that
2413 the mask becomes simple. */
2414 /* ??? This code does not check to see if the new mask is actually
2415 simpler. It may not be. */
2416 rtx op1 = gen_reg_rtx (SImode);
2417 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2418 start of this pattern. */
2419 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2420 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2422 emit_insn (gen_andsi3 (op1, operands[0],
2423 gen_int_mode (~mask2, SImode)));
2424 emit_insn (gen_iorsi3 (subtarget, op1,
2425 gen_int_mode (op3_value << start_bit, SImode)));
2427 else if (start_bit == 0
2428 && !(const_ok_for_arm (mask)
2429 || const_ok_for_arm (~mask)))
2431 /* A Trick, since we are setting the bottom bits in the word,
2432 we can shift operand[3] up, operand[0] down, OR them together
2433 and rotate the result back again. This takes 3 insns, and
2434 the third might be mergeable into another op. */
2435 /* The shift up copes with the possibility that operand[3] is
2436 wider than the bitfield. */
2437 rtx op0 = gen_reg_rtx (SImode);
2438 rtx op1 = gen_reg_rtx (SImode);
2440 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2441 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2442 emit_insn (gen_iorsi3 (op1, op1, op0));
2443 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2445 else if ((width + start_bit == 32)
2446 && !(const_ok_for_arm (mask)
2447 || const_ok_for_arm (~mask)))
2449 /* Similar trick, but slightly less efficient. */
2451 rtx op0 = gen_reg_rtx (SImode);
2452 rtx op1 = gen_reg_rtx (SImode);
2454 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2455 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2456 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2457 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2461 rtx op0 = gen_int_mode (mask, SImode);
2462 rtx op1 = gen_reg_rtx (SImode);
2463 rtx op2 = gen_reg_rtx (SImode);
2465 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2467 rtx tmp = gen_reg_rtx (SImode);
2469 emit_insn (gen_movsi (tmp, op0));
2473 /* Mask out any bits in operand[3] that are not needed. */
2474 emit_insn (gen_andsi3 (op1, operands[3], op0));
2476 if (CONST_INT_P (op0)
2477 && (const_ok_for_arm (mask << start_bit)
2478 || const_ok_for_arm (~(mask << start_bit))))
2480 op0 = gen_int_mode (~(mask << start_bit), SImode);
2481 emit_insn (gen_andsi3 (op2, operands[0], op0));
2485 if (CONST_INT_P (op0))
2487 rtx tmp = gen_reg_rtx (SImode);
2489 emit_insn (gen_movsi (tmp, op0));
2494 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2496 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2500 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2502 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2505 if (subtarget != target)
2507 /* If TARGET is still a SUBREG, then it must be wider than a word,
2508 so we must be careful only to set the subword we were asked to. */
2509 if (GET_CODE (target) == SUBREG)
2510 emit_move_insn (target, subtarget);
2512 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2519 (define_insn "insv_zero"
2520 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2521 (match_operand:SI 1 "const_int_M_operand" "M")
2522 (match_operand:SI 2 "const_int_M_operand" "M"))
2526 [(set_attr "length" "4")
2527 (set_attr "predicable" "yes")
2528 (set_attr "type" "bfm")]
2531 (define_insn "insv_t2"
2532 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2533 (match_operand:SI 1 "const_int_M_operand" "M")
2534 (match_operand:SI 2 "const_int_M_operand" "M"))
2535 (match_operand:SI 3 "s_register_operand" "r"))]
2537 "bfi%?\t%0, %3, %2, %1"
2538 [(set_attr "length" "4")
2539 (set_attr "predicable" "yes")
2540 (set_attr "type" "bfm")]
2543 (define_insn "andsi_notsi_si"
2544 [(set (match_operand:SI 0 "s_register_operand" "=r")
2545 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2546 (match_operand:SI 1 "s_register_operand" "r")))]
2548 "bic%?\\t%0, %1, %2"
2549 [(set_attr "predicable" "yes")
2550 (set_attr "type" "logic_reg")]
2553 (define_insn "andsi_not_shiftsi_si"
2554 [(set (match_operand:SI 0 "s_register_operand" "=r")
2555 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2556 [(match_operand:SI 2 "s_register_operand" "r")
2557 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2558 (match_operand:SI 1 "s_register_operand" "r")))]
2560 "bic%?\\t%0, %1, %2%S4"
2561 [(set_attr "predicable" "yes")
2562 (set_attr "shift" "2")
2563 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2564 (const_string "logic_shift_imm")
2565 (const_string "logic_shift_reg")))]
2568 ;; Shifted bics pattern used to set up CC status register and not reusing
2569 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2570 ;; does not support shift by register.
2571 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2572 [(set (reg:CC_NOOV CC_REGNUM)
2574 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2575 [(match_operand:SI 1 "s_register_operand" "r")
2576 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2577 (match_operand:SI 3 "s_register_operand" "r"))
2579 (clobber (match_scratch:SI 4 "=r"))]
2580 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2581 "bics%?\\t%4, %3, %1%S0"
2582 [(set_attr "predicable" "yes")
2583 (set_attr "conds" "set")
2584 (set_attr "shift" "1")
2585 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2586 (const_string "logic_shift_imm")
2587 (const_string "logic_shift_reg")))]
2590 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2591 ;; getting reused later.
2592 (define_insn "andsi_not_shiftsi_si_scc"
2593 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2595 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2596 [(match_operand:SI 1 "s_register_operand" "r")
2597 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2598 (match_operand:SI 3 "s_register_operand" "r"))
2600 (set (match_operand:SI 4 "s_register_operand" "=r")
2601 (and:SI (not:SI (match_op_dup 0
2605 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2606 "bics%?\\t%4, %3, %1%S0"
2607 [(set_attr "predicable" "yes")
2608 (set_attr "conds" "set")
2609 (set_attr "shift" "1")
2610 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2611 (const_string "logic_shift_imm")
2612 (const_string "logic_shift_reg")))]
2615 (define_insn "*andsi_notsi_si_compare0"
2616 [(set (reg:CC_NOOV CC_REGNUM)
2618 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2619 (match_operand:SI 1 "s_register_operand" "r"))
2621 (set (match_operand:SI 0 "s_register_operand" "=r")
2622 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2625 [(set_attr "conds" "set")
2626 (set_attr "type" "logics_shift_reg")]
2629 (define_insn "*andsi_notsi_si_compare0_scratch"
2630 [(set (reg:CC_NOOV CC_REGNUM)
2632 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2633 (match_operand:SI 1 "s_register_operand" "r"))
2635 (clobber (match_scratch:SI 0 "=r"))]
2638 [(set_attr "conds" "set")
2639 (set_attr "type" "logics_shift_reg")]
2642 (define_expand "iorsi3"
2643 [(set (match_operand:SI 0 "s_register_operand")
2644 (ior:SI (match_operand:SI 1 "s_register_operand")
2645 (match_operand:SI 2 "reg_or_int_operand")))]
2648 if (CONST_INT_P (operands[2]))
2652 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2653 operands[2] = force_reg (SImode, operands[2]);
2656 arm_split_constant (IOR, SImode, NULL_RTX,
2657 INTVAL (operands[2]), operands[0],
2659 optimize && can_create_pseudo_p ());
2663 else /* TARGET_THUMB1 */
2665 rtx tmp = force_reg (SImode, operands[2]);
2666 if (rtx_equal_p (operands[0], operands[1]))
2670 operands[2] = operands[1];
2678 (define_insn_and_split "*iorsi3_insn"
2679 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2680 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2681 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2686 orn%?\\t%0, %1, #%B2
2690 && CONST_INT_P (operands[2])
2691 && !(const_ok_for_arm (INTVAL (operands[2]))
2692 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2693 [(clobber (const_int 0))]
2695 arm_split_constant (IOR, SImode, curr_insn,
2696 INTVAL (operands[2]), operands[0], operands[1], 0);
2699 [(set_attr "length" "4,4,4,4,16")
2700 (set_attr "arch" "32,t2,t2,32,32")
2701 (set_attr "predicable" "yes")
2702 (set_attr "predicable_short_it" "no,yes,no,no,no")
2703 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2707 [(match_scratch:SI 3 "r")
2708 (set (match_operand:SI 0 "arm_general_register_operand" "")
2709 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2710 (match_operand:SI 2 "const_int_operand" "")))]
2712 && !const_ok_for_arm (INTVAL (operands[2]))
2713 && const_ok_for_arm (~INTVAL (operands[2]))"
2714 [(set (match_dup 3) (match_dup 2))
2715 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2719 (define_insn "*iorsi3_compare0"
2720 [(set (reg:CC_NOOV CC_REGNUM)
2722 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2723 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2725 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2726 (ior:SI (match_dup 1) (match_dup 2)))]
2728 "orrs%?\\t%0, %1, %2"
2729 [(set_attr "conds" "set")
2730 (set_attr "arch" "*,t2,*")
2731 (set_attr "length" "4,2,4")
2732 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2735 (define_insn "*iorsi3_compare0_scratch"
2736 [(set (reg:CC_NOOV CC_REGNUM)
2738 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2739 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2741 (clobber (match_scratch:SI 0 "=r,l,r"))]
2743 "orrs%?\\t%0, %1, %2"
2744 [(set_attr "conds" "set")
2745 (set_attr "arch" "*,t2,*")
2746 (set_attr "length" "4,2,4")
2747 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2750 (define_expand "xorsi3"
2751 [(set (match_operand:SI 0 "s_register_operand")
2752 (xor:SI (match_operand:SI 1 "s_register_operand")
2753 (match_operand:SI 2 "reg_or_int_operand")))]
2755 "if (CONST_INT_P (operands[2]))
2759 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2760 operands[2] = force_reg (SImode, operands[2]);
2763 arm_split_constant (XOR, SImode, NULL_RTX,
2764 INTVAL (operands[2]), operands[0],
2766 optimize && can_create_pseudo_p ());
2770 else /* TARGET_THUMB1 */
2772 rtx tmp = force_reg (SImode, operands[2]);
2773 if (rtx_equal_p (operands[0], operands[1]))
2777 operands[2] = operands[1];
2784 (define_insn_and_split "*arm_xorsi3"
2785 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2786 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2787 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2795 && CONST_INT_P (operands[2])
2796 && !const_ok_for_arm (INTVAL (operands[2]))"
2797 [(clobber (const_int 0))]
2799 arm_split_constant (XOR, SImode, curr_insn,
2800 INTVAL (operands[2]), operands[0], operands[1], 0);
2803 [(set_attr "length" "4,4,4,16")
2804 (set_attr "predicable" "yes")
2805 (set_attr "predicable_short_it" "no,yes,no,no")
2806 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2809 (define_insn "*xorsi3_compare0"
2810 [(set (reg:CC_NOOV CC_REGNUM)
2811 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2812 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2814 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2815 (xor:SI (match_dup 1) (match_dup 2)))]
2817 "eors%?\\t%0, %1, %2"
2818 [(set_attr "conds" "set")
2819 (set_attr "type" "logics_imm,logics_reg")]
2822 (define_insn "*xorsi3_compare0_scratch"
2823 [(set (reg:CC_NOOV CC_REGNUM)
2824 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2825 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2829 [(set_attr "conds" "set")
2830 (set_attr "type" "logics_imm,logics_reg")]
2833 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2834 ; (NOT D) we can sometimes merge the final NOT into one of the following
2838 [(set (match_operand:SI 0 "s_register_operand" "")
2839 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2840 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2841 (match_operand:SI 3 "arm_rhs_operand" "")))
2842 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2844 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2845 (not:SI (match_dup 3))))
2846 (set (match_dup 0) (not:SI (match_dup 4)))]
2850 (define_insn_and_split "*andsi_iorsi3_notsi"
2851 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2852 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2853 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2854 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2856 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2857 "&& reload_completed"
2858 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2859 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2861 /* If operands[3] is a constant make sure to fold the NOT into it
2862 to avoid creating a NOT of a CONST_INT. */
2863 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2864 if (CONST_INT_P (not_rtx))
2866 operands[4] = operands[0];
2867 operands[5] = not_rtx;
2871 operands[5] = operands[0];
2872 operands[4] = not_rtx;
2875 [(set_attr "length" "8")
2876 (set_attr "ce_count" "2")
2877 (set_attr "predicable" "yes")
2878 (set_attr "type" "multiple")]
2881 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2882 ; insns are available?
2884 [(set (match_operand:SI 0 "s_register_operand" "")
2885 (match_operator:SI 1 "logical_binary_operator"
2886 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2887 (match_operand:SI 3 "const_int_operand" "")
2888 (match_operand:SI 4 "const_int_operand" ""))
2889 (match_operator:SI 9 "logical_binary_operator"
2890 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2891 (match_operand:SI 6 "const_int_operand" ""))
2892 (match_operand:SI 7 "s_register_operand" "")])]))
2893 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2895 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2896 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2899 [(ashift:SI (match_dup 2) (match_dup 4))
2903 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2906 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2910 [(set (match_operand:SI 0 "s_register_operand" "")
2911 (match_operator:SI 1 "logical_binary_operator"
2912 [(match_operator:SI 9 "logical_binary_operator"
2913 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2914 (match_operand:SI 6 "const_int_operand" ""))
2915 (match_operand:SI 7 "s_register_operand" "")])
2916 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2917 (match_operand:SI 3 "const_int_operand" "")
2918 (match_operand:SI 4 "const_int_operand" ""))]))
2919 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2921 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2922 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2925 [(ashift:SI (match_dup 2) (match_dup 4))
2929 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2932 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2936 [(set (match_operand:SI 0 "s_register_operand" "")
2937 (match_operator:SI 1 "logical_binary_operator"
2938 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2939 (match_operand:SI 3 "const_int_operand" "")
2940 (match_operand:SI 4 "const_int_operand" ""))
2941 (match_operator:SI 9 "logical_binary_operator"
2942 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2943 (match_operand:SI 6 "const_int_operand" ""))
2944 (match_operand:SI 7 "s_register_operand" "")])]))
2945 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2947 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2948 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2951 [(ashift:SI (match_dup 2) (match_dup 4))
2955 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2958 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2962 [(set (match_operand:SI 0 "s_register_operand" "")
2963 (match_operator:SI 1 "logical_binary_operator"
2964 [(match_operator:SI 9 "logical_binary_operator"
2965 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2966 (match_operand:SI 6 "const_int_operand" ""))
2967 (match_operand:SI 7 "s_register_operand" "")])
2968 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2969 (match_operand:SI 3 "const_int_operand" "")
2970 (match_operand:SI 4 "const_int_operand" ""))]))
2971 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2973 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2974 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2977 [(ashift:SI (match_dup 2) (match_dup 4))
2981 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2984 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2988 ;; Minimum and maximum insns
2990 (define_expand "smaxsi3"
2992 (set (match_operand:SI 0 "s_register_operand")
2993 (smax:SI (match_operand:SI 1 "s_register_operand")
2994 (match_operand:SI 2 "arm_rhs_operand")))
2995 (clobber (reg:CC CC_REGNUM))])]
2998 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3000 /* No need for a clobber of the condition code register here. */
3001 emit_insn (gen_rtx_SET (operands[0],
3002 gen_rtx_SMAX (SImode, operands[1],
3008 (define_insn "*smax_0"
3009 [(set (match_operand:SI 0 "s_register_operand" "=r")
3010 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3013 "bic%?\\t%0, %1, %1, asr #31"
3014 [(set_attr "predicable" "yes")
3015 (set_attr "type" "logic_shift_reg")]
3018 (define_insn "*smax_m1"
3019 [(set (match_operand:SI 0 "s_register_operand" "=r")
3020 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3023 "orr%?\\t%0, %1, %1, asr #31"
3024 [(set_attr "predicable" "yes")
3025 (set_attr "type" "logic_shift_reg")]
3028 (define_insn_and_split "*arm_smax_insn"
3029 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3030 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3031 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3032 (clobber (reg:CC CC_REGNUM))]
3035 ; cmp\\t%1, %2\;movlt\\t%0, %2
3036 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3038 [(set (reg:CC CC_REGNUM)
3039 (compare:CC (match_dup 1) (match_dup 2)))
3041 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3045 [(set_attr "conds" "clob")
3046 (set_attr "length" "8,12")
3047 (set_attr "type" "multiple")]
3050 (define_expand "sminsi3"
3052 (set (match_operand:SI 0 "s_register_operand")
3053 (smin:SI (match_operand:SI 1 "s_register_operand")
3054 (match_operand:SI 2 "arm_rhs_operand")))
3055 (clobber (reg:CC CC_REGNUM))])]
3058 if (operands[2] == const0_rtx)
3060 /* No need for a clobber of the condition code register here. */
3061 emit_insn (gen_rtx_SET (operands[0],
3062 gen_rtx_SMIN (SImode, operands[1],
3068 (define_insn "*smin_0"
3069 [(set (match_operand:SI 0 "s_register_operand" "=r")
3070 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3073 "and%?\\t%0, %1, %1, asr #31"
3074 [(set_attr "predicable" "yes")
3075 (set_attr "type" "logic_shift_reg")]
3078 (define_insn_and_split "*arm_smin_insn"
3079 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3080 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3081 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3082 (clobber (reg:CC CC_REGNUM))]
3085 ; cmp\\t%1, %2\;movge\\t%0, %2
3086 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3088 [(set (reg:CC CC_REGNUM)
3089 (compare:CC (match_dup 1) (match_dup 2)))
3091 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3095 [(set_attr "conds" "clob")
3096 (set_attr "length" "8,12")
3097 (set_attr "type" "multiple,multiple")]
3100 (define_expand "umaxsi3"
3102 (set (match_operand:SI 0 "s_register_operand")
3103 (umax:SI (match_operand:SI 1 "s_register_operand")
3104 (match_operand:SI 2 "arm_rhs_operand")))
3105 (clobber (reg:CC CC_REGNUM))])]
3110 (define_insn_and_split "*arm_umaxsi3"
3111 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3112 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3113 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3114 (clobber (reg:CC CC_REGNUM))]
3117 ; cmp\\t%1, %2\;movcc\\t%0, %2
3118 ; cmp\\t%1, %2\;movcs\\t%0, %1
3119 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3121 [(set (reg:CC CC_REGNUM)
3122 (compare:CC (match_dup 1) (match_dup 2)))
3124 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3128 [(set_attr "conds" "clob")
3129 (set_attr "length" "8,8,12")
3130 (set_attr "type" "store_4")]
3133 (define_expand "uminsi3"
3135 (set (match_operand:SI 0 "s_register_operand")
3136 (umin:SI (match_operand:SI 1 "s_register_operand")
3137 (match_operand:SI 2 "arm_rhs_operand")))
3138 (clobber (reg:CC CC_REGNUM))])]
3143 (define_insn_and_split "*arm_uminsi3"
3144 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3145 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3146 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3147 (clobber (reg:CC CC_REGNUM))]
3150 ; cmp\\t%1, %2\;movcs\\t%0, %2
3151 ; cmp\\t%1, %2\;movcc\\t%0, %1
3152 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3154 [(set (reg:CC CC_REGNUM)
3155 (compare:CC (match_dup 1) (match_dup 2)))
3157 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3161 [(set_attr "conds" "clob")
3162 (set_attr "length" "8,8,12")
3163 (set_attr "type" "store_4")]
3166 (define_insn "*store_minmaxsi"
3167 [(set (match_operand:SI 0 "memory_operand" "=m")
3168 (match_operator:SI 3 "minmax_operator"
3169 [(match_operand:SI 1 "s_register_operand" "r")
3170 (match_operand:SI 2 "s_register_operand" "r")]))
3171 (clobber (reg:CC CC_REGNUM))]
3172 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3174 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3175 operands[1], operands[2]);
3176 output_asm_insn (\"cmp\\t%1, %2\", operands);
3178 output_asm_insn (\"ite\t%d3\", operands);
3179 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3180 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3183 [(set_attr "conds" "clob")
3184 (set (attr "length")
3185 (if_then_else (eq_attr "is_thumb" "yes")
3188 (set_attr "type" "store_4")]
3191 ; Reject the frame pointer in operand[1], since reloading this after
3192 ; it has been eliminated can cause carnage.
3193 (define_insn "*minmax_arithsi"
3194 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3195 (match_operator:SI 4 "shiftable_operator"
3196 [(match_operator:SI 5 "minmax_operator"
3197 [(match_operand:SI 2 "s_register_operand" "r,r")
3198 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3199 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3200 (clobber (reg:CC CC_REGNUM))]
3201 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3204 enum rtx_code code = GET_CODE (operands[4]);
3207 if (which_alternative != 0 || operands[3] != const0_rtx
3208 || (code != PLUS && code != IOR && code != XOR))
3213 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3214 operands[2], operands[3]);
3215 output_asm_insn (\"cmp\\t%2, %3\", operands);
3219 output_asm_insn (\"ite\\t%d5\", operands);
3221 output_asm_insn (\"it\\t%d5\", operands);
3223 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3225 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3228 [(set_attr "conds" "clob")
3229 (set (attr "length")
3230 (if_then_else (eq_attr "is_thumb" "yes")
3233 (set_attr "type" "multiple")]
3236 ; Reject the frame pointer in operand[1], since reloading this after
3237 ; it has been eliminated can cause carnage.
3238 (define_insn_and_split "*minmax_arithsi_non_canon"
3239 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3241 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3242 (match_operator:SI 4 "minmax_operator"
3243 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3244 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3245 (clobber (reg:CC CC_REGNUM))]
3246 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3247 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3249 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3250 [(set (reg:CC CC_REGNUM)
3251 (compare:CC (match_dup 2) (match_dup 3)))
3253 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3255 (minus:SI (match_dup 1)
3257 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3261 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3262 operands[2], operands[3]);
3263 enum rtx_code rc = minmax_code (operands[4]);
3264 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3265 operands[2], operands[3]);
3267 if (mode == CCFPmode || mode == CCFPEmode)
3268 rc = reverse_condition_maybe_unordered (rc);
3270 rc = reverse_condition (rc);
3271 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3272 if (CONST_INT_P (operands[3]))
3273 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3275 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3277 [(set_attr "conds" "clob")
3278 (set (attr "length")
3279 (if_then_else (eq_attr "is_thumb" "yes")
3282 (set_attr "type" "multiple")]
3285 (define_code_iterator SAT [smin smax])
3286 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3287 (define_code_attr SATlo [(smin "1") (smax "2")])
3288 (define_code_attr SAThi [(smin "2") (smax "1")])
3290 (define_insn "*satsi_<SAT:code>"
3291 [(set (match_operand:SI 0 "s_register_operand" "=r")
3292 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3293 (match_operand:SI 1 "const_int_operand" "i"))
3294 (match_operand:SI 2 "const_int_operand" "i")))]
3295 "TARGET_32BIT && arm_arch6
3296 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3300 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3301 &mask, &signed_sat))
3304 operands[1] = GEN_INT (mask);
3306 return "ssat%?\t%0, %1, %3";
3308 return "usat%?\t%0, %1, %3";
3310 [(set_attr "predicable" "yes")
3311 (set_attr "type" "alus_imm")]
3314 (define_insn "*satsi_<SAT:code>_shift"
3315 [(set (match_operand:SI 0 "s_register_operand" "=r")
3316 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3317 [(match_operand:SI 4 "s_register_operand" "r")
3318 (match_operand:SI 5 "const_int_operand" "i")])
3319 (match_operand:SI 1 "const_int_operand" "i"))
3320 (match_operand:SI 2 "const_int_operand" "i")))]
3321 "TARGET_32BIT && arm_arch6
3322 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3326 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3327 &mask, &signed_sat))
3330 operands[1] = GEN_INT (mask);
3332 return "ssat%?\t%0, %1, %4%S3";
3334 return "usat%?\t%0, %1, %4%S3";
3336 [(set_attr "predicable" "yes")
3337 (set_attr "shift" "3")
3338 (set_attr "type" "logic_shift_reg")])
3340 ;; Shift and rotation insns
3342 (define_expand "ashldi3"
3343 [(set (match_operand:DI 0 "s_register_operand")
3344 (ashift:DI (match_operand:DI 1 "s_register_operand")
3345 (match_operand:SI 2 "reg_or_int_operand")))]
3348 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3349 operands[2], gen_reg_rtx (SImode),
3350 gen_reg_rtx (SImode));
3354 (define_expand "ashlsi3"
3355 [(set (match_operand:SI 0 "s_register_operand")
3356 (ashift:SI (match_operand:SI 1 "s_register_operand")
3357 (match_operand:SI 2 "arm_rhs_operand")))]
3360 if (CONST_INT_P (operands[2])
3361 && (UINTVAL (operands[2])) > 31)
3363 emit_insn (gen_movsi (operands[0], const0_rtx));
3369 (define_expand "ashrdi3"
3370 [(set (match_operand:DI 0 "s_register_operand")
3371 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3372 (match_operand:SI 2 "reg_or_int_operand")))]
3375 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3376 operands[2], gen_reg_rtx (SImode),
3377 gen_reg_rtx (SImode));
3381 (define_expand "ashrsi3"
3382 [(set (match_operand:SI 0 "s_register_operand")
3383 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3384 (match_operand:SI 2 "arm_rhs_operand")))]
3387 if (CONST_INT_P (operands[2])
3388 && UINTVAL (operands[2]) > 31)
3389 operands[2] = GEN_INT (31);
3393 (define_expand "lshrdi3"
3394 [(set (match_operand:DI 0 "s_register_operand")
3395 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3396 (match_operand:SI 2 "reg_or_int_operand")))]
3399 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3400 operands[2], gen_reg_rtx (SImode),
3401 gen_reg_rtx (SImode));
3405 (define_expand "lshrsi3"
3406 [(set (match_operand:SI 0 "s_register_operand")
3407 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3408 (match_operand:SI 2 "arm_rhs_operand")))]
3411 if (CONST_INT_P (operands[2])
3412 && (UINTVAL (operands[2])) > 31)
3414 emit_insn (gen_movsi (operands[0], const0_rtx));
3420 (define_expand "rotlsi3"
3421 [(set (match_operand:SI 0 "s_register_operand")
3422 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3423 (match_operand:SI 2 "reg_or_int_operand")))]
3426 if (CONST_INT_P (operands[2]))
3427 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3430 rtx reg = gen_reg_rtx (SImode);
3431 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3437 (define_expand "rotrsi3"
3438 [(set (match_operand:SI 0 "s_register_operand")
3439 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3440 (match_operand:SI 2 "arm_rhs_operand")))]
3445 if (CONST_INT_P (operands[2])
3446 && UINTVAL (operands[2]) > 31)
3447 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3449 else /* TARGET_THUMB1 */
3451 if (CONST_INT_P (operands [2]))
3452 operands [2] = force_reg (SImode, operands[2]);
3457 (define_insn "*arm_shiftsi3"
3458 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3459 (match_operator:SI 3 "shift_operator"
3460 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3461 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3463 "* return arm_output_shift(operands, 0);"
3464 [(set_attr "predicable" "yes")
3465 (set_attr "arch" "t2,t2,*,*")
3466 (set_attr "predicable_short_it" "yes,yes,no,no")
3467 (set_attr "length" "4")
3468 (set_attr "shift" "1")
3469 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3472 (define_insn "*shiftsi3_compare0"
3473 [(set (reg:CC_NOOV CC_REGNUM)
3474 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3475 [(match_operand:SI 1 "s_register_operand" "r,r")
3476 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3478 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3479 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3481 "* return arm_output_shift(operands, 1);"
3482 [(set_attr "conds" "set")
3483 (set_attr "shift" "1")
3484 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3487 (define_insn "*shiftsi3_compare0_scratch"
3488 [(set (reg:CC_NOOV CC_REGNUM)
3489 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3490 [(match_operand:SI 1 "s_register_operand" "r,r")
3491 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3493 (clobber (match_scratch:SI 0 "=r,r"))]
3495 "* return arm_output_shift(operands, 1);"
3496 [(set_attr "conds" "set")
3497 (set_attr "shift" "1")
3498 (set_attr "type" "shift_imm,shift_reg")]
3501 (define_insn "*not_shiftsi"
3502 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3503 (not:SI (match_operator:SI 3 "shift_operator"
3504 [(match_operand:SI 1 "s_register_operand" "r,r")
3505 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3508 [(set_attr "predicable" "yes")
3509 (set_attr "shift" "1")
3510 (set_attr "arch" "32,a")
3511 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3513 (define_insn "*not_shiftsi_compare0"
3514 [(set (reg:CC_NOOV CC_REGNUM)
3516 (not:SI (match_operator:SI 3 "shift_operator"
3517 [(match_operand:SI 1 "s_register_operand" "r,r")
3518 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3520 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3521 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3523 "mvns%?\\t%0, %1%S3"
3524 [(set_attr "conds" "set")
3525 (set_attr "shift" "1")
3526 (set_attr "arch" "32,a")
3527 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3529 (define_insn "*not_shiftsi_compare0_scratch"
3530 [(set (reg:CC_NOOV CC_REGNUM)
3532 (not:SI (match_operator:SI 3 "shift_operator"
3533 [(match_operand:SI 1 "s_register_operand" "r,r")
3534 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3536 (clobber (match_scratch:SI 0 "=r,r"))]
3538 "mvns%?\\t%0, %1%S3"
3539 [(set_attr "conds" "set")
3540 (set_attr "shift" "1")
3541 (set_attr "arch" "32,a")
3542 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3544 ;; We don't really have extzv, but defining this using shifts helps
3545 ;; to reduce register pressure later on.
3547 (define_expand "extzv"
3548 [(set (match_operand 0 "s_register_operand")
3549 (zero_extract (match_operand 1 "nonimmediate_operand")
3550 (match_operand 2 "const_int_operand")
3551 (match_operand 3 "const_int_operand")))]
3552 "TARGET_THUMB1 || arm_arch_thumb2"
3555 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3556 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3558 if (arm_arch_thumb2)
3560 HOST_WIDE_INT width = INTVAL (operands[2]);
3561 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3563 if (unaligned_access && MEM_P (operands[1])
3564 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3568 if (BYTES_BIG_ENDIAN)
3569 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3574 base_addr = adjust_address (operands[1], SImode,
3575 bitpos / BITS_PER_UNIT);
3576 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3580 rtx dest = operands[0];
3581 rtx tmp = gen_reg_rtx (SImode);
3583 /* We may get a paradoxical subreg here. Strip it off. */
3584 if (GET_CODE (dest) == SUBREG
3585 && GET_MODE (dest) == SImode
3586 && GET_MODE (SUBREG_REG (dest)) == HImode)
3587 dest = SUBREG_REG (dest);
3589 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3592 base_addr = adjust_address (operands[1], HImode,
3593 bitpos / BITS_PER_UNIT);
3594 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3595 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3599 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3601 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3609 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3612 operands[3] = GEN_INT (rshift);
3616 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3620 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3621 operands[3], gen_reg_rtx (SImode)));
3626 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3628 (define_expand "extzv_t1"
3629 [(set (match_operand:SI 4 "s_register_operand")
3630 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3631 (match_operand:SI 2 "const_int_operand")))
3632 (set (match_operand:SI 0 "s_register_operand")
3633 (lshiftrt:SI (match_dup 4)
3634 (match_operand:SI 3 "const_int_operand")))]
3638 (define_expand "extv"
3639 [(set (match_operand 0 "s_register_operand")
3640 (sign_extract (match_operand 1 "nonimmediate_operand")
3641 (match_operand 2 "const_int_operand")
3642 (match_operand 3 "const_int_operand")))]
3645 HOST_WIDE_INT width = INTVAL (operands[2]);
3646 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3648 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3649 && (bitpos % BITS_PER_UNIT) == 0)
3653 if (BYTES_BIG_ENDIAN)
3654 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3658 base_addr = adjust_address (operands[1], SImode,
3659 bitpos / BITS_PER_UNIT);
3660 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3664 rtx dest = operands[0];
3665 rtx tmp = gen_reg_rtx (SImode);
3667 /* We may get a paradoxical subreg here. Strip it off. */
3668 if (GET_CODE (dest) == SUBREG
3669 && GET_MODE (dest) == SImode
3670 && GET_MODE (SUBREG_REG (dest)) == HImode)
3671 dest = SUBREG_REG (dest);
3673 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3676 base_addr = adjust_address (operands[1], HImode,
3677 bitpos / BITS_PER_UNIT);
3678 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3679 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3684 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3686 else if (GET_MODE (operands[0]) == SImode
3687 && GET_MODE (operands[1]) == SImode)
3689 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3697 ; Helper to expand register forms of extv with the proper modes.
3699 (define_expand "extv_regsi"
3700 [(set (match_operand:SI 0 "s_register_operand")
3701 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3702 (match_operand 2 "const_int_operand")
3703 (match_operand 3 "const_int_operand")))]
3708 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3710 (define_insn "unaligned_loaddi"
3711 [(set (match_operand:DI 0 "s_register_operand" "=r")
3712 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3713 UNSPEC_UNALIGNED_LOAD))]
3714 "TARGET_32BIT && TARGET_LDRD"
3716 return output_move_double (operands, true, NULL);
3718 [(set_attr "length" "8")
3719 (set_attr "type" "load_8")])
3721 (define_insn "unaligned_loadsi"
3722 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3723 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3724 UNSPEC_UNALIGNED_LOAD))]
3727 ldr\t%0, %1\t@ unaligned
3728 ldr%?\t%0, %1\t@ unaligned
3729 ldr%?\t%0, %1\t@ unaligned"
3730 [(set_attr "arch" "t1,t2,32")
3731 (set_attr "length" "2,2,4")
3732 (set_attr "predicable" "no,yes,yes")
3733 (set_attr "predicable_short_it" "no,yes,no")
3734 (set_attr "type" "load_4")])
3736 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3737 ;; address (there's no immediate format). That's tricky to support
3738 ;; here and we don't really need this pattern for that case, so only
3739 ;; enable for 32-bit ISAs.
3740 (define_insn "unaligned_loadhis"
3741 [(set (match_operand:SI 0 "s_register_operand" "=r")
3743 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3744 UNSPEC_UNALIGNED_LOAD)))]
3745 "unaligned_access && TARGET_32BIT"
3746 "ldrsh%?\t%0, %1\t@ unaligned"
3747 [(set_attr "predicable" "yes")
3748 (set_attr "type" "load_byte")])
3750 (define_insn "unaligned_loadhiu"
3751 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3753 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3754 UNSPEC_UNALIGNED_LOAD)))]
3757 ldrh\t%0, %1\t@ unaligned
3758 ldrh%?\t%0, %1\t@ unaligned
3759 ldrh%?\t%0, %1\t@ unaligned"
3760 [(set_attr "arch" "t1,t2,32")
3761 (set_attr "length" "2,2,4")
3762 (set_attr "predicable" "no,yes,yes")
3763 (set_attr "predicable_short_it" "no,yes,no")
3764 (set_attr "type" "load_byte")])
3766 (define_insn "unaligned_storedi"
3767 [(set (match_operand:DI 0 "memory_operand" "=m")
3768 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3769 UNSPEC_UNALIGNED_STORE))]
3770 "TARGET_32BIT && TARGET_LDRD"
3772 return output_move_double (operands, true, NULL);
3774 [(set_attr "length" "8")
3775 (set_attr "type" "store_8")])
3777 (define_insn "unaligned_storesi"
3778 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3779 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3780 UNSPEC_UNALIGNED_STORE))]
3783 str\t%1, %0\t@ unaligned
3784 str%?\t%1, %0\t@ unaligned
3785 str%?\t%1, %0\t@ unaligned"
3786 [(set_attr "arch" "t1,t2,32")
3787 (set_attr "length" "2,2,4")
3788 (set_attr "predicable" "no,yes,yes")
3789 (set_attr "predicable_short_it" "no,yes,no")
3790 (set_attr "type" "store_4")])
3792 (define_insn "unaligned_storehi"
3793 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3794 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3795 UNSPEC_UNALIGNED_STORE))]
3798 strh\t%1, %0\t@ unaligned
3799 strh%?\t%1, %0\t@ unaligned
3800 strh%?\t%1, %0\t@ unaligned"
3801 [(set_attr "arch" "t1,t2,32")
3802 (set_attr "length" "2,2,4")
3803 (set_attr "predicable" "no,yes,yes")
3804 (set_attr "predicable_short_it" "no,yes,no")
3805 (set_attr "type" "store_4")])
3808 (define_insn "*extv_reg"
3809 [(set (match_operand:SI 0 "s_register_operand" "=r")
3810 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3811 (match_operand:SI 2 "const_int_operand" "n")
3812 (match_operand:SI 3 "const_int_operand" "n")))]
3814 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3815 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3816 "sbfx%?\t%0, %1, %3, %2"
3817 [(set_attr "length" "4")
3818 (set_attr "predicable" "yes")
3819 (set_attr "type" "bfm")]
3822 (define_insn "extzv_t2"
3823 [(set (match_operand:SI 0 "s_register_operand" "=r")
3824 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3825 (match_operand:SI 2 "const_int_operand" "n")
3826 (match_operand:SI 3 "const_int_operand" "n")))]
3828 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3829 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3830 "ubfx%?\t%0, %1, %3, %2"
3831 [(set_attr "length" "4")
3832 (set_attr "predicable" "yes")
3833 (set_attr "type" "bfm")]
3837 ;; Division instructions
3838 (define_insn "divsi3"
3839 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3840 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3841 (match_operand:SI 2 "s_register_operand" "r,r")))]
3846 [(set_attr "arch" "32,v8mb")
3847 (set_attr "predicable" "yes")
3848 (set_attr "type" "sdiv")]
3851 (define_insn "udivsi3"
3852 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3853 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3854 (match_operand:SI 2 "s_register_operand" "r,r")))]
3859 [(set_attr "arch" "32,v8mb")
3860 (set_attr "predicable" "yes")
3861 (set_attr "type" "udiv")]
3865 ;; Unary arithmetic insns
3867 (define_expand "negvsi3"
3868 [(match_operand:SI 0 "register_operand")
3869 (match_operand:SI 1 "register_operand")
3870 (match_operand 2 "")]
3873 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3874 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3879 (define_expand "negvdi3"
3880 [(match_operand:DI 0 "s_register_operand")
3881 (match_operand:DI 1 "s_register_operand")
3882 (match_operand 2 "")]
3885 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3886 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3892 (define_insn "negdi2_compare"
3893 [(set (reg:CC CC_REGNUM)
3896 (match_operand:DI 1 "register_operand" "r,r")))
3897 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3898 (minus:DI (const_int 0) (match_dup 1)))]
3901 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3902 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3903 [(set_attr "conds" "set")
3904 (set_attr "arch" "a,t2")
3905 (set_attr "length" "8")
3906 (set_attr "type" "multiple")]
3909 (define_expand "negsi2"
3910 [(set (match_operand:SI 0 "s_register_operand")
3911 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3916 (define_insn "*arm_negsi2"
3917 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3918 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3920 "rsb%?\\t%0, %1, #0"
3921 [(set_attr "predicable" "yes")
3922 (set_attr "predicable_short_it" "yes,no")
3923 (set_attr "arch" "t2,*")
3924 (set_attr "length" "4")
3925 (set_attr "type" "alu_imm")]
3928 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3929 ;; rather than (0 cmp reg). This gives the same results for unsigned
3930 ;; and equality compares which is what we mostly need here.
3931 (define_insn "negsi2_0compare"
3932 [(set (reg:CC_RSB CC_REGNUM)
3933 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3935 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3936 (neg:SI (match_dup 1)))]
3941 [(set_attr "conds" "set")
3942 (set_attr "arch" "t2,*")
3943 (set_attr "length" "2,*")
3944 (set_attr "type" "alus_imm")]
3947 (define_insn "negsi2_carryin"
3948 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3949 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3950 (match_operand:SI 2 "arm_borrow_operation" "")))]
3954 sbc\\t%0, %1, %1, lsl #1"
3955 [(set_attr "conds" "use")
3956 (set_attr "arch" "a,t2")
3957 (set_attr "type" "adc_imm,adc_reg")]
3960 (define_expand "negsf2"
3961 [(set (match_operand:SF 0 "s_register_operand")
3962 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3963 "TARGET_32BIT && TARGET_HARD_FLOAT"
3967 (define_expand "negdf2"
3968 [(set (match_operand:DF 0 "s_register_operand")
3969 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3970 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3973 ;; abssi2 doesn't really clobber the condition codes if a different register
3974 ;; is being set. To keep things simple, assume during rtl manipulations that
3975 ;; it does, but tell the final scan operator the truth. Similarly for
3978 (define_expand "abssi2"
3980 [(set (match_operand:SI 0 "s_register_operand")
3981 (abs:SI (match_operand:SI 1 "s_register_operand")))
3982 (clobber (match_dup 2))])]
3986 operands[2] = gen_rtx_SCRATCH (SImode);
3988 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3991 (define_insn_and_split "*arm_abssi2"
3992 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3993 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3994 (clobber (reg:CC CC_REGNUM))]
3997 "&& reload_completed"
4000 /* if (which_alternative == 0) */
4001 if (REGNO(operands[0]) == REGNO(operands[1]))
4003 /* Emit the pattern:
4004 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4005 [(set (reg:CC CC_REGNUM)
4006 (compare:CC (match_dup 0) (const_int 0)))
4007 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4008 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4010 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4011 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4012 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4013 (gen_rtx_LT (SImode,
4014 gen_rtx_REG (CCmode, CC_REGNUM),
4016 (gen_rtx_SET (operands[0],
4017 (gen_rtx_MINUS (SImode,
4024 /* Emit the pattern:
4025 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4027 (xor:SI (match_dup 1)
4028 (ashiftrt:SI (match_dup 1) (const_int 31))))
4030 (minus:SI (match_dup 0)
4031 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4033 emit_insn (gen_rtx_SET (operands[0],
4034 gen_rtx_XOR (SImode,
4035 gen_rtx_ASHIFTRT (SImode,
4039 emit_insn (gen_rtx_SET (operands[0],
4040 gen_rtx_MINUS (SImode,
4042 gen_rtx_ASHIFTRT (SImode,
4048 [(set_attr "conds" "clob,*")
4049 (set_attr "shift" "1")
4050 (set_attr "predicable" "no, yes")
4051 (set_attr "length" "8")
4052 (set_attr "type" "multiple")]
4055 (define_insn_and_split "*arm_neg_abssi2"
4056 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4057 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4058 (clobber (reg:CC CC_REGNUM))]
4061 "&& reload_completed"
4064 /* if (which_alternative == 0) */
4065 if (REGNO (operands[0]) == REGNO (operands[1]))
4067 /* Emit the pattern:
4068 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4070 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4071 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4072 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4074 gen_rtx_REG (CCmode, CC_REGNUM),
4076 gen_rtx_SET (operands[0],
4077 (gen_rtx_MINUS (SImode,
4083 /* Emit the pattern:
4084 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4086 emit_insn (gen_rtx_SET (operands[0],
4087 gen_rtx_XOR (SImode,
4088 gen_rtx_ASHIFTRT (SImode,
4092 emit_insn (gen_rtx_SET (operands[0],
4093 gen_rtx_MINUS (SImode,
4094 gen_rtx_ASHIFTRT (SImode,
4101 [(set_attr "conds" "clob,*")
4102 (set_attr "shift" "1")
4103 (set_attr "predicable" "no, yes")
4104 (set_attr "length" "8")
4105 (set_attr "type" "multiple")]
4108 (define_expand "abssf2"
4109 [(set (match_operand:SF 0 "s_register_operand")
4110 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4111 "TARGET_32BIT && TARGET_HARD_FLOAT"
4114 (define_expand "absdf2"
4115 [(set (match_operand:DF 0 "s_register_operand")
4116 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4117 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4120 (define_expand "sqrtsf2"
4121 [(set (match_operand:SF 0 "s_register_operand")
4122 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4123 "TARGET_32BIT && TARGET_HARD_FLOAT"
4126 (define_expand "sqrtdf2"
4127 [(set (match_operand:DF 0 "s_register_operand")
4128 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4129 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4132 (define_expand "one_cmplsi2"
4133 [(set (match_operand:SI 0 "s_register_operand")
4134 (not:SI (match_operand:SI 1 "s_register_operand")))]
4139 (define_insn "*arm_one_cmplsi2"
4140 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4141 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4144 [(set_attr "predicable" "yes")
4145 (set_attr "predicable_short_it" "yes,no")
4146 (set_attr "arch" "t2,*")
4147 (set_attr "length" "4")
4148 (set_attr "type" "mvn_reg")]
4151 (define_insn "*notsi_compare0"
4152 [(set (reg:CC_NOOV CC_REGNUM)
4153 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4155 (set (match_operand:SI 0 "s_register_operand" "=r")
4156 (not:SI (match_dup 1)))]
4159 [(set_attr "conds" "set")
4160 (set_attr "type" "mvn_reg")]
4163 (define_insn "*notsi_compare0_scratch"
4164 [(set (reg:CC_NOOV CC_REGNUM)
4165 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4167 (clobber (match_scratch:SI 0 "=r"))]
4170 [(set_attr "conds" "set")
4171 (set_attr "type" "mvn_reg")]
4174 ;; Fixed <--> Floating conversion insns
4176 (define_expand "floatsihf2"
4177 [(set (match_operand:HF 0 "general_operand")
4178 (float:HF (match_operand:SI 1 "general_operand")))]
4182 rtx op1 = gen_reg_rtx (SFmode);
4183 expand_float (op1, operands[1], 0);
4184 op1 = convert_to_mode (HFmode, op1, 0);
4185 emit_move_insn (operands[0], op1);
4190 (define_expand "floatdihf2"
4191 [(set (match_operand:HF 0 "general_operand")
4192 (float:HF (match_operand:DI 1 "general_operand")))]
4196 rtx op1 = gen_reg_rtx (SFmode);
4197 expand_float (op1, operands[1], 0);
4198 op1 = convert_to_mode (HFmode, op1, 0);
4199 emit_move_insn (operands[0], op1);
4204 (define_expand "floatsisf2"
4205 [(set (match_operand:SF 0 "s_register_operand")
4206 (float:SF (match_operand:SI 1 "s_register_operand")))]
4207 "TARGET_32BIT && TARGET_HARD_FLOAT"
4211 (define_expand "floatsidf2"
4212 [(set (match_operand:DF 0 "s_register_operand")
4213 (float:DF (match_operand:SI 1 "s_register_operand")))]
4214 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4218 (define_expand "fix_trunchfsi2"
4219 [(set (match_operand:SI 0 "general_operand")
4220 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4224 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4225 expand_fix (operands[0], op1, 0);
4230 (define_expand "fix_trunchfdi2"
4231 [(set (match_operand:DI 0 "general_operand")
4232 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4236 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4237 expand_fix (operands[0], op1, 0);
4242 (define_expand "fix_truncsfsi2"
4243 [(set (match_operand:SI 0 "s_register_operand")
4244 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4245 "TARGET_32BIT && TARGET_HARD_FLOAT"
4249 (define_expand "fix_truncdfsi2"
4250 [(set (match_operand:SI 0 "s_register_operand")
4251 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4252 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4258 (define_expand "truncdfsf2"
4259 [(set (match_operand:SF 0 "s_register_operand")
4261 (match_operand:DF 1 "s_register_operand")))]
4262 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4266 ;; DFmode to HFmode conversions on targets without a single-step hardware
4267 ;; instruction for it would have to go through SFmode. This is dangerous
4268 ;; as it introduces double rounding.
4270 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4271 ;; a single-step instruction.
4273 (define_expand "truncdfhf2"
4274 [(set (match_operand:HF 0 "s_register_operand")
4276 (match_operand:DF 1 "s_register_operand")))]
4277 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4278 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4280 /* We don't have a direct instruction for this, so we must be in
4281 an unsafe math mode, and going via SFmode. */
4283 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4286 op1 = convert_to_mode (SFmode, operands[1], 0);
4287 op1 = convert_to_mode (HFmode, op1, 0);
4288 emit_move_insn (operands[0], op1);
4291 /* Otherwise, we will pick this up as a single instruction with
4292 no intermediary rounding. */
4296 ;; Zero and sign extension instructions.
4298 (define_expand "zero_extend<mode>di2"
4299 [(set (match_operand:DI 0 "s_register_operand" "")
4300 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4301 "TARGET_32BIT <qhs_zextenddi_cond>"
4303 rtx res_lo, res_hi, op0_lo, op0_hi;
4304 res_lo = gen_lowpart (SImode, operands[0]);
4305 res_hi = gen_highpart (SImode, operands[0]);
4306 if (can_create_pseudo_p ())
4308 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4309 op0_hi = gen_reg_rtx (SImode);
4313 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4316 if (<MODE>mode != SImode)
4317 emit_insn (gen_rtx_SET (op0_lo,
4318 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4319 emit_insn (gen_movsi (op0_hi, const0_rtx));
4320 if (res_lo != op0_lo)
4321 emit_move_insn (res_lo, op0_lo);
4322 if (res_hi != op0_hi)
4323 emit_move_insn (res_hi, op0_hi);
4328 (define_expand "extend<mode>di2"
4329 [(set (match_operand:DI 0 "s_register_operand" "")
4330 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4331 "TARGET_32BIT <qhs_sextenddi_cond>"
4333 rtx res_lo, res_hi, op0_lo, op0_hi;
4334 res_lo = gen_lowpart (SImode, operands[0]);
4335 res_hi = gen_highpart (SImode, operands[0]);
4336 if (can_create_pseudo_p ())
4338 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4339 op0_hi = gen_reg_rtx (SImode);
4343 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4346 if (<MODE>mode != SImode)
4347 emit_insn (gen_rtx_SET (op0_lo,
4348 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4349 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4350 if (res_lo != op0_lo)
4351 emit_move_insn (res_lo, op0_lo);
4352 if (res_hi != op0_hi)
4353 emit_move_insn (res_hi, op0_hi);
4358 ;; Splits for all extensions to DImode
4360 [(set (match_operand:DI 0 "s_register_operand" "")
4361 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4363 [(set (match_dup 0) (match_dup 1))]
4365 rtx lo_part = gen_lowpart (SImode, operands[0]);
4366 machine_mode src_mode = GET_MODE (operands[1]);
4368 if (src_mode == SImode)
4369 emit_move_insn (lo_part, operands[1]);
4371 emit_insn (gen_rtx_SET (lo_part,
4372 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4373 operands[0] = gen_highpart (SImode, operands[0]);
4374 operands[1] = const0_rtx;
4378 [(set (match_operand:DI 0 "s_register_operand" "")
4379 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4381 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4383 rtx lo_part = gen_lowpart (SImode, operands[0]);
4384 machine_mode src_mode = GET_MODE (operands[1]);
4386 if (src_mode == SImode)
4387 emit_move_insn (lo_part, operands[1]);
4389 emit_insn (gen_rtx_SET (lo_part,
4390 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4391 operands[1] = lo_part;
4392 operands[0] = gen_highpart (SImode, operands[0]);
4395 (define_expand "zero_extendhisi2"
4396 [(set (match_operand:SI 0 "s_register_operand")
4397 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4400 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4402 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4405 if (!arm_arch6 && !MEM_P (operands[1]))
4407 rtx t = gen_lowpart (SImode, operands[1]);
4408 rtx tmp = gen_reg_rtx (SImode);
4409 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4410 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4416 [(set (match_operand:SI 0 "s_register_operand" "")
4417 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4418 "!TARGET_THUMB2 && !arm_arch6"
4419 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4420 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4422 operands[2] = gen_lowpart (SImode, operands[1]);
4425 (define_insn "*arm_zero_extendhisi2"
4426 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4427 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4428 "TARGET_ARM && arm_arch4 && !arm_arch6"
4432 [(set_attr "type" "alu_shift_reg,load_byte")
4433 (set_attr "predicable" "yes")]
4436 (define_insn "*arm_zero_extendhisi2_v6"
4437 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4438 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4439 "TARGET_ARM && arm_arch6"
4443 [(set_attr "predicable" "yes")
4444 (set_attr "type" "extend,load_byte")]
4447 (define_insn "*arm_zero_extendhisi2addsi"
4448 [(set (match_operand:SI 0 "s_register_operand" "=r")
4449 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4450 (match_operand:SI 2 "s_register_operand" "r")))]
4452 "uxtah%?\\t%0, %2, %1"
4453 [(set_attr "type" "alu_shift_reg")
4454 (set_attr "predicable" "yes")]
4457 (define_expand "zero_extendqisi2"
4458 [(set (match_operand:SI 0 "s_register_operand")
4459 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4462 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4464 emit_insn (gen_andsi3 (operands[0],
4465 gen_lowpart (SImode, operands[1]),
4469 if (!arm_arch6 && !MEM_P (operands[1]))
4471 rtx t = gen_lowpart (SImode, operands[1]);
4472 rtx tmp = gen_reg_rtx (SImode);
4473 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4474 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4480 [(set (match_operand:SI 0 "s_register_operand" "")
4481 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4483 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4484 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4486 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4489 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4494 (define_insn "*arm_zero_extendqisi2"
4495 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4496 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4497 "TARGET_ARM && !arm_arch6"
4500 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4501 [(set_attr "length" "8,4")
4502 (set_attr "type" "alu_shift_reg,load_byte")
4503 (set_attr "predicable" "yes")]
4506 (define_insn "*arm_zero_extendqisi2_v6"
4507 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4508 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4509 "TARGET_ARM && arm_arch6"
4512 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4513 [(set_attr "type" "extend,load_byte")
4514 (set_attr "predicable" "yes")]
4517 (define_insn "*arm_zero_extendqisi2addsi"
4518 [(set (match_operand:SI 0 "s_register_operand" "=r")
4519 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4520 (match_operand:SI 2 "s_register_operand" "r")))]
4522 "uxtab%?\\t%0, %2, %1"
4523 [(set_attr "predicable" "yes")
4524 (set_attr "type" "alu_shift_reg")]
4528 [(set (match_operand:SI 0 "s_register_operand" "")
4529 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4530 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4531 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4532 [(set (match_dup 2) (match_dup 1))
4533 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4538 [(set (match_operand:SI 0 "s_register_operand" "")
4539 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4540 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4541 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4542 [(set (match_dup 2) (match_dup 1))
4543 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4549 [(set (match_operand:SI 0 "s_register_operand" "")
4550 (IOR_XOR:SI (and:SI (ashift:SI
4551 (match_operand:SI 1 "s_register_operand" "")
4552 (match_operand:SI 2 "const_int_operand" ""))
4553 (match_operand:SI 3 "const_int_operand" ""))
4555 (match_operator 5 "subreg_lowpart_operator"
4556 [(match_operand:SI 4 "s_register_operand" "")]))))]
4558 && (UINTVAL (operands[3])
4559 == (GET_MODE_MASK (GET_MODE (operands[5]))
4560 & (GET_MODE_MASK (GET_MODE (operands[5]))
4561 << (INTVAL (operands[2])))))"
4562 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4564 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4565 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4568 (define_insn "*compareqi_eq0"
4569 [(set (reg:CC_Z CC_REGNUM)
4570 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4574 [(set_attr "conds" "set")
4575 (set_attr "predicable" "yes")
4576 (set_attr "type" "logic_imm")]
4579 (define_expand "extendhisi2"
4580 [(set (match_operand:SI 0 "s_register_operand")
4581 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4586 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4589 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4591 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4595 if (!arm_arch6 && !MEM_P (operands[1]))
4597 rtx t = gen_lowpart (SImode, operands[1]);
4598 rtx tmp = gen_reg_rtx (SImode);
4599 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4600 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4607 [(set (match_operand:SI 0 "register_operand" "")
4608 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4609 (clobber (match_scratch:SI 2 ""))])]
4611 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4612 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4614 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4617 ;; This pattern will only be used when ldsh is not available
4618 (define_expand "extendhisi2_mem"
4619 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4621 (zero_extend:SI (match_dup 7)))
4622 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4623 (set (match_operand:SI 0 "" "")
4624 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4629 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4631 mem1 = change_address (operands[1], QImode, addr);
4632 mem2 = change_address (operands[1], QImode,
4633 plus_constant (Pmode, addr, 1));
4634 operands[0] = gen_lowpart (SImode, operands[0]);
4636 operands[2] = gen_reg_rtx (SImode);
4637 operands[3] = gen_reg_rtx (SImode);
4638 operands[6] = gen_reg_rtx (SImode);
4641 if (BYTES_BIG_ENDIAN)
4643 operands[4] = operands[2];
4644 operands[5] = operands[3];
4648 operands[4] = operands[3];
4649 operands[5] = operands[2];
4655 [(set (match_operand:SI 0 "register_operand" "")
4656 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4658 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4659 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4661 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4664 (define_insn "*arm_extendhisi2"
4665 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4666 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4667 "TARGET_ARM && arm_arch4 && !arm_arch6"
4671 [(set_attr "length" "8,4")
4672 (set_attr "type" "alu_shift_reg,load_byte")
4673 (set_attr "predicable" "yes")]
4676 ;; ??? Check Thumb-2 pool range
4677 (define_insn "*arm_extendhisi2_v6"
4678 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4679 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4680 "TARGET_32BIT && arm_arch6"
4684 [(set_attr "type" "extend,load_byte")
4685 (set_attr "predicable" "yes")]
4688 (define_insn "*arm_extendhisi2addsi"
4689 [(set (match_operand:SI 0 "s_register_operand" "=r")
4690 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4691 (match_operand:SI 2 "s_register_operand" "r")))]
4693 "sxtah%?\\t%0, %2, %1"
4694 [(set_attr "type" "alu_shift_reg")]
4697 (define_expand "extendqihi2"
4699 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4701 (set (match_operand:HI 0 "s_register_operand")
4702 (ashiftrt:SI (match_dup 2)
4707 if (arm_arch4 && MEM_P (operands[1]))
4709 emit_insn (gen_rtx_SET (operands[0],
4710 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4713 if (!s_register_operand (operands[1], QImode))
4714 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4715 operands[0] = gen_lowpart (SImode, operands[0]);
4716 operands[1] = gen_lowpart (SImode, operands[1]);
4717 operands[2] = gen_reg_rtx (SImode);
4721 (define_insn "*arm_extendqihi_insn"
4722 [(set (match_operand:HI 0 "s_register_operand" "=r")
4723 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4724 "TARGET_ARM && arm_arch4"
4726 [(set_attr "type" "load_byte")
4727 (set_attr "predicable" "yes")]
4730 (define_expand "extendqisi2"
4731 [(set (match_operand:SI 0 "s_register_operand")
4732 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4735 if (!arm_arch4 && MEM_P (operands[1]))
4736 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4738 if (!arm_arch6 && !MEM_P (operands[1]))
4740 rtx t = gen_lowpart (SImode, operands[1]);
4741 rtx tmp = gen_reg_rtx (SImode);
4742 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4743 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4749 [(set (match_operand:SI 0 "register_operand" "")
4750 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4752 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4753 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4755 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4758 (define_insn "*arm_extendqisi"
4759 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4760 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4761 "TARGET_ARM && arm_arch4 && !arm_arch6"
4765 [(set_attr "length" "8,4")
4766 (set_attr "type" "alu_shift_reg,load_byte")
4767 (set_attr "predicable" "yes")]
4770 (define_insn "*arm_extendqisi_v6"
4771 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4773 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4774 "TARGET_ARM && arm_arch6"
4778 [(set_attr "type" "extend,load_byte")
4779 (set_attr "predicable" "yes")]
4782 (define_insn "*arm_extendqisi2addsi"
4783 [(set (match_operand:SI 0 "s_register_operand" "=r")
4784 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4785 (match_operand:SI 2 "s_register_operand" "r")))]
4787 "sxtab%?\\t%0, %2, %1"
4788 [(set_attr "type" "alu_shift_reg")
4789 (set_attr "predicable" "yes")]
4792 (define_insn "arm_<sup>xtb16"
4793 [(set (match_operand:SI 0 "s_register_operand" "=r")
4795 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4797 "<sup>xtb16%?\\t%0, %1"
4798 [(set_attr "predicable" "yes")
4799 (set_attr "type" "alu_dsp_reg")])
4801 (define_insn "arm_<simd32_op>"
4802 [(set (match_operand:SI 0 "s_register_operand" "=r")
4804 [(match_operand:SI 1 "s_register_operand" "r")
4805 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4807 "<simd32_op>%?\\t%0, %1, %2"
4808 [(set_attr "predicable" "yes")
4809 (set_attr "type" "alu_dsp_reg")])
4811 (define_insn "arm_usada8"
4812 [(set (match_operand:SI 0 "s_register_operand" "=r")
4814 [(match_operand:SI 1 "s_register_operand" "r")
4815 (match_operand:SI 2 "s_register_operand" "r")
4816 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4818 "usada8%?\\t%0, %1, %2, %3"
4819 [(set_attr "predicable" "yes")
4820 (set_attr "type" "alu_dsp_reg")])
4822 (define_insn "arm_<simd32_op>"
4823 [(set (match_operand:DI 0 "s_register_operand" "=r")
4825 [(match_operand:SI 1 "s_register_operand" "r")
4826 (match_operand:SI 2 "s_register_operand" "r")
4827 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4829 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4830 [(set_attr "predicable" "yes")
4831 (set_attr "type" "smlald")])
4833 (define_expand "extendsfdf2"
4834 [(set (match_operand:DF 0 "s_register_operand")
4835 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4836 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4840 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4841 ;; must go through SFmode.
4843 ;; This is always safe for an extend.
4845 (define_expand "extendhfdf2"
4846 [(set (match_operand:DF 0 "s_register_operand")
4847 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4850 /* We don't have a direct instruction for this, so go via SFmode. */
4851 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4854 op1 = convert_to_mode (SFmode, operands[1], 0);
4855 op1 = convert_to_mode (DFmode, op1, 0);
4856 emit_insn (gen_movdf (operands[0], op1));
4859 /* Otherwise, we're done producing RTL and will pick up the correct
4860 pattern to do this with one rounding-step in a single instruction. */
4864 ;; Move insns (including loads and stores)
4866 ;; XXX Just some ideas about movti.
4867 ;; I don't think these are a good idea on the arm, there just aren't enough
4869 ;;(define_expand "loadti"
4870 ;; [(set (match_operand:TI 0 "s_register_operand")
4871 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4874 ;;(define_expand "storeti"
4875 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4876 ;; (match_operand:TI 1 "s_register_operand"))]
4879 ;;(define_expand "movti"
4880 ;; [(set (match_operand:TI 0 "general_operand")
4881 ;; (match_operand:TI 1 "general_operand"))]
4887 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4888 ;; operands[1] = copy_to_reg (operands[1]);
4889 ;; if (MEM_P (operands[0]))
4890 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4891 ;; else if (MEM_P (operands[1]))
4892 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4896 ;; emit_insn (insn);
4900 ;; Recognize garbage generated above.
4903 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4904 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4908 ;; register mem = (which_alternative < 3);
4909 ;; register const char *template;
4911 ;; operands[mem] = XEXP (operands[mem], 0);
4912 ;; switch (which_alternative)
4914 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4915 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4916 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4917 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4918 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4919 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4921 ;; output_asm_insn (template, operands);
4925 (define_expand "movdi"
4926 [(set (match_operand:DI 0 "general_operand")
4927 (match_operand:DI 1 "general_operand"))]
4930 gcc_checking_assert (aligned_operand (operands[0], DImode));
4931 gcc_checking_assert (aligned_operand (operands[1], DImode));
4932 if (can_create_pseudo_p ())
4934 if (!REG_P (operands[0]))
4935 operands[1] = force_reg (DImode, operands[1]);
4937 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4938 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4940 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4941 when expanding function calls. */
4942 gcc_assert (can_create_pseudo_p ());
4943 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4945 /* Perform load into legal reg pair first, then move. */
4946 rtx reg = gen_reg_rtx (DImode);
4947 emit_insn (gen_movdi (reg, operands[1]));
4950 emit_move_insn (gen_lowpart (SImode, operands[0]),
4951 gen_lowpart (SImode, operands[1]));
4952 emit_move_insn (gen_highpart (SImode, operands[0]),
4953 gen_highpart (SImode, operands[1]));
4956 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4957 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4959 /* Avoid STRD's from an odd-numbered register pair in ARM state
4960 when expanding function prologue. */
4961 gcc_assert (can_create_pseudo_p ());
4962 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4963 ? gen_reg_rtx (DImode)
4965 emit_move_insn (gen_lowpart (SImode, split_dest),
4966 gen_lowpart (SImode, operands[1]));
4967 emit_move_insn (gen_highpart (SImode, split_dest),
4968 gen_highpart (SImode, operands[1]));
4969 if (split_dest != operands[0])
4970 emit_insn (gen_movdi (operands[0], split_dest));
4976 (define_insn "*arm_movdi"
4977 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4978 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4980 && !(TARGET_HARD_FLOAT)
4982 && ( register_operand (operands[0], DImode)
4983 || register_operand (operands[1], DImode))"
4985 switch (which_alternative)
4992 /* Cannot load it directly, split to load it via MOV / MOVT. */
4993 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4997 return output_move_double (operands, true, NULL);
5000 [(set_attr "length" "8,12,16,8,8")
5001 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5002 (set_attr "arm_pool_range" "*,*,*,1020,*")
5003 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5004 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5005 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5009 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5010 (match_operand:ANY64 1 "immediate_operand" ""))]
5013 && (arm_disable_literal_pool
5014 || (arm_const_double_inline_cost (operands[1])
5015 <= arm_max_const_double_inline_cost ()))"
5018 arm_split_constant (SET, SImode, curr_insn,
5019 INTVAL (gen_lowpart (SImode, operands[1])),
5020 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5021 arm_split_constant (SET, SImode, curr_insn,
5022 INTVAL (gen_highpart_mode (SImode,
5023 GET_MODE (operands[0]),
5025 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5030 ; If optimizing for size, or if we have load delay slots, then
5031 ; we want to split the constant into two separate operations.
5032 ; In both cases this may split a trivial part into a single data op
5033 ; leaving a single complex constant to load. We can also get longer
5034 ; offsets in a LDR which means we get better chances of sharing the pool
5035 ; entries. Finally, we can normally do a better job of scheduling
5036 ; LDR instructions than we can with LDM.
5037 ; This pattern will only match if the one above did not.
5039 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5040 (match_operand:ANY64 1 "const_double_operand" ""))]
5041 "TARGET_ARM && reload_completed
5042 && arm_const_double_by_parts (operands[1])"
5043 [(set (match_dup 0) (match_dup 1))
5044 (set (match_dup 2) (match_dup 3))]
5046 operands[2] = gen_highpart (SImode, operands[0]);
5047 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5049 operands[0] = gen_lowpart (SImode, operands[0]);
5050 operands[1] = gen_lowpart (SImode, operands[1]);
5055 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5056 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5057 "TARGET_EITHER && reload_completed"
5058 [(set (match_dup 0) (match_dup 1))
5059 (set (match_dup 2) (match_dup 3))]
5061 operands[2] = gen_highpart (SImode, operands[0]);
5062 operands[3] = gen_highpart (SImode, operands[1]);
5063 operands[0] = gen_lowpart (SImode, operands[0]);
5064 operands[1] = gen_lowpart (SImode, operands[1]);
5066 /* Handle a partial overlap. */
5067 if (rtx_equal_p (operands[0], operands[3]))
5069 rtx tmp0 = operands[0];
5070 rtx tmp1 = operands[1];
5072 operands[0] = operands[2];
5073 operands[1] = operands[3];
5080 ;; We can't actually do base+index doubleword loads if the index and
5081 ;; destination overlap. Split here so that we at least have chance to
5084 [(set (match_operand:DI 0 "s_register_operand" "")
5085 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5086 (match_operand:SI 2 "s_register_operand" ""))))]
5088 && reg_overlap_mentioned_p (operands[0], operands[1])
5089 && reg_overlap_mentioned_p (operands[0], operands[2])"
5091 (plus:SI (match_dup 1)
5094 (mem:DI (match_dup 4)))]
5096 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5100 (define_expand "movsi"
5101 [(set (match_operand:SI 0 "general_operand")
5102 (match_operand:SI 1 "general_operand"))]
5106 rtx base, offset, tmp;
5108 gcc_checking_assert (aligned_operand (operands[0], SImode));
5109 gcc_checking_assert (aligned_operand (operands[1], SImode));
5110 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5112 /* Everything except mem = const or mem = mem can be done easily. */
5113 if (MEM_P (operands[0]))
5114 operands[1] = force_reg (SImode, operands[1]);
5115 if (arm_general_register_operand (operands[0], SImode)
5116 && CONST_INT_P (operands[1])
5117 && !(const_ok_for_arm (INTVAL (operands[1]))
5118 || const_ok_for_arm (~INTVAL (operands[1]))))
5120 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5122 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5127 arm_split_constant (SET, SImode, NULL_RTX,
5128 INTVAL (operands[1]), operands[0], NULL_RTX,
5129 optimize && can_create_pseudo_p ());
5134 else /* Target doesn't have MOVT... */
5136 if (can_create_pseudo_p ())
5138 if (!REG_P (operands[0]))
5139 operands[1] = force_reg (SImode, operands[1]);
5143 split_const (operands[1], &base, &offset);
5144 if (INTVAL (offset) != 0
5145 && targetm.cannot_force_const_mem (SImode, operands[1]))
5147 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5148 emit_move_insn (tmp, base);
5149 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5153 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5155 /* Recognize the case where operand[1] is a reference to thread-local
5156 data and load its address to a register. Offsets have been split off
5158 if (arm_tls_referenced_p (operands[1]))
5159 operands[1] = legitimize_tls_address (operands[1], tmp);
5161 && (CONSTANT_P (operands[1])
5162 || symbol_mentioned_p (operands[1])
5163 || label_mentioned_p (operands[1])))
5165 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5170 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5171 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5172 ;; so this does not matter.
5173 (define_insn "*arm_movt"
5174 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5175 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5176 (match_operand:SI 2 "general_operand" "i,i")))]
5177 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5179 movt%?\t%0, #:upper16:%c2
5180 movt\t%0, #:upper16:%c2"
5181 [(set_attr "arch" "32,v8mb")
5182 (set_attr "predicable" "yes")
5183 (set_attr "length" "4")
5184 (set_attr "type" "alu_sreg")]
5187 (define_insn "*arm_movsi_insn"
5188 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5189 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5190 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5191 && ( register_operand (operands[0], SImode)
5192 || register_operand (operands[1], SImode))"
5200 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5201 (set_attr "predicable" "yes")
5202 (set_attr "arch" "*,*,*,v6t2,*,*")
5203 (set_attr "pool_range" "*,*,*,*,4096,*")
5204 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5208 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5209 (match_operand:SI 1 "const_int_operand" ""))]
5210 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5211 && (!(const_ok_for_arm (INTVAL (operands[1]))
5212 || const_ok_for_arm (~INTVAL (operands[1]))))"
5213 [(clobber (const_int 0))]
5215 arm_split_constant (SET, SImode, NULL_RTX,
5216 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5221 ;; A normal way to do (symbol + offset) requires three instructions at least
5222 ;; (depends on how big the offset is) as below:
5223 ;; movw r0, #:lower16:g
5224 ;; movw r0, #:upper16:g
5227 ;; A better way would be:
5228 ;; movw r0, #:lower16:g+4
5229 ;; movw r0, #:upper16:g+4
5231 ;; The limitation of this way is that the length of offset should be a 16-bit
5232 ;; signed value, because current assembler only supports REL type relocation for
5233 ;; such case. If the more powerful RELA type is supported in future, we should
5234 ;; update this pattern to go with better way.
5236 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5237 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5238 (match_operand:SI 2 "const_int_operand" ""))))]
5241 && arm_disable_literal_pool
5243 && GET_CODE (operands[1]) == SYMBOL_REF"
5244 [(clobber (const_int 0))]
5246 int offset = INTVAL (operands[2]);
5248 if (offset < -0x8000 || offset > 0x7fff)
5250 arm_emit_movpair (operands[0], operands[1]);
5251 emit_insn (gen_rtx_SET (operands[0],
5252 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5256 rtx op = gen_rtx_CONST (SImode,
5257 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5258 arm_emit_movpair (operands[0], op);
5263 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5264 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5265 ;; and lo_sum would be merged back into memory load at cprop. However,
5266 ;; if the default is to prefer movt/movw rather than a load from the constant
5267 ;; pool, the performance is better.
5269 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5270 (match_operand:SI 1 "general_operand" ""))]
5271 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5272 && !target_word_relocations
5273 && !arm_tls_referenced_p (operands[1])"
5274 [(clobber (const_int 0))]
5276 arm_emit_movpair (operands[0], operands[1]);
5280 ;; When generating pic, we need to load the symbol offset into a register.
5281 ;; So that the optimizer does not confuse this with a normal symbol load
5282 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5283 ;; since that is the only type of relocation we can use.
5285 ;; Wrap calculation of the whole PIC address in a single pattern for the
5286 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5287 ;; a PIC address involves two loads from memory, so we want to CSE it
5288 ;; as often as possible.
5289 ;; This pattern will be split into one of the pic_load_addr_* patterns
5290 ;; and a move after GCSE optimizations.
5292 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5293 (define_expand "calculate_pic_address"
5294 [(set (match_operand:SI 0 "register_operand")
5295 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5296 (unspec:SI [(match_operand:SI 2 "" "")]
5301 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5303 [(set (match_operand:SI 0 "register_operand" "")
5304 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5305 (unspec:SI [(match_operand:SI 2 "" "")]
5308 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5309 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5310 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5313 ;; operand1 is the memory address to go into
5314 ;; pic_load_addr_32bit.
5315 ;; operand2 is the PIC label to be emitted
5316 ;; from pic_add_dot_plus_eight.
5317 ;; We do this to allow hoisting of the entire insn.
5318 (define_insn_and_split "pic_load_addr_unified"
5319 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5320 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5321 (match_operand:SI 2 "" "")]
5322 UNSPEC_PIC_UNIFIED))]
5325 "&& reload_completed"
5326 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5327 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5328 (match_dup 2)] UNSPEC_PIC_BASE))]
5329 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5330 [(set_attr "type" "load_4,load_4,load_4")
5331 (set_attr "pool_range" "4096,4094,1022")
5332 (set_attr "neg_pool_range" "4084,0,0")
5333 (set_attr "arch" "a,t2,t1")
5334 (set_attr "length" "8,6,4")]
5337 ;; The rather odd constraints on the following are to force reload to leave
5338 ;; the insn alone, and to force the minipool generation pass to then move
5339 ;; the GOT symbol to memory.
5341 (define_insn "pic_load_addr_32bit"
5342 [(set (match_operand:SI 0 "s_register_operand" "=r")
5343 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5344 "TARGET_32BIT && flag_pic"
5346 [(set_attr "type" "load_4")
5347 (set (attr "pool_range")
5348 (if_then_else (eq_attr "is_thumb" "no")
5351 (set (attr "neg_pool_range")
5352 (if_then_else (eq_attr "is_thumb" "no")
5357 (define_insn "pic_load_addr_thumb1"
5358 [(set (match_operand:SI 0 "s_register_operand" "=l")
5359 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5360 "TARGET_THUMB1 && flag_pic"
5362 [(set_attr "type" "load_4")
5363 (set (attr "pool_range") (const_int 1018))]
5366 (define_insn "pic_add_dot_plus_four"
5367 [(set (match_operand:SI 0 "register_operand" "=r")
5368 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5370 (match_operand 2 "" "")]
5374 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5375 INTVAL (operands[2]));
5376 return \"add\\t%0, %|pc\";
5378 [(set_attr "length" "2")
5379 (set_attr "type" "alu_sreg")]
5382 (define_insn "pic_add_dot_plus_eight"
5383 [(set (match_operand:SI 0 "register_operand" "=r")
5384 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5386 (match_operand 2 "" "")]
5390 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5391 INTVAL (operands[2]));
5392 return \"add%?\\t%0, %|pc, %1\";
5394 [(set_attr "predicable" "yes")
5395 (set_attr "type" "alu_sreg")]
5398 (define_insn "tls_load_dot_plus_eight"
5399 [(set (match_operand:SI 0 "register_operand" "=r")
5400 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5402 (match_operand 2 "" "")]
5406 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5407 INTVAL (operands[2]));
5408 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5410 [(set_attr "predicable" "yes")
5411 (set_attr "type" "load_4")]
5414 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5415 ;; followed by a load. These sequences can be crunched down to
5416 ;; tls_load_dot_plus_eight by a peephole.
5419 [(set (match_operand:SI 0 "register_operand" "")
5420 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5422 (match_operand 1 "" "")]
5424 (set (match_operand:SI 2 "arm_general_register_operand" "")
5425 (mem:SI (match_dup 0)))]
5426 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5428 (mem:SI (unspec:SI [(match_dup 3)
5435 (define_insn "pic_offset_arm"
5436 [(set (match_operand:SI 0 "register_operand" "=r")
5437 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5438 (unspec:SI [(match_operand:SI 2 "" "X")]
5439 UNSPEC_PIC_OFFSET))))]
5440 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5441 "ldr%?\\t%0, [%1,%2]"
5442 [(set_attr "type" "load_4")]
5445 (define_expand "builtin_setjmp_receiver"
5446 [(label_ref (match_operand 0 "" ""))]
5450 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5452 if (arm_pic_register != INVALID_REGNUM)
5453 arm_load_pic_register (1UL << 3, NULL_RTX);
5457 ;; If copying one reg to another we can set the condition codes according to
5458 ;; its value. Such a move is common after a return from subroutine and the
5459 ;; result is being tested against zero.
5461 (define_insn "*movsi_compare0"
5462 [(set (reg:CC CC_REGNUM)
5463 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5465 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5470 subs%?\\t%0, %1, #0"
5471 [(set_attr "conds" "set")
5472 (set_attr "type" "alus_imm,alus_imm")]
5475 ;; Subroutine to store a half word from a register into memory.
5476 ;; Operand 0 is the source register (HImode)
5477 ;; Operand 1 is the destination address in a register (SImode)
5479 ;; In both this routine and the next, we must be careful not to spill
5480 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5481 ;; can generate unrecognizable rtl.
5483 (define_expand "storehi"
5484 [;; store the low byte
5485 (set (match_operand 1 "" "") (match_dup 3))
5486 ;; extract the high byte
5488 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5489 ;; store the high byte
5490 (set (match_dup 4) (match_dup 5))]
5494 rtx op1 = operands[1];
5495 rtx addr = XEXP (op1, 0);
5496 enum rtx_code code = GET_CODE (addr);
5498 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5500 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5502 operands[4] = adjust_address (op1, QImode, 1);
5503 operands[1] = adjust_address (operands[1], QImode, 0);
5504 operands[3] = gen_lowpart (QImode, operands[0]);
5505 operands[0] = gen_lowpart (SImode, operands[0]);
5506 operands[2] = gen_reg_rtx (SImode);
5507 operands[5] = gen_lowpart (QImode, operands[2]);
5511 (define_expand "storehi_bigend"
5512 [(set (match_dup 4) (match_dup 3))
5514 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5515 (set (match_operand 1 "" "") (match_dup 5))]
5519 rtx op1 = operands[1];
5520 rtx addr = XEXP (op1, 0);
5521 enum rtx_code code = GET_CODE (addr);
5523 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5525 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5527 operands[4] = adjust_address (op1, QImode, 1);
5528 operands[1] = adjust_address (operands[1], QImode, 0);
5529 operands[3] = gen_lowpart (QImode, operands[0]);
5530 operands[0] = gen_lowpart (SImode, operands[0]);
5531 operands[2] = gen_reg_rtx (SImode);
5532 operands[5] = gen_lowpart (QImode, operands[2]);
5536 ;; Subroutine to store a half word integer constant into memory.
5537 (define_expand "storeinthi"
5538 [(set (match_operand 0 "" "")
5539 (match_operand 1 "" ""))
5540 (set (match_dup 3) (match_dup 2))]
5544 HOST_WIDE_INT value = INTVAL (operands[1]);
5545 rtx addr = XEXP (operands[0], 0);
5546 rtx op0 = operands[0];
5547 enum rtx_code code = GET_CODE (addr);
5549 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5551 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5553 operands[1] = gen_reg_rtx (SImode);
5554 if (BYTES_BIG_ENDIAN)
5556 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5557 if ((value & 255) == ((value >> 8) & 255))
5558 operands[2] = operands[1];
5561 operands[2] = gen_reg_rtx (SImode);
5562 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5567 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5568 if ((value & 255) == ((value >> 8) & 255))
5569 operands[2] = operands[1];
5572 operands[2] = gen_reg_rtx (SImode);
5573 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5577 operands[3] = adjust_address (op0, QImode, 1);
5578 operands[0] = adjust_address (operands[0], QImode, 0);
5579 operands[2] = gen_lowpart (QImode, operands[2]);
5580 operands[1] = gen_lowpart (QImode, operands[1]);
5584 (define_expand "storehi_single_op"
5585 [(set (match_operand:HI 0 "memory_operand")
5586 (match_operand:HI 1 "general_operand"))]
5587 "TARGET_32BIT && arm_arch4"
5589 if (!s_register_operand (operands[1], HImode))
5590 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5594 (define_expand "movhi"
5595 [(set (match_operand:HI 0 "general_operand")
5596 (match_operand:HI 1 "general_operand"))]
5599 gcc_checking_assert (aligned_operand (operands[0], HImode));
5600 gcc_checking_assert (aligned_operand (operands[1], HImode));
5603 if (can_create_pseudo_p ())
5605 if (MEM_P (operands[0]))
5609 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5612 if (CONST_INT_P (operands[1]))
5613 emit_insn (gen_storeinthi (operands[0], operands[1]));
5616 if (MEM_P (operands[1]))
5617 operands[1] = force_reg (HImode, operands[1]);
5618 if (BYTES_BIG_ENDIAN)
5619 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5621 emit_insn (gen_storehi (operands[1], operands[0]));
5625 /* Sign extend a constant, and keep it in an SImode reg. */
5626 else if (CONST_INT_P (operands[1]))
5628 rtx reg = gen_reg_rtx (SImode);
5629 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5631 /* If the constant is already valid, leave it alone. */
5632 if (!const_ok_for_arm (val))
5634 /* If setting all the top bits will make the constant
5635 loadable in a single instruction, then set them.
5636 Otherwise, sign extend the number. */
5638 if (const_ok_for_arm (~(val | ~0xffff)))
5640 else if (val & 0x8000)
5644 emit_insn (gen_movsi (reg, GEN_INT (val)));
5645 operands[1] = gen_lowpart (HImode, reg);
5647 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5648 && MEM_P (operands[1]))
5650 rtx reg = gen_reg_rtx (SImode);
5652 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5653 operands[1] = gen_lowpart (HImode, reg);
5655 else if (!arm_arch4)
5657 if (MEM_P (operands[1]))
5660 rtx offset = const0_rtx;
5661 rtx reg = gen_reg_rtx (SImode);
5663 if ((REG_P (base = XEXP (operands[1], 0))
5664 || (GET_CODE (base) == PLUS
5665 && (CONST_INT_P (offset = XEXP (base, 1)))
5666 && ((INTVAL(offset) & 1) != 1)
5667 && REG_P (base = XEXP (base, 0))))
5668 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5672 new_rtx = widen_memory_access (operands[1], SImode,
5673 ((INTVAL (offset) & ~3)
5674 - INTVAL (offset)));
5675 emit_insn (gen_movsi (reg, new_rtx));
5676 if (((INTVAL (offset) & 2) != 0)
5677 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5679 rtx reg2 = gen_reg_rtx (SImode);
5681 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5686 emit_insn (gen_movhi_bytes (reg, operands[1]));
5688 operands[1] = gen_lowpart (HImode, reg);
5692 /* Handle loading a large integer during reload. */
5693 else if (CONST_INT_P (operands[1])
5694 && !const_ok_for_arm (INTVAL (operands[1]))
5695 && !const_ok_for_arm (~INTVAL (operands[1])))
5697 /* Writing a constant to memory needs a scratch, which should
5698 be handled with SECONDARY_RELOADs. */
5699 gcc_assert (REG_P (operands[0]));
5701 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5702 emit_insn (gen_movsi (operands[0], operands[1]));
5706 else if (TARGET_THUMB2)
5708 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5709 if (can_create_pseudo_p ())
5711 if (!REG_P (operands[0]))
5712 operands[1] = force_reg (HImode, operands[1]);
5713 /* Zero extend a constant, and keep it in an SImode reg. */
5714 else if (CONST_INT_P (operands[1]))
5716 rtx reg = gen_reg_rtx (SImode);
5717 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5719 emit_insn (gen_movsi (reg, GEN_INT (val)));
5720 operands[1] = gen_lowpart (HImode, reg);
5724 else /* TARGET_THUMB1 */
5726 if (can_create_pseudo_p ())
5728 if (CONST_INT_P (operands[1]))
5730 rtx reg = gen_reg_rtx (SImode);
5732 emit_insn (gen_movsi (reg, operands[1]));
5733 operands[1] = gen_lowpart (HImode, reg);
5736 /* ??? We shouldn't really get invalid addresses here, but this can
5737 happen if we are passed a SP (never OK for HImode/QImode) or
5738 virtual register (also rejected as illegitimate for HImode/QImode)
5739 relative address. */
5740 /* ??? This should perhaps be fixed elsewhere, for instance, in
5741 fixup_stack_1, by checking for other kinds of invalid addresses,
5742 e.g. a bare reference to a virtual register. This may confuse the
5743 alpha though, which must handle this case differently. */
5744 if (MEM_P (operands[0])
5745 && !memory_address_p (GET_MODE (operands[0]),
5746 XEXP (operands[0], 0)))
5748 = replace_equiv_address (operands[0],
5749 copy_to_reg (XEXP (operands[0], 0)));
5751 if (MEM_P (operands[1])
5752 && !memory_address_p (GET_MODE (operands[1]),
5753 XEXP (operands[1], 0)))
5755 = replace_equiv_address (operands[1],
5756 copy_to_reg (XEXP (operands[1], 0)));
5758 if (MEM_P (operands[1]) && optimize > 0)
5760 rtx reg = gen_reg_rtx (SImode);
5762 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5763 operands[1] = gen_lowpart (HImode, reg);
5766 if (MEM_P (operands[0]))
5767 operands[1] = force_reg (HImode, operands[1]);
5769 else if (CONST_INT_P (operands[1])
5770 && !satisfies_constraint_I (operands[1]))
5772 /* Handle loading a large integer during reload. */
5774 /* Writing a constant to memory needs a scratch, which should
5775 be handled with SECONDARY_RELOADs. */
5776 gcc_assert (REG_P (operands[0]));
5778 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5779 emit_insn (gen_movsi (operands[0], operands[1]));
5786 (define_expand "movhi_bytes"
5787 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5789 (zero_extend:SI (match_dup 6)))
5790 (set (match_operand:SI 0 "" "")
5791 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5796 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5798 mem1 = change_address (operands[1], QImode, addr);
5799 mem2 = change_address (operands[1], QImode,
5800 plus_constant (Pmode, addr, 1));
5801 operands[0] = gen_lowpart (SImode, operands[0]);
5803 operands[2] = gen_reg_rtx (SImode);
5804 operands[3] = gen_reg_rtx (SImode);
5807 if (BYTES_BIG_ENDIAN)
5809 operands[4] = operands[2];
5810 operands[5] = operands[3];
5814 operands[4] = operands[3];
5815 operands[5] = operands[2];
5820 (define_expand "movhi_bigend"
5822 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5825 (ashiftrt:SI (match_dup 2) (const_int 16)))
5826 (set (match_operand:HI 0 "s_register_operand")
5830 operands[2] = gen_reg_rtx (SImode);
5831 operands[3] = gen_reg_rtx (SImode);
5832 operands[4] = gen_lowpart (HImode, operands[3]);
5836 ;; Pattern to recognize insn generated default case above
5837 (define_insn "*movhi_insn_arch4"
5838 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5839 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5841 && arm_arch4 && !TARGET_HARD_FLOAT
5842 && (register_operand (operands[0], HImode)
5843 || register_operand (operands[1], HImode))"
5845 mov%?\\t%0, %1\\t%@ movhi
5846 mvn%?\\t%0, #%B1\\t%@ movhi
5847 movw%?\\t%0, %L1\\t%@ movhi
5848 strh%?\\t%1, %0\\t%@ movhi
5849 ldrh%?\\t%0, %1\\t%@ movhi"
5850 [(set_attr "predicable" "yes")
5851 (set_attr "pool_range" "*,*,*,*,256")
5852 (set_attr "neg_pool_range" "*,*,*,*,244")
5853 (set_attr "arch" "*,*,v6t2,*,*")
5854 (set_attr_alternative "type"
5855 [(if_then_else (match_operand 1 "const_int_operand" "")
5856 (const_string "mov_imm" )
5857 (const_string "mov_reg"))
5858 (const_string "mvn_imm")
5859 (const_string "mov_imm")
5860 (const_string "store_4")
5861 (const_string "load_4")])]
5864 (define_insn "*movhi_bytes"
5865 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5866 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5867 "TARGET_ARM && !TARGET_HARD_FLOAT"
5869 mov%?\\t%0, %1\\t%@ movhi
5870 mov%?\\t%0, %1\\t%@ movhi
5871 mvn%?\\t%0, #%B1\\t%@ movhi"
5872 [(set_attr "predicable" "yes")
5873 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5876 ;; We use a DImode scratch because we may occasionally need an additional
5877 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5878 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5879 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5880 ;; to be correctly handled in default_secondary_reload function.
5881 (define_expand "reload_outhi"
5882 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5883 (match_operand:HI 1 "s_register_operand" "r")
5884 (match_operand:DI 2 "s_register_operand" "=&l")])]
5887 arm_reload_out_hi (operands);
5889 thumb_reload_out_hi (operands);
5894 (define_expand "reload_inhi"
5895 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5896 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5897 (match_operand:DI 2 "s_register_operand" "=&r")])]
5901 arm_reload_in_hi (operands);
5903 thumb_reload_out_hi (operands);
5907 (define_expand "movqi"
5908 [(set (match_operand:QI 0 "general_operand")
5909 (match_operand:QI 1 "general_operand"))]
5912 /* Everything except mem = const or mem = mem can be done easily */
5914 if (can_create_pseudo_p ())
5916 if (CONST_INT_P (operands[1]))
5918 rtx reg = gen_reg_rtx (SImode);
5920 /* For thumb we want an unsigned immediate, then we are more likely
5921 to be able to use a movs insn. */
5923 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5925 emit_insn (gen_movsi (reg, operands[1]));
5926 operands[1] = gen_lowpart (QImode, reg);
5931 /* ??? We shouldn't really get invalid addresses here, but this can
5932 happen if we are passed a SP (never OK for HImode/QImode) or
5933 virtual register (also rejected as illegitimate for HImode/QImode)
5934 relative address. */
5935 /* ??? This should perhaps be fixed elsewhere, for instance, in
5936 fixup_stack_1, by checking for other kinds of invalid addresses,
5937 e.g. a bare reference to a virtual register. This may confuse the
5938 alpha though, which must handle this case differently. */
5939 if (MEM_P (operands[0])
5940 && !memory_address_p (GET_MODE (operands[0]),
5941 XEXP (operands[0], 0)))
5943 = replace_equiv_address (operands[0],
5944 copy_to_reg (XEXP (operands[0], 0)));
5945 if (MEM_P (operands[1])
5946 && !memory_address_p (GET_MODE (operands[1]),
5947 XEXP (operands[1], 0)))
5949 = replace_equiv_address (operands[1],
5950 copy_to_reg (XEXP (operands[1], 0)));
5953 if (MEM_P (operands[1]) && optimize > 0)
5955 rtx reg = gen_reg_rtx (SImode);
5957 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5958 operands[1] = gen_lowpart (QImode, reg);
5961 if (MEM_P (operands[0]))
5962 operands[1] = force_reg (QImode, operands[1]);
5964 else if (TARGET_THUMB
5965 && CONST_INT_P (operands[1])
5966 && !satisfies_constraint_I (operands[1]))
5968 /* Handle loading a large integer during reload. */
5970 /* Writing a constant to memory needs a scratch, which should
5971 be handled with SECONDARY_RELOADs. */
5972 gcc_assert (REG_P (operands[0]));
5974 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5975 emit_insn (gen_movsi (operands[0], operands[1]));
5981 (define_insn "*arm_movqi_insn"
5982 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5983 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5985 && ( register_operand (operands[0], QImode)
5986 || register_operand (operands[1], QImode))"
5997 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5998 (set_attr "predicable" "yes")
5999 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6000 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6001 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6005 (define_expand "movhf"
6006 [(set (match_operand:HF 0 "general_operand")
6007 (match_operand:HF 1 "general_operand"))]
6010 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6011 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6014 if (MEM_P (operands[0]))
6015 operands[1] = force_reg (HFmode, operands[1]);
6017 else /* TARGET_THUMB1 */
6019 if (can_create_pseudo_p ())
6021 if (!REG_P (operands[0]))
6022 operands[1] = force_reg (HFmode, operands[1]);
6028 (define_insn "*arm32_movhf"
6029 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6030 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6031 "TARGET_32BIT && !TARGET_HARD_FLOAT
6032 && ( s_register_operand (operands[0], HFmode)
6033 || s_register_operand (operands[1], HFmode))"
6035 switch (which_alternative)
6037 case 0: /* ARM register from memory */
6038 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6039 case 1: /* memory from ARM register */
6040 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6041 case 2: /* ARM register from ARM register */
6042 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6043 case 3: /* ARM register from constant */
6048 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6050 ops[0] = operands[0];
6051 ops[1] = GEN_INT (bits);
6052 ops[2] = GEN_INT (bits & 0xff00);
6053 ops[3] = GEN_INT (bits & 0x00ff);
6055 if (arm_arch_thumb2)
6056 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6058 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6065 [(set_attr "conds" "unconditional")
6066 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6067 (set_attr "length" "4,4,4,8")
6068 (set_attr "predicable" "yes")]
6071 (define_expand "movsf"
6072 [(set (match_operand:SF 0 "general_operand")
6073 (match_operand:SF 1 "general_operand"))]
6076 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6077 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6080 if (MEM_P (operands[0]))
6081 operands[1] = force_reg (SFmode, operands[1]);
6083 else /* TARGET_THUMB1 */
6085 if (can_create_pseudo_p ())
6087 if (!REG_P (operands[0]))
6088 operands[1] = force_reg (SFmode, operands[1]);
6092 /* Cannot load it directly, generate a load with clobber so that it can be
6093 loaded via GPR with MOV / MOVT. */
6094 if (arm_disable_literal_pool
6095 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6096 && CONST_DOUBLE_P (operands[1])
6097 && TARGET_HARD_FLOAT
6098 && !vfp3_const_double_rtx (operands[1]))
6100 rtx clobreg = gen_reg_rtx (SFmode);
6101 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6108 ;; Transform a floating-point move of a constant into a core register into
6109 ;; an SImode operation.
6111 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6112 (match_operand:SF 1 "immediate_operand" ""))]
6115 && CONST_DOUBLE_P (operands[1])"
6116 [(set (match_dup 2) (match_dup 3))]
6118 operands[2] = gen_lowpart (SImode, operands[0]);
6119 operands[3] = gen_lowpart (SImode, operands[1]);
6120 if (operands[2] == 0 || operands[3] == 0)
6125 (define_insn "*arm_movsf_soft_insn"
6126 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6127 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6129 && TARGET_SOFT_FLOAT
6130 && (!MEM_P (operands[0])
6131 || register_operand (operands[1], SFmode))"
6133 switch (which_alternative)
6135 case 0: return \"mov%?\\t%0, %1\";
6137 /* Cannot load it directly, split to load it via MOV / MOVT. */
6138 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6140 return \"ldr%?\\t%0, %1\\t%@ float\";
6141 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6142 default: gcc_unreachable ();
6145 [(set_attr "predicable" "yes")
6146 (set_attr "type" "mov_reg,load_4,store_4")
6147 (set_attr "arm_pool_range" "*,4096,*")
6148 (set_attr "thumb2_pool_range" "*,4094,*")
6149 (set_attr "arm_neg_pool_range" "*,4084,*")
6150 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6153 ;; Splitter for the above.
6155 [(set (match_operand:SF 0 "s_register_operand")
6156 (match_operand:SF 1 "const_double_operand"))]
6157 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6161 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6162 rtx cst = gen_int_mode (buf, SImode);
6163 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6168 (define_expand "movdf"
6169 [(set (match_operand:DF 0 "general_operand")
6170 (match_operand:DF 1 "general_operand"))]
6173 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6174 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6177 if (MEM_P (operands[0]))
6178 operands[1] = force_reg (DFmode, operands[1]);
6180 else /* TARGET_THUMB */
6182 if (can_create_pseudo_p ())
6184 if (!REG_P (operands[0]))
6185 operands[1] = force_reg (DFmode, operands[1]);
6189 /* Cannot load it directly, generate a load with clobber so that it can be
6190 loaded via GPR with MOV / MOVT. */
6191 if (arm_disable_literal_pool
6192 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6193 && CONSTANT_P (operands[1])
6194 && TARGET_HARD_FLOAT
6195 && !arm_const_double_rtx (operands[1])
6196 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6198 rtx clobreg = gen_reg_rtx (DFmode);
6199 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6206 ;; Reloading a df mode value stored in integer regs to memory can require a
6208 ;; Another reload_out<m> pattern that requires special constraints.
6209 (define_expand "reload_outdf"
6210 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6211 (match_operand:DF 1 "s_register_operand" "r")
6212 (match_operand:SI 2 "s_register_operand" "=&r")]
6216 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6219 operands[2] = XEXP (operands[0], 0);
6220 else if (code == POST_INC || code == PRE_DEC)
6222 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6223 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6224 emit_insn (gen_movdi (operands[0], operands[1]));
6227 else if (code == PRE_INC)
6229 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6231 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6234 else if (code == POST_DEC)
6235 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6237 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6238 XEXP (XEXP (operands[0], 0), 1)));
6240 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6243 if (code == POST_DEC)
6244 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6250 (define_insn "*movdf_soft_insn"
6251 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6252 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6253 "TARGET_32BIT && TARGET_SOFT_FLOAT
6254 && ( register_operand (operands[0], DFmode)
6255 || register_operand (operands[1], DFmode))"
6257 switch (which_alternative)
6264 /* Cannot load it directly, split to load it via MOV / MOVT. */
6265 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6269 return output_move_double (operands, true, NULL);
6272 [(set_attr "length" "8,12,16,8,8")
6273 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6274 (set_attr "arm_pool_range" "*,*,*,1020,*")
6275 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6276 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6277 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6280 ;; Splitter for the above.
6282 [(set (match_operand:DF 0 "s_register_operand")
6283 (match_operand:DF 1 "const_double_operand"))]
6284 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6288 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6289 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6290 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6291 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6292 rtx cst = gen_int_mode (ival, DImode);
6293 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6299 ;; load- and store-multiple insns
6300 ;; The arm can load/store any set of registers, provided that they are in
6301 ;; ascending order, but these expanders assume a contiguous set.
6303 (define_expand "load_multiple"
6304 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6305 (match_operand:SI 1 "" ""))
6306 (use (match_operand:SI 2 "" ""))])]
6309 HOST_WIDE_INT offset = 0;
6311 /* Support only fixed point registers. */
6312 if (!CONST_INT_P (operands[2])
6313 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6314 || INTVAL (operands[2]) < 2
6315 || !MEM_P (operands[1])
6316 || !REG_P (operands[0])
6317 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6318 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6322 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6323 INTVAL (operands[2]),
6324 force_reg (SImode, XEXP (operands[1], 0)),
6325 FALSE, operands[1], &offset);
6328 (define_expand "store_multiple"
6329 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6330 (match_operand:SI 1 "" ""))
6331 (use (match_operand:SI 2 "" ""))])]
6334 HOST_WIDE_INT offset = 0;
6336 /* Support only fixed point registers. */
6337 if (!CONST_INT_P (operands[2])
6338 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6339 || INTVAL (operands[2]) < 2
6340 || !REG_P (operands[1])
6341 || !MEM_P (operands[0])
6342 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6343 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6347 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6348 INTVAL (operands[2]),
6349 force_reg (SImode, XEXP (operands[0], 0)),
6350 FALSE, operands[0], &offset);
6354 (define_expand "setmemsi"
6355 [(match_operand:BLK 0 "general_operand")
6356 (match_operand:SI 1 "const_int_operand")
6357 (match_operand:SI 2 "const_int_operand")
6358 (match_operand:SI 3 "const_int_operand")]
6361 if (arm_gen_setmem (operands))
6368 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6369 ;; We could let this apply for blocks of less than this, but it clobbers so
6370 ;; many registers that there is then probably a better way.
6372 (define_expand "cpymemqi"
6373 [(match_operand:BLK 0 "general_operand")
6374 (match_operand:BLK 1 "general_operand")
6375 (match_operand:SI 2 "const_int_operand")
6376 (match_operand:SI 3 "const_int_operand")]
6381 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6382 && !optimize_function_for_size_p (cfun))
6384 if (gen_cpymem_ldrd_strd (operands))
6389 if (arm_gen_cpymemqi (operands))
6393 else /* TARGET_THUMB1 */
6395 if ( INTVAL (operands[3]) != 4
6396 || INTVAL (operands[2]) > 48)
6399 thumb_expand_cpymemqi (operands);
6406 ;; Compare & branch insns
6407 ;; The range calculations are based as follows:
6408 ;; For forward branches, the address calculation returns the address of
6409 ;; the next instruction. This is 2 beyond the branch instruction.
6410 ;; For backward branches, the address calculation returns the address of
6411 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6412 ;; instruction for the shortest sequence, and 4 before the branch instruction
6413 ;; if we have to jump around an unconditional branch.
6414 ;; To the basic branch range the PC offset must be added (this is +4).
6415 ;; So for forward branches we have
6416 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6417 ;; And for backward branches we have
6418 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6420 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6421 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6423 (define_expand "cbranchsi4"
6424 [(set (pc) (if_then_else
6425 (match_operator 0 "expandable_comparison_operator"
6426 [(match_operand:SI 1 "s_register_operand")
6427 (match_operand:SI 2 "nonmemory_operand")])
6428 (label_ref (match_operand 3 "" ""))
6434 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6436 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6440 if (thumb1_cmpneg_operand (operands[2], SImode))
6442 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6443 operands[3], operands[0]));
6446 if (!thumb1_cmp_operand (operands[2], SImode))
6447 operands[2] = force_reg (SImode, operands[2]);
6450 (define_expand "cbranchsf4"
6451 [(set (pc) (if_then_else
6452 (match_operator 0 "expandable_comparison_operator"
6453 [(match_operand:SF 1 "s_register_operand")
6454 (match_operand:SF 2 "vfp_compare_operand")])
6455 (label_ref (match_operand 3 "" ""))
6457 "TARGET_32BIT && TARGET_HARD_FLOAT"
6458 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6459 operands[3])); DONE;"
6462 (define_expand "cbranchdf4"
6463 [(set (pc) (if_then_else
6464 (match_operator 0 "expandable_comparison_operator"
6465 [(match_operand:DF 1 "s_register_operand")
6466 (match_operand:DF 2 "vfp_compare_operand")])
6467 (label_ref (match_operand 3 "" ""))
6469 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6470 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6471 operands[3])); DONE;"
6474 (define_expand "cbranchdi4"
6475 [(set (pc) (if_then_else
6476 (match_operator 0 "expandable_comparison_operator"
6477 [(match_operand:DI 1 "s_register_operand")
6478 (match_operand:DI 2 "reg_or_int_operand")])
6479 (label_ref (match_operand 3 "" ""))
6483 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6485 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6491 ;; Comparison and test insns
6493 (define_insn "*arm_cmpsi_insn"
6494 [(set (reg:CC CC_REGNUM)
6495 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6496 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6504 [(set_attr "conds" "set")
6505 (set_attr "arch" "t2,t2,any,any,any")
6506 (set_attr "length" "2,2,4,4,4")
6507 (set_attr "predicable" "yes")
6508 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6509 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6512 (define_insn "*cmpsi_shiftsi"
6513 [(set (reg:CC CC_REGNUM)
6514 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6515 (match_operator:SI 3 "shift_operator"
6516 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6517 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6520 [(set_attr "conds" "set")
6521 (set_attr "shift" "1")
6522 (set_attr "arch" "32,a,a")
6523 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6525 (define_insn "*cmpsi_shiftsi_swp"
6526 [(set (reg:CC_SWP CC_REGNUM)
6527 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6528 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6529 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6530 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6533 [(set_attr "conds" "set")
6534 (set_attr "shift" "1")
6535 (set_attr "arch" "32,a,a")
6536 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6538 (define_insn "*arm_cmpsi_negshiftsi_si"
6539 [(set (reg:CC_Z CC_REGNUM)
6541 (neg:SI (match_operator:SI 1 "shift_operator"
6542 [(match_operand:SI 2 "s_register_operand" "r")
6543 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6544 (match_operand:SI 0 "s_register_operand" "r")))]
6547 [(set_attr "conds" "set")
6548 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6549 (const_string "alus_shift_imm")
6550 (const_string "alus_shift_reg")))
6551 (set_attr "predicable" "yes")]
6554 ; This insn allows redundant compares to be removed by cse, nothing should
6555 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6556 ; is deleted later on. The match_dup will match the mode here, so that
6557 ; mode changes of the condition codes aren't lost by this even though we don't
6558 ; specify what they are.
6560 (define_insn "*deleted_compare"
6561 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6563 "\\t%@ deleted compare"
6564 [(set_attr "conds" "set")
6565 (set_attr "length" "0")
6566 (set_attr "type" "no_insn")]
6570 ;; Conditional branch insns
6572 (define_expand "cbranch_cc"
6574 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6575 (match_operand 2 "" "")])
6576 (label_ref (match_operand 3 "" ""))
6579 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6580 operands[1], operands[2], NULL_RTX);
6581 operands[2] = const0_rtx;"
6585 ;; Patterns to match conditional branch insns.
6588 (define_insn "arm_cond_branch"
6590 (if_then_else (match_operator 1 "arm_comparison_operator"
6591 [(match_operand 2 "cc_register" "") (const_int 0)])
6592 (label_ref (match_operand 0 "" ""))
6596 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6598 arm_ccfsm_state += 2;
6601 return \"b%d1\\t%l0\";
6603 [(set_attr "conds" "use")
6604 (set_attr "type" "branch")
6605 (set (attr "length")
6607 (and (match_test "TARGET_THUMB2")
6608 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6609 (le (minus (match_dup 0) (pc)) (const_int 256))))
6614 (define_insn "*arm_cond_branch_reversed"
6616 (if_then_else (match_operator 1 "arm_comparison_operator"
6617 [(match_operand 2 "cc_register" "") (const_int 0)])
6619 (label_ref (match_operand 0 "" ""))))]
6622 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6624 arm_ccfsm_state += 2;
6627 return \"b%D1\\t%l0\";
6629 [(set_attr "conds" "use")
6630 (set_attr "type" "branch")
6631 (set (attr "length")
6633 (and (match_test "TARGET_THUMB2")
6634 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6635 (le (minus (match_dup 0) (pc)) (const_int 256))))
6644 (define_expand "cstore_cc"
6645 [(set (match_operand:SI 0 "s_register_operand")
6646 (match_operator:SI 1 "" [(match_operand 2 "" "")
6647 (match_operand 3 "" "")]))]
6649 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6650 operands[2], operands[3], NULL_RTX);
6651 operands[3] = const0_rtx;"
6654 (define_insn_and_split "*mov_scc"
6655 [(set (match_operand:SI 0 "s_register_operand" "=r")
6656 (match_operator:SI 1 "arm_comparison_operator_mode"
6657 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6659 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6662 (if_then_else:SI (match_dup 1)
6666 [(set_attr "conds" "use")
6667 (set_attr "length" "8")
6668 (set_attr "type" "multiple")]
6671 (define_insn "*negscc_borrow"
6672 [(set (match_operand:SI 0 "s_register_operand" "=r")
6673 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
6676 [(set_attr "conds" "use")
6677 (set_attr "length" "4")
6678 (set_attr "type" "adc_reg")]
6681 (define_insn_and_split "*mov_negscc"
6682 [(set (match_operand:SI 0 "s_register_operand" "=r")
6683 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6684 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6685 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
6686 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6689 (if_then_else:SI (match_dup 1)
6693 operands[3] = GEN_INT (~0);
6695 [(set_attr "conds" "use")
6696 (set_attr "length" "8")
6697 (set_attr "type" "multiple")]
6700 (define_insn_and_split "*mov_notscc"
6701 [(set (match_operand:SI 0 "s_register_operand" "=r")
6702 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6703 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6705 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6708 (if_then_else:SI (match_dup 1)
6712 operands[3] = GEN_INT (~1);
6713 operands[4] = GEN_INT (~0);
6715 [(set_attr "conds" "use")
6716 (set_attr "length" "8")
6717 (set_attr "type" "multiple")]
6720 (define_expand "cstoresi4"
6721 [(set (match_operand:SI 0 "s_register_operand")
6722 (match_operator:SI 1 "expandable_comparison_operator"
6723 [(match_operand:SI 2 "s_register_operand")
6724 (match_operand:SI 3 "reg_or_int_operand")]))]
6725 "TARGET_32BIT || TARGET_THUMB1"
6727 rtx op3, scratch, scratch2;
6731 if (!arm_add_operand (operands[3], SImode))
6732 operands[3] = force_reg (SImode, operands[3]);
6733 emit_insn (gen_cstore_cc (operands[0], operands[1],
6734 operands[2], operands[3]));
6738 if (operands[3] == const0_rtx)
6740 switch (GET_CODE (operands[1]))
6743 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6747 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6751 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6752 NULL_RTX, 0, OPTAB_WIDEN);
6753 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6754 NULL_RTX, 0, OPTAB_WIDEN);
6755 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6756 operands[0], 1, OPTAB_WIDEN);
6760 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6762 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6763 NULL_RTX, 1, OPTAB_WIDEN);
6767 scratch = expand_binop (SImode, ashr_optab, operands[2],
6768 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6769 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6770 NULL_RTX, 0, OPTAB_WIDEN);
6771 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6775 /* LT is handled by generic code. No need for unsigned with 0. */
6782 switch (GET_CODE (operands[1]))
6785 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6786 NULL_RTX, 0, OPTAB_WIDEN);
6787 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6791 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6792 NULL_RTX, 0, OPTAB_WIDEN);
6793 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6797 op3 = force_reg (SImode, operands[3]);
6799 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6800 NULL_RTX, 1, OPTAB_WIDEN);
6801 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6802 NULL_RTX, 0, OPTAB_WIDEN);
6803 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6809 if (!thumb1_cmp_operand (op3, SImode))
6810 op3 = force_reg (SImode, op3);
6811 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6812 NULL_RTX, 0, OPTAB_WIDEN);
6813 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6814 NULL_RTX, 1, OPTAB_WIDEN);
6815 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6820 op3 = force_reg (SImode, operands[3]);
6821 scratch = force_reg (SImode, const0_rtx);
6822 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6828 if (!thumb1_cmp_operand (op3, SImode))
6829 op3 = force_reg (SImode, op3);
6830 scratch = force_reg (SImode, const0_rtx);
6831 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6837 if (!thumb1_cmp_operand (op3, SImode))
6838 op3 = force_reg (SImode, op3);
6839 scratch = gen_reg_rtx (SImode);
6840 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6844 op3 = force_reg (SImode, operands[3]);
6845 scratch = gen_reg_rtx (SImode);
6846 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6849 /* No good sequences for GT, LT. */
6856 (define_expand "cstorehf4"
6857 [(set (match_operand:SI 0 "s_register_operand")
6858 (match_operator:SI 1 "expandable_comparison_operator"
6859 [(match_operand:HF 2 "s_register_operand")
6860 (match_operand:HF 3 "vfp_compare_operand")]))]
6861 "TARGET_VFP_FP16INST"
6863 if (!arm_validize_comparison (&operands[1],
6868 emit_insn (gen_cstore_cc (operands[0], operands[1],
6869 operands[2], operands[3]));
6874 (define_expand "cstoresf4"
6875 [(set (match_operand:SI 0 "s_register_operand")
6876 (match_operator:SI 1 "expandable_comparison_operator"
6877 [(match_operand:SF 2 "s_register_operand")
6878 (match_operand:SF 3 "vfp_compare_operand")]))]
6879 "TARGET_32BIT && TARGET_HARD_FLOAT"
6880 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6881 operands[2], operands[3])); DONE;"
6884 (define_expand "cstoredf4"
6885 [(set (match_operand:SI 0 "s_register_operand")
6886 (match_operator:SI 1 "expandable_comparison_operator"
6887 [(match_operand:DF 2 "s_register_operand")
6888 (match_operand:DF 3 "vfp_compare_operand")]))]
6889 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6890 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6891 operands[2], operands[3])); DONE;"
6894 (define_expand "cstoredi4"
6895 [(set (match_operand:SI 0 "s_register_operand")
6896 (match_operator:SI 1 "expandable_comparison_operator"
6897 [(match_operand:DI 2 "s_register_operand")
6898 (match_operand:DI 3 "reg_or_int_operand")]))]
6901 if (!arm_validize_comparison (&operands[1],
6905 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6912 ;; Conditional move insns
6914 (define_expand "movsicc"
6915 [(set (match_operand:SI 0 "s_register_operand")
6916 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6917 (match_operand:SI 2 "arm_not_operand")
6918 (match_operand:SI 3 "arm_not_operand")))]
6925 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6926 &XEXP (operands[1], 1)))
6929 code = GET_CODE (operands[1]);
6930 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6931 XEXP (operands[1], 1), NULL_RTX);
6932 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6936 (define_expand "movhfcc"
6937 [(set (match_operand:HF 0 "s_register_operand")
6938 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6939 (match_operand:HF 2 "s_register_operand")
6940 (match_operand:HF 3 "s_register_operand")))]
6941 "TARGET_VFP_FP16INST"
6944 enum rtx_code code = GET_CODE (operands[1]);
6947 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6948 &XEXP (operands[1], 1)))
6951 code = GET_CODE (operands[1]);
6952 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6953 XEXP (operands[1], 1), NULL_RTX);
6954 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6958 (define_expand "movsfcc"
6959 [(set (match_operand:SF 0 "s_register_operand")
6960 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6961 (match_operand:SF 2 "s_register_operand")
6962 (match_operand:SF 3 "s_register_operand")))]
6963 "TARGET_32BIT && TARGET_HARD_FLOAT"
6966 enum rtx_code code = GET_CODE (operands[1]);
6969 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6970 &XEXP (operands[1], 1)))
6973 code = GET_CODE (operands[1]);
6974 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6975 XEXP (operands[1], 1), NULL_RTX);
6976 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6980 (define_expand "movdfcc"
6981 [(set (match_operand:DF 0 "s_register_operand")
6982 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6983 (match_operand:DF 2 "s_register_operand")
6984 (match_operand:DF 3 "s_register_operand")))]
6985 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6988 enum rtx_code code = GET_CODE (operands[1]);
6991 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6992 &XEXP (operands[1], 1)))
6994 code = GET_CODE (operands[1]);
6995 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6996 XEXP (operands[1], 1), NULL_RTX);
6997 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7001 (define_insn "*cmov<mode>"
7002 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7003 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7004 [(match_operand 2 "cc_register" "") (const_int 0)])
7005 (match_operand:SDF 3 "s_register_operand"
7007 (match_operand:SDF 4 "s_register_operand"
7008 "<F_constraint>")))]
7009 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7012 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7019 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7024 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7030 [(set_attr "conds" "use")
7031 (set_attr "type" "fcsel")]
7034 (define_insn "*cmovhf"
7035 [(set (match_operand:HF 0 "s_register_operand" "=t")
7036 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7037 [(match_operand 2 "cc_register" "") (const_int 0)])
7038 (match_operand:HF 3 "s_register_operand" "t")
7039 (match_operand:HF 4 "s_register_operand" "t")))]
7040 "TARGET_VFP_FP16INST"
7043 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7050 return \"vsel%d1.f16\\t%0, %3, %4\";
7055 return \"vsel%D1.f16\\t%0, %4, %3\";
7061 [(set_attr "conds" "use")
7062 (set_attr "type" "fcsel")]
7065 (define_insn_and_split "*movsicc_insn"
7066 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7068 (match_operator 3 "arm_comparison_operator"
7069 [(match_operand 4 "cc_register" "") (const_int 0)])
7070 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7071 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7082 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7083 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7084 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7085 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7086 "&& reload_completed"
7089 enum rtx_code rev_code;
7093 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7095 gen_rtx_SET (operands[0], operands[1])));
7097 rev_code = GET_CODE (operands[3]);
7098 mode = GET_MODE (operands[4]);
7099 if (mode == CCFPmode || mode == CCFPEmode)
7100 rev_code = reverse_condition_maybe_unordered (rev_code);
7102 rev_code = reverse_condition (rev_code);
7104 rev_cond = gen_rtx_fmt_ee (rev_code,
7108 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7110 gen_rtx_SET (operands[0], operands[2])));
7113 [(set_attr "length" "4,4,4,4,8,8,8,8")
7114 (set_attr "conds" "use")
7115 (set_attr_alternative "type"
7116 [(if_then_else (match_operand 2 "const_int_operand" "")
7117 (const_string "mov_imm")
7118 (const_string "mov_reg"))
7119 (const_string "mvn_imm")
7120 (if_then_else (match_operand 1 "const_int_operand" "")
7121 (const_string "mov_imm")
7122 (const_string "mov_reg"))
7123 (const_string "mvn_imm")
7124 (const_string "multiple")
7125 (const_string "multiple")
7126 (const_string "multiple")
7127 (const_string "multiple")])]
7130 (define_insn "*movsfcc_soft_insn"
7131 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7132 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7133 [(match_operand 4 "cc_register" "") (const_int 0)])
7134 (match_operand:SF 1 "s_register_operand" "0,r")
7135 (match_operand:SF 2 "s_register_operand" "r,0")))]
7136 "TARGET_ARM && TARGET_SOFT_FLOAT"
7140 [(set_attr "conds" "use")
7141 (set_attr "type" "mov_reg")]
7145 ;; Jump and linkage insns
7147 (define_expand "jump"
7149 (label_ref (match_operand 0 "" "")))]
7154 (define_insn "*arm_jump"
7156 (label_ref (match_operand 0 "" "")))]
7160 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7162 arm_ccfsm_state += 2;
7165 return \"b%?\\t%l0\";
7168 [(set_attr "predicable" "yes")
7169 (set (attr "length")
7171 (and (match_test "TARGET_THUMB2")
7172 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7173 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7176 (set_attr "type" "branch")]
7179 (define_expand "call"
7180 [(parallel [(call (match_operand 0 "memory_operand")
7181 (match_operand 1 "general_operand"))
7182 (use (match_operand 2 "" ""))
7183 (clobber (reg:SI LR_REGNUM))])]
7188 tree addr = MEM_EXPR (operands[0]);
7190 /* In an untyped call, we can get NULL for operand 2. */
7191 if (operands[2] == NULL_RTX)
7192 operands[2] = const0_rtx;
7194 /* Decide if we should generate indirect calls by loading the
7195 32-bit address of the callee into a register before performing the
7197 callee = XEXP (operands[0], 0);
7198 if (GET_CODE (callee) == SYMBOL_REF
7199 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7201 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7203 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7204 /* Indirect call: set r9 with FDPIC value of callee. */
7205 XEXP (operands[0], 0)
7206 = arm_load_function_descriptor (XEXP (operands[0], 0));
7208 if (detect_cmse_nonsecure_call (addr))
7210 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7212 emit_call_insn (pat);
7216 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7217 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7220 /* Restore FDPIC register (r9) after call. */
7223 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7224 rtx initial_fdpic_reg
7225 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7227 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7228 initial_fdpic_reg));
7235 (define_insn "restore_pic_register_after_call"
7236 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7237 (unspec:SI [(match_dup 0)
7238 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7239 UNSPEC_PIC_RESTORE))]
7246 (define_expand "call_internal"
7247 [(parallel [(call (match_operand 0 "memory_operand")
7248 (match_operand 1 "general_operand"))
7249 (use (match_operand 2 "" ""))
7250 (clobber (reg:SI LR_REGNUM))])])
7252 (define_expand "nonsecure_call_internal"
7253 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7254 UNSPEC_NONSECURE_MEM)
7255 (match_operand 1 "general_operand"))
7256 (use (match_operand 2 "" ""))
7257 (clobber (reg:SI LR_REGNUM))])]
7262 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7263 gen_rtx_REG (SImode, R4_REGNUM),
7266 operands[0] = replace_equiv_address (operands[0], tmp);
7269 (define_insn "*call_reg_armv5"
7270 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7271 (match_operand 1 "" ""))
7272 (use (match_operand 2 "" ""))
7273 (clobber (reg:SI LR_REGNUM))]
7274 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7276 [(set_attr "type" "call")]
7279 (define_insn "*call_reg_arm"
7280 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7281 (match_operand 1 "" ""))
7282 (use (match_operand 2 "" ""))
7283 (clobber (reg:SI LR_REGNUM))]
7284 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7286 return output_call (operands);
7288 ;; length is worst case, normally it is only two
7289 [(set_attr "length" "12")
7290 (set_attr "type" "call")]
7294 (define_expand "call_value"
7295 [(parallel [(set (match_operand 0 "" "")
7296 (call (match_operand 1 "memory_operand")
7297 (match_operand 2 "general_operand")))
7298 (use (match_operand 3 "" ""))
7299 (clobber (reg:SI LR_REGNUM))])]
7304 tree addr = MEM_EXPR (operands[1]);
7306 /* In an untyped call, we can get NULL for operand 2. */
7307 if (operands[3] == 0)
7308 operands[3] = const0_rtx;
7310 /* Decide if we should generate indirect calls by loading the
7311 32-bit address of the callee into a register before performing the
7313 callee = XEXP (operands[1], 0);
7314 if (GET_CODE (callee) == SYMBOL_REF
7315 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7317 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7319 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7320 /* Indirect call: set r9 with FDPIC value of callee. */
7321 XEXP (operands[1], 0)
7322 = arm_load_function_descriptor (XEXP (operands[1], 0));
7324 if (detect_cmse_nonsecure_call (addr))
7326 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7327 operands[2], operands[3]);
7328 emit_call_insn (pat);
7332 pat = gen_call_value_internal (operands[0], operands[1],
7333 operands[2], operands[3]);
7334 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7337 /* Restore FDPIC register (r9) after call. */
7340 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7341 rtx initial_fdpic_reg
7342 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7344 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7345 initial_fdpic_reg));
7352 (define_expand "call_value_internal"
7353 [(parallel [(set (match_operand 0 "" "")
7354 (call (match_operand 1 "memory_operand")
7355 (match_operand 2 "general_operand")))
7356 (use (match_operand 3 "" ""))
7357 (clobber (reg:SI LR_REGNUM))])])
7359 (define_expand "nonsecure_call_value_internal"
7360 [(parallel [(set (match_operand 0 "" "")
7361 (call (unspec:SI [(match_operand 1 "memory_operand")]
7362 UNSPEC_NONSECURE_MEM)
7363 (match_operand 2 "general_operand")))
7364 (use (match_operand 3 "" ""))
7365 (clobber (reg:SI LR_REGNUM))])]
7370 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7371 gen_rtx_REG (SImode, R4_REGNUM),
7374 operands[1] = replace_equiv_address (operands[1], tmp);
7377 (define_insn "*call_value_reg_armv5"
7378 [(set (match_operand 0 "" "")
7379 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7380 (match_operand 2 "" "")))
7381 (use (match_operand 3 "" ""))
7382 (clobber (reg:SI LR_REGNUM))]
7383 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7385 [(set_attr "type" "call")]
7388 (define_insn "*call_value_reg_arm"
7389 [(set (match_operand 0 "" "")
7390 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7391 (match_operand 2 "" "")))
7392 (use (match_operand 3 "" ""))
7393 (clobber (reg:SI LR_REGNUM))]
7394 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7396 return output_call (&operands[1]);
7398 [(set_attr "length" "12")
7399 (set_attr "type" "call")]
7402 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7403 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7405 (define_insn "*call_symbol"
7406 [(call (mem:SI (match_operand:SI 0 "" ""))
7407 (match_operand 1 "" ""))
7408 (use (match_operand 2 "" ""))
7409 (clobber (reg:SI LR_REGNUM))]
7411 && !SIBLING_CALL_P (insn)
7412 && (GET_CODE (operands[0]) == SYMBOL_REF)
7413 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7416 rtx op = operands[0];
7418 /* Switch mode now when possible. */
7419 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7420 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7421 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7423 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7425 [(set_attr "type" "call")]
7428 (define_insn "*call_value_symbol"
7429 [(set (match_operand 0 "" "")
7430 (call (mem:SI (match_operand:SI 1 "" ""))
7431 (match_operand:SI 2 "" "")))
7432 (use (match_operand 3 "" ""))
7433 (clobber (reg:SI LR_REGNUM))]
7435 && !SIBLING_CALL_P (insn)
7436 && (GET_CODE (operands[1]) == SYMBOL_REF)
7437 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7440 rtx op = operands[1];
7442 /* Switch mode now when possible. */
7443 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7444 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7445 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7447 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7449 [(set_attr "type" "call")]
7452 (define_expand "sibcall_internal"
7453 [(parallel [(call (match_operand 0 "memory_operand")
7454 (match_operand 1 "general_operand"))
7456 (use (match_operand 2 "" ""))])])
7458 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7459 (define_expand "sibcall"
7460 [(parallel [(call (match_operand 0 "memory_operand")
7461 (match_operand 1 "general_operand"))
7463 (use (match_operand 2 "" ""))])]
7469 if ((!REG_P (XEXP (operands[0], 0))
7470 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7471 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7472 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7473 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7475 if (operands[2] == NULL_RTX)
7476 operands[2] = const0_rtx;
7478 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7479 arm_emit_call_insn (pat, operands[0], true);
7484 (define_expand "sibcall_value_internal"
7485 [(parallel [(set (match_operand 0 "" "")
7486 (call (match_operand 1 "memory_operand")
7487 (match_operand 2 "general_operand")))
7489 (use (match_operand 3 "" ""))])])
7491 (define_expand "sibcall_value"
7492 [(parallel [(set (match_operand 0 "" "")
7493 (call (match_operand 1 "memory_operand")
7494 (match_operand 2 "general_operand")))
7496 (use (match_operand 3 "" ""))])]
7502 if ((!REG_P (XEXP (operands[1], 0))
7503 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7504 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7505 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7506 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7508 if (operands[3] == NULL_RTX)
7509 operands[3] = const0_rtx;
7511 pat = gen_sibcall_value_internal (operands[0], operands[1],
7512 operands[2], operands[3]);
7513 arm_emit_call_insn (pat, operands[1], true);
7518 (define_insn "*sibcall_insn"
7519 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7520 (match_operand 1 "" ""))
7522 (use (match_operand 2 "" ""))]
7523 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7525 if (which_alternative == 1)
7526 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7529 if (arm_arch5t || arm_arch4t)
7530 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7532 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7535 [(set_attr "type" "call")]
7538 (define_insn "*sibcall_value_insn"
7539 [(set (match_operand 0 "" "")
7540 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7541 (match_operand 2 "" "")))
7543 (use (match_operand 3 "" ""))]
7544 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7546 if (which_alternative == 1)
7547 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7550 if (arm_arch5t || arm_arch4t)
7551 return \"bx%?\\t%1\";
7553 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7556 [(set_attr "type" "call")]
7559 (define_expand "<return_str>return"
7561 "(TARGET_ARM || (TARGET_THUMB2
7562 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7563 && !IS_STACKALIGN (arm_current_func_type ())))
7564 <return_cond_false>"
7569 thumb2_expand_return (<return_simple_p>);
7576 ;; Often the return insn will be the same as loading from memory, so set attr
7577 (define_insn "*arm_return"
7579 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7582 if (arm_ccfsm_state == 2)
7584 arm_ccfsm_state += 2;
7587 return output_return_instruction (const_true_rtx, true, false, false);
7589 [(set_attr "type" "load_4")
7590 (set_attr "length" "12")
7591 (set_attr "predicable" "yes")]
7594 (define_insn "*cond_<return_str>return"
7596 (if_then_else (match_operator 0 "arm_comparison_operator"
7597 [(match_operand 1 "cc_register" "") (const_int 0)])
7600 "TARGET_ARM <return_cond_true>"
7603 if (arm_ccfsm_state == 2)
7605 arm_ccfsm_state += 2;
7608 return output_return_instruction (operands[0], true, false,
7611 [(set_attr "conds" "use")
7612 (set_attr "length" "12")
7613 (set_attr "type" "load_4")]
7616 (define_insn "*cond_<return_str>return_inverted"
7618 (if_then_else (match_operator 0 "arm_comparison_operator"
7619 [(match_operand 1 "cc_register" "") (const_int 0)])
7622 "TARGET_ARM <return_cond_true>"
7625 if (arm_ccfsm_state == 2)
7627 arm_ccfsm_state += 2;
7630 return output_return_instruction (operands[0], true, true,
7633 [(set_attr "conds" "use")
7634 (set_attr "length" "12")
7635 (set_attr "type" "load_4")]
7638 (define_insn "*arm_simple_return"
7643 if (arm_ccfsm_state == 2)
7645 arm_ccfsm_state += 2;
7648 return output_return_instruction (const_true_rtx, true, false, true);
7650 [(set_attr "type" "branch")
7651 (set_attr "length" "4")
7652 (set_attr "predicable" "yes")]
7655 ;; Generate a sequence of instructions to determine if the processor is
7656 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7659 (define_expand "return_addr_mask"
7661 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7663 (set (match_operand:SI 0 "s_register_operand")
7664 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7666 (const_int 67108860)))] ; 0x03fffffc
7669 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7672 (define_insn "*check_arch2"
7673 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7674 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7677 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7678 [(set_attr "length" "8")
7679 (set_attr "conds" "set")
7680 (set_attr "type" "multiple")]
7683 ;; Call subroutine returning any type.
7685 (define_expand "untyped_call"
7686 [(parallel [(call (match_operand 0 "" "")
7688 (match_operand 1 "" "")
7689 (match_operand 2 "" "")])]
7690 "TARGET_EITHER && !TARGET_FDPIC"
7694 rtx par = gen_rtx_PARALLEL (VOIDmode,
7695 rtvec_alloc (XVECLEN (operands[2], 0)));
7696 rtx addr = gen_reg_rtx (Pmode);
7700 emit_move_insn (addr, XEXP (operands[1], 0));
7701 mem = change_address (operands[1], BLKmode, addr);
7703 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7705 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7707 /* Default code only uses r0 as a return value, but we could
7708 be using anything up to 4 registers. */
7709 if (REGNO (src) == R0_REGNUM)
7710 src = gen_rtx_REG (TImode, R0_REGNUM);
7712 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7714 size += GET_MODE_SIZE (GET_MODE (src));
7717 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7721 for (i = 0; i < XVECLEN (par, 0); i++)
7723 HOST_WIDE_INT offset = 0;
7724 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7727 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7729 mem = change_address (mem, GET_MODE (reg), NULL);
7730 if (REGNO (reg) == R0_REGNUM)
7732 /* On thumb we have to use a write-back instruction. */
7733 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7734 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7735 size = TARGET_ARM ? 16 : 0;
7739 emit_move_insn (mem, reg);
7740 size = GET_MODE_SIZE (GET_MODE (reg));
7744 /* The optimizer does not know that the call sets the function value
7745 registers we stored in the result block. We avoid problems by
7746 claiming that all hard registers are used and clobbered at this
7748 emit_insn (gen_blockage ());
7754 (define_expand "untyped_return"
7755 [(match_operand:BLK 0 "memory_operand")
7756 (match_operand 1 "" "")]
7757 "TARGET_EITHER && !TARGET_FDPIC"
7761 rtx addr = gen_reg_rtx (Pmode);
7765 emit_move_insn (addr, XEXP (operands[0], 0));
7766 mem = change_address (operands[0], BLKmode, addr);
7768 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7770 HOST_WIDE_INT offset = 0;
7771 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7774 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7776 mem = change_address (mem, GET_MODE (reg), NULL);
7777 if (REGNO (reg) == R0_REGNUM)
7779 /* On thumb we have to use a write-back instruction. */
7780 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7781 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7782 size = TARGET_ARM ? 16 : 0;
7786 emit_move_insn (reg, mem);
7787 size = GET_MODE_SIZE (GET_MODE (reg));
7791 /* Emit USE insns before the return. */
7792 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7793 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7795 /* Construct the return. */
7796 expand_naked_return ();
7802 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7803 ;; all of memory. This blocks insns from being moved across this point.
7805 (define_insn "blockage"
7806 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7809 [(set_attr "length" "0")
7810 (set_attr "type" "block")]
7813 ;; Since we hard code r0 here use the 'o' constraint to prevent
7814 ;; provoking undefined behaviour in the hardware with putting out
7815 ;; auto-increment operations with potentially r0 as the base register.
7816 (define_insn "probe_stack"
7817 [(set (match_operand:SI 0 "memory_operand" "=o")
7818 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7821 [(set_attr "type" "store_4")
7822 (set_attr "predicable" "yes")]
7825 (define_insn "probe_stack_range"
7826 [(set (match_operand:SI 0 "register_operand" "=r")
7827 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7828 (match_operand:SI 2 "register_operand" "r")]
7829 VUNSPEC_PROBE_STACK_RANGE))]
7832 return output_probe_stack_range (operands[0], operands[2]);
7834 [(set_attr "type" "multiple")
7835 (set_attr "conds" "clob")]
7838 ;; Named patterns for stack smashing protection.
7839 (define_expand "stack_protect_combined_set"
7841 [(set (match_operand:SI 0 "memory_operand")
7842 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7844 (clobber (match_scratch:SI 2 ""))
7845 (clobber (match_scratch:SI 3 ""))])]
7850 ;; Use a separate insn from the above expand to be able to have the mem outside
7851 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7852 ;; try to reload the guard since we need to control how PIC access is done in
7853 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7854 ;; legitimize_pic_address ()).
7855 (define_insn_and_split "*stack_protect_combined_set_insn"
7856 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7857 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7859 (clobber (match_scratch:SI 2 "=&l,&r"))
7860 (clobber (match_scratch:SI 3 "=&l,&r"))]
7864 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7866 (clobber (match_dup 2))])]
7874 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7876 pic_reg = operands[3];
7878 /* Forces recomputing of GOT base now. */
7879 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7880 true /*compute_now*/);
7884 if (address_operand (operands[1], SImode))
7885 operands[2] = operands[1];
7888 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7889 emit_move_insn (operands[2], mem);
7893 [(set_attr "arch" "t1,32")]
7896 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7897 ;; canary value does not live beyond the life of this sequence.
7898 (define_insn "*stack_protect_set_insn"
7899 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7900 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7902 (clobber (match_dup 1))]
7905 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7906 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7907 [(set_attr "length" "8,12")
7908 (set_attr "conds" "clob,nocond")
7909 (set_attr "type" "multiple")
7910 (set_attr "arch" "t1,32")]
7913 (define_expand "stack_protect_combined_test"
7917 (eq (match_operand:SI 0 "memory_operand")
7918 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7920 (label_ref (match_operand 2))
7922 (clobber (match_scratch:SI 3 ""))
7923 (clobber (match_scratch:SI 4 ""))
7924 (clobber (reg:CC CC_REGNUM))])]
7929 ;; Use a separate insn from the above expand to be able to have the mem outside
7930 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7931 ;; try to reload the guard since we need to control how PIC access is done in
7932 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7933 ;; legitimize_pic_address ()).
7934 (define_insn_and_split "*stack_protect_combined_test_insn"
7937 (eq (match_operand:SI 0 "memory_operand" "m,m")
7938 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7940 (label_ref (match_operand 2))
7942 (clobber (match_scratch:SI 3 "=&l,&r"))
7943 (clobber (match_scratch:SI 4 "=&l,&r"))
7944 (clobber (reg:CC CC_REGNUM))]
7957 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7959 pic_reg = operands[4];
7961 /* Forces recomputing of GOT base now. */
7962 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7963 true /*compute_now*/);
7967 if (address_operand (operands[1], SImode))
7968 operands[3] = operands[1];
7971 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7972 emit_move_insn (operands[3], mem);
7977 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7979 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7980 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7981 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7985 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7987 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7988 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7993 [(set_attr "arch" "t1,32")]
7996 (define_insn "arm_stack_protect_test_insn"
7997 [(set (reg:CC_Z CC_REGNUM)
7998 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7999 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8002 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8003 (clobber (match_dup 2))]
8005 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8006 [(set_attr "length" "8,12")
8007 (set_attr "conds" "set")
8008 (set_attr "type" "multiple")
8009 (set_attr "arch" "t,32")]
8012 (define_expand "casesi"
8013 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8014 (match_operand:SI 1 "const_int_operand") ; lower bound
8015 (match_operand:SI 2 "const_int_operand") ; total range
8016 (match_operand:SI 3 "" "") ; table label
8017 (match_operand:SI 4 "" "")] ; Out of range label
8018 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8021 enum insn_code code;
8022 if (operands[1] != const0_rtx)
8024 rtx reg = gen_reg_rtx (SImode);
8026 emit_insn (gen_addsi3 (reg, operands[0],
8027 gen_int_mode (-INTVAL (operands[1]),
8033 code = CODE_FOR_arm_casesi_internal;
8034 else if (TARGET_THUMB1)
8035 code = CODE_FOR_thumb1_casesi_internal_pic;
8037 code = CODE_FOR_thumb2_casesi_internal_pic;
8039 code = CODE_FOR_thumb2_casesi_internal;
8041 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8042 operands[2] = force_reg (SImode, operands[2]);
8044 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8045 operands[3], operands[4]));
8050 ;; The USE in this pattern is needed to tell flow analysis that this is
8051 ;; a CASESI insn. It has no other purpose.
8052 (define_expand "arm_casesi_internal"
8053 [(parallel [(set (pc)
8055 (leu (match_operand:SI 0 "s_register_operand")
8056 (match_operand:SI 1 "arm_rhs_operand"))
8058 (label_ref:SI (match_operand 3 ""))))
8059 (clobber (reg:CC CC_REGNUM))
8060 (use (label_ref:SI (match_operand 2 "")))])]
8063 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8064 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8065 gen_rtx_LABEL_REF (SImode, operands[2]));
8066 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8067 MEM_READONLY_P (operands[4]) = 1;
8068 MEM_NOTRAP_P (operands[4]) = 1;
8071 (define_insn "*arm_casesi_internal"
8072 [(parallel [(set (pc)
8074 (leu (match_operand:SI 0 "s_register_operand" "r")
8075 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8076 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8077 (label_ref:SI (match_operand 2 "" ""))))
8078 (label_ref:SI (match_operand 3 "" ""))))
8079 (clobber (reg:CC CC_REGNUM))
8080 (use (label_ref:SI (match_dup 2)))])]
8084 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8085 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8087 [(set_attr "conds" "clob")
8088 (set_attr "length" "12")
8089 (set_attr "type" "multiple")]
8092 (define_expand "indirect_jump"
8094 (match_operand:SI 0 "s_register_operand"))]
8097 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8098 address and use bx. */
8102 tmp = gen_reg_rtx (SImode);
8103 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8109 ;; NB Never uses BX.
8110 (define_insn "*arm_indirect_jump"
8112 (match_operand:SI 0 "s_register_operand" "r"))]
8114 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8115 [(set_attr "predicable" "yes")
8116 (set_attr "type" "branch")]
8119 (define_insn "*load_indirect_jump"
8121 (match_operand:SI 0 "memory_operand" "m"))]
8123 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8124 [(set_attr "type" "load_4")
8125 (set_attr "pool_range" "4096")
8126 (set_attr "neg_pool_range" "4084")
8127 (set_attr "predicable" "yes")]
8137 [(set (attr "length")
8138 (if_then_else (eq_attr "is_thumb" "yes")
8141 (set_attr "type" "mov_reg")]
8145 [(trap_if (const_int 1) (const_int 0))]
8149 return \".inst\\t0xe7f000f0\";
8151 return \".inst\\t0xdeff\";
8153 [(set (attr "length")
8154 (if_then_else (eq_attr "is_thumb" "yes")
8157 (set_attr "type" "trap")
8158 (set_attr "conds" "unconditional")]
8162 ;; Patterns to allow combination of arithmetic, cond code and shifts
8164 (define_insn "*<arith_shift_insn>_multsi"
8165 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8167 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8168 (match_operand:SI 3 "power_of_two_operand" ""))
8169 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8171 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8172 [(set_attr "predicable" "yes")
8173 (set_attr "shift" "2")
8174 (set_attr "arch" "a,t2")
8175 (set_attr "type" "alu_shift_imm")])
8177 (define_insn "*<arith_shift_insn>_shiftsi"
8178 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8180 (match_operator:SI 2 "shift_nomul_operator"
8181 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8182 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8183 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8184 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8185 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8186 [(set_attr "predicable" "yes")
8187 (set_attr "shift" "3")
8188 (set_attr "arch" "a,t2,a")
8189 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8192 [(set (match_operand:SI 0 "s_register_operand" "")
8193 (match_operator:SI 1 "shiftable_operator"
8194 [(match_operator:SI 2 "shiftable_operator"
8195 [(match_operator:SI 3 "shift_operator"
8196 [(match_operand:SI 4 "s_register_operand" "")
8197 (match_operand:SI 5 "reg_or_int_operand" "")])
8198 (match_operand:SI 6 "s_register_operand" "")])
8199 (match_operand:SI 7 "arm_rhs_operand" "")]))
8200 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8203 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8206 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8209 (define_insn "*arith_shiftsi_compare0"
8210 [(set (reg:CC_NOOV CC_REGNUM)
8212 (match_operator:SI 1 "shiftable_operator"
8213 [(match_operator:SI 3 "shift_operator"
8214 [(match_operand:SI 4 "s_register_operand" "r,r")
8215 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8216 (match_operand:SI 2 "s_register_operand" "r,r")])
8218 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8219 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8222 "%i1s%?\\t%0, %2, %4%S3"
8223 [(set_attr "conds" "set")
8224 (set_attr "shift" "4")
8225 (set_attr "arch" "32,a")
8226 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8228 (define_insn "*arith_shiftsi_compare0_scratch"
8229 [(set (reg:CC_NOOV CC_REGNUM)
8231 (match_operator:SI 1 "shiftable_operator"
8232 [(match_operator:SI 3 "shift_operator"
8233 [(match_operand:SI 4 "s_register_operand" "r,r")
8234 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8235 (match_operand:SI 2 "s_register_operand" "r,r")])
8237 (clobber (match_scratch:SI 0 "=r,r"))]
8239 "%i1s%?\\t%0, %2, %4%S3"
8240 [(set_attr "conds" "set")
8241 (set_attr "shift" "4")
8242 (set_attr "arch" "32,a")
8243 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8245 (define_insn "*sub_shiftsi"
8246 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8247 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8248 (match_operator:SI 2 "shift_operator"
8249 [(match_operand:SI 3 "s_register_operand" "r,r")
8250 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8252 "sub%?\\t%0, %1, %3%S2"
8253 [(set_attr "predicable" "yes")
8254 (set_attr "predicable_short_it" "no")
8255 (set_attr "shift" "3")
8256 (set_attr "arch" "32,a")
8257 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8259 (define_insn "*sub_shiftsi_compare0"
8260 [(set (reg:CC_NOOV CC_REGNUM)
8262 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8263 (match_operator:SI 2 "shift_operator"
8264 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8265 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8267 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8268 (minus:SI (match_dup 1)
8269 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8271 "subs%?\\t%0, %1, %3%S2"
8272 [(set_attr "conds" "set")
8273 (set_attr "shift" "3")
8274 (set_attr "arch" "32,a,a")
8275 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8277 (define_insn "*sub_shiftsi_compare0_scratch"
8278 [(set (reg:CC_NOOV CC_REGNUM)
8280 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8281 (match_operator:SI 2 "shift_operator"
8282 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8283 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8285 (clobber (match_scratch:SI 0 "=r,r,r"))]
8287 "subs%?\\t%0, %1, %3%S2"
8288 [(set_attr "conds" "set")
8289 (set_attr "shift" "3")
8290 (set_attr "arch" "32,a,a")
8291 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8294 (define_insn_and_split "*and_scc"
8295 [(set (match_operand:SI 0 "s_register_operand" "=r")
8296 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8297 [(match_operand 2 "cc_register" "") (const_int 0)])
8298 (match_operand:SI 3 "s_register_operand" "r")))]
8300 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8301 "&& reload_completed"
8302 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8303 (cond_exec (match_dup 4) (set (match_dup 0)
8304 (and:SI (match_dup 3) (const_int 1))))]
8306 machine_mode mode = GET_MODE (operands[2]);
8307 enum rtx_code rc = GET_CODE (operands[1]);
8309 /* Note that operands[4] is the same as operands[1],
8310 but with VOIDmode as the result. */
8311 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8312 if (mode == CCFPmode || mode == CCFPEmode)
8313 rc = reverse_condition_maybe_unordered (rc);
8315 rc = reverse_condition (rc);
8316 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8318 [(set_attr "conds" "use")
8319 (set_attr "type" "multiple")
8320 (set_attr "length" "8")]
8323 (define_insn_and_split "*ior_scc"
8324 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8325 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8326 [(match_operand 2 "cc_register" "") (const_int 0)])
8327 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8332 "&& reload_completed
8333 && REGNO (operands [0]) != REGNO (operands[3])"
8334 ;; && which_alternative == 1
8335 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8336 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8337 (cond_exec (match_dup 4) (set (match_dup 0)
8338 (ior:SI (match_dup 3) (const_int 1))))]
8340 machine_mode mode = GET_MODE (operands[2]);
8341 enum rtx_code rc = GET_CODE (operands[1]);
8343 /* Note that operands[4] is the same as operands[1],
8344 but with VOIDmode as the result. */
8345 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8346 if (mode == CCFPmode || mode == CCFPEmode)
8347 rc = reverse_condition_maybe_unordered (rc);
8349 rc = reverse_condition (rc);
8350 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8352 [(set_attr "conds" "use")
8353 (set_attr "length" "4,8")
8354 (set_attr "type" "logic_imm,multiple")]
8357 ; A series of splitters for the compare_scc pattern below. Note that
8358 ; order is important.
8360 [(set (match_operand:SI 0 "s_register_operand" "")
8361 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8363 (clobber (reg:CC CC_REGNUM))]
8364 "TARGET_32BIT && reload_completed"
8365 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8368 [(set (match_operand:SI 0 "s_register_operand" "")
8369 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8371 (clobber (reg:CC CC_REGNUM))]
8372 "TARGET_32BIT && reload_completed"
8373 [(set (match_dup 0) (not:SI (match_dup 1)))
8374 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8377 [(set (match_operand:SI 0 "s_register_operand" "")
8378 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8380 (clobber (reg:CC CC_REGNUM))]
8381 "arm_arch5t && TARGET_32BIT"
8382 [(set (match_dup 0) (clz:SI (match_dup 1)))
8383 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8387 [(set (match_operand:SI 0 "s_register_operand" "")
8388 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8390 (clobber (reg:CC CC_REGNUM))]
8391 "TARGET_32BIT && reload_completed"
8393 [(set (reg:CC CC_REGNUM)
8394 (compare:CC (const_int 1) (match_dup 1)))
8396 (minus:SI (const_int 1) (match_dup 1)))])
8397 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8398 (set (match_dup 0) (const_int 0)))])
8401 [(set (match_operand:SI 0 "s_register_operand" "")
8402 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8403 (match_operand:SI 2 "const_int_operand" "")))
8404 (clobber (reg:CC CC_REGNUM))]
8405 "TARGET_32BIT && reload_completed"
8407 [(set (reg:CC CC_REGNUM)
8408 (compare:CC (match_dup 1) (match_dup 2)))
8409 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8410 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8411 (set (match_dup 0) (const_int 1)))]
8413 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8417 [(set (match_operand:SI 0 "s_register_operand" "")
8418 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8419 (match_operand:SI 2 "arm_add_operand" "")))
8420 (clobber (reg:CC CC_REGNUM))]
8421 "TARGET_32BIT && reload_completed"
8423 [(set (reg:CC_NOOV CC_REGNUM)
8424 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8426 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8427 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8428 (set (match_dup 0) (const_int 1)))])
8430 (define_insn_and_split "*compare_scc"
8431 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8432 (match_operator:SI 1 "arm_comparison_operator"
8433 [(match_operand:SI 2 "s_register_operand" "r,r")
8434 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8435 (clobber (reg:CC CC_REGNUM))]
8438 "&& reload_completed"
8439 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8440 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8441 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8444 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8445 operands[2], operands[3]);
8446 enum rtx_code rc = GET_CODE (operands[1]);
8448 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8450 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8451 if (mode == CCFPmode || mode == CCFPEmode)
8452 rc = reverse_condition_maybe_unordered (rc);
8454 rc = reverse_condition (rc);
8455 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8457 [(set_attr "type" "multiple")]
8460 ;; Attempt to improve the sequence generated by the compare_scc splitters
8461 ;; not to use conditional execution.
8463 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8467 [(set (reg:CC CC_REGNUM)
8468 (compare:CC (match_operand:SI 1 "register_operand" "")
8470 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8471 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8472 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8473 (set (match_dup 0) (const_int 1)))]
8474 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8475 [(set (match_dup 0) (clz:SI (match_dup 1)))
8476 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8479 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8483 [(set (reg:CC CC_REGNUM)
8484 (compare:CC (match_operand:SI 1 "register_operand" "")
8486 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8487 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8488 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8489 (set (match_dup 0) (const_int 1)))
8490 (match_scratch:SI 2 "r")]
8491 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8493 [(set (reg:CC CC_REGNUM)
8494 (compare:CC (const_int 0) (match_dup 1)))
8495 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8497 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8498 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8501 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8502 ;; sub Rd, Reg1, reg2
8506 [(set (reg:CC CC_REGNUM)
8507 (compare:CC (match_operand:SI 1 "register_operand" "")
8508 (match_operand:SI 2 "arm_rhs_operand" "")))
8509 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8510 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8511 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8512 (set (match_dup 0) (const_int 1)))]
8513 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8514 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8515 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8516 (set (match_dup 0) (clz:SI (match_dup 0)))
8517 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8521 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8522 ;; sub T1, Reg1, reg2
8526 [(set (reg:CC CC_REGNUM)
8527 (compare:CC (match_operand:SI 1 "register_operand" "")
8528 (match_operand:SI 2 "arm_rhs_operand" "")))
8529 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8530 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8531 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8532 (set (match_dup 0) (const_int 1)))
8533 (match_scratch:SI 3 "r")]
8534 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8535 [(set (match_dup 3) (match_dup 4))
8537 [(set (reg:CC CC_REGNUM)
8538 (compare:CC (const_int 0) (match_dup 3)))
8539 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8541 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8542 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8544 if (CONST_INT_P (operands[2]))
8545 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8547 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8550 (define_insn "*cond_move"
8551 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8552 (if_then_else:SI (match_operator 3 "equality_operator"
8553 [(match_operator 4 "arm_comparison_operator"
8554 [(match_operand 5 "cc_register" "") (const_int 0)])
8556 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8557 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8560 if (GET_CODE (operands[3]) == NE)
8562 if (which_alternative != 1)
8563 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8564 if (which_alternative != 0)
8565 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8568 if (which_alternative != 0)
8569 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8570 if (which_alternative != 1)
8571 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8574 [(set_attr "conds" "use")
8575 (set_attr_alternative "type"
8576 [(if_then_else (match_operand 2 "const_int_operand" "")
8577 (const_string "mov_imm")
8578 (const_string "mov_reg"))
8579 (if_then_else (match_operand 1 "const_int_operand" "")
8580 (const_string "mov_imm")
8581 (const_string "mov_reg"))
8582 (const_string "multiple")])
8583 (set_attr "length" "4,4,8")]
8586 (define_insn "*cond_arith"
8587 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8588 (match_operator:SI 5 "shiftable_operator"
8589 [(match_operator:SI 4 "arm_comparison_operator"
8590 [(match_operand:SI 2 "s_register_operand" "r,r")
8591 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8592 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8593 (clobber (reg:CC CC_REGNUM))]
8596 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8597 return \"%i5\\t%0, %1, %2, lsr #31\";
8599 output_asm_insn (\"cmp\\t%2, %3\", operands);
8600 if (GET_CODE (operands[5]) == AND)
8601 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8602 else if (GET_CODE (operands[5]) == MINUS)
8603 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8604 else if (which_alternative != 0)
8605 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8606 return \"%i5%d4\\t%0, %1, #1\";
8608 [(set_attr "conds" "clob")
8609 (set_attr "length" "12")
8610 (set_attr "type" "multiple")]
8613 (define_insn "*cond_sub"
8614 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8615 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8616 (match_operator:SI 4 "arm_comparison_operator"
8617 [(match_operand:SI 2 "s_register_operand" "r,r")
8618 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8619 (clobber (reg:CC CC_REGNUM))]
8622 output_asm_insn (\"cmp\\t%2, %3\", operands);
8623 if (which_alternative != 0)
8624 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8625 return \"sub%d4\\t%0, %1, #1\";
8627 [(set_attr "conds" "clob")
8628 (set_attr "length" "8,12")
8629 (set_attr "type" "multiple")]
8632 (define_insn "*cmp_ite0"
8633 [(set (match_operand 6 "dominant_cc_register" "")
8636 (match_operator 4 "arm_comparison_operator"
8637 [(match_operand:SI 0 "s_register_operand"
8638 "l,l,l,r,r,r,r,r,r")
8639 (match_operand:SI 1 "arm_add_operand"
8640 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8641 (match_operator:SI 5 "arm_comparison_operator"
8642 [(match_operand:SI 2 "s_register_operand"
8643 "l,r,r,l,l,r,r,r,r")
8644 (match_operand:SI 3 "arm_add_operand"
8645 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8651 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8653 {\"cmp%d5\\t%0, %1\",
8654 \"cmp%d4\\t%2, %3\"},
8655 {\"cmn%d5\\t%0, #%n1\",
8656 \"cmp%d4\\t%2, %3\"},
8657 {\"cmp%d5\\t%0, %1\",
8658 \"cmn%d4\\t%2, #%n3\"},
8659 {\"cmn%d5\\t%0, #%n1\",
8660 \"cmn%d4\\t%2, #%n3\"}
8662 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8667 \"cmn\\t%0, #%n1\"},
8668 {\"cmn\\t%2, #%n3\",
8670 {\"cmn\\t%2, #%n3\",
8673 static const char * const ite[2] =
8678 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8679 CMP_CMP, CMN_CMP, CMP_CMP,
8680 CMN_CMP, CMP_CMN, CMN_CMN};
8682 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8684 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8685 if (TARGET_THUMB2) {
8686 output_asm_insn (ite[swap], operands);
8688 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8691 [(set_attr "conds" "set")
8692 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8693 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8694 (set_attr "type" "multiple")
8695 (set_attr_alternative "length"
8701 (if_then_else (eq_attr "is_thumb" "no")
8704 (if_then_else (eq_attr "is_thumb" "no")
8707 (if_then_else (eq_attr "is_thumb" "no")
8710 (if_then_else (eq_attr "is_thumb" "no")
8715 (define_insn "*cmp_ite1"
8716 [(set (match_operand 6 "dominant_cc_register" "")
8719 (match_operator 4 "arm_comparison_operator"
8720 [(match_operand:SI 0 "s_register_operand"
8721 "l,l,l,r,r,r,r,r,r")
8722 (match_operand:SI 1 "arm_add_operand"
8723 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8724 (match_operator:SI 5 "arm_comparison_operator"
8725 [(match_operand:SI 2 "s_register_operand"
8726 "l,r,r,l,l,r,r,r,r")
8727 (match_operand:SI 3 "arm_add_operand"
8728 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8734 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8738 {\"cmn\\t%0, #%n1\",
8741 \"cmn\\t%2, #%n3\"},
8742 {\"cmn\\t%0, #%n1\",
8745 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8747 {\"cmp%d4\\t%2, %3\",
8748 \"cmp%D5\\t%0, %1\"},
8749 {\"cmp%d4\\t%2, %3\",
8750 \"cmn%D5\\t%0, #%n1\"},
8751 {\"cmn%d4\\t%2, #%n3\",
8752 \"cmp%D5\\t%0, %1\"},
8753 {\"cmn%d4\\t%2, #%n3\",
8754 \"cmn%D5\\t%0, #%n1\"}
8756 static const char * const ite[2] =
8761 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8762 CMP_CMP, CMN_CMP, CMP_CMP,
8763 CMN_CMP, CMP_CMN, CMN_CMN};
8765 comparison_dominates_p (GET_CODE (operands[5]),
8766 reverse_condition (GET_CODE (operands[4])));
8768 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8769 if (TARGET_THUMB2) {
8770 output_asm_insn (ite[swap], operands);
8772 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8775 [(set_attr "conds" "set")
8776 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8777 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8778 (set_attr_alternative "length"
8784 (if_then_else (eq_attr "is_thumb" "no")
8787 (if_then_else (eq_attr "is_thumb" "no")
8790 (if_then_else (eq_attr "is_thumb" "no")
8793 (if_then_else (eq_attr "is_thumb" "no")
8796 (set_attr "type" "multiple")]
8799 (define_insn "*cmp_and"
8800 [(set (match_operand 6 "dominant_cc_register" "")
8803 (match_operator 4 "arm_comparison_operator"
8804 [(match_operand:SI 0 "s_register_operand"
8805 "l,l,l,r,r,r,r,r,r,r")
8806 (match_operand:SI 1 "arm_add_operand"
8807 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8808 (match_operator:SI 5 "arm_comparison_operator"
8809 [(match_operand:SI 2 "s_register_operand"
8810 "l,r,r,l,l,r,r,r,r,r")
8811 (match_operand:SI 3 "arm_add_operand"
8812 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8817 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8819 {\"cmp%d5\\t%0, %1\",
8820 \"cmp%d4\\t%2, %3\"},
8821 {\"cmn%d5\\t%0, #%n1\",
8822 \"cmp%d4\\t%2, %3\"},
8823 {\"cmp%d5\\t%0, %1\",
8824 \"cmn%d4\\t%2, #%n3\"},
8825 {\"cmn%d5\\t%0, #%n1\",
8826 \"cmn%d4\\t%2, #%n3\"}
8828 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8833 \"cmn\\t%0, #%n1\"},
8834 {\"cmn\\t%2, #%n3\",
8836 {\"cmn\\t%2, #%n3\",
8839 static const char *const ite[2] =
8844 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8845 CMP_CMP, CMN_CMP, CMP_CMP,
8846 CMP_CMP, CMN_CMP, CMP_CMN,
8849 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8851 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8852 if (TARGET_THUMB2) {
8853 output_asm_insn (ite[swap], operands);
8855 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8858 [(set_attr "conds" "set")
8859 (set_attr "predicable" "no")
8860 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8861 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8862 (set_attr_alternative "length"
8869 (if_then_else (eq_attr "is_thumb" "no")
8872 (if_then_else (eq_attr "is_thumb" "no")
8875 (if_then_else (eq_attr "is_thumb" "no")
8878 (if_then_else (eq_attr "is_thumb" "no")
8881 (set_attr "type" "multiple")]
8884 (define_insn "*cmp_ior"
8885 [(set (match_operand 6 "dominant_cc_register" "")
8888 (match_operator 4 "arm_comparison_operator"
8889 [(match_operand:SI 0 "s_register_operand"
8890 "l,l,l,r,r,r,r,r,r,r")
8891 (match_operand:SI 1 "arm_add_operand"
8892 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8893 (match_operator:SI 5 "arm_comparison_operator"
8894 [(match_operand:SI 2 "s_register_operand"
8895 "l,r,r,l,l,r,r,r,r,r")
8896 (match_operand:SI 3 "arm_add_operand"
8897 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8902 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8906 {\"cmn\\t%0, #%n1\",
8909 \"cmn\\t%2, #%n3\"},
8910 {\"cmn\\t%0, #%n1\",
8913 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8915 {\"cmp%D4\\t%2, %3\",
8916 \"cmp%D5\\t%0, %1\"},
8917 {\"cmp%D4\\t%2, %3\",
8918 \"cmn%D5\\t%0, #%n1\"},
8919 {\"cmn%D4\\t%2, #%n3\",
8920 \"cmp%D5\\t%0, %1\"},
8921 {\"cmn%D4\\t%2, #%n3\",
8922 \"cmn%D5\\t%0, #%n1\"}
8924 static const char *const ite[2] =
8929 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8930 CMP_CMP, CMN_CMP, CMP_CMP,
8931 CMP_CMP, CMN_CMP, CMP_CMN,
8934 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8936 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8937 if (TARGET_THUMB2) {
8938 output_asm_insn (ite[swap], operands);
8940 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8944 [(set_attr "conds" "set")
8945 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8946 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8947 (set_attr_alternative "length"
8954 (if_then_else (eq_attr "is_thumb" "no")
8957 (if_then_else (eq_attr "is_thumb" "no")
8960 (if_then_else (eq_attr "is_thumb" "no")
8963 (if_then_else (eq_attr "is_thumb" "no")
8966 (set_attr "type" "multiple")]
8969 (define_insn_and_split "*ior_scc_scc"
8970 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8971 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8972 [(match_operand:SI 1 "s_register_operand" "l,r")
8973 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8974 (match_operator:SI 6 "arm_comparison_operator"
8975 [(match_operand:SI 4 "s_register_operand" "l,r")
8976 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8977 (clobber (reg:CC CC_REGNUM))]
8979 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8982 "TARGET_32BIT && reload_completed"
8986 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8987 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8989 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8991 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8994 [(set_attr "conds" "clob")
8995 (set_attr "enabled_for_short_it" "yes,no")
8996 (set_attr "length" "16")
8997 (set_attr "type" "multiple")]
9000 ; If the above pattern is followed by a CMP insn, then the compare is
9001 ; redundant, since we can rework the conditional instruction that follows.
9002 (define_insn_and_split "*ior_scc_scc_cmp"
9003 [(set (match_operand 0 "dominant_cc_register" "")
9004 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9005 [(match_operand:SI 1 "s_register_operand" "l,r")
9006 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9007 (match_operator:SI 6 "arm_comparison_operator"
9008 [(match_operand:SI 4 "s_register_operand" "l,r")
9009 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9011 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9012 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9013 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9016 "TARGET_32BIT && reload_completed"
9020 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9021 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9023 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9025 [(set_attr "conds" "set")
9026 (set_attr "enabled_for_short_it" "yes,no")
9027 (set_attr "length" "16")
9028 (set_attr "type" "multiple")]
9031 (define_insn_and_split "*and_scc_scc"
9032 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9033 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9034 [(match_operand:SI 1 "s_register_operand" "l,r")
9035 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9036 (match_operator:SI 6 "arm_comparison_operator"
9037 [(match_operand:SI 4 "s_register_operand" "l,r")
9038 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9039 (clobber (reg:CC CC_REGNUM))]
9041 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9044 "TARGET_32BIT && reload_completed
9045 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9050 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9051 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9053 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9055 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9058 [(set_attr "conds" "clob")
9059 (set_attr "enabled_for_short_it" "yes,no")
9060 (set_attr "length" "16")
9061 (set_attr "type" "multiple")]
9064 ; If the above pattern is followed by a CMP insn, then the compare is
9065 ; redundant, since we can rework the conditional instruction that follows.
9066 (define_insn_and_split "*and_scc_scc_cmp"
9067 [(set (match_operand 0 "dominant_cc_register" "")
9068 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9069 [(match_operand:SI 1 "s_register_operand" "l,r")
9070 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9071 (match_operator:SI 6 "arm_comparison_operator"
9072 [(match_operand:SI 4 "s_register_operand" "l,r")
9073 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9075 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9076 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9077 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9080 "TARGET_32BIT && reload_completed"
9084 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9085 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9087 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9089 [(set_attr "conds" "set")
9090 (set_attr "enabled_for_short_it" "yes,no")
9091 (set_attr "length" "16")
9092 (set_attr "type" "multiple")]
9095 ;; If there is no dominance in the comparison, then we can still save an
9096 ;; instruction in the AND case, since we can know that the second compare
9097 ;; need only zero the value if false (if true, then the value is already
9099 (define_insn_and_split "*and_scc_scc_nodom"
9100 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9101 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9102 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9103 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9104 (match_operator:SI 6 "arm_comparison_operator"
9105 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9106 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9107 (clobber (reg:CC CC_REGNUM))]
9109 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9112 "TARGET_32BIT && reload_completed"
9113 [(parallel [(set (match_dup 0)
9114 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9115 (clobber (reg:CC CC_REGNUM))])
9116 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9118 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9121 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9122 operands[4], operands[5]),
9124 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9126 [(set_attr "conds" "clob")
9127 (set_attr "length" "20")
9128 (set_attr "type" "multiple")]
9132 [(set (reg:CC_NOOV CC_REGNUM)
9133 (compare:CC_NOOV (ior:SI
9134 (and:SI (match_operand:SI 0 "s_register_operand" "")
9136 (match_operator:SI 1 "arm_comparison_operator"
9137 [(match_operand:SI 2 "s_register_operand" "")
9138 (match_operand:SI 3 "arm_add_operand" "")]))
9140 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9143 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9145 (set (reg:CC_NOOV CC_REGNUM)
9146 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9151 [(set (reg:CC_NOOV CC_REGNUM)
9152 (compare:CC_NOOV (ior:SI
9153 (match_operator:SI 1 "arm_comparison_operator"
9154 [(match_operand:SI 2 "s_register_operand" "")
9155 (match_operand:SI 3 "arm_add_operand" "")])
9156 (and:SI (match_operand:SI 0 "s_register_operand" "")
9159 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9162 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9164 (set (reg:CC_NOOV CC_REGNUM)
9165 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9168 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9170 (define_insn_and_split "*negscc"
9171 [(set (match_operand:SI 0 "s_register_operand" "=r")
9172 (neg:SI (match_operator 3 "arm_comparison_operator"
9173 [(match_operand:SI 1 "s_register_operand" "r")
9174 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9175 (clobber (reg:CC CC_REGNUM))]
9178 "&& reload_completed"
9181 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9183 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9185 /* Emit mov\\t%0, %1, asr #31 */
9186 emit_insn (gen_rtx_SET (operands[0],
9187 gen_rtx_ASHIFTRT (SImode,
9192 else if (GET_CODE (operands[3]) == NE)
9194 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9195 if (CONST_INT_P (operands[2]))
9196 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9197 gen_int_mode (-INTVAL (operands[2]),
9200 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9202 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9206 gen_rtx_SET (operands[0],
9212 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9213 emit_insn (gen_rtx_SET (cc_reg,
9214 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9215 enum rtx_code rc = GET_CODE (operands[3]);
9217 rc = reverse_condition (rc);
9218 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9223 gen_rtx_SET (operands[0], const0_rtx)));
9224 rc = GET_CODE (operands[3]);
9225 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9230 gen_rtx_SET (operands[0],
9236 [(set_attr "conds" "clob")
9237 (set_attr "length" "12")
9238 (set_attr "type" "multiple")]
9241 (define_insn_and_split "movcond_addsi"
9242 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9244 (match_operator 5 "comparison_operator"
9245 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9246 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9248 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9249 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9250 (clobber (reg:CC CC_REGNUM))]
9253 "&& reload_completed"
9254 [(set (reg:CC_NOOV CC_REGNUM)
9256 (plus:SI (match_dup 3)
9259 (set (match_dup 0) (match_dup 1))
9260 (cond_exec (match_dup 6)
9261 (set (match_dup 0) (match_dup 2)))]
9264 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9265 operands[3], operands[4]);
9266 enum rtx_code rc = GET_CODE (operands[5]);
9267 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9268 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9269 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9270 rc = reverse_condition (rc);
9272 std::swap (operands[1], operands[2]);
9274 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9277 [(set_attr "conds" "clob")
9278 (set_attr "enabled_for_short_it" "no,yes,yes")
9279 (set_attr "type" "multiple")]
9282 (define_insn "movcond"
9283 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9285 (match_operator 5 "arm_comparison_operator"
9286 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9287 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9288 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9289 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9290 (clobber (reg:CC CC_REGNUM))]
9293 if (GET_CODE (operands[5]) == LT
9294 && (operands[4] == const0_rtx))
9296 if (which_alternative != 1 && REG_P (operands[1]))
9298 if (operands[2] == const0_rtx)
9299 return \"and\\t%0, %1, %3, asr #31\";
9300 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9302 else if (which_alternative != 0 && REG_P (operands[2]))
9304 if (operands[1] == const0_rtx)
9305 return \"bic\\t%0, %2, %3, asr #31\";
9306 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9308 /* The only case that falls through to here is when both ops 1 & 2
9312 if (GET_CODE (operands[5]) == GE
9313 && (operands[4] == const0_rtx))
9315 if (which_alternative != 1 && REG_P (operands[1]))
9317 if (operands[2] == const0_rtx)
9318 return \"bic\\t%0, %1, %3, asr #31\";
9319 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9321 else if (which_alternative != 0 && REG_P (operands[2]))
9323 if (operands[1] == const0_rtx)
9324 return \"and\\t%0, %2, %3, asr #31\";
9325 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9327 /* The only case that falls through to here is when both ops 1 & 2
9330 if (CONST_INT_P (operands[4])
9331 && !const_ok_for_arm (INTVAL (operands[4])))
9332 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9334 output_asm_insn (\"cmp\\t%3, %4\", operands);
9335 if (which_alternative != 0)
9336 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9337 if (which_alternative != 1)
9338 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9341 [(set_attr "conds" "clob")
9342 (set_attr "length" "8,8,12")
9343 (set_attr "type" "multiple")]
9346 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9348 (define_insn "*ifcompare_plus_move"
9349 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9350 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9351 [(match_operand:SI 4 "s_register_operand" "r,r")
9352 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9354 (match_operand:SI 2 "s_register_operand" "r,r")
9355 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9356 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9357 (clobber (reg:CC CC_REGNUM))]
9360 [(set_attr "conds" "clob")
9361 (set_attr "length" "8,12")
9362 (set_attr "type" "multiple")]
9365 (define_insn "*if_plus_move"
9366 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9368 (match_operator 4 "arm_comparison_operator"
9369 [(match_operand 5 "cc_register" "") (const_int 0)])
9371 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9372 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9373 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9377 sub%d4\\t%0, %2, #%n3
9378 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9379 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9380 [(set_attr "conds" "use")
9381 (set_attr "length" "4,4,8,8")
9382 (set_attr_alternative "type"
9383 [(if_then_else (match_operand 3 "const_int_operand" "")
9384 (const_string "alu_imm" )
9385 (const_string "alu_sreg"))
9386 (const_string "alu_imm")
9387 (const_string "multiple")
9388 (const_string "multiple")])]
9391 (define_insn "*ifcompare_move_plus"
9392 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9393 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9394 [(match_operand:SI 4 "s_register_operand" "r,r")
9395 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9396 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9398 (match_operand:SI 2 "s_register_operand" "r,r")
9399 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9400 (clobber (reg:CC CC_REGNUM))]
9403 [(set_attr "conds" "clob")
9404 (set_attr "length" "8,12")
9405 (set_attr "type" "multiple")]
9408 (define_insn "*if_move_plus"
9409 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9411 (match_operator 4 "arm_comparison_operator"
9412 [(match_operand 5 "cc_register" "") (const_int 0)])
9413 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9415 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9416 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9420 sub%D4\\t%0, %2, #%n3
9421 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9422 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9423 [(set_attr "conds" "use")
9424 (set_attr "length" "4,4,8,8")
9425 (set_attr_alternative "type"
9426 [(if_then_else (match_operand 3 "const_int_operand" "")
9427 (const_string "alu_imm" )
9428 (const_string "alu_sreg"))
9429 (const_string "alu_imm")
9430 (const_string "multiple")
9431 (const_string "multiple")])]
9434 (define_insn "*ifcompare_arith_arith"
9435 [(set (match_operand:SI 0 "s_register_operand" "=r")
9436 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9437 [(match_operand:SI 5 "s_register_operand" "r")
9438 (match_operand:SI 6 "arm_add_operand" "rIL")])
9439 (match_operator:SI 8 "shiftable_operator"
9440 [(match_operand:SI 1 "s_register_operand" "r")
9441 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9442 (match_operator:SI 7 "shiftable_operator"
9443 [(match_operand:SI 3 "s_register_operand" "r")
9444 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9445 (clobber (reg:CC CC_REGNUM))]
9448 [(set_attr "conds" "clob")
9449 (set_attr "length" "12")
9450 (set_attr "type" "multiple")]
9453 (define_insn "*if_arith_arith"
9454 [(set (match_operand:SI 0 "s_register_operand" "=r")
9455 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9456 [(match_operand 8 "cc_register" "") (const_int 0)])
9457 (match_operator:SI 6 "shiftable_operator"
9458 [(match_operand:SI 1 "s_register_operand" "r")
9459 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9460 (match_operator:SI 7 "shiftable_operator"
9461 [(match_operand:SI 3 "s_register_operand" "r")
9462 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9464 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9465 [(set_attr "conds" "use")
9466 (set_attr "length" "8")
9467 (set_attr "type" "multiple")]
9470 (define_insn "*ifcompare_arith_move"
9471 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9472 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9473 [(match_operand:SI 2 "s_register_operand" "r,r")
9474 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9475 (match_operator:SI 7 "shiftable_operator"
9476 [(match_operand:SI 4 "s_register_operand" "r,r")
9477 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9478 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9479 (clobber (reg:CC CC_REGNUM))]
9482 /* If we have an operation where (op x 0) is the identity operation and
9483 the conditional operator is LT or GE and we are comparing against zero and
9484 everything is in registers then we can do this in two instructions. */
9485 if (operands[3] == const0_rtx
9486 && GET_CODE (operands[7]) != AND
9487 && REG_P (operands[5])
9488 && REG_P (operands[1])
9489 && REGNO (operands[1]) == REGNO (operands[4])
9490 && REGNO (operands[4]) != REGNO (operands[0]))
9492 if (GET_CODE (operands[6]) == LT)
9493 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9494 else if (GET_CODE (operands[6]) == GE)
9495 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9497 if (CONST_INT_P (operands[3])
9498 && !const_ok_for_arm (INTVAL (operands[3])))
9499 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9501 output_asm_insn (\"cmp\\t%2, %3\", operands);
9502 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9503 if (which_alternative != 0)
9504 return \"mov%D6\\t%0, %1\";
9507 [(set_attr "conds" "clob")
9508 (set_attr "length" "8,12")
9509 (set_attr "type" "multiple")]
9512 (define_insn "*if_arith_move"
9513 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9514 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9515 [(match_operand 6 "cc_register" "") (const_int 0)])
9516 (match_operator:SI 5 "shiftable_operator"
9517 [(match_operand:SI 2 "s_register_operand" "r,r")
9518 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9519 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9523 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9524 [(set_attr "conds" "use")
9525 (set_attr "length" "4,8")
9526 (set_attr_alternative "type"
9527 [(if_then_else (match_operand 3 "const_int_operand" "")
9528 (const_string "alu_shift_imm" )
9529 (const_string "alu_shift_reg"))
9530 (const_string "multiple")])]
9533 (define_insn "*ifcompare_move_arith"
9534 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9535 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9536 [(match_operand:SI 4 "s_register_operand" "r,r")
9537 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9538 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9539 (match_operator:SI 7 "shiftable_operator"
9540 [(match_operand:SI 2 "s_register_operand" "r,r")
9541 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9542 (clobber (reg:CC CC_REGNUM))]
9545 /* If we have an operation where (op x 0) is the identity operation and
9546 the conditional operator is LT or GE and we are comparing against zero and
9547 everything is in registers then we can do this in two instructions */
9548 if (operands[5] == const0_rtx
9549 && GET_CODE (operands[7]) != AND
9550 && REG_P (operands[3])
9551 && REG_P (operands[1])
9552 && REGNO (operands[1]) == REGNO (operands[2])
9553 && REGNO (operands[2]) != REGNO (operands[0]))
9555 if (GET_CODE (operands[6]) == GE)
9556 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9557 else if (GET_CODE (operands[6]) == LT)
9558 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9561 if (CONST_INT_P (operands[5])
9562 && !const_ok_for_arm (INTVAL (operands[5])))
9563 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9565 output_asm_insn (\"cmp\\t%4, %5\", operands);
9567 if (which_alternative != 0)
9568 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9569 return \"%I7%D6\\t%0, %2, %3\";
9571 [(set_attr "conds" "clob")
9572 (set_attr "length" "8,12")
9573 (set_attr "type" "multiple")]
9576 (define_insn "*if_move_arith"
9577 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9579 (match_operator 4 "arm_comparison_operator"
9580 [(match_operand 6 "cc_register" "") (const_int 0)])
9581 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9582 (match_operator:SI 5 "shiftable_operator"
9583 [(match_operand:SI 2 "s_register_operand" "r,r")
9584 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9588 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9589 [(set_attr "conds" "use")
9590 (set_attr "length" "4,8")
9591 (set_attr_alternative "type"
9592 [(if_then_else (match_operand 3 "const_int_operand" "")
9593 (const_string "alu_shift_imm" )
9594 (const_string "alu_shift_reg"))
9595 (const_string "multiple")])]
9598 (define_insn "*ifcompare_move_not"
9599 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9601 (match_operator 5 "arm_comparison_operator"
9602 [(match_operand:SI 3 "s_register_operand" "r,r")
9603 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9604 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9606 (match_operand:SI 2 "s_register_operand" "r,r"))))
9607 (clobber (reg:CC CC_REGNUM))]
9610 [(set_attr "conds" "clob")
9611 (set_attr "length" "8,12")
9612 (set_attr "type" "multiple")]
9615 (define_insn "*if_move_not"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9618 (match_operator 4 "arm_comparison_operator"
9619 [(match_operand 3 "cc_register" "") (const_int 0)])
9620 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9621 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9625 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9626 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9627 [(set_attr "conds" "use")
9628 (set_attr "type" "mvn_reg")
9629 (set_attr "length" "4,8,8")
9630 (set_attr "type" "mvn_reg,multiple,multiple")]
9633 (define_insn "*ifcompare_not_move"
9634 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9636 (match_operator 5 "arm_comparison_operator"
9637 [(match_operand:SI 3 "s_register_operand" "r,r")
9638 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9640 (match_operand:SI 2 "s_register_operand" "r,r"))
9641 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9642 (clobber (reg:CC CC_REGNUM))]
9645 [(set_attr "conds" "clob")
9646 (set_attr "length" "8,12")
9647 (set_attr "type" "multiple")]
9650 (define_insn "*if_not_move"
9651 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9653 (match_operator 4 "arm_comparison_operator"
9654 [(match_operand 3 "cc_register" "") (const_int 0)])
9655 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9656 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9660 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9661 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9662 [(set_attr "conds" "use")
9663 (set_attr "type" "mvn_reg,multiple,multiple")
9664 (set_attr "length" "4,8,8")]
9667 (define_insn "*ifcompare_shift_move"
9668 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9670 (match_operator 6 "arm_comparison_operator"
9671 [(match_operand:SI 4 "s_register_operand" "r,r")
9672 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9673 (match_operator:SI 7 "shift_operator"
9674 [(match_operand:SI 2 "s_register_operand" "r,r")
9675 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9676 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9677 (clobber (reg:CC CC_REGNUM))]
9680 [(set_attr "conds" "clob")
9681 (set_attr "length" "8,12")
9682 (set_attr "type" "multiple")]
9685 (define_insn "*if_shift_move"
9686 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9688 (match_operator 5 "arm_comparison_operator"
9689 [(match_operand 6 "cc_register" "") (const_int 0)])
9690 (match_operator:SI 4 "shift_operator"
9691 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9692 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9693 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9697 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9698 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9699 [(set_attr "conds" "use")
9700 (set_attr "shift" "2")
9701 (set_attr "length" "4,8,8")
9702 (set_attr_alternative "type"
9703 [(if_then_else (match_operand 3 "const_int_operand" "")
9704 (const_string "mov_shift" )
9705 (const_string "mov_shift_reg"))
9706 (const_string "multiple")
9707 (const_string "multiple")])]
9710 (define_insn "*ifcompare_move_shift"
9711 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9713 (match_operator 6 "arm_comparison_operator"
9714 [(match_operand:SI 4 "s_register_operand" "r,r")
9715 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9716 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9717 (match_operator:SI 7 "shift_operator"
9718 [(match_operand:SI 2 "s_register_operand" "r,r")
9719 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9720 (clobber (reg:CC CC_REGNUM))]
9723 [(set_attr "conds" "clob")
9724 (set_attr "length" "8,12")
9725 (set_attr "type" "multiple")]
9728 (define_insn "*if_move_shift"
9729 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9731 (match_operator 5 "arm_comparison_operator"
9732 [(match_operand 6 "cc_register" "") (const_int 0)])
9733 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9734 (match_operator:SI 4 "shift_operator"
9735 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9736 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9740 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9741 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9742 [(set_attr "conds" "use")
9743 (set_attr "shift" "2")
9744 (set_attr "length" "4,8,8")
9745 (set_attr_alternative "type"
9746 [(if_then_else (match_operand 3 "const_int_operand" "")
9747 (const_string "mov_shift" )
9748 (const_string "mov_shift_reg"))
9749 (const_string "multiple")
9750 (const_string "multiple")])]
9753 (define_insn "*ifcompare_shift_shift"
9754 [(set (match_operand:SI 0 "s_register_operand" "=r")
9756 (match_operator 7 "arm_comparison_operator"
9757 [(match_operand:SI 5 "s_register_operand" "r")
9758 (match_operand:SI 6 "arm_add_operand" "rIL")])
9759 (match_operator:SI 8 "shift_operator"
9760 [(match_operand:SI 1 "s_register_operand" "r")
9761 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9762 (match_operator:SI 9 "shift_operator"
9763 [(match_operand:SI 3 "s_register_operand" "r")
9764 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9765 (clobber (reg:CC CC_REGNUM))]
9768 [(set_attr "conds" "clob")
9769 (set_attr "length" "12")
9770 (set_attr "type" "multiple")]
9773 (define_insn "*if_shift_shift"
9774 [(set (match_operand:SI 0 "s_register_operand" "=r")
9776 (match_operator 5 "arm_comparison_operator"
9777 [(match_operand 8 "cc_register" "") (const_int 0)])
9778 (match_operator:SI 6 "shift_operator"
9779 [(match_operand:SI 1 "s_register_operand" "r")
9780 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9781 (match_operator:SI 7 "shift_operator"
9782 [(match_operand:SI 3 "s_register_operand" "r")
9783 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9785 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9786 [(set_attr "conds" "use")
9787 (set_attr "shift" "1")
9788 (set_attr "length" "8")
9789 (set (attr "type") (if_then_else
9790 (and (match_operand 2 "const_int_operand" "")
9791 (match_operand 4 "const_int_operand" ""))
9792 (const_string "mov_shift")
9793 (const_string "mov_shift_reg")))]
9796 (define_insn "*ifcompare_not_arith"
9797 [(set (match_operand:SI 0 "s_register_operand" "=r")
9799 (match_operator 6 "arm_comparison_operator"
9800 [(match_operand:SI 4 "s_register_operand" "r")
9801 (match_operand:SI 5 "arm_add_operand" "rIL")])
9802 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9803 (match_operator:SI 7 "shiftable_operator"
9804 [(match_operand:SI 2 "s_register_operand" "r")
9805 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9806 (clobber (reg:CC CC_REGNUM))]
9809 [(set_attr "conds" "clob")
9810 (set_attr "length" "12")
9811 (set_attr "type" "multiple")]
9814 (define_insn "*if_not_arith"
9815 [(set (match_operand:SI 0 "s_register_operand" "=r")
9817 (match_operator 5 "arm_comparison_operator"
9818 [(match_operand 4 "cc_register" "") (const_int 0)])
9819 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9820 (match_operator:SI 6 "shiftable_operator"
9821 [(match_operand:SI 2 "s_register_operand" "r")
9822 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9824 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9825 [(set_attr "conds" "use")
9826 (set_attr "type" "mvn_reg")
9827 (set_attr "length" "8")]
9830 (define_insn "*ifcompare_arith_not"
9831 [(set (match_operand:SI 0 "s_register_operand" "=r")
9833 (match_operator 6 "arm_comparison_operator"
9834 [(match_operand:SI 4 "s_register_operand" "r")
9835 (match_operand:SI 5 "arm_add_operand" "rIL")])
9836 (match_operator:SI 7 "shiftable_operator"
9837 [(match_operand:SI 2 "s_register_operand" "r")
9838 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9839 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9840 (clobber (reg:CC CC_REGNUM))]
9843 [(set_attr "conds" "clob")
9844 (set_attr "length" "12")
9845 (set_attr "type" "multiple")]
9848 (define_insn "*if_arith_not"
9849 [(set (match_operand:SI 0 "s_register_operand" "=r")
9851 (match_operator 5 "arm_comparison_operator"
9852 [(match_operand 4 "cc_register" "") (const_int 0)])
9853 (match_operator:SI 6 "shiftable_operator"
9854 [(match_operand:SI 2 "s_register_operand" "r")
9855 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9856 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9858 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9859 [(set_attr "conds" "use")
9860 (set_attr "type" "multiple")
9861 (set_attr "length" "8")]
9864 (define_insn "*ifcompare_neg_move"
9865 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9867 (match_operator 5 "arm_comparison_operator"
9868 [(match_operand:SI 3 "s_register_operand" "r,r")
9869 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9870 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9871 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9872 (clobber (reg:CC CC_REGNUM))]
9875 [(set_attr "conds" "clob")
9876 (set_attr "length" "8,12")
9877 (set_attr "type" "multiple")]
9880 (define_insn_and_split "*if_neg_move"
9881 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9883 (match_operator 4 "arm_comparison_operator"
9884 [(match_operand 3 "cc_register" "") (const_int 0)])
9885 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9886 (match_operand:SI 1 "s_register_operand" "0,0")))]
9889 "&& reload_completed"
9890 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9891 (set (match_dup 0) (neg:SI (match_dup 2))))]
9893 [(set_attr "conds" "use")
9894 (set_attr "length" "4")
9895 (set_attr "arch" "t2,32")
9896 (set_attr "enabled_for_short_it" "yes,no")
9897 (set_attr "type" "logic_shift_imm")]
9900 (define_insn "*ifcompare_move_neg"
9901 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9903 (match_operator 5 "arm_comparison_operator"
9904 [(match_operand:SI 3 "s_register_operand" "r,r")
9905 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9906 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9907 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9908 (clobber (reg:CC CC_REGNUM))]
9911 [(set_attr "conds" "clob")
9912 (set_attr "length" "8,12")
9913 (set_attr "type" "multiple")]
9916 (define_insn_and_split "*if_move_neg"
9917 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9919 (match_operator 4 "arm_comparison_operator"
9920 [(match_operand 3 "cc_register" "") (const_int 0)])
9921 (match_operand:SI 1 "s_register_operand" "0,0")
9922 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9925 "&& reload_completed"
9926 [(cond_exec (match_dup 5)
9927 (set (match_dup 0) (neg:SI (match_dup 2))))]
9929 machine_mode mode = GET_MODE (operands[3]);
9930 rtx_code rc = GET_CODE (operands[4]);
9932 if (mode == CCFPmode || mode == CCFPEmode)
9933 rc = reverse_condition_maybe_unordered (rc);
9935 rc = reverse_condition (rc);
9937 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9939 [(set_attr "conds" "use")
9940 (set_attr "length" "4")
9941 (set_attr "arch" "t2,32")
9942 (set_attr "enabled_for_short_it" "yes,no")
9943 (set_attr "type" "logic_shift_imm")]
9946 (define_insn "*arith_adjacentmem"
9947 [(set (match_operand:SI 0 "s_register_operand" "=r")
9948 (match_operator:SI 1 "shiftable_operator"
9949 [(match_operand:SI 2 "memory_operand" "m")
9950 (match_operand:SI 3 "memory_operand" "m")]))
9951 (clobber (match_scratch:SI 4 "=r"))]
9952 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9958 HOST_WIDE_INT val1 = 0, val2 = 0;
9960 if (REGNO (operands[0]) > REGNO (operands[4]))
9962 ldm[1] = operands[4];
9963 ldm[2] = operands[0];
9967 ldm[1] = operands[0];
9968 ldm[2] = operands[4];
9971 base_reg = XEXP (operands[2], 0);
9973 if (!REG_P (base_reg))
9975 val1 = INTVAL (XEXP (base_reg, 1));
9976 base_reg = XEXP (base_reg, 0);
9979 if (!REG_P (XEXP (operands[3], 0)))
9980 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9982 arith[0] = operands[0];
9983 arith[3] = operands[1];
9997 if (val1 !=0 && val2 != 0)
10001 if (val1 == 4 || val2 == 4)
10002 /* Other val must be 8, since we know they are adjacent and neither
10004 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10005 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10007 ldm[0] = ops[0] = operands[4];
10009 ops[2] = GEN_INT (val1);
10010 output_add_immediate (ops);
10012 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10014 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10018 /* Offset is out of range for a single add, so use two ldr. */
10021 ops[2] = GEN_INT (val1);
10022 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10024 ops[2] = GEN_INT (val2);
10025 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10028 else if (val1 != 0)
10031 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10033 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10038 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10040 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10042 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10045 [(set_attr "length" "12")
10046 (set_attr "predicable" "yes")
10047 (set_attr "type" "load_4")]
10050 ; This pattern is never tried by combine, so do it as a peephole
10053 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10054 (match_operand:SI 1 "arm_general_register_operand" ""))
10055 (set (reg:CC CC_REGNUM)
10056 (compare:CC (match_dup 1) (const_int 0)))]
10058 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10059 (set (match_dup 0) (match_dup 1))])]
10064 [(set (match_operand:SI 0 "s_register_operand" "")
10065 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10067 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10068 [(match_operand:SI 3 "s_register_operand" "")
10069 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10070 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10072 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10073 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10078 ;; This split can be used because CC_Z mode implies that the following
10079 ;; branch will be an equality, or an unsigned inequality, so the sign
10080 ;; extension is not needed.
10083 [(set (reg:CC_Z CC_REGNUM)
10085 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10087 (match_operand 1 "const_int_operand" "")))
10088 (clobber (match_scratch:SI 2 ""))]
10090 && ((UINTVAL (operands[1]))
10091 == ((UINTVAL (operands[1])) >> 24) << 24)"
10092 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10093 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10095 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10098 ;; ??? Check the patterns above for Thumb-2 usefulness
10100 (define_expand "prologue"
10101 [(clobber (const_int 0))]
10104 arm_expand_prologue ();
10106 thumb1_expand_prologue ();
10111 (define_expand "epilogue"
10112 [(clobber (const_int 0))]
10115 if (crtl->calls_eh_return)
10116 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10119 thumb1_expand_epilogue ();
10120 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10121 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10123 else if (HAVE_return)
10125 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10126 no need for explicit testing again. */
10127 emit_jump_insn (gen_return ());
10129 else if (TARGET_32BIT)
10131 arm_expand_epilogue (true);
10137 ;; Note - although unspec_volatile's USE all hard registers,
10138 ;; USEs are ignored after relaod has completed. Thus we need
10139 ;; to add an unspec of the link register to ensure that flow
10140 ;; does not think that it is unused by the sibcall branch that
10141 ;; will replace the standard function epilogue.
10142 (define_expand "sibcall_epilogue"
10143 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10144 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10147 arm_expand_epilogue (false);
10152 (define_expand "eh_epilogue"
10153 [(use (match_operand:SI 0 "register_operand"))
10154 (use (match_operand:SI 1 "register_operand"))
10155 (use (match_operand:SI 2 "register_operand"))]
10159 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10160 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10162 rtx ra = gen_rtx_REG (Pmode, 2);
10164 emit_move_insn (ra, operands[2]);
10167 /* This is a hack -- we may have crystalized the function type too
10169 cfun->machine->func_type = 0;
10173 ;; This split is only used during output to reduce the number of patterns
10174 ;; that need assembler instructions adding to them. We allowed the setting
10175 ;; of the conditions to be implicit during rtl generation so that
10176 ;; the conditional compare patterns would work. However this conflicts to
10177 ;; some extent with the conditional data operations, so we have to split them
10180 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10181 ;; conditional execution sufficient?
10184 [(set (match_operand:SI 0 "s_register_operand" "")
10185 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10186 [(match_operand 2 "" "") (match_operand 3 "" "")])
10188 (match_operand 4 "" "")))
10189 (clobber (reg:CC CC_REGNUM))]
10190 "TARGET_ARM && reload_completed"
10191 [(set (match_dup 5) (match_dup 6))
10192 (cond_exec (match_dup 7)
10193 (set (match_dup 0) (match_dup 4)))]
10196 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10197 operands[2], operands[3]);
10198 enum rtx_code rc = GET_CODE (operands[1]);
10200 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10201 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10202 if (mode == CCFPmode || mode == CCFPEmode)
10203 rc = reverse_condition_maybe_unordered (rc);
10205 rc = reverse_condition (rc);
10207 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10212 [(set (match_operand:SI 0 "s_register_operand" "")
10213 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10214 [(match_operand 2 "" "") (match_operand 3 "" "")])
10215 (match_operand 4 "" "")
10217 (clobber (reg:CC CC_REGNUM))]
10218 "TARGET_ARM && reload_completed"
10219 [(set (match_dup 5) (match_dup 6))
10220 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10221 (set (match_dup 0) (match_dup 4)))]
10224 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10225 operands[2], operands[3]);
10227 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10228 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10233 [(set (match_operand:SI 0 "s_register_operand" "")
10234 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10235 [(match_operand 2 "" "") (match_operand 3 "" "")])
10236 (match_operand 4 "" "")
10237 (match_operand 5 "" "")))
10238 (clobber (reg:CC CC_REGNUM))]
10239 "TARGET_ARM && reload_completed"
10240 [(set (match_dup 6) (match_dup 7))
10241 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10242 (set (match_dup 0) (match_dup 4)))
10243 (cond_exec (match_dup 8)
10244 (set (match_dup 0) (match_dup 5)))]
10247 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10248 operands[2], operands[3]);
10249 enum rtx_code rc = GET_CODE (operands[1]);
10251 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10252 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10253 if (mode == CCFPmode || mode == CCFPEmode)
10254 rc = reverse_condition_maybe_unordered (rc);
10256 rc = reverse_condition (rc);
10258 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10263 [(set (match_operand:SI 0 "s_register_operand" "")
10264 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10265 [(match_operand:SI 2 "s_register_operand" "")
10266 (match_operand:SI 3 "arm_add_operand" "")])
10267 (match_operand:SI 4 "arm_rhs_operand" "")
10269 (match_operand:SI 5 "s_register_operand" ""))))
10270 (clobber (reg:CC CC_REGNUM))]
10271 "TARGET_ARM && reload_completed"
10272 [(set (match_dup 6) (match_dup 7))
10273 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10274 (set (match_dup 0) (match_dup 4)))
10275 (cond_exec (match_dup 8)
10276 (set (match_dup 0) (not:SI (match_dup 5))))]
10279 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10280 operands[2], operands[3]);
10281 enum rtx_code rc = GET_CODE (operands[1]);
10283 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10284 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10285 if (mode == CCFPmode || mode == CCFPEmode)
10286 rc = reverse_condition_maybe_unordered (rc);
10288 rc = reverse_condition (rc);
10290 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10294 (define_insn "*cond_move_not"
10295 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10296 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10297 [(match_operand 3 "cc_register" "") (const_int 0)])
10298 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10300 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10304 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10305 [(set_attr "conds" "use")
10306 (set_attr "type" "mvn_reg,multiple")
10307 (set_attr "length" "4,8")]
10310 ;; The next two patterns occur when an AND operation is followed by a
10311 ;; scc insn sequence
10313 (define_insn "*sign_extract_onebit"
10314 [(set (match_operand:SI 0 "s_register_operand" "=r")
10315 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10317 (match_operand:SI 2 "const_int_operand" "n")))
10318 (clobber (reg:CC CC_REGNUM))]
10321 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10322 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10323 return \"mvnne\\t%0, #0\";
10325 [(set_attr "conds" "clob")
10326 (set_attr "length" "8")
10327 (set_attr "type" "multiple")]
10330 (define_insn "*not_signextract_onebit"
10331 [(set (match_operand:SI 0 "s_register_operand" "=r")
10333 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10335 (match_operand:SI 2 "const_int_operand" "n"))))
10336 (clobber (reg:CC CC_REGNUM))]
10339 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10340 output_asm_insn (\"tst\\t%1, %2\", operands);
10341 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10342 return \"movne\\t%0, #0\";
10344 [(set_attr "conds" "clob")
10345 (set_attr "length" "12")
10346 (set_attr "type" "multiple")]
10348 ;; ??? The above patterns need auditing for Thumb-2
10350 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10351 ;; expressions. For simplicity, the first register is also in the unspec
10353 ;; To avoid the usage of GNU extension, the length attribute is computed
10354 ;; in a C function arm_attr_length_push_multi.
10355 (define_insn "*push_multi"
10356 [(match_parallel 2 "multi_register_push"
10357 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10358 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10359 UNSPEC_PUSH_MULT))])]
10363 int num_saves = XVECLEN (operands[2], 0);
10365 /* For the StrongARM at least it is faster to
10366 use STR to store only a single register.
10367 In Thumb mode always use push, and the assembler will pick
10368 something appropriate. */
10369 if (num_saves == 1 && TARGET_ARM)
10370 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10377 strcpy (pattern, \"push%?\\t{%1\");
10379 strcpy (pattern, \"push\\t{%1\");
10381 for (i = 1; i < num_saves; i++)
10383 strcat (pattern, \", %|\");
10385 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10388 strcat (pattern, \"}\");
10389 output_asm_insn (pattern, operands);
10394 [(set_attr "type" "store_16")
10395 (set (attr "length")
10396 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10399 (define_insn "stack_tie"
10400 [(set (mem:BLK (scratch))
10401 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10402 (match_operand:SI 1 "s_register_operand" "rk")]
10406 [(set_attr "length" "0")
10407 (set_attr "type" "block")]
10410 ;; Pop (as used in epilogue RTL)
10412 (define_insn "*load_multiple_with_writeback"
10413 [(match_parallel 0 "load_multiple_operation"
10414 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10415 (plus:SI (match_dup 1)
10416 (match_operand:SI 2 "const_int_I_operand" "I")))
10417 (set (match_operand:SI 3 "s_register_operand" "=rk")
10418 (mem:SI (match_dup 1)))
10420 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10423 arm_output_multireg_pop (operands, /*return_pc=*/false,
10424 /*cond=*/const_true_rtx,
10430 [(set_attr "type" "load_16")
10431 (set_attr "predicable" "yes")
10432 (set (attr "length")
10433 (symbol_ref "arm_attr_length_pop_multi (operands,
10434 /*return_pc=*/false,
10435 /*write_back_p=*/true)"))]
10438 ;; Pop with return (as used in epilogue RTL)
10440 ;; This instruction is generated when the registers are popped at the end of
10441 ;; epilogue. Here, instead of popping the value into LR and then generating
10442 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10444 (define_insn "*pop_multiple_with_writeback_and_return"
10445 [(match_parallel 0 "pop_multiple_return"
10447 (set (match_operand:SI 1 "s_register_operand" "+rk")
10448 (plus:SI (match_dup 1)
10449 (match_operand:SI 2 "const_int_I_operand" "I")))
10450 (set (match_operand:SI 3 "s_register_operand" "=rk")
10451 (mem:SI (match_dup 1)))
10453 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10456 arm_output_multireg_pop (operands, /*return_pc=*/true,
10457 /*cond=*/const_true_rtx,
10463 [(set_attr "type" "load_16")
10464 (set_attr "predicable" "yes")
10465 (set (attr "length")
10466 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10467 /*write_back_p=*/true)"))]
10470 (define_insn "*pop_multiple_with_return"
10471 [(match_parallel 0 "pop_multiple_return"
10473 (set (match_operand:SI 2 "s_register_operand" "=rk")
10474 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10476 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10479 arm_output_multireg_pop (operands, /*return_pc=*/true,
10480 /*cond=*/const_true_rtx,
10486 [(set_attr "type" "load_16")
10487 (set_attr "predicable" "yes")
10488 (set (attr "length")
10489 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10490 /*write_back_p=*/false)"))]
10493 ;; Load into PC and return
10494 (define_insn "*ldr_with_return"
10496 (set (reg:SI PC_REGNUM)
10497 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10498 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10499 "ldr%?\t%|pc, [%0], #4"
10500 [(set_attr "type" "load_4")
10501 (set_attr "predicable" "yes")]
10503 ;; Pop for floating point registers (as used in epilogue RTL)
10504 (define_insn "*vfp_pop_multiple_with_writeback"
10505 [(match_parallel 0 "pop_multiple_fp"
10506 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10507 (plus:SI (match_dup 1)
10508 (match_operand:SI 2 "const_int_I_operand" "I")))
10509 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10510 (mem:DF (match_dup 1)))])]
10511 "TARGET_32BIT && TARGET_HARD_FLOAT"
10514 int num_regs = XVECLEN (operands[0], 0);
10517 strcpy (pattern, \"vldm\\t\");
10518 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10519 strcat (pattern, \"!, {\");
10520 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10521 strcat (pattern, \"%P0\");
10522 if ((num_regs - 1) > 1)
10524 strcat (pattern, \"-%P1\");
10525 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10528 strcat (pattern, \"}\");
10529 output_asm_insn (pattern, op_list);
10533 [(set_attr "type" "load_16")
10534 (set_attr "conds" "unconditional")
10535 (set_attr "predicable" "no")]
10538 ;; Special patterns for dealing with the constant pool
10540 (define_insn "align_4"
10541 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10544 assemble_align (32);
10547 [(set_attr "type" "no_insn")]
10550 (define_insn "align_8"
10551 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10554 assemble_align (64);
10557 [(set_attr "type" "no_insn")]
10560 (define_insn "consttable_end"
10561 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10564 making_const_table = FALSE;
10567 [(set_attr "type" "no_insn")]
10570 (define_insn "consttable_1"
10571 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10574 making_const_table = TRUE;
10575 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10576 assemble_zeros (3);
10579 [(set_attr "length" "4")
10580 (set_attr "type" "no_insn")]
10583 (define_insn "consttable_2"
10584 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10588 rtx x = operands[0];
10589 making_const_table = TRUE;
10590 switch (GET_MODE_CLASS (GET_MODE (x)))
10593 arm_emit_fp16_const (x);
10596 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10597 assemble_zeros (2);
10602 [(set_attr "length" "4")
10603 (set_attr "type" "no_insn")]
10606 (define_insn "consttable_4"
10607 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10611 rtx x = operands[0];
10612 making_const_table = TRUE;
10613 scalar_float_mode float_mode;
10614 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10615 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10618 /* XXX: Sometimes gcc does something really dumb and ends up with
10619 a HIGH in a constant pool entry, usually because it's trying to
10620 load into a VFP register. We know this will always be used in
10621 combination with a LO_SUM which ignores the high bits, so just
10622 strip off the HIGH. */
10623 if (GET_CODE (x) == HIGH)
10625 assemble_integer (x, 4, BITS_PER_WORD, 1);
10626 mark_symbol_refs_as_used (x);
10630 [(set_attr "length" "4")
10631 (set_attr "type" "no_insn")]
10634 (define_insn "consttable_8"
10635 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10639 making_const_table = TRUE;
10640 scalar_float_mode float_mode;
10641 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10642 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10643 float_mode, BITS_PER_WORD);
10645 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10648 [(set_attr "length" "8")
10649 (set_attr "type" "no_insn")]
10652 (define_insn "consttable_16"
10653 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10657 making_const_table = TRUE;
10658 scalar_float_mode float_mode;
10659 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10660 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10661 float_mode, BITS_PER_WORD);
10663 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10666 [(set_attr "length" "16")
10667 (set_attr "type" "no_insn")]
10670 ;; V5 Instructions,
10672 (define_insn "clzsi2"
10673 [(set (match_operand:SI 0 "s_register_operand" "=r")
10674 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10675 "TARGET_32BIT && arm_arch5t"
10677 [(set_attr "predicable" "yes")
10678 (set_attr "type" "clz")])
10680 (define_insn "rbitsi2"
10681 [(set (match_operand:SI 0 "s_register_operand" "=r")
10682 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10683 "TARGET_32BIT && arm_arch_thumb2"
10685 [(set_attr "predicable" "yes")
10686 (set_attr "type" "clz")])
10688 ;; Keep this as a CTZ expression until after reload and then split
10689 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10690 ;; to fold with any other expression.
10692 (define_insn_and_split "ctzsi2"
10693 [(set (match_operand:SI 0 "s_register_operand" "=r")
10694 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10695 "TARGET_32BIT && arm_arch_thumb2"
10697 "&& reload_completed"
10700 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10701 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10705 ;; V5E instructions.
10707 (define_insn "prefetch"
10708 [(prefetch (match_operand:SI 0 "address_operand" "p")
10709 (match_operand:SI 1 "" "")
10710 (match_operand:SI 2 "" ""))]
10711 "TARGET_32BIT && arm_arch5te"
10713 [(set_attr "type" "load_4")]
10716 ;; General predication pattern
10719 [(match_operator 0 "arm_comparison_operator"
10720 [(match_operand 1 "cc_register" "")
10723 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10725 [(set_attr "predicated" "yes")]
10728 (define_insn "force_register_use"
10729 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10732 [(set_attr "length" "0")
10733 (set_attr "type" "no_insn")]
10737 ;; Patterns for exception handling
10739 (define_expand "eh_return"
10740 [(use (match_operand 0 "general_operand"))]
10745 emit_insn (gen_arm_eh_return (operands[0]));
10747 emit_insn (gen_thumb_eh_return (operands[0]));
10752 ;; We can't expand this before we know where the link register is stored.
10753 (define_insn_and_split "arm_eh_return"
10754 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10756 (clobber (match_scratch:SI 1 "=&r"))]
10759 "&& reload_completed"
10763 arm_set_return_address (operands[0], operands[1]);
10771 (define_insn "load_tp_hard"
10772 [(set (match_operand:SI 0 "register_operand" "=r")
10773 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10775 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10776 [(set_attr "predicable" "yes")
10777 (set_attr "type" "mrs")]
10780 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10781 (define_insn "load_tp_soft_fdpic"
10782 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10783 (clobber (reg:SI FDPIC_REGNUM))
10784 (clobber (reg:SI LR_REGNUM))
10785 (clobber (reg:SI IP_REGNUM))
10786 (clobber (reg:CC CC_REGNUM))]
10787 "TARGET_SOFT_TP && TARGET_FDPIC"
10788 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10789 [(set_attr "conds" "clob")
10790 (set_attr "type" "branch")]
10793 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10794 (define_insn "load_tp_soft"
10795 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10796 (clobber (reg:SI LR_REGNUM))
10797 (clobber (reg:SI IP_REGNUM))
10798 (clobber (reg:CC CC_REGNUM))]
10799 "TARGET_SOFT_TP && !TARGET_FDPIC"
10800 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10801 [(set_attr "conds" "clob")
10802 (set_attr "type" "branch")]
10805 ;; tls descriptor call
10806 (define_insn "tlscall"
10807 [(set (reg:SI R0_REGNUM)
10808 (unspec:SI [(reg:SI R0_REGNUM)
10809 (match_operand:SI 0 "" "X")
10810 (match_operand 1 "" "")] UNSPEC_TLS))
10811 (clobber (reg:SI R1_REGNUM))
10812 (clobber (reg:SI LR_REGNUM))
10813 (clobber (reg:SI CC_REGNUM))]
10816 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10817 INTVAL (operands[1]));
10818 return "bl\\t%c0(tlscall)";
10820 [(set_attr "conds" "clob")
10821 (set_attr "length" "4")
10822 (set_attr "type" "branch")]
10825 ;; For thread pointer builtin
10826 (define_expand "get_thread_pointersi"
10827 [(match_operand:SI 0 "s_register_operand")]
10831 arm_load_tp (operands[0]);
10837 ;; We only care about the lower 16 bits of the constant
10838 ;; being inserted into the upper 16 bits of the register.
10839 (define_insn "*arm_movtas_ze"
10840 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10843 (match_operand:SI 1 "const_int_operand" ""))]
10848 [(set_attr "arch" "32,v8mb")
10849 (set_attr "predicable" "yes")
10850 (set_attr "length" "4")
10851 (set_attr "type" "alu_sreg")]
10854 (define_insn "*arm_rev"
10855 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10856 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10862 [(set_attr "arch" "t1,t2,32")
10863 (set_attr "length" "2,2,4")
10864 (set_attr "predicable" "no,yes,yes")
10865 (set_attr "type" "rev")]
10868 (define_expand "arm_legacy_rev"
10869 [(set (match_operand:SI 2 "s_register_operand")
10870 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10874 (lshiftrt:SI (match_dup 2)
10876 (set (match_operand:SI 3 "s_register_operand")
10877 (rotatert:SI (match_dup 1)
10880 (and:SI (match_dup 2)
10881 (const_int -65281)))
10882 (set (match_operand:SI 0 "s_register_operand")
10883 (xor:SI (match_dup 3)
10889 ;; Reuse temporaries to keep register pressure down.
10890 (define_expand "thumb_legacy_rev"
10891 [(set (match_operand:SI 2 "s_register_operand")
10892 (ashift:SI (match_operand:SI 1 "s_register_operand")
10894 (set (match_operand:SI 3 "s_register_operand")
10895 (lshiftrt:SI (match_dup 1)
10898 (ior:SI (match_dup 3)
10900 (set (match_operand:SI 4 "s_register_operand")
10902 (set (match_operand:SI 5 "s_register_operand")
10903 (rotatert:SI (match_dup 1)
10906 (ashift:SI (match_dup 5)
10909 (lshiftrt:SI (match_dup 5)
10912 (ior:SI (match_dup 5)
10915 (rotatert:SI (match_dup 5)
10917 (set (match_operand:SI 0 "s_register_operand")
10918 (ior:SI (match_dup 5)
10924 ;; ARM-specific expansion of signed mod by power of 2
10925 ;; using conditional negate.
10926 ;; For r0 % n where n is a power of 2 produce:
10928 ;; and r0, r0, #(n - 1)
10929 ;; and r1, r1, #(n - 1)
10930 ;; rsbpl r0, r1, #0
10932 (define_expand "modsi3"
10933 [(match_operand:SI 0 "register_operand")
10934 (match_operand:SI 1 "register_operand")
10935 (match_operand:SI 2 "const_int_operand")]
10938 HOST_WIDE_INT val = INTVAL (operands[2]);
10941 || exact_log2 (val) <= 0)
10944 rtx mask = GEN_INT (val - 1);
10946 /* In the special case of x0 % 2 we can do the even shorter:
10949 rsblt r0, r0, #0. */
10953 rtx cc_reg = arm_gen_compare_reg (LT,
10954 operands[1], const0_rtx, NULL_RTX);
10955 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10956 rtx masked = gen_reg_rtx (SImode);
10958 emit_insn (gen_andsi3 (masked, operands[1], mask));
10959 emit_move_insn (operands[0],
10960 gen_rtx_IF_THEN_ELSE (SImode, cond,
10961 gen_rtx_NEG (SImode,
10967 rtx neg_op = gen_reg_rtx (SImode);
10968 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10971 /* Extract the condition register and mode. */
10972 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10973 rtx cc_reg = SET_DEST (cmp);
10974 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10976 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10978 rtx masked_neg = gen_reg_rtx (SImode);
10979 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10981 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10982 during expand does not always work. Do an IF_THEN_ELSE instead. */
10983 emit_move_insn (operands[0],
10984 gen_rtx_IF_THEN_ELSE (SImode, cond,
10985 gen_rtx_NEG (SImode, masked_neg),
10993 (define_expand "bswapsi2"
10994 [(set (match_operand:SI 0 "s_register_operand")
10995 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10996 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11000 rtx op2 = gen_reg_rtx (SImode);
11001 rtx op3 = gen_reg_rtx (SImode);
11005 rtx op4 = gen_reg_rtx (SImode);
11006 rtx op5 = gen_reg_rtx (SImode);
11008 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11009 op2, op3, op4, op5));
11013 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11022 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11023 ;; and unsigned variants, respectively. For rev16, expose
11024 ;; byte-swapping in the lower 16 bits only.
11025 (define_insn "*arm_revsh"
11026 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11027 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11033 [(set_attr "arch" "t1,t2,32")
11034 (set_attr "length" "2,2,4")
11035 (set_attr "type" "rev")]
11038 (define_insn "*arm_rev16"
11039 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11040 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11046 [(set_attr "arch" "t1,t2,32")
11047 (set_attr "length" "2,2,4")
11048 (set_attr "type" "rev")]
11051 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11052 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11053 ;; each valid permutation.
11055 (define_insn "arm_rev16si2"
11056 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11057 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11059 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11060 (and:SI (lshiftrt:SI (match_dup 1)
11062 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11064 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11065 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11067 [(set_attr "arch" "t1,t2,32")
11068 (set_attr "length" "2,2,4")
11069 (set_attr "type" "rev")]
11072 (define_insn "arm_rev16si2_alt"
11073 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11074 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11076 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11077 (and:SI (ashift:SI (match_dup 1)
11079 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11081 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11082 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11084 [(set_attr "arch" "t1,t2,32")
11085 (set_attr "length" "2,2,4")
11086 (set_attr "type" "rev")]
11089 (define_expand "bswaphi2"
11090 [(set (match_operand:HI 0 "s_register_operand")
11091 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11096 ;; Patterns for LDRD/STRD in Thumb2 mode
11098 (define_insn "*thumb2_ldrd"
11099 [(set (match_operand:SI 0 "s_register_operand" "=r")
11100 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11101 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11102 (set (match_operand:SI 3 "s_register_operand" "=r")
11103 (mem:SI (plus:SI (match_dup 1)
11104 (match_operand:SI 4 "const_int_operand" ""))))]
11105 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11106 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11107 && (operands_ok_ldrd_strd (operands[0], operands[3],
11108 operands[1], INTVAL (operands[2]),
11110 "ldrd%?\t%0, %3, [%1, %2]"
11111 [(set_attr "type" "load_8")
11112 (set_attr "predicable" "yes")])
11114 (define_insn "*thumb2_ldrd_base"
11115 [(set (match_operand:SI 0 "s_register_operand" "=r")
11116 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11117 (set (match_operand:SI 2 "s_register_operand" "=r")
11118 (mem:SI (plus:SI (match_dup 1)
11120 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11121 && (operands_ok_ldrd_strd (operands[0], operands[2],
11122 operands[1], 0, false, true))"
11123 "ldrd%?\t%0, %2, [%1]"
11124 [(set_attr "type" "load_8")
11125 (set_attr "predicable" "yes")])
11127 (define_insn "*thumb2_ldrd_base_neg"
11128 [(set (match_operand:SI 0 "s_register_operand" "=r")
11129 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11131 (set (match_operand:SI 2 "s_register_operand" "=r")
11132 (mem:SI (match_dup 1)))]
11133 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11134 && (operands_ok_ldrd_strd (operands[0], operands[2],
11135 operands[1], -4, false, true))"
11136 "ldrd%?\t%0, %2, [%1, #-4]"
11137 [(set_attr "type" "load_8")
11138 (set_attr "predicable" "yes")])
11140 (define_insn "*thumb2_strd"
11141 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11142 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11143 (match_operand:SI 2 "s_register_operand" "r"))
11144 (set (mem:SI (plus:SI (match_dup 0)
11145 (match_operand:SI 3 "const_int_operand" "")))
11146 (match_operand:SI 4 "s_register_operand" "r"))]
11147 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11148 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11149 && (operands_ok_ldrd_strd (operands[2], operands[4],
11150 operands[0], INTVAL (operands[1]),
11152 "strd%?\t%2, %4, [%0, %1]"
11153 [(set_attr "type" "store_8")
11154 (set_attr "predicable" "yes")])
11156 (define_insn "*thumb2_strd_base"
11157 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11158 (match_operand:SI 1 "s_register_operand" "r"))
11159 (set (mem:SI (plus:SI (match_dup 0)
11161 (match_operand:SI 2 "s_register_operand" "r"))]
11162 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11163 && (operands_ok_ldrd_strd (operands[1], operands[2],
11164 operands[0], 0, false, false))"
11165 "strd%?\t%1, %2, [%0]"
11166 [(set_attr "type" "store_8")
11167 (set_attr "predicable" "yes")])
11169 (define_insn "*thumb2_strd_base_neg"
11170 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11172 (match_operand:SI 1 "s_register_operand" "r"))
11173 (set (mem:SI (match_dup 0))
11174 (match_operand:SI 2 "s_register_operand" "r"))]
11175 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11176 && (operands_ok_ldrd_strd (operands[1], operands[2],
11177 operands[0], -4, false, false))"
11178 "strd%?\t%1, %2, [%0, #-4]"
11179 [(set_attr "type" "store_8")
11180 (set_attr "predicable" "yes")])
11182 ;; ARMv8 CRC32 instructions.
11183 (define_insn "arm_<crc_variant>"
11184 [(set (match_operand:SI 0 "s_register_operand" "=r")
11185 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11186 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11189 "<crc_variant>\\t%0, %1, %2"
11190 [(set_attr "type" "crc")
11191 (set_attr "conds" "unconditional")]
11194 ;; Load the load/store double peephole optimizations.
11195 (include "ldrdstrd.md")
11197 ;; Load the load/store multiple patterns
11198 (include "ldmstm.md")
11200 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11201 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11202 ;; The operands are validated through the load_multiple_operation
11203 ;; match_parallel predicate rather than through constraints so enable it only
11205 (define_insn "*load_multiple"
11206 [(match_parallel 0 "load_multiple_operation"
11207 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11208 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11210 "TARGET_32BIT && reload_completed"
11213 arm_output_multireg_pop (operands, /*return_pc=*/false,
11214 /*cond=*/const_true_rtx,
11220 [(set_attr "predicable" "yes")]
11223 (define_expand "copysignsf3"
11224 [(match_operand:SF 0 "register_operand")
11225 (match_operand:SF 1 "register_operand")
11226 (match_operand:SF 2 "register_operand")]
11227 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11229 emit_move_insn (operands[0], operands[2]);
11230 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11231 GEN_INT (31), GEN_INT (0),
11232 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11237 (define_expand "copysigndf3"
11238 [(match_operand:DF 0 "register_operand")
11239 (match_operand:DF 1 "register_operand")
11240 (match_operand:DF 2 "register_operand")]
11241 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11243 rtx op0_low = gen_lowpart (SImode, operands[0]);
11244 rtx op0_high = gen_highpart (SImode, operands[0]);
11245 rtx op1_low = gen_lowpart (SImode, operands[1]);
11246 rtx op1_high = gen_highpart (SImode, operands[1]);
11247 rtx op2_high = gen_highpart (SImode, operands[2]);
11249 rtx scratch1 = gen_reg_rtx (SImode);
11250 rtx scratch2 = gen_reg_rtx (SImode);
11251 emit_move_insn (scratch1, op2_high);
11252 emit_move_insn (scratch2, op1_high);
11254 emit_insn(gen_rtx_SET(scratch1,
11255 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11256 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11257 emit_move_insn (op0_low, op1_low);
11258 emit_move_insn (op0_high, scratch2);
11264 ;; movmisalign patterns for HImode and SImode.
11265 (define_expand "movmisalign<mode>"
11266 [(match_operand:HSI 0 "general_operand")
11267 (match_operand:HSI 1 "general_operand")]
11270 /* This pattern is not permitted to fail during expansion: if both arguments
11271 are non-registers (e.g. memory := constant), force operand 1 into a
11273 rtx (* gen_unaligned_load)(rtx, rtx);
11274 rtx tmp_dest = operands[0];
11275 if (!s_register_operand (operands[0], <MODE>mode)
11276 && !s_register_operand (operands[1], <MODE>mode))
11277 operands[1] = force_reg (<MODE>mode, operands[1]);
11279 if (<MODE>mode == HImode)
11281 gen_unaligned_load = gen_unaligned_loadhiu;
11282 tmp_dest = gen_reg_rtx (SImode);
11285 gen_unaligned_load = gen_unaligned_loadsi;
11287 if (MEM_P (operands[1]))
11289 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11290 if (<MODE>mode == HImode)
11291 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11294 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11299 (define_insn "arm_<cdp>"
11300 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11301 (match_operand:SI 1 "immediate_operand" "n")
11302 (match_operand:SI 2 "immediate_operand" "n")
11303 (match_operand:SI 3 "immediate_operand" "n")
11304 (match_operand:SI 4 "immediate_operand" "n")
11305 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11306 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11308 arm_const_bounds (operands[0], 0, 16);
11309 arm_const_bounds (operands[1], 0, 16);
11310 arm_const_bounds (operands[2], 0, (1 << 5));
11311 arm_const_bounds (operands[3], 0, (1 << 5));
11312 arm_const_bounds (operands[4], 0, (1 << 5));
11313 arm_const_bounds (operands[5], 0, 8);
11314 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11316 [(set_attr "length" "4")
11317 (set_attr "type" "coproc")])
11319 (define_insn "*ldc"
11320 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11321 (match_operand:SI 1 "immediate_operand" "n")
11322 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11323 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11325 arm_const_bounds (operands[0], 0, 16);
11326 arm_const_bounds (operands[1], 0, (1 << 5));
11327 return "<ldc>\\tp%c0, CR%c1, %2";
11329 [(set_attr "length" "4")
11330 (set_attr "type" "coproc")])
11332 (define_insn "*stc"
11333 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11334 (match_operand:SI 1 "immediate_operand" "n")
11335 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11336 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11338 arm_const_bounds (operands[0], 0, 16);
11339 arm_const_bounds (operands[1], 0, (1 << 5));
11340 return "<stc>\\tp%c0, CR%c1, %2";
11342 [(set_attr "length" "4")
11343 (set_attr "type" "coproc")])
11345 (define_expand "arm_<ldc>"
11346 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11347 (match_operand:SI 1 "immediate_operand")
11348 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11349 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11351 (define_expand "arm_<stc>"
11352 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11353 (match_operand:SI 1 "immediate_operand")
11354 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11355 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11357 (define_insn "arm_<mcr>"
11358 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11359 (match_operand:SI 1 "immediate_operand" "n")
11360 (match_operand:SI 2 "s_register_operand" "r")
11361 (match_operand:SI 3 "immediate_operand" "n")
11362 (match_operand:SI 4 "immediate_operand" "n")
11363 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11364 (use (match_dup 2))]
11365 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11367 arm_const_bounds (operands[0], 0, 16);
11368 arm_const_bounds (operands[1], 0, 8);
11369 arm_const_bounds (operands[3], 0, (1 << 5));
11370 arm_const_bounds (operands[4], 0, (1 << 5));
11371 arm_const_bounds (operands[5], 0, 8);
11372 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11374 [(set_attr "length" "4")
11375 (set_attr "type" "coproc")])
11377 (define_insn "arm_<mrc>"
11378 [(set (match_operand:SI 0 "s_register_operand" "=r")
11379 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11380 (match_operand:SI 2 "immediate_operand" "n")
11381 (match_operand:SI 3 "immediate_operand" "n")
11382 (match_operand:SI 4 "immediate_operand" "n")
11383 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11384 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11386 arm_const_bounds (operands[1], 0, 16);
11387 arm_const_bounds (operands[2], 0, 8);
11388 arm_const_bounds (operands[3], 0, (1 << 5));
11389 arm_const_bounds (operands[4], 0, (1 << 5));
11390 arm_const_bounds (operands[5], 0, 8);
11391 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11393 [(set_attr "length" "4")
11394 (set_attr "type" "coproc")])
11396 (define_insn "arm_<mcrr>"
11397 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11398 (match_operand:SI 1 "immediate_operand" "n")
11399 (match_operand:DI 2 "s_register_operand" "r")
11400 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11401 (use (match_dup 2))]
11402 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11404 arm_const_bounds (operands[0], 0, 16);
11405 arm_const_bounds (operands[1], 0, 8);
11406 arm_const_bounds (operands[3], 0, (1 << 5));
11407 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11409 [(set_attr "length" "4")
11410 (set_attr "type" "coproc")])
11412 (define_insn "arm_<mrrc>"
11413 [(set (match_operand:DI 0 "s_register_operand" "=r")
11414 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11415 (match_operand:SI 2 "immediate_operand" "n")
11416 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11417 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11419 arm_const_bounds (operands[1], 0, 16);
11420 arm_const_bounds (operands[2], 0, 8);
11421 arm_const_bounds (operands[3], 0, (1 << 5));
11422 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11424 [(set_attr "length" "4")
11425 (set_attr "type" "coproc")])
11427 (define_expand "speculation_barrier"
11428 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11431 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11432 have a usable barrier (and probably don't need one in practice).
11433 But to be safe if such code is run on later architectures, call a
11434 helper function in libgcc that will do the thing for the active
11436 if (!(arm_arch7 || arm_arch8))
11438 arm_emit_speculation_barrier_function ();
11444 ;; Generate a hard speculation barrier when we have not enabled speculation
11446 (define_insn "*speculation_barrier_insn"
11447 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11448 "arm_arch7 || arm_arch8"
11450 [(set_attr "type" "block")
11451 (set_attr "length" "8")]
11454 ;; Vector bits common to IWMMXT and Neon
11455 (include "vec-common.md")
11456 ;; Load the Intel Wireless Multimedia Extension patterns
11457 (include "iwmmxt.md")
11458 ;; Load the VFP co-processor patterns
11460 ;; Thumb-1 patterns
11461 (include "thumb1.md")
11462 ;; Thumb-2 patterns
11463 (include "thumb2.md")
11465 (include "neon.md")
11467 (include "crypto.md")
11468 ;; Synchronization Primitives
11469 (include "sync.md")
11470 ;; Fixed-point patterns
11471 (include "arm-fixed.md")