1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
491 (define_expand "addv<mode>4"
492 [(match_operand:SIDI 0 "register_operand")
493 (match_operand:SIDI 1 "register_operand")
494 (match_operand:SIDI 2 "register_operand")
495 (match_operand 3 "")]
498 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
499 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
504 (define_expand "uaddv<mode>4"
505 [(match_operand:SIDI 0 "register_operand")
506 (match_operand:SIDI 1 "register_operand")
507 (match_operand:SIDI 2 "register_operand")
508 (match_operand 3 "")]
511 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
512 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
517 (define_expand "addsi3"
518 [(set (match_operand:SI 0 "s_register_operand")
519 (plus:SI (match_operand:SI 1 "s_register_operand")
520 (match_operand:SI 2 "reg_or_int_operand")))]
523 if (TARGET_32BIT && CONST_INT_P (operands[2]))
525 arm_split_constant (PLUS, SImode, NULL_RTX,
526 INTVAL (operands[2]), operands[0], operands[1],
527 optimize && can_create_pseudo_p ());
533 ; If there is a scratch available, this will be faster than synthesizing the
536 [(match_scratch:SI 3 "r")
537 (set (match_operand:SI 0 "arm_general_register_operand" "")
538 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
539 (match_operand:SI 2 "const_int_operand" "")))]
541 !(const_ok_for_arm (INTVAL (operands[2]))
542 || const_ok_for_arm (-INTVAL (operands[2])))
543 && const_ok_for_arm (~INTVAL (operands[2]))"
544 [(set (match_dup 3) (match_dup 2))
545 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
549 ;; The r/r/k alternative is required when reloading the address
550 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
551 ;; put the duplicated register first, and not try the commutative version.
552 (define_insn_and_split "*arm_addsi3"
553 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
554 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
555 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
571 subw%?\\t%0, %1, #%n2
572 subw%?\\t%0, %1, #%n2
575 && CONST_INT_P (operands[2])
576 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
577 && (reload_completed || !arm_eliminable_register (operands[1]))"
578 [(clobber (const_int 0))]
580 arm_split_constant (PLUS, SImode, curr_insn,
581 INTVAL (operands[2]), operands[0],
585 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
586 (set_attr "predicable" "yes")
587 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
588 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
589 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
590 (const_string "alu_imm")
591 (const_string "alu_sreg")))
595 (define_insn "adddi3_compareV"
596 [(set (reg:CC_V CC_REGNUM)
599 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
600 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
601 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
602 (set (match_operand:DI 0 "s_register_operand" "=&r")
603 (plus:DI (match_dup 1) (match_dup 2)))]
605 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
606 [(set_attr "conds" "set")
607 (set_attr "length" "8")
608 (set_attr "type" "multiple")]
611 (define_insn "addsi3_compareV"
612 [(set (reg:CC_V CC_REGNUM)
615 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
616 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
617 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
618 (set (match_operand:SI 0 "register_operand" "=r")
619 (plus:SI (match_dup 1) (match_dup 2)))]
621 "adds%?\\t%0, %1, %2"
622 [(set_attr "conds" "set")
623 (set_attr "type" "alus_sreg")]
626 (define_insn "adddi3_compareC"
627 [(set (reg:CC_C CC_REGNUM)
630 (match_operand:DI 1 "register_operand" "r")
631 (match_operand:DI 2 "register_operand" "r"))
633 (set (match_operand:DI 0 "register_operand" "=&r")
634 (plus:DI (match_dup 1) (match_dup 2)))]
636 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
637 [(set_attr "conds" "set")
638 (set_attr "length" "8")
639 (set_attr "type" "multiple")]
642 (define_insn "addsi3_compareC"
643 [(set (reg:CC_C CC_REGNUM)
644 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
645 (match_operand:SI 2 "register_operand" "r"))
647 (set (match_operand:SI 0 "register_operand" "=r")
648 (plus:SI (match_dup 1) (match_dup 2)))]
650 "adds%?\\t%0, %1, %2"
651 [(set_attr "conds" "set")
652 (set_attr "type" "alus_sreg")]
655 (define_insn "addsi3_compare0"
656 [(set (reg:CC_NOOV CC_REGNUM)
658 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
659 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
661 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
662 (plus:SI (match_dup 1) (match_dup 2)))]
666 subs%?\\t%0, %1, #%n2
668 [(set_attr "conds" "set")
669 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
672 (define_insn "*addsi3_compare0_scratch"
673 [(set (reg:CC_NOOV CC_REGNUM)
675 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
676 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
683 [(set_attr "conds" "set")
684 (set_attr "predicable" "yes")
685 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
688 (define_insn "*compare_negsi_si"
689 [(set (reg:CC_Z CC_REGNUM)
691 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
692 (match_operand:SI 1 "s_register_operand" "l,r")))]
695 [(set_attr "conds" "set")
696 (set_attr "predicable" "yes")
697 (set_attr "arch" "t2,*")
698 (set_attr "length" "2,4")
699 (set_attr "predicable_short_it" "yes,no")
700 (set_attr "type" "alus_sreg")]
703 ;; This is the canonicalization of subsi3_compare when the
704 ;; addend is a constant.
705 (define_insn "cmpsi2_addneg"
706 [(set (reg:CC CC_REGNUM)
708 (match_operand:SI 1 "s_register_operand" "r,r")
709 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
711 (plus:SI (match_dup 1)
712 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
714 && (INTVAL (operands[2])
715 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
717 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
718 in different condition codes (like cmn rather than like cmp), so that
719 alternative comes first. Both alternatives can match for any 0x??000000
720 where except for 0 and INT_MIN it doesn't matter what we choose, and also
721 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
723 if (which_alternative == 0 && operands[3] != const1_rtx)
724 return "subs%?\\t%0, %1, #%n3";
726 return "adds%?\\t%0, %1, %3";
728 [(set_attr "conds" "set")
729 (set_attr "type" "alus_sreg")]
732 ;; Convert the sequence
734 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
738 ;; bcs dest ((unsigned)rn >= 1)
739 ;; similarly for the beq variant using bcc.
740 ;; This is a common looping idiom (while (n--))
742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
743 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
745 (set (match_operand 2 "cc_register" "")
746 (compare (match_dup 0) (const_int -1)))
748 (if_then_else (match_operator 3 "equality_operator"
749 [(match_dup 2) (const_int 0)])
750 (match_operand 4 "" "")
751 (match_operand 5 "" "")))]
752 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
756 (match_dup 1) (const_int 1)))
757 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
759 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
762 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
763 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
766 operands[2], const0_rtx);"
769 ;; The next four insns work because they compare the result with one of
770 ;; the operands, and we know that the use of the condition code is
771 ;; either GEU or LTU, so we can use the carry flag from the addition
772 ;; instead of doing the compare a second time.
773 (define_insn "*addsi3_compare_op1"
774 [(set (reg:CC_C CC_REGNUM)
776 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
777 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
779 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
780 (plus:SI (match_dup 1) (match_dup 2)))]
785 subs%?\\t%0, %1, #%n2
786 subs%?\\t%0, %0, #%n2
788 subs%?\\t%0, %1, #%n2
790 [(set_attr "conds" "set")
791 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
792 (set_attr "length" "2,2,2,2,4,4,4")
794 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
797 (define_insn "*addsi3_compare_op2"
798 [(set (reg:CC_C CC_REGNUM)
800 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
801 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
803 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
804 (plus:SI (match_dup 1) (match_dup 2)))]
809 subs%?\\t%0, %1, #%n2
810 subs%?\\t%0, %0, #%n2
812 subs%?\\t%0, %1, #%n2
814 [(set_attr "conds" "set")
815 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
816 (set_attr "length" "2,2,2,2,4,4,4")
818 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
821 (define_insn "*compare_addsi2_op0"
822 [(set (reg:CC_C CC_REGNUM)
824 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
825 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
834 [(set_attr "conds" "set")
835 (set_attr "predicable" "yes")
836 (set_attr "arch" "t2,t2,*,*,*")
837 (set_attr "predicable_short_it" "yes,yes,no,no,no")
838 (set_attr "length" "2,2,4,4,4")
839 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
842 (define_insn "*compare_addsi2_op1"
843 [(set (reg:CC_C CC_REGNUM)
845 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
846 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
855 [(set_attr "conds" "set")
856 (set_attr "predicable" "yes")
857 (set_attr "arch" "t2,t2,*,*,*")
858 (set_attr "predicable_short_it" "yes,yes,no,no,no")
859 (set_attr "length" "2,2,4,4,4")
860 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
863 (define_insn "addsi3_carryin"
864 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
865 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
866 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
867 (match_operand:SI 3 "arm_carry_operation" "")))]
872 sbc%?\\t%0, %1, #%B2"
873 [(set_attr "conds" "use")
874 (set_attr "predicable" "yes")
875 (set_attr "arch" "t2,*,*")
876 (set_attr "length" "4")
877 (set_attr "predicable_short_it" "yes,no,no")
878 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
881 ;; Canonicalization of the above when the immediate is zero.
882 (define_insn "add0si3_carryin"
883 [(set (match_operand:SI 0 "s_register_operand" "=r")
884 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
885 (match_operand:SI 1 "arm_not_operand" "r")))]
888 [(set_attr "conds" "use")
889 (set_attr "predicable" "yes")
890 (set_attr "length" "4")
891 (set_attr "type" "adc_imm")]
894 (define_insn "*addsi3_carryin_alt2"
895 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
896 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
897 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
898 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
903 sbc%?\\t%0, %1, #%B2"
904 [(set_attr "conds" "use")
905 (set_attr "predicable" "yes")
906 (set_attr "arch" "t2,*,*")
907 (set_attr "length" "4")
908 (set_attr "predicable_short_it" "yes,no,no")
909 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
912 (define_insn "*addsi3_carryin_shift"
913 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
915 (match_operator:SI 2 "shift_operator"
916 [(match_operand:SI 3 "s_register_operand" "r,r")
917 (match_operand:SI 4 "shift_amount_operand" "M,r")])
918 (match_operand:SI 5 "arm_carry_operation" ""))
919 (match_operand:SI 1 "s_register_operand" "r,r")))]
921 "adc%?\\t%0, %1, %3%S2"
922 [(set_attr "conds" "use")
923 (set_attr "arch" "32,a")
924 (set_attr "shift" "3")
925 (set_attr "predicable" "yes")
926 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
927 (const_string "alu_shift_imm")
928 (const_string "alu_shift_reg")))]
931 (define_insn "*addsi3_carryin_clobercc"
932 [(set (match_operand:SI 0 "s_register_operand" "=r")
933 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
934 (match_operand:SI 2 "arm_rhs_operand" "rI"))
935 (match_operand:SI 3 "arm_carry_operation" "")))
936 (clobber (reg:CC CC_REGNUM))]
938 "adcs%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "type" "adcs_reg")]
943 (define_expand "subv<mode>4"
944 [(match_operand:SIDI 0 "register_operand")
945 (match_operand:SIDI 1 "register_operand")
946 (match_operand:SIDI 2 "register_operand")
947 (match_operand 3 "")]
950 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
951 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
956 (define_expand "usubv<mode>4"
957 [(match_operand:SIDI 0 "register_operand")
958 (match_operand:SIDI 1 "register_operand")
959 (match_operand:SIDI 2 "register_operand")
960 (match_operand 3 "")]
963 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
964 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
969 (define_insn "subdi3_compare1"
970 [(set (reg:CC CC_REGNUM)
972 (match_operand:DI 1 "s_register_operand" "r")
973 (match_operand:DI 2 "s_register_operand" "r")))
974 (set (match_operand:DI 0 "s_register_operand" "=&r")
975 (minus:DI (match_dup 1) (match_dup 2)))]
977 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
978 [(set_attr "conds" "set")
979 (set_attr "length" "8")
980 (set_attr "type" "multiple")]
983 (define_insn "subsi3_compare1"
984 [(set (reg:CC CC_REGNUM)
986 (match_operand:SI 1 "register_operand" "r")
987 (match_operand:SI 2 "register_operand" "r")))
988 (set (match_operand:SI 0 "register_operand" "=r")
989 (minus:SI (match_dup 1) (match_dup 2)))]
991 "subs%?\\t%0, %1, %2"
992 [(set_attr "conds" "set")
993 (set_attr "type" "alus_sreg")]
996 (define_insn "subsi3_carryin"
997 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
998 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
999 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1000 (match_operand:SI 3 "arm_borrow_operation" "")))]
1005 sbc%?\\t%0, %2, %2, lsl #1"
1006 [(set_attr "conds" "use")
1007 (set_attr "arch" "*,a,t2")
1008 (set_attr "predicable" "yes")
1009 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1012 (define_insn "*subsi3_carryin_const"
1013 [(set (match_operand:SI 0 "s_register_operand" "=r")
1015 (match_operand:SI 1 "s_register_operand" "r")
1016 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1017 (match_operand:SI 3 "arm_borrow_operation" "")))]
1019 "sbc\\t%0, %1, #%n2"
1020 [(set_attr "conds" "use")
1021 (set_attr "type" "adc_imm")]
1024 (define_insn "*subsi3_carryin_const0"
1025 [(set (match_operand:SI 0 "s_register_operand" "=r")
1026 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1027 (match_operand:SI 2 "arm_borrow_operation" "")))]
1030 [(set_attr "conds" "use")
1031 (set_attr "type" "adc_imm")]
1034 (define_insn "*subsi3_carryin_shift"
1035 [(set (match_operand:SI 0 "s_register_operand" "=r")
1037 (match_operand:SI 1 "s_register_operand" "r")
1038 (match_operator:SI 2 "shift_operator"
1039 [(match_operand:SI 3 "s_register_operand" "r")
1040 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1041 (match_operand:SI 5 "arm_borrow_operation" "")))]
1043 "sbc%?\\t%0, %1, %3%S2"
1044 [(set_attr "conds" "use")
1045 (set_attr "predicable" "yes")
1046 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1047 (const_string "alu_shift_imm")
1048 (const_string "alu_shift_reg")))]
1051 (define_insn "*subsi3_carryin_shift_alt"
1052 [(set (match_operand:SI 0 "s_register_operand" "=r")
1054 (match_operand:SI 1 "s_register_operand" "r")
1055 (match_operand:SI 5 "arm_borrow_operation" ""))
1056 (match_operator:SI 2 "shift_operator"
1057 [(match_operand:SI 3 "s_register_operand" "r")
1058 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1060 "sbc%?\\t%0, %1, %3%S2"
1061 [(set_attr "conds" "use")
1062 (set_attr "predicable" "yes")
1063 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1064 (const_string "alu_shift_imm")
1065 (const_string "alu_shift_reg")))]
1068 (define_insn "*rsbsi3_carryin_shift"
1069 [(set (match_operand:SI 0 "s_register_operand" "=r")
1071 (match_operator:SI 2 "shift_operator"
1072 [(match_operand:SI 3 "s_register_operand" "r")
1073 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1074 (match_operand:SI 1 "s_register_operand" "r"))
1075 (match_operand:SI 5 "arm_borrow_operation" "")))]
1077 "rsc%?\\t%0, %1, %3%S2"
1078 [(set_attr "conds" "use")
1079 (set_attr "predicable" "yes")
1080 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1081 (const_string "alu_shift_imm")
1082 (const_string "alu_shift_reg")))]
1085 (define_insn "*rsbsi3_carryin_shift_alt"
1086 [(set (match_operand:SI 0 "s_register_operand" "=r")
1088 (match_operator:SI 2 "shift_operator"
1089 [(match_operand:SI 3 "s_register_operand" "r")
1090 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1091 (match_operand:SI 5 "arm_borrow_operation" ""))
1092 (match_operand:SI 1 "s_register_operand" "r")))]
1094 "rsc%?\\t%0, %1, %3%S2"
1095 [(set_attr "conds" "use")
1096 (set_attr "predicable" "yes")
1097 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1098 (const_string "alu_shift_imm")
1099 (const_string "alu_shift_reg")))]
1102 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1104 [(set (match_operand:SI 0 "s_register_operand" "")
1105 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1106 (match_operand:SI 2 "s_register_operand" ""))
1108 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1110 [(set (match_dup 3) (match_dup 1))
1111 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1113 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1116 (define_expand "addsf3"
1117 [(set (match_operand:SF 0 "s_register_operand")
1118 (plus:SF (match_operand:SF 1 "s_register_operand")
1119 (match_operand:SF 2 "s_register_operand")))]
1120 "TARGET_32BIT && TARGET_HARD_FLOAT"
1124 (define_expand "adddf3"
1125 [(set (match_operand:DF 0 "s_register_operand")
1126 (plus:DF (match_operand:DF 1 "s_register_operand")
1127 (match_operand:DF 2 "s_register_operand")))]
1128 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1132 (define_expand "subdi3"
1134 [(set (match_operand:DI 0 "s_register_operand")
1135 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1136 (match_operand:DI 2 "s_register_operand")))
1137 (clobber (reg:CC CC_REGNUM))])]
1142 if (!REG_P (operands[1]))
1143 operands[1] = force_reg (DImode, operands[1]);
1147 rtx lo_result, hi_result, lo_dest, hi_dest;
1148 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1151 /* Since operands[1] may be an integer, pass it second, so that
1152 any necessary simplifications will be done on the decomposed
1154 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1156 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1157 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1159 if (!arm_rhs_operand (lo_op1, SImode))
1160 lo_op1 = force_reg (SImode, lo_op1);
1162 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1163 || !arm_rhs_operand (hi_op1, SImode))
1164 hi_op1 = force_reg (SImode, hi_op1);
1167 if (lo_op1 == const0_rtx)
1169 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1170 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1172 else if (CONST_INT_P (lo_op1))
1174 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1175 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1176 GEN_INT (~UINTVAL (lo_op1))));
1180 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1181 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1184 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1186 if (hi_op1 == const0_rtx)
1187 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1189 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1191 if (lo_result != lo_dest)
1192 emit_move_insn (lo_result, lo_dest);
1194 if (hi_result != hi_dest)
1195 emit_move_insn (hi_result, hi_dest);
1202 (define_expand "subsi3"
1203 [(set (match_operand:SI 0 "s_register_operand")
1204 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1205 (match_operand:SI 2 "s_register_operand")))]
1208 if (CONST_INT_P (operands[1]))
1212 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1213 operands[1] = force_reg (SImode, operands[1]);
1216 arm_split_constant (MINUS, SImode, NULL_RTX,
1217 INTVAL (operands[1]), operands[0],
1219 optimize && can_create_pseudo_p ());
1223 else /* TARGET_THUMB1 */
1224 operands[1] = force_reg (SImode, operands[1]);
1229 ; ??? Check Thumb-2 split length
1230 (define_insn_and_split "*arm_subsi3_insn"
1231 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1232 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1233 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1245 "&& (CONST_INT_P (operands[1])
1246 && !const_ok_for_arm (INTVAL (operands[1])))"
1247 [(clobber (const_int 0))]
1249 arm_split_constant (MINUS, SImode, curr_insn,
1250 INTVAL (operands[1]), operands[0], operands[2], 0);
1253 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1254 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1255 (set_attr "predicable" "yes")
1256 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1257 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1261 [(match_scratch:SI 3 "r")
1262 (set (match_operand:SI 0 "arm_general_register_operand" "")
1263 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1264 (match_operand:SI 2 "arm_general_register_operand" "")))]
1266 && !const_ok_for_arm (INTVAL (operands[1]))
1267 && const_ok_for_arm (~INTVAL (operands[1]))"
1268 [(set (match_dup 3) (match_dup 1))
1269 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1273 (define_insn "subsi3_compare0"
1274 [(set (reg:CC_NOOV CC_REGNUM)
1276 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1277 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1279 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1280 (minus:SI (match_dup 1) (match_dup 2)))]
1285 rsbs%?\\t%0, %2, %1"
1286 [(set_attr "conds" "set")
1287 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1290 (define_insn "subsi3_compare"
1291 [(set (reg:CC CC_REGNUM)
1292 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1293 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1294 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1295 (minus:SI (match_dup 1) (match_dup 2)))]
1300 rsbs%?\\t%0, %2, %1"
1301 [(set_attr "conds" "set")
1302 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1305 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1306 ;; rather than (0 cmp reg). This gives the same results for unsigned
1307 ;; and equality compares which is what we mostly need here.
1308 (define_insn "rsb_imm_compare"
1309 [(set (reg:CC_RSB CC_REGNUM)
1310 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1311 (match_operand 3 "const_int_operand" "")))
1312 (set (match_operand:SI 0 "s_register_operand" "=r")
1313 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1315 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1317 [(set_attr "conds" "set")
1318 (set_attr "type" "alus_imm")]
1321 (define_expand "subsf3"
1322 [(set (match_operand:SF 0 "s_register_operand")
1323 (minus:SF (match_operand:SF 1 "s_register_operand")
1324 (match_operand:SF 2 "s_register_operand")))]
1325 "TARGET_32BIT && TARGET_HARD_FLOAT"
1329 (define_expand "subdf3"
1330 [(set (match_operand:DF 0 "s_register_operand")
1331 (minus:DF (match_operand:DF 1 "s_register_operand")
1332 (match_operand:DF 2 "s_register_operand")))]
1333 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1338 ;; Multiplication insns
1340 (define_expand "mulhi3"
1341 [(set (match_operand:HI 0 "s_register_operand")
1342 (mult:HI (match_operand:HI 1 "s_register_operand")
1343 (match_operand:HI 2 "s_register_operand")))]
1344 "TARGET_DSP_MULTIPLY"
1347 rtx result = gen_reg_rtx (SImode);
1348 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1349 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1354 (define_expand "mulsi3"
1355 [(set (match_operand:SI 0 "s_register_operand")
1356 (mult:SI (match_operand:SI 2 "s_register_operand")
1357 (match_operand:SI 1 "s_register_operand")))]
1362 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1364 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1365 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1366 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1368 "mul%?\\t%0, %2, %1"
1369 [(set_attr "type" "mul")
1370 (set_attr "predicable" "yes")
1371 (set_attr "arch" "t2,v6,nov6,nov6")
1372 (set_attr "length" "4")
1373 (set_attr "predicable_short_it" "yes,no,*,*")]
1376 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1377 ;; reusing the same register.
1380 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1382 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1383 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1384 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1386 "mla%?\\t%0, %3, %2, %1"
1387 [(set_attr "type" "mla")
1388 (set_attr "predicable" "yes")
1389 (set_attr "arch" "v6,nov6,nov6,nov6")]
1393 [(set (match_operand:SI 0 "s_register_operand" "=r")
1395 (match_operand:SI 1 "s_register_operand" "r")
1396 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1397 (match_operand:SI 2 "s_register_operand" "r"))))]
1398 "TARGET_32BIT && arm_arch_thumb2"
1399 "mls%?\\t%0, %3, %2, %1"
1400 [(set_attr "type" "mla")
1401 (set_attr "predicable" "yes")]
1404 (define_insn "*mulsi3_compare0"
1405 [(set (reg:CC_NOOV CC_REGNUM)
1406 (compare:CC_NOOV (mult:SI
1407 (match_operand:SI 2 "s_register_operand" "r,r")
1408 (match_operand:SI 1 "s_register_operand" "%0,r"))
1410 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1411 (mult:SI (match_dup 2) (match_dup 1)))]
1412 "TARGET_ARM && !arm_arch6"
1413 "muls%?\\t%0, %2, %1"
1414 [(set_attr "conds" "set")
1415 (set_attr "type" "muls")]
1418 (define_insn "*mulsi3_compare0_v6"
1419 [(set (reg:CC_NOOV CC_REGNUM)
1420 (compare:CC_NOOV (mult:SI
1421 (match_operand:SI 2 "s_register_operand" "r")
1422 (match_operand:SI 1 "s_register_operand" "r"))
1424 (set (match_operand:SI 0 "s_register_operand" "=r")
1425 (mult:SI (match_dup 2) (match_dup 1)))]
1426 "TARGET_ARM && arm_arch6 && optimize_size"
1427 "muls%?\\t%0, %2, %1"
1428 [(set_attr "conds" "set")
1429 (set_attr "type" "muls")]
1432 (define_insn "*mulsi_compare0_scratch"
1433 [(set (reg:CC_NOOV CC_REGNUM)
1434 (compare:CC_NOOV (mult:SI
1435 (match_operand:SI 2 "s_register_operand" "r,r")
1436 (match_operand:SI 1 "s_register_operand" "%0,r"))
1438 (clobber (match_scratch:SI 0 "=&r,&r"))]
1439 "TARGET_ARM && !arm_arch6"
1440 "muls%?\\t%0, %2, %1"
1441 [(set_attr "conds" "set")
1442 (set_attr "type" "muls")]
1445 (define_insn "*mulsi_compare0_scratch_v6"
1446 [(set (reg:CC_NOOV CC_REGNUM)
1447 (compare:CC_NOOV (mult:SI
1448 (match_operand:SI 2 "s_register_operand" "r")
1449 (match_operand:SI 1 "s_register_operand" "r"))
1451 (clobber (match_scratch:SI 0 "=r"))]
1452 "TARGET_ARM && arm_arch6 && optimize_size"
1453 "muls%?\\t%0, %2, %1"
1454 [(set_attr "conds" "set")
1455 (set_attr "type" "muls")]
1458 (define_insn "*mulsi3addsi_compare0"
1459 [(set (reg:CC_NOOV CC_REGNUM)
1462 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1463 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1464 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1466 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1467 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1469 "TARGET_ARM && arm_arch6"
1470 "mlas%?\\t%0, %2, %1, %3"
1471 [(set_attr "conds" "set")
1472 (set_attr "type" "mlas")]
1475 (define_insn "*mulsi3addsi_compare0_v6"
1476 [(set (reg:CC_NOOV CC_REGNUM)
1479 (match_operand:SI 2 "s_register_operand" "r")
1480 (match_operand:SI 1 "s_register_operand" "r"))
1481 (match_operand:SI 3 "s_register_operand" "r"))
1483 (set (match_operand:SI 0 "s_register_operand" "=r")
1484 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1486 "TARGET_ARM && arm_arch6 && optimize_size"
1487 "mlas%?\\t%0, %2, %1, %3"
1488 [(set_attr "conds" "set")
1489 (set_attr "type" "mlas")]
1492 (define_insn "*mulsi3addsi_compare0_scratch"
1493 [(set (reg:CC_NOOV CC_REGNUM)
1496 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1497 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1498 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1500 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1501 "TARGET_ARM && !arm_arch6"
1502 "mlas%?\\t%0, %2, %1, %3"
1503 [(set_attr "conds" "set")
1504 (set_attr "type" "mlas")]
1507 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1508 [(set (reg:CC_NOOV CC_REGNUM)
1511 (match_operand:SI 2 "s_register_operand" "r")
1512 (match_operand:SI 1 "s_register_operand" "r"))
1513 (match_operand:SI 3 "s_register_operand" "r"))
1515 (clobber (match_scratch:SI 0 "=r"))]
1516 "TARGET_ARM && arm_arch6 && optimize_size"
1517 "mlas%?\\t%0, %2, %1, %3"
1518 [(set_attr "conds" "set")
1519 (set_attr "type" "mlas")]
1522 ;; 32x32->64 widening multiply.
1523 ;; The only difference between the v3-5 and v6+ versions is the requirement
1524 ;; that the output does not overlap with either input.
1526 (define_expand "<Us>mulsidi3"
1527 [(set (match_operand:DI 0 "s_register_operand")
1529 (SE:DI (match_operand:SI 1 "s_register_operand"))
1530 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1533 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1534 gen_highpart (SImode, operands[0]),
1535 operands[1], operands[2]));
1540 (define_insn "<US>mull"
1541 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1543 (match_operand:SI 2 "s_register_operand" "%r,r")
1544 (match_operand:SI 3 "s_register_operand" "r,r")))
1545 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1548 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1551 "<US>mull%?\\t%0, %1, %2, %3"
1552 [(set_attr "type" "umull")
1553 (set_attr "predicable" "yes")
1554 (set_attr "arch" "v6,nov6")]
1557 (define_expand "<Us>maddsidi4"
1558 [(set (match_operand:DI 0 "s_register_operand")
1561 (SE:DI (match_operand:SI 1 "s_register_operand"))
1562 (SE:DI (match_operand:SI 2 "s_register_operand")))
1563 (match_operand:DI 3 "s_register_operand")))]
1566 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1567 gen_lowpart (SImode, operands[3]),
1568 gen_highpart (SImode, operands[0]),
1569 gen_highpart (SImode, operands[3]),
1570 operands[1], operands[2]));
1575 (define_insn "<US>mlal"
1576 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1579 (match_operand:SI 4 "s_register_operand" "%r,r")
1580 (match_operand:SI 5 "s_register_operand" "r,r"))
1581 (match_operand:SI 1 "s_register_operand" "0,0")))
1582 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1587 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1588 (zero_extend:DI (match_dup 1)))
1590 (match_operand:SI 3 "s_register_operand" "2,2")))]
1592 "<US>mlal%?\\t%0, %2, %4, %5"
1593 [(set_attr "type" "umlal")
1594 (set_attr "predicable" "yes")
1595 (set_attr "arch" "v6,nov6")]
1598 (define_expand "<US>mulsi3_highpart"
1600 [(set (match_operand:SI 0 "s_register_operand")
1604 (SE:DI (match_operand:SI 1 "s_register_operand"))
1605 (SE:DI (match_operand:SI 2 "s_register_operand")))
1607 (clobber (match_scratch:SI 3 ""))])]
1612 (define_insn "*<US>mull_high"
1613 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1617 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1618 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1620 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1622 "<US>mull%?\\t%3, %0, %2, %1"
1623 [(set_attr "type" "umull")
1624 (set_attr "predicable" "yes")
1625 (set_attr "arch" "v6,nov6,nov6")]
1628 (define_insn "mulhisi3"
1629 [(set (match_operand:SI 0 "s_register_operand" "=r")
1630 (mult:SI (sign_extend:SI
1631 (match_operand:HI 1 "s_register_operand" "%r"))
1633 (match_operand:HI 2 "s_register_operand" "r"))))]
1634 "TARGET_DSP_MULTIPLY"
1635 "smulbb%?\\t%0, %1, %2"
1636 [(set_attr "type" "smulxy")
1637 (set_attr "predicable" "yes")]
1640 (define_insn "*mulhisi3tb"
1641 [(set (match_operand:SI 0 "s_register_operand" "=r")
1642 (mult:SI (ashiftrt:SI
1643 (match_operand:SI 1 "s_register_operand" "r")
1646 (match_operand:HI 2 "s_register_operand" "r"))))]
1647 "TARGET_DSP_MULTIPLY"
1648 "smultb%?\\t%0, %1, %2"
1649 [(set_attr "type" "smulxy")
1650 (set_attr "predicable" "yes")]
1653 (define_insn "*mulhisi3bt"
1654 [(set (match_operand:SI 0 "s_register_operand" "=r")
1655 (mult:SI (sign_extend:SI
1656 (match_operand:HI 1 "s_register_operand" "r"))
1658 (match_operand:SI 2 "s_register_operand" "r")
1660 "TARGET_DSP_MULTIPLY"
1661 "smulbt%?\\t%0, %1, %2"
1662 [(set_attr "type" "smulxy")
1663 (set_attr "predicable" "yes")]
1666 (define_insn "*mulhisi3tt"
1667 [(set (match_operand:SI 0 "s_register_operand" "=r")
1668 (mult:SI (ashiftrt:SI
1669 (match_operand:SI 1 "s_register_operand" "r")
1672 (match_operand:SI 2 "s_register_operand" "r")
1674 "TARGET_DSP_MULTIPLY"
1675 "smultt%?\\t%0, %1, %2"
1676 [(set_attr "type" "smulxy")
1677 (set_attr "predicable" "yes")]
1680 (define_insn "maddhisi4"
1681 [(set (match_operand:SI 0 "s_register_operand" "=r")
1682 (plus:SI (mult:SI (sign_extend:SI
1683 (match_operand:HI 1 "s_register_operand" "r"))
1685 (match_operand:HI 2 "s_register_operand" "r")))
1686 (match_operand:SI 3 "s_register_operand" "r")))]
1687 "TARGET_DSP_MULTIPLY"
1688 "smlabb%?\\t%0, %1, %2, %3"
1689 [(set_attr "type" "smlaxy")
1690 (set_attr "predicable" "yes")]
1693 ;; Note: there is no maddhisi4ibt because this one is canonical form
1694 (define_insn "*maddhisi4tb"
1695 [(set (match_operand:SI 0 "s_register_operand" "=r")
1696 (plus:SI (mult:SI (ashiftrt:SI
1697 (match_operand:SI 1 "s_register_operand" "r")
1700 (match_operand:HI 2 "s_register_operand" "r")))
1701 (match_operand:SI 3 "s_register_operand" "r")))]
1702 "TARGET_DSP_MULTIPLY"
1703 "smlatb%?\\t%0, %1, %2, %3"
1704 [(set_attr "type" "smlaxy")
1705 (set_attr "predicable" "yes")]
1708 (define_insn "*maddhisi4tt"
1709 [(set (match_operand:SI 0 "s_register_operand" "=r")
1710 (plus:SI (mult:SI (ashiftrt:SI
1711 (match_operand:SI 1 "s_register_operand" "r")
1714 (match_operand:SI 2 "s_register_operand" "r")
1716 (match_operand:SI 3 "s_register_operand" "r")))]
1717 "TARGET_DSP_MULTIPLY"
1718 "smlatt%?\\t%0, %1, %2, %3"
1719 [(set_attr "type" "smlaxy")
1720 (set_attr "predicable" "yes")]
1723 (define_insn "maddhidi4"
1724 [(set (match_operand:DI 0 "s_register_operand" "=r")
1726 (mult:DI (sign_extend:DI
1727 (match_operand:HI 1 "s_register_operand" "r"))
1729 (match_operand:HI 2 "s_register_operand" "r")))
1730 (match_operand:DI 3 "s_register_operand" "0")))]
1731 "TARGET_DSP_MULTIPLY"
1732 "smlalbb%?\\t%Q0, %R0, %1, %2"
1733 [(set_attr "type" "smlalxy")
1734 (set_attr "predicable" "yes")])
1736 ;; Note: there is no maddhidi4ibt because this one is canonical form
1737 (define_insn "*maddhidi4tb"
1738 [(set (match_operand:DI 0 "s_register_operand" "=r")
1740 (mult:DI (sign_extend:DI
1742 (match_operand:SI 1 "s_register_operand" "r")
1745 (match_operand:HI 2 "s_register_operand" "r")))
1746 (match_operand:DI 3 "s_register_operand" "0")))]
1747 "TARGET_DSP_MULTIPLY"
1748 "smlaltb%?\\t%Q0, %R0, %1, %2"
1749 [(set_attr "type" "smlalxy")
1750 (set_attr "predicable" "yes")])
1752 (define_insn "*maddhidi4tt"
1753 [(set (match_operand:DI 0 "s_register_operand" "=r")
1755 (mult:DI (sign_extend:DI
1757 (match_operand:SI 1 "s_register_operand" "r")
1761 (match_operand:SI 2 "s_register_operand" "r")
1763 (match_operand:DI 3 "s_register_operand" "0")))]
1764 "TARGET_DSP_MULTIPLY"
1765 "smlaltt%?\\t%Q0, %R0, %1, %2"
1766 [(set_attr "type" "smlalxy")
1767 (set_attr "predicable" "yes")])
1769 (define_expand "mulsf3"
1770 [(set (match_operand:SF 0 "s_register_operand")
1771 (mult:SF (match_operand:SF 1 "s_register_operand")
1772 (match_operand:SF 2 "s_register_operand")))]
1773 "TARGET_32BIT && TARGET_HARD_FLOAT"
1777 (define_expand "muldf3"
1778 [(set (match_operand:DF 0 "s_register_operand")
1779 (mult:DF (match_operand:DF 1 "s_register_operand")
1780 (match_operand:DF 2 "s_register_operand")))]
1781 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1787 (define_expand "divsf3"
1788 [(set (match_operand:SF 0 "s_register_operand")
1789 (div:SF (match_operand:SF 1 "s_register_operand")
1790 (match_operand:SF 2 "s_register_operand")))]
1791 "TARGET_32BIT && TARGET_HARD_FLOAT"
1794 (define_expand "divdf3"
1795 [(set (match_operand:DF 0 "s_register_operand")
1796 (div:DF (match_operand:DF 1 "s_register_operand")
1797 (match_operand:DF 2 "s_register_operand")))]
1798 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1802 ; Expand logical operations. The mid-end expander does not split off memory
1803 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1804 ; So an explicit expander is needed to generate better code.
1806 (define_expand "<LOGICAL:optab>di3"
1807 [(set (match_operand:DI 0 "s_register_operand")
1808 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1809 (match_operand:DI 2 "arm_<optab>di_operand")))]
1812 rtx low = simplify_gen_binary (<CODE>, SImode,
1813 gen_lowpart (SImode, operands[1]),
1814 gen_lowpart (SImode, operands[2]));
1815 rtx high = simplify_gen_binary (<CODE>, SImode,
1816 gen_highpart (SImode, operands[1]),
1817 gen_highpart_mode (SImode, DImode,
1820 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1821 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1826 (define_expand "one_cmpldi2"
1827 [(set (match_operand:DI 0 "s_register_operand")
1828 (not:DI (match_operand:DI 1 "s_register_operand")))]
1831 rtx low = simplify_gen_unary (NOT, SImode,
1832 gen_lowpart (SImode, operands[1]),
1834 rtx high = simplify_gen_unary (NOT, SImode,
1835 gen_highpart_mode (SImode, DImode,
1839 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1840 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1845 ;; Split DImode and, ior, xor operations. Simply perform the logical
1846 ;; operation on the upper and lower halves of the registers.
1847 ;; This is needed for atomic operations in arm_split_atomic_op.
1848 ;; Avoid splitting IWMMXT instructions.
1850 [(set (match_operand:DI 0 "s_register_operand" "")
1851 (match_operator:DI 6 "logical_binary_operator"
1852 [(match_operand:DI 1 "s_register_operand" "")
1853 (match_operand:DI 2 "s_register_operand" "")]))]
1854 "TARGET_32BIT && reload_completed
1855 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1856 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1857 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1860 operands[3] = gen_highpart (SImode, operands[0]);
1861 operands[0] = gen_lowpart (SImode, operands[0]);
1862 operands[4] = gen_highpart (SImode, operands[1]);
1863 operands[1] = gen_lowpart (SImode, operands[1]);
1864 operands[5] = gen_highpart (SImode, operands[2]);
1865 operands[2] = gen_lowpart (SImode, operands[2]);
1869 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1870 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1872 [(set (match_operand:DI 0 "s_register_operand")
1873 (not:DI (match_operand:DI 1 "s_register_operand")))]
1875 [(set (match_dup 0) (not:SI (match_dup 1)))
1876 (set (match_dup 2) (not:SI (match_dup 3)))]
1879 operands[2] = gen_highpart (SImode, operands[0]);
1880 operands[0] = gen_lowpart (SImode, operands[0]);
1881 operands[3] = gen_highpart (SImode, operands[1]);
1882 operands[1] = gen_lowpart (SImode, operands[1]);
1886 (define_expand "andsi3"
1887 [(set (match_operand:SI 0 "s_register_operand")
1888 (and:SI (match_operand:SI 1 "s_register_operand")
1889 (match_operand:SI 2 "reg_or_int_operand")))]
1894 if (CONST_INT_P (operands[2]))
1896 if (INTVAL (operands[2]) == 255 && arm_arch6)
1898 operands[1] = convert_to_mode (QImode, operands[1], 1);
1899 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1903 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1904 operands[2] = force_reg (SImode, operands[2]);
1907 arm_split_constant (AND, SImode, NULL_RTX,
1908 INTVAL (operands[2]), operands[0],
1910 optimize && can_create_pseudo_p ());
1916 else /* TARGET_THUMB1 */
1918 if (!CONST_INT_P (operands[2]))
1920 rtx tmp = force_reg (SImode, operands[2]);
1921 if (rtx_equal_p (operands[0], operands[1]))
1925 operands[2] = operands[1];
1933 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1935 operands[2] = force_reg (SImode,
1936 GEN_INT (~INTVAL (operands[2])));
1938 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1943 for (i = 9; i <= 31; i++)
1945 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1947 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1951 else if ((HOST_WIDE_INT_1 << i) - 1
1952 == ~INTVAL (operands[2]))
1954 rtx shift = GEN_INT (i);
1955 rtx reg = gen_reg_rtx (SImode);
1957 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1958 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1964 operands[2] = force_reg (SImode, operands[2]);
1970 ; ??? Check split length for Thumb-2
1971 (define_insn_and_split "*arm_andsi3_insn"
1972 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1973 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1974 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1979 bic%?\\t%0, %1, #%B2
1983 && CONST_INT_P (operands[2])
1984 && !(const_ok_for_arm (INTVAL (operands[2]))
1985 || const_ok_for_arm (~INTVAL (operands[2])))"
1986 [(clobber (const_int 0))]
1988 arm_split_constant (AND, SImode, curr_insn,
1989 INTVAL (operands[2]), operands[0], operands[1], 0);
1992 [(set_attr "length" "4,4,4,4,16")
1993 (set_attr "predicable" "yes")
1994 (set_attr "predicable_short_it" "no,yes,no,no,no")
1995 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1998 (define_insn "*andsi3_compare0"
1999 [(set (reg:CC_NOOV CC_REGNUM)
2001 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2002 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2004 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2005 (and:SI (match_dup 1) (match_dup 2)))]
2009 bics%?\\t%0, %1, #%B2
2010 ands%?\\t%0, %1, %2"
2011 [(set_attr "conds" "set")
2012 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2015 (define_insn "*andsi3_compare0_scratch"
2016 [(set (reg:CC_NOOV CC_REGNUM)
2018 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2019 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2021 (clobber (match_scratch:SI 2 "=X,r,X"))]
2025 bics%?\\t%2, %0, #%B1
2027 [(set_attr "conds" "set")
2028 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2031 (define_insn "*zeroextractsi_compare0_scratch"
2032 [(set (reg:CC_NOOV CC_REGNUM)
2033 (compare:CC_NOOV (zero_extract:SI
2034 (match_operand:SI 0 "s_register_operand" "r")
2035 (match_operand 1 "const_int_operand" "n")
2036 (match_operand 2 "const_int_operand" "n"))
2039 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2040 && INTVAL (operands[1]) > 0
2041 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2042 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2044 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2045 << INTVAL (operands[2]));
2046 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2049 [(set_attr "conds" "set")
2050 (set_attr "predicable" "yes")
2051 (set_attr "type" "logics_imm")]
2054 (define_insn_and_split "*ne_zeroextractsi"
2055 [(set (match_operand:SI 0 "s_register_operand" "=r")
2056 (ne:SI (zero_extract:SI
2057 (match_operand:SI 1 "s_register_operand" "r")
2058 (match_operand:SI 2 "const_int_operand" "n")
2059 (match_operand:SI 3 "const_int_operand" "n"))
2061 (clobber (reg:CC CC_REGNUM))]
2063 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2064 && INTVAL (operands[2]) > 0
2065 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2066 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2069 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2070 && INTVAL (operands[2]) > 0
2071 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2072 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2073 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2074 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2076 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2078 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2079 (match_dup 0) (const_int 1)))]
2081 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2082 << INTVAL (operands[3]));
2084 [(set_attr "conds" "clob")
2085 (set (attr "length")
2086 (if_then_else (eq_attr "is_thumb" "yes")
2089 (set_attr "type" "multiple")]
2092 (define_insn_and_split "*ne_zeroextractsi_shifted"
2093 [(set (match_operand:SI 0 "s_register_operand" "=r")
2094 (ne:SI (zero_extract:SI
2095 (match_operand:SI 1 "s_register_operand" "r")
2096 (match_operand:SI 2 "const_int_operand" "n")
2099 (clobber (reg:CC CC_REGNUM))]
2103 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2104 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2106 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2108 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2109 (match_dup 0) (const_int 1)))]
2111 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2113 [(set_attr "conds" "clob")
2114 (set_attr "length" "8")
2115 (set_attr "type" "multiple")]
2118 (define_insn_and_split "*ite_ne_zeroextractsi"
2119 [(set (match_operand:SI 0 "s_register_operand" "=r")
2120 (if_then_else:SI (ne (zero_extract:SI
2121 (match_operand:SI 1 "s_register_operand" "r")
2122 (match_operand:SI 2 "const_int_operand" "n")
2123 (match_operand:SI 3 "const_int_operand" "n"))
2125 (match_operand:SI 4 "arm_not_operand" "rIK")
2127 (clobber (reg:CC CC_REGNUM))]
2129 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2130 && INTVAL (operands[2]) > 0
2131 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2132 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2133 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2136 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2137 && INTVAL (operands[2]) > 0
2138 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2139 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2140 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2141 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2142 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2144 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2146 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2147 (match_dup 0) (match_dup 4)))]
2149 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2150 << INTVAL (operands[3]));
2152 [(set_attr "conds" "clob")
2153 (set_attr "length" "8")
2154 (set_attr "type" "multiple")]
2157 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2158 [(set (match_operand:SI 0 "s_register_operand" "=r")
2159 (if_then_else:SI (ne (zero_extract:SI
2160 (match_operand:SI 1 "s_register_operand" "r")
2161 (match_operand:SI 2 "const_int_operand" "n")
2164 (match_operand:SI 3 "arm_not_operand" "rIK")
2166 (clobber (reg:CC CC_REGNUM))]
2167 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2169 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2170 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2171 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2173 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2175 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2176 (match_dup 0) (match_dup 3)))]
2178 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2180 [(set_attr "conds" "clob")
2181 (set_attr "length" "8")
2182 (set_attr "type" "multiple")]
2185 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2187 [(set (match_operand:SI 0 "s_register_operand" "")
2188 (match_operator:SI 1 "shiftable_operator"
2189 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2190 (match_operand:SI 3 "const_int_operand" "")
2191 (match_operand:SI 4 "const_int_operand" ""))
2192 (match_operand:SI 5 "s_register_operand" "")]))
2193 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2195 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2198 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2201 HOST_WIDE_INT temp = INTVAL (operands[3]);
2203 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2204 operands[4] = GEN_INT (32 - temp);
2209 [(set (match_operand:SI 0 "s_register_operand" "")
2210 (match_operator:SI 1 "shiftable_operator"
2211 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2212 (match_operand:SI 3 "const_int_operand" "")
2213 (match_operand:SI 4 "const_int_operand" ""))
2214 (match_operand:SI 5 "s_register_operand" "")]))
2215 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2217 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2220 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2223 HOST_WIDE_INT temp = INTVAL (operands[3]);
2225 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2226 operands[4] = GEN_INT (32 - temp);
2230 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2231 ;;; represented by the bitfield, then this will produce incorrect results.
2232 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2233 ;;; which have a real bit-field insert instruction, the truncation happens
2234 ;;; in the bit-field insert instruction itself. Since arm does not have a
2235 ;;; bit-field insert instruction, we would have to emit code here to truncate
2236 ;;; the value before we insert. This loses some of the advantage of having
2237 ;;; this insv pattern, so this pattern needs to be reevalutated.
2239 (define_expand "insv"
2240 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2241 (match_operand 1 "general_operand")
2242 (match_operand 2 "general_operand"))
2243 (match_operand 3 "reg_or_int_operand"))]
2244 "TARGET_ARM || arm_arch_thumb2"
2247 int start_bit = INTVAL (operands[2]);
2248 int width = INTVAL (operands[1]);
2249 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2250 rtx target, subtarget;
2252 if (arm_arch_thumb2)
2254 if (unaligned_access && MEM_P (operands[0])
2255 && s_register_operand (operands[3], GET_MODE (operands[3]))
2256 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2260 if (BYTES_BIG_ENDIAN)
2261 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2266 base_addr = adjust_address (operands[0], SImode,
2267 start_bit / BITS_PER_UNIT);
2268 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2272 rtx tmp = gen_reg_rtx (HImode);
2274 base_addr = adjust_address (operands[0], HImode,
2275 start_bit / BITS_PER_UNIT);
2276 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2277 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2281 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2283 bool use_bfi = TRUE;
2285 if (CONST_INT_P (operands[3]))
2287 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2291 emit_insn (gen_insv_zero (operands[0], operands[1],
2296 /* See if the set can be done with a single orr instruction. */
2297 if (val == mask && const_ok_for_arm (val << start_bit))
2303 if (!REG_P (operands[3]))
2304 operands[3] = force_reg (SImode, operands[3]);
2306 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2315 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2318 target = copy_rtx (operands[0]);
2319 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2320 subreg as the final target. */
2321 if (GET_CODE (target) == SUBREG)
2323 subtarget = gen_reg_rtx (SImode);
2324 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2325 < GET_MODE_SIZE (SImode))
2326 target = SUBREG_REG (target);
2331 if (CONST_INT_P (operands[3]))
2333 /* Since we are inserting a known constant, we may be able to
2334 reduce the number of bits that we have to clear so that
2335 the mask becomes simple. */
2336 /* ??? This code does not check to see if the new mask is actually
2337 simpler. It may not be. */
2338 rtx op1 = gen_reg_rtx (SImode);
2339 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2340 start of this pattern. */
2341 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2342 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2344 emit_insn (gen_andsi3 (op1, operands[0],
2345 gen_int_mode (~mask2, SImode)));
2346 emit_insn (gen_iorsi3 (subtarget, op1,
2347 gen_int_mode (op3_value << start_bit, SImode)));
2349 else if (start_bit == 0
2350 && !(const_ok_for_arm (mask)
2351 || const_ok_for_arm (~mask)))
2353 /* A Trick, since we are setting the bottom bits in the word,
2354 we can shift operand[3] up, operand[0] down, OR them together
2355 and rotate the result back again. This takes 3 insns, and
2356 the third might be mergeable into another op. */
2357 /* The shift up copes with the possibility that operand[3] is
2358 wider than the bitfield. */
2359 rtx op0 = gen_reg_rtx (SImode);
2360 rtx op1 = gen_reg_rtx (SImode);
2362 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2363 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2364 emit_insn (gen_iorsi3 (op1, op1, op0));
2365 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2367 else if ((width + start_bit == 32)
2368 && !(const_ok_for_arm (mask)
2369 || const_ok_for_arm (~mask)))
2371 /* Similar trick, but slightly less efficient. */
2373 rtx op0 = gen_reg_rtx (SImode);
2374 rtx op1 = gen_reg_rtx (SImode);
2376 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2377 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2378 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2379 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2383 rtx op0 = gen_int_mode (mask, SImode);
2384 rtx op1 = gen_reg_rtx (SImode);
2385 rtx op2 = gen_reg_rtx (SImode);
2387 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2389 rtx tmp = gen_reg_rtx (SImode);
2391 emit_insn (gen_movsi (tmp, op0));
2395 /* Mask out any bits in operand[3] that are not needed. */
2396 emit_insn (gen_andsi3 (op1, operands[3], op0));
2398 if (CONST_INT_P (op0)
2399 && (const_ok_for_arm (mask << start_bit)
2400 || const_ok_for_arm (~(mask << start_bit))))
2402 op0 = gen_int_mode (~(mask << start_bit), SImode);
2403 emit_insn (gen_andsi3 (op2, operands[0], op0));
2407 if (CONST_INT_P (op0))
2409 rtx tmp = gen_reg_rtx (SImode);
2411 emit_insn (gen_movsi (tmp, op0));
2416 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2418 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2422 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2424 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2427 if (subtarget != target)
2429 /* If TARGET is still a SUBREG, then it must be wider than a word,
2430 so we must be careful only to set the subword we were asked to. */
2431 if (GET_CODE (target) == SUBREG)
2432 emit_move_insn (target, subtarget);
2434 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2441 (define_insn "insv_zero"
2442 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2443 (match_operand:SI 1 "const_int_M_operand" "M")
2444 (match_operand:SI 2 "const_int_M_operand" "M"))
2448 [(set_attr "length" "4")
2449 (set_attr "predicable" "yes")
2450 (set_attr "type" "bfm")]
2453 (define_insn "insv_t2"
2454 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2455 (match_operand:SI 1 "const_int_M_operand" "M")
2456 (match_operand:SI 2 "const_int_M_operand" "M"))
2457 (match_operand:SI 3 "s_register_operand" "r"))]
2459 "bfi%?\t%0, %3, %2, %1"
2460 [(set_attr "length" "4")
2461 (set_attr "predicable" "yes")
2462 (set_attr "type" "bfm")]
2465 (define_insn "andsi_notsi_si"
2466 [(set (match_operand:SI 0 "s_register_operand" "=r")
2467 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2468 (match_operand:SI 1 "s_register_operand" "r")))]
2470 "bic%?\\t%0, %1, %2"
2471 [(set_attr "predicable" "yes")
2472 (set_attr "type" "logic_reg")]
2475 (define_insn "andsi_not_shiftsi_si"
2476 [(set (match_operand:SI 0 "s_register_operand" "=r")
2477 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2478 [(match_operand:SI 2 "s_register_operand" "r")
2479 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2480 (match_operand:SI 1 "s_register_operand" "r")))]
2482 "bic%?\\t%0, %1, %2%S4"
2483 [(set_attr "predicable" "yes")
2484 (set_attr "shift" "2")
2485 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2486 (const_string "logic_shift_imm")
2487 (const_string "logic_shift_reg")))]
2490 ;; Shifted bics pattern used to set up CC status register and not reusing
2491 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2492 ;; does not support shift by register.
2493 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2494 [(set (reg:CC_NOOV CC_REGNUM)
2496 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2497 [(match_operand:SI 1 "s_register_operand" "r")
2498 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2499 (match_operand:SI 3 "s_register_operand" "r"))
2501 (clobber (match_scratch:SI 4 "=r"))]
2502 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2503 "bics%?\\t%4, %3, %1%S0"
2504 [(set_attr "predicable" "yes")
2505 (set_attr "conds" "set")
2506 (set_attr "shift" "1")
2507 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2508 (const_string "logic_shift_imm")
2509 (const_string "logic_shift_reg")))]
2512 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2513 ;; getting reused later.
2514 (define_insn "andsi_not_shiftsi_si_scc"
2515 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2517 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2518 [(match_operand:SI 1 "s_register_operand" "r")
2519 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2520 (match_operand:SI 3 "s_register_operand" "r"))
2522 (set (match_operand:SI 4 "s_register_operand" "=r")
2523 (and:SI (not:SI (match_op_dup 0
2527 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2528 "bics%?\\t%4, %3, %1%S0"
2529 [(set_attr "predicable" "yes")
2530 (set_attr "conds" "set")
2531 (set_attr "shift" "1")
2532 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2533 (const_string "logic_shift_imm")
2534 (const_string "logic_shift_reg")))]
2537 (define_insn "*andsi_notsi_si_compare0"
2538 [(set (reg:CC_NOOV CC_REGNUM)
2540 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2541 (match_operand:SI 1 "s_register_operand" "r"))
2543 (set (match_operand:SI 0 "s_register_operand" "=r")
2544 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2547 [(set_attr "conds" "set")
2548 (set_attr "type" "logics_shift_reg")]
2551 (define_insn "*andsi_notsi_si_compare0_scratch"
2552 [(set (reg:CC_NOOV CC_REGNUM)
2554 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2555 (match_operand:SI 1 "s_register_operand" "r"))
2557 (clobber (match_scratch:SI 0 "=r"))]
2560 [(set_attr "conds" "set")
2561 (set_attr "type" "logics_shift_reg")]
2564 (define_expand "iorsi3"
2565 [(set (match_operand:SI 0 "s_register_operand")
2566 (ior:SI (match_operand:SI 1 "s_register_operand")
2567 (match_operand:SI 2 "reg_or_int_operand")))]
2570 if (CONST_INT_P (operands[2]))
2574 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2575 operands[2] = force_reg (SImode, operands[2]);
2578 arm_split_constant (IOR, SImode, NULL_RTX,
2579 INTVAL (operands[2]), operands[0],
2581 optimize && can_create_pseudo_p ());
2585 else /* TARGET_THUMB1 */
2587 rtx tmp = force_reg (SImode, operands[2]);
2588 if (rtx_equal_p (operands[0], operands[1]))
2592 operands[2] = operands[1];
2600 (define_insn_and_split "*iorsi3_insn"
2601 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2602 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2603 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2608 orn%?\\t%0, %1, #%B2
2612 && CONST_INT_P (operands[2])
2613 && !(const_ok_for_arm (INTVAL (operands[2]))
2614 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2615 [(clobber (const_int 0))]
2617 arm_split_constant (IOR, SImode, curr_insn,
2618 INTVAL (operands[2]), operands[0], operands[1], 0);
2621 [(set_attr "length" "4,4,4,4,16")
2622 (set_attr "arch" "32,t2,t2,32,32")
2623 (set_attr "predicable" "yes")
2624 (set_attr "predicable_short_it" "no,yes,no,no,no")
2625 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2629 [(match_scratch:SI 3 "r")
2630 (set (match_operand:SI 0 "arm_general_register_operand" "")
2631 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2632 (match_operand:SI 2 "const_int_operand" "")))]
2634 && !const_ok_for_arm (INTVAL (operands[2]))
2635 && const_ok_for_arm (~INTVAL (operands[2]))"
2636 [(set (match_dup 3) (match_dup 2))
2637 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2641 (define_insn "*iorsi3_compare0"
2642 [(set (reg:CC_NOOV CC_REGNUM)
2644 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2645 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2647 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2648 (ior:SI (match_dup 1) (match_dup 2)))]
2650 "orrs%?\\t%0, %1, %2"
2651 [(set_attr "conds" "set")
2652 (set_attr "arch" "*,t2,*")
2653 (set_attr "length" "4,2,4")
2654 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2657 (define_insn "*iorsi3_compare0_scratch"
2658 [(set (reg:CC_NOOV CC_REGNUM)
2660 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2661 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2663 (clobber (match_scratch:SI 0 "=r,l,r"))]
2665 "orrs%?\\t%0, %1, %2"
2666 [(set_attr "conds" "set")
2667 (set_attr "arch" "*,t2,*")
2668 (set_attr "length" "4,2,4")
2669 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2672 (define_expand "xorsi3"
2673 [(set (match_operand:SI 0 "s_register_operand")
2674 (xor:SI (match_operand:SI 1 "s_register_operand")
2675 (match_operand:SI 2 "reg_or_int_operand")))]
2677 "if (CONST_INT_P (operands[2]))
2681 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2682 operands[2] = force_reg (SImode, operands[2]);
2685 arm_split_constant (XOR, SImode, NULL_RTX,
2686 INTVAL (operands[2]), operands[0],
2688 optimize && can_create_pseudo_p ());
2692 else /* TARGET_THUMB1 */
2694 rtx tmp = force_reg (SImode, operands[2]);
2695 if (rtx_equal_p (operands[0], operands[1]))
2699 operands[2] = operands[1];
2706 (define_insn_and_split "*arm_xorsi3"
2707 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2708 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2709 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2717 && CONST_INT_P (operands[2])
2718 && !const_ok_for_arm (INTVAL (operands[2]))"
2719 [(clobber (const_int 0))]
2721 arm_split_constant (XOR, SImode, curr_insn,
2722 INTVAL (operands[2]), operands[0], operands[1], 0);
2725 [(set_attr "length" "4,4,4,16")
2726 (set_attr "predicable" "yes")
2727 (set_attr "predicable_short_it" "no,yes,no,no")
2728 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2731 (define_insn "*xorsi3_compare0"
2732 [(set (reg:CC_NOOV CC_REGNUM)
2733 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2734 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2736 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2737 (xor:SI (match_dup 1) (match_dup 2)))]
2739 "eors%?\\t%0, %1, %2"
2740 [(set_attr "conds" "set")
2741 (set_attr "type" "logics_imm,logics_reg")]
2744 (define_insn "*xorsi3_compare0_scratch"
2745 [(set (reg:CC_NOOV CC_REGNUM)
2746 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2747 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2751 [(set_attr "conds" "set")
2752 (set_attr "type" "logics_imm,logics_reg")]
2755 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2756 ; (NOT D) we can sometimes merge the final NOT into one of the following
2760 [(set (match_operand:SI 0 "s_register_operand" "")
2761 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2762 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2763 (match_operand:SI 3 "arm_rhs_operand" "")))
2764 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2766 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2767 (not:SI (match_dup 3))))
2768 (set (match_dup 0) (not:SI (match_dup 4)))]
2772 (define_insn_and_split "*andsi_iorsi3_notsi"
2773 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2774 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2775 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2776 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2778 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2779 "&& reload_completed"
2780 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2781 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2783 /* If operands[3] is a constant make sure to fold the NOT into it
2784 to avoid creating a NOT of a CONST_INT. */
2785 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2786 if (CONST_INT_P (not_rtx))
2788 operands[4] = operands[0];
2789 operands[5] = not_rtx;
2793 operands[5] = operands[0];
2794 operands[4] = not_rtx;
2797 [(set_attr "length" "8")
2798 (set_attr "ce_count" "2")
2799 (set_attr "predicable" "yes")
2800 (set_attr "type" "multiple")]
2803 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2804 ; insns are available?
2806 [(set (match_operand:SI 0 "s_register_operand" "")
2807 (match_operator:SI 1 "logical_binary_operator"
2808 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2809 (match_operand:SI 3 "const_int_operand" "")
2810 (match_operand:SI 4 "const_int_operand" ""))
2811 (match_operator:SI 9 "logical_binary_operator"
2812 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2813 (match_operand:SI 6 "const_int_operand" ""))
2814 (match_operand:SI 7 "s_register_operand" "")])]))
2815 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2817 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2818 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2821 [(ashift:SI (match_dup 2) (match_dup 4))
2825 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2828 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2832 [(set (match_operand:SI 0 "s_register_operand" "")
2833 (match_operator:SI 1 "logical_binary_operator"
2834 [(match_operator:SI 9 "logical_binary_operator"
2835 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2836 (match_operand:SI 6 "const_int_operand" ""))
2837 (match_operand:SI 7 "s_register_operand" "")])
2838 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2839 (match_operand:SI 3 "const_int_operand" "")
2840 (match_operand:SI 4 "const_int_operand" ""))]))
2841 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2843 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2844 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2847 [(ashift:SI (match_dup 2) (match_dup 4))
2851 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2854 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2858 [(set (match_operand:SI 0 "s_register_operand" "")
2859 (match_operator:SI 1 "logical_binary_operator"
2860 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2861 (match_operand:SI 3 "const_int_operand" "")
2862 (match_operand:SI 4 "const_int_operand" ""))
2863 (match_operator:SI 9 "logical_binary_operator"
2864 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2865 (match_operand:SI 6 "const_int_operand" ""))
2866 (match_operand:SI 7 "s_register_operand" "")])]))
2867 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2869 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2870 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2873 [(ashift:SI (match_dup 2) (match_dup 4))
2877 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2880 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2884 [(set (match_operand:SI 0 "s_register_operand" "")
2885 (match_operator:SI 1 "logical_binary_operator"
2886 [(match_operator:SI 9 "logical_binary_operator"
2887 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2888 (match_operand:SI 6 "const_int_operand" ""))
2889 (match_operand:SI 7 "s_register_operand" "")])
2890 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2891 (match_operand:SI 3 "const_int_operand" "")
2892 (match_operand:SI 4 "const_int_operand" ""))]))
2893 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2895 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2896 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2899 [(ashift:SI (match_dup 2) (match_dup 4))
2903 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2906 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2910 ;; Minimum and maximum insns
2912 (define_expand "smaxsi3"
2914 (set (match_operand:SI 0 "s_register_operand")
2915 (smax:SI (match_operand:SI 1 "s_register_operand")
2916 (match_operand:SI 2 "arm_rhs_operand")))
2917 (clobber (reg:CC CC_REGNUM))])]
2920 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2922 /* No need for a clobber of the condition code register here. */
2923 emit_insn (gen_rtx_SET (operands[0],
2924 gen_rtx_SMAX (SImode, operands[1],
2930 (define_insn "*smax_0"
2931 [(set (match_operand:SI 0 "s_register_operand" "=r")
2932 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2935 "bic%?\\t%0, %1, %1, asr #31"
2936 [(set_attr "predicable" "yes")
2937 (set_attr "type" "logic_shift_reg")]
2940 (define_insn "*smax_m1"
2941 [(set (match_operand:SI 0 "s_register_operand" "=r")
2942 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2945 "orr%?\\t%0, %1, %1, asr #31"
2946 [(set_attr "predicable" "yes")
2947 (set_attr "type" "logic_shift_reg")]
2950 (define_insn_and_split "*arm_smax_insn"
2951 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2952 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2953 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2954 (clobber (reg:CC CC_REGNUM))]
2957 ; cmp\\t%1, %2\;movlt\\t%0, %2
2958 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2960 [(set (reg:CC CC_REGNUM)
2961 (compare:CC (match_dup 1) (match_dup 2)))
2963 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2967 [(set_attr "conds" "clob")
2968 (set_attr "length" "8,12")
2969 (set_attr "type" "multiple")]
2972 (define_expand "sminsi3"
2974 (set (match_operand:SI 0 "s_register_operand")
2975 (smin:SI (match_operand:SI 1 "s_register_operand")
2976 (match_operand:SI 2 "arm_rhs_operand")))
2977 (clobber (reg:CC CC_REGNUM))])]
2980 if (operands[2] == const0_rtx)
2982 /* No need for a clobber of the condition code register here. */
2983 emit_insn (gen_rtx_SET (operands[0],
2984 gen_rtx_SMIN (SImode, operands[1],
2990 (define_insn "*smin_0"
2991 [(set (match_operand:SI 0 "s_register_operand" "=r")
2992 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2995 "and%?\\t%0, %1, %1, asr #31"
2996 [(set_attr "predicable" "yes")
2997 (set_attr "type" "logic_shift_reg")]
3000 (define_insn_and_split "*arm_smin_insn"
3001 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3002 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3003 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3004 (clobber (reg:CC CC_REGNUM))]
3007 ; cmp\\t%1, %2\;movge\\t%0, %2
3008 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3010 [(set (reg:CC CC_REGNUM)
3011 (compare:CC (match_dup 1) (match_dup 2)))
3013 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3017 [(set_attr "conds" "clob")
3018 (set_attr "length" "8,12")
3019 (set_attr "type" "multiple,multiple")]
3022 (define_expand "umaxsi3"
3024 (set (match_operand:SI 0 "s_register_operand")
3025 (umax:SI (match_operand:SI 1 "s_register_operand")
3026 (match_operand:SI 2 "arm_rhs_operand")))
3027 (clobber (reg:CC CC_REGNUM))])]
3032 (define_insn_and_split "*arm_umaxsi3"
3033 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3034 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3035 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3036 (clobber (reg:CC CC_REGNUM))]
3039 ; cmp\\t%1, %2\;movcc\\t%0, %2
3040 ; cmp\\t%1, %2\;movcs\\t%0, %1
3041 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3043 [(set (reg:CC CC_REGNUM)
3044 (compare:CC (match_dup 1) (match_dup 2)))
3046 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3050 [(set_attr "conds" "clob")
3051 (set_attr "length" "8,8,12")
3052 (set_attr "type" "store_4")]
3055 (define_expand "uminsi3"
3057 (set (match_operand:SI 0 "s_register_operand")
3058 (umin:SI (match_operand:SI 1 "s_register_operand")
3059 (match_operand:SI 2 "arm_rhs_operand")))
3060 (clobber (reg:CC CC_REGNUM))])]
3065 (define_insn_and_split "*arm_uminsi3"
3066 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3067 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3068 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3069 (clobber (reg:CC CC_REGNUM))]
3072 ; cmp\\t%1, %2\;movcs\\t%0, %2
3073 ; cmp\\t%1, %2\;movcc\\t%0, %1
3074 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3076 [(set (reg:CC CC_REGNUM)
3077 (compare:CC (match_dup 1) (match_dup 2)))
3079 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3083 [(set_attr "conds" "clob")
3084 (set_attr "length" "8,8,12")
3085 (set_attr "type" "store_4")]
3088 (define_insn "*store_minmaxsi"
3089 [(set (match_operand:SI 0 "memory_operand" "=m")
3090 (match_operator:SI 3 "minmax_operator"
3091 [(match_operand:SI 1 "s_register_operand" "r")
3092 (match_operand:SI 2 "s_register_operand" "r")]))
3093 (clobber (reg:CC CC_REGNUM))]
3094 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3096 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3097 operands[1], operands[2]);
3098 output_asm_insn (\"cmp\\t%1, %2\", operands);
3100 output_asm_insn (\"ite\t%d3\", operands);
3101 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3102 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3105 [(set_attr "conds" "clob")
3106 (set (attr "length")
3107 (if_then_else (eq_attr "is_thumb" "yes")
3110 (set_attr "type" "store_4")]
3113 ; Reject the frame pointer in operand[1], since reloading this after
3114 ; it has been eliminated can cause carnage.
3115 (define_insn "*minmax_arithsi"
3116 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3117 (match_operator:SI 4 "shiftable_operator"
3118 [(match_operator:SI 5 "minmax_operator"
3119 [(match_operand:SI 2 "s_register_operand" "r,r")
3120 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3121 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3122 (clobber (reg:CC CC_REGNUM))]
3123 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3126 enum rtx_code code = GET_CODE (operands[4]);
3129 if (which_alternative != 0 || operands[3] != const0_rtx
3130 || (code != PLUS && code != IOR && code != XOR))
3135 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3136 operands[2], operands[3]);
3137 output_asm_insn (\"cmp\\t%2, %3\", operands);
3141 output_asm_insn (\"ite\\t%d5\", operands);
3143 output_asm_insn (\"it\\t%d5\", operands);
3145 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3147 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3150 [(set_attr "conds" "clob")
3151 (set (attr "length")
3152 (if_then_else (eq_attr "is_thumb" "yes")
3155 (set_attr "type" "multiple")]
3158 ; Reject the frame pointer in operand[1], since reloading this after
3159 ; it has been eliminated can cause carnage.
3160 (define_insn_and_split "*minmax_arithsi_non_canon"
3161 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3163 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3164 (match_operator:SI 4 "minmax_operator"
3165 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3166 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3167 (clobber (reg:CC CC_REGNUM))]
3168 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3169 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3171 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3172 [(set (reg:CC CC_REGNUM)
3173 (compare:CC (match_dup 2) (match_dup 3)))
3175 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3177 (minus:SI (match_dup 1)
3179 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3183 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3184 operands[2], operands[3]);
3185 enum rtx_code rc = minmax_code (operands[4]);
3186 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3187 operands[2], operands[3]);
3189 if (mode == CCFPmode || mode == CCFPEmode)
3190 rc = reverse_condition_maybe_unordered (rc);
3192 rc = reverse_condition (rc);
3193 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3194 if (CONST_INT_P (operands[3]))
3195 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3197 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3199 [(set_attr "conds" "clob")
3200 (set (attr "length")
3201 (if_then_else (eq_attr "is_thumb" "yes")
3204 (set_attr "type" "multiple")]
3207 (define_code_iterator SAT [smin smax])
3208 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3209 (define_code_attr SATlo [(smin "1") (smax "2")])
3210 (define_code_attr SAThi [(smin "2") (smax "1")])
3212 (define_insn "*satsi_<SAT:code>"
3213 [(set (match_operand:SI 0 "s_register_operand" "=r")
3214 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3215 (match_operand:SI 1 "const_int_operand" "i"))
3216 (match_operand:SI 2 "const_int_operand" "i")))]
3217 "TARGET_32BIT && arm_arch6
3218 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3222 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3223 &mask, &signed_sat))
3226 operands[1] = GEN_INT (mask);
3228 return "ssat%?\t%0, %1, %3";
3230 return "usat%?\t%0, %1, %3";
3232 [(set_attr "predicable" "yes")
3233 (set_attr "type" "alus_imm")]
3236 (define_insn "*satsi_<SAT:code>_shift"
3237 [(set (match_operand:SI 0 "s_register_operand" "=r")
3238 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3239 [(match_operand:SI 4 "s_register_operand" "r")
3240 (match_operand:SI 5 "const_int_operand" "i")])
3241 (match_operand:SI 1 "const_int_operand" "i"))
3242 (match_operand:SI 2 "const_int_operand" "i")))]
3243 "TARGET_32BIT && arm_arch6
3244 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3248 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3249 &mask, &signed_sat))
3252 operands[1] = GEN_INT (mask);
3254 return "ssat%?\t%0, %1, %4%S3";
3256 return "usat%?\t%0, %1, %4%S3";
3258 [(set_attr "predicable" "yes")
3259 (set_attr "shift" "3")
3260 (set_attr "type" "logic_shift_reg")])
3262 ;; Shift and rotation insns
3264 (define_expand "ashldi3"
3265 [(set (match_operand:DI 0 "s_register_operand")
3266 (ashift:DI (match_operand:DI 1 "s_register_operand")
3267 (match_operand:SI 2 "reg_or_int_operand")))]
3270 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3271 operands[2], gen_reg_rtx (SImode),
3272 gen_reg_rtx (SImode));
3276 (define_expand "ashlsi3"
3277 [(set (match_operand:SI 0 "s_register_operand")
3278 (ashift:SI (match_operand:SI 1 "s_register_operand")
3279 (match_operand:SI 2 "arm_rhs_operand")))]
3282 if (CONST_INT_P (operands[2])
3283 && (UINTVAL (operands[2])) > 31)
3285 emit_insn (gen_movsi (operands[0], const0_rtx));
3291 (define_expand "ashrdi3"
3292 [(set (match_operand:DI 0 "s_register_operand")
3293 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3294 (match_operand:SI 2 "reg_or_int_operand")))]
3297 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3298 operands[2], gen_reg_rtx (SImode),
3299 gen_reg_rtx (SImode));
3303 (define_expand "ashrsi3"
3304 [(set (match_operand:SI 0 "s_register_operand")
3305 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3306 (match_operand:SI 2 "arm_rhs_operand")))]
3309 if (CONST_INT_P (operands[2])
3310 && UINTVAL (operands[2]) > 31)
3311 operands[2] = GEN_INT (31);
3315 (define_expand "lshrdi3"
3316 [(set (match_operand:DI 0 "s_register_operand")
3317 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3318 (match_operand:SI 2 "reg_or_int_operand")))]
3321 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3322 operands[2], gen_reg_rtx (SImode),
3323 gen_reg_rtx (SImode));
3327 (define_expand "lshrsi3"
3328 [(set (match_operand:SI 0 "s_register_operand")
3329 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3330 (match_operand:SI 2 "arm_rhs_operand")))]
3333 if (CONST_INT_P (operands[2])
3334 && (UINTVAL (operands[2])) > 31)
3336 emit_insn (gen_movsi (operands[0], const0_rtx));
3342 (define_expand "rotlsi3"
3343 [(set (match_operand:SI 0 "s_register_operand")
3344 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3345 (match_operand:SI 2 "reg_or_int_operand")))]
3348 if (CONST_INT_P (operands[2]))
3349 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3352 rtx reg = gen_reg_rtx (SImode);
3353 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3359 (define_expand "rotrsi3"
3360 [(set (match_operand:SI 0 "s_register_operand")
3361 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3362 (match_operand:SI 2 "arm_rhs_operand")))]
3367 if (CONST_INT_P (operands[2])
3368 && UINTVAL (operands[2]) > 31)
3369 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3371 else /* TARGET_THUMB1 */
3373 if (CONST_INT_P (operands [2]))
3374 operands [2] = force_reg (SImode, operands[2]);
3379 (define_insn "*arm_shiftsi3"
3380 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3381 (match_operator:SI 3 "shift_operator"
3382 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3383 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3385 "* return arm_output_shift(operands, 0);"
3386 [(set_attr "predicable" "yes")
3387 (set_attr "arch" "t2,t2,*,*")
3388 (set_attr "predicable_short_it" "yes,yes,no,no")
3389 (set_attr "length" "4")
3390 (set_attr "shift" "1")
3391 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3394 (define_insn "*shiftsi3_compare0"
3395 [(set (reg:CC_NOOV CC_REGNUM)
3396 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3397 [(match_operand:SI 1 "s_register_operand" "r,r")
3398 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3400 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3401 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3403 "* return arm_output_shift(operands, 1);"
3404 [(set_attr "conds" "set")
3405 (set_attr "shift" "1")
3406 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3409 (define_insn "*shiftsi3_compare0_scratch"
3410 [(set (reg:CC_NOOV CC_REGNUM)
3411 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3412 [(match_operand:SI 1 "s_register_operand" "r,r")
3413 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3415 (clobber (match_scratch:SI 0 "=r,r"))]
3417 "* return arm_output_shift(operands, 1);"
3418 [(set_attr "conds" "set")
3419 (set_attr "shift" "1")
3420 (set_attr "type" "shift_imm,shift_reg")]
3423 (define_insn "*not_shiftsi"
3424 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3425 (not:SI (match_operator:SI 3 "shift_operator"
3426 [(match_operand:SI 1 "s_register_operand" "r,r")
3427 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3430 [(set_attr "predicable" "yes")
3431 (set_attr "shift" "1")
3432 (set_attr "arch" "32,a")
3433 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3435 (define_insn "*not_shiftsi_compare0"
3436 [(set (reg:CC_NOOV CC_REGNUM)
3438 (not:SI (match_operator:SI 3 "shift_operator"
3439 [(match_operand:SI 1 "s_register_operand" "r,r")
3440 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3442 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3443 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3445 "mvns%?\\t%0, %1%S3"
3446 [(set_attr "conds" "set")
3447 (set_attr "shift" "1")
3448 (set_attr "arch" "32,a")
3449 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3451 (define_insn "*not_shiftsi_compare0_scratch"
3452 [(set (reg:CC_NOOV CC_REGNUM)
3454 (not:SI (match_operator:SI 3 "shift_operator"
3455 [(match_operand:SI 1 "s_register_operand" "r,r")
3456 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3458 (clobber (match_scratch:SI 0 "=r,r"))]
3460 "mvns%?\\t%0, %1%S3"
3461 [(set_attr "conds" "set")
3462 (set_attr "shift" "1")
3463 (set_attr "arch" "32,a")
3464 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3466 ;; We don't really have extzv, but defining this using shifts helps
3467 ;; to reduce register pressure later on.
3469 (define_expand "extzv"
3470 [(set (match_operand 0 "s_register_operand")
3471 (zero_extract (match_operand 1 "nonimmediate_operand")
3472 (match_operand 2 "const_int_operand")
3473 (match_operand 3 "const_int_operand")))]
3474 "TARGET_THUMB1 || arm_arch_thumb2"
3477 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3478 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3480 if (arm_arch_thumb2)
3482 HOST_WIDE_INT width = INTVAL (operands[2]);
3483 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3485 if (unaligned_access && MEM_P (operands[1])
3486 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3490 if (BYTES_BIG_ENDIAN)
3491 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3496 base_addr = adjust_address (operands[1], SImode,
3497 bitpos / BITS_PER_UNIT);
3498 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3502 rtx dest = operands[0];
3503 rtx tmp = gen_reg_rtx (SImode);
3505 /* We may get a paradoxical subreg here. Strip it off. */
3506 if (GET_CODE (dest) == SUBREG
3507 && GET_MODE (dest) == SImode
3508 && GET_MODE (SUBREG_REG (dest)) == HImode)
3509 dest = SUBREG_REG (dest);
3511 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3514 base_addr = adjust_address (operands[1], HImode,
3515 bitpos / BITS_PER_UNIT);
3516 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3517 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3521 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3523 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3531 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3534 operands[3] = GEN_INT (rshift);
3538 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3542 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3543 operands[3], gen_reg_rtx (SImode)));
3548 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3550 (define_expand "extzv_t1"
3551 [(set (match_operand:SI 4 "s_register_operand")
3552 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3553 (match_operand:SI 2 "const_int_operand")))
3554 (set (match_operand:SI 0 "s_register_operand")
3555 (lshiftrt:SI (match_dup 4)
3556 (match_operand:SI 3 "const_int_operand")))]
3560 (define_expand "extv"
3561 [(set (match_operand 0 "s_register_operand")
3562 (sign_extract (match_operand 1 "nonimmediate_operand")
3563 (match_operand 2 "const_int_operand")
3564 (match_operand 3 "const_int_operand")))]
3567 HOST_WIDE_INT width = INTVAL (operands[2]);
3568 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3570 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3571 && (bitpos % BITS_PER_UNIT) == 0)
3575 if (BYTES_BIG_ENDIAN)
3576 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3580 base_addr = adjust_address (operands[1], SImode,
3581 bitpos / BITS_PER_UNIT);
3582 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3586 rtx dest = operands[0];
3587 rtx tmp = gen_reg_rtx (SImode);
3589 /* We may get a paradoxical subreg here. Strip it off. */
3590 if (GET_CODE (dest) == SUBREG
3591 && GET_MODE (dest) == SImode
3592 && GET_MODE (SUBREG_REG (dest)) == HImode)
3593 dest = SUBREG_REG (dest);
3595 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3598 base_addr = adjust_address (operands[1], HImode,
3599 bitpos / BITS_PER_UNIT);
3600 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3601 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3606 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3608 else if (GET_MODE (operands[0]) == SImode
3609 && GET_MODE (operands[1]) == SImode)
3611 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3619 ; Helper to expand register forms of extv with the proper modes.
3621 (define_expand "extv_regsi"
3622 [(set (match_operand:SI 0 "s_register_operand")
3623 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3624 (match_operand 2 "const_int_operand")
3625 (match_operand 3 "const_int_operand")))]
3630 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3632 (define_insn "unaligned_loaddi"
3633 [(set (match_operand:DI 0 "s_register_operand" "=r")
3634 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3635 UNSPEC_UNALIGNED_LOAD))]
3636 "TARGET_32BIT && TARGET_LDRD"
3638 return output_move_double (operands, true, NULL);
3640 [(set_attr "length" "8")
3641 (set_attr "type" "load_8")])
3643 (define_insn "unaligned_loadsi"
3644 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3645 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3646 UNSPEC_UNALIGNED_LOAD))]
3649 ldr\t%0, %1\t@ unaligned
3650 ldr%?\t%0, %1\t@ unaligned
3651 ldr%?\t%0, %1\t@ unaligned"
3652 [(set_attr "arch" "t1,t2,32")
3653 (set_attr "length" "2,2,4")
3654 (set_attr "predicable" "no,yes,yes")
3655 (set_attr "predicable_short_it" "no,yes,no")
3656 (set_attr "type" "load_4")])
3658 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3659 ;; address (there's no immediate format). That's tricky to support
3660 ;; here and we don't really need this pattern for that case, so only
3661 ;; enable for 32-bit ISAs.
3662 (define_insn "unaligned_loadhis"
3663 [(set (match_operand:SI 0 "s_register_operand" "=r")
3665 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3666 UNSPEC_UNALIGNED_LOAD)))]
3667 "unaligned_access && TARGET_32BIT"
3668 "ldrsh%?\t%0, %1\t@ unaligned"
3669 [(set_attr "predicable" "yes")
3670 (set_attr "type" "load_byte")])
3672 (define_insn "unaligned_loadhiu"
3673 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3675 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3676 UNSPEC_UNALIGNED_LOAD)))]
3679 ldrh\t%0, %1\t@ unaligned
3680 ldrh%?\t%0, %1\t@ unaligned
3681 ldrh%?\t%0, %1\t@ unaligned"
3682 [(set_attr "arch" "t1,t2,32")
3683 (set_attr "length" "2,2,4")
3684 (set_attr "predicable" "no,yes,yes")
3685 (set_attr "predicable_short_it" "no,yes,no")
3686 (set_attr "type" "load_byte")])
3688 (define_insn "unaligned_storedi"
3689 [(set (match_operand:DI 0 "memory_operand" "=m")
3690 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3691 UNSPEC_UNALIGNED_STORE))]
3692 "TARGET_32BIT && TARGET_LDRD"
3694 return output_move_double (operands, true, NULL);
3696 [(set_attr "length" "8")
3697 (set_attr "type" "store_8")])
3699 (define_insn "unaligned_storesi"
3700 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3701 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3702 UNSPEC_UNALIGNED_STORE))]
3705 str\t%1, %0\t@ unaligned
3706 str%?\t%1, %0\t@ unaligned
3707 str%?\t%1, %0\t@ unaligned"
3708 [(set_attr "arch" "t1,t2,32")
3709 (set_attr "length" "2,2,4")
3710 (set_attr "predicable" "no,yes,yes")
3711 (set_attr "predicable_short_it" "no,yes,no")
3712 (set_attr "type" "store_4")])
3714 (define_insn "unaligned_storehi"
3715 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3716 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3717 UNSPEC_UNALIGNED_STORE))]
3720 strh\t%1, %0\t@ unaligned
3721 strh%?\t%1, %0\t@ unaligned
3722 strh%?\t%1, %0\t@ unaligned"
3723 [(set_attr "arch" "t1,t2,32")
3724 (set_attr "length" "2,2,4")
3725 (set_attr "predicable" "no,yes,yes")
3726 (set_attr "predicable_short_it" "no,yes,no")
3727 (set_attr "type" "store_4")])
3730 (define_insn "*extv_reg"
3731 [(set (match_operand:SI 0 "s_register_operand" "=r")
3732 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3733 (match_operand:SI 2 "const_int_operand" "n")
3734 (match_operand:SI 3 "const_int_operand" "n")))]
3736 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3737 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3738 "sbfx%?\t%0, %1, %3, %2"
3739 [(set_attr "length" "4")
3740 (set_attr "predicable" "yes")
3741 (set_attr "type" "bfm")]
3744 (define_insn "extzv_t2"
3745 [(set (match_operand:SI 0 "s_register_operand" "=r")
3746 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3747 (match_operand:SI 2 "const_int_operand" "n")
3748 (match_operand:SI 3 "const_int_operand" "n")))]
3750 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3751 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3752 "ubfx%?\t%0, %1, %3, %2"
3753 [(set_attr "length" "4")
3754 (set_attr "predicable" "yes")
3755 (set_attr "type" "bfm")]
3759 ;; Division instructions
3760 (define_insn "divsi3"
3761 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3762 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3763 (match_operand:SI 2 "s_register_operand" "r,r")))]
3768 [(set_attr "arch" "32,v8mb")
3769 (set_attr "predicable" "yes")
3770 (set_attr "type" "sdiv")]
3773 (define_insn "udivsi3"
3774 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3775 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3776 (match_operand:SI 2 "s_register_operand" "r,r")))]
3781 [(set_attr "arch" "32,v8mb")
3782 (set_attr "predicable" "yes")
3783 (set_attr "type" "udiv")]
3787 ;; Unary arithmetic insns
3789 (define_expand "negvsi3"
3790 [(match_operand:SI 0 "register_operand")
3791 (match_operand:SI 1 "register_operand")
3792 (match_operand 2 "")]
3795 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3796 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3801 (define_expand "negvdi3"
3802 [(match_operand:DI 0 "s_register_operand")
3803 (match_operand:DI 1 "s_register_operand")
3804 (match_operand 2 "")]
3807 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3808 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3814 (define_insn "negdi2_compare"
3815 [(set (reg:CC CC_REGNUM)
3818 (match_operand:DI 1 "register_operand" "r,r")))
3819 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3820 (minus:DI (const_int 0) (match_dup 1)))]
3823 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3824 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3825 [(set_attr "conds" "set")
3826 (set_attr "arch" "a,t2")
3827 (set_attr "length" "8")
3828 (set_attr "type" "multiple")]
3831 (define_expand "negsi2"
3832 [(set (match_operand:SI 0 "s_register_operand")
3833 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3838 (define_insn "*arm_negsi2"
3839 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3840 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3842 "rsb%?\\t%0, %1, #0"
3843 [(set_attr "predicable" "yes")
3844 (set_attr "predicable_short_it" "yes,no")
3845 (set_attr "arch" "t2,*")
3846 (set_attr "length" "4")
3847 (set_attr "type" "alu_imm")]
3850 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3851 ;; rather than (0 cmp reg). This gives the same results for unsigned
3852 ;; and equality compares which is what we mostly need here.
3853 (define_insn "negsi2_0compare"
3854 [(set (reg:CC_RSB CC_REGNUM)
3855 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3857 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3858 (neg:SI (match_dup 1)))]
3863 [(set_attr "conds" "set")
3864 (set_attr "arch" "t2,*")
3865 (set_attr "length" "2,*")
3866 (set_attr "type" "alus_imm")]
3869 (define_insn "negsi2_carryin"
3870 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3871 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3872 (match_operand:SI 2 "arm_borrow_operation" "")))]
3876 sbc\\t%0, %1, %1, lsl #1"
3877 [(set_attr "conds" "use")
3878 (set_attr "arch" "a,t2")
3879 (set_attr "type" "adc_imm,adc_reg")]
3882 (define_expand "negsf2"
3883 [(set (match_operand:SF 0 "s_register_operand")
3884 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3885 "TARGET_32BIT && TARGET_HARD_FLOAT"
3889 (define_expand "negdf2"
3890 [(set (match_operand:DF 0 "s_register_operand")
3891 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3892 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3895 ;; abssi2 doesn't really clobber the condition codes if a different register
3896 ;; is being set. To keep things simple, assume during rtl manipulations that
3897 ;; it does, but tell the final scan operator the truth. Similarly for
3900 (define_expand "abssi2"
3902 [(set (match_operand:SI 0 "s_register_operand")
3903 (abs:SI (match_operand:SI 1 "s_register_operand")))
3904 (clobber (match_dup 2))])]
3908 operands[2] = gen_rtx_SCRATCH (SImode);
3910 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3913 (define_insn_and_split "*arm_abssi2"
3914 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3915 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3916 (clobber (reg:CC CC_REGNUM))]
3919 "&& reload_completed"
3922 /* if (which_alternative == 0) */
3923 if (REGNO(operands[0]) == REGNO(operands[1]))
3925 /* Emit the pattern:
3926 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3927 [(set (reg:CC CC_REGNUM)
3928 (compare:CC (match_dup 0) (const_int 0)))
3929 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3930 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3932 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3933 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3934 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3935 (gen_rtx_LT (SImode,
3936 gen_rtx_REG (CCmode, CC_REGNUM),
3938 (gen_rtx_SET (operands[0],
3939 (gen_rtx_MINUS (SImode,
3946 /* Emit the pattern:
3947 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
3949 (xor:SI (match_dup 1)
3950 (ashiftrt:SI (match_dup 1) (const_int 31))))
3952 (minus:SI (match_dup 0)
3953 (ashiftrt:SI (match_dup 1) (const_int 31))))]
3955 emit_insn (gen_rtx_SET (operands[0],
3956 gen_rtx_XOR (SImode,
3957 gen_rtx_ASHIFTRT (SImode,
3961 emit_insn (gen_rtx_SET (operands[0],
3962 gen_rtx_MINUS (SImode,
3964 gen_rtx_ASHIFTRT (SImode,
3970 [(set_attr "conds" "clob,*")
3971 (set_attr "shift" "1")
3972 (set_attr "predicable" "no, yes")
3973 (set_attr "length" "8")
3974 (set_attr "type" "multiple")]
3977 (define_insn_and_split "*arm_neg_abssi2"
3978 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3979 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3980 (clobber (reg:CC CC_REGNUM))]
3983 "&& reload_completed"
3986 /* if (which_alternative == 0) */
3987 if (REGNO (operands[0]) == REGNO (operands[1]))
3989 /* Emit the pattern:
3990 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3992 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3993 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3994 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3996 gen_rtx_REG (CCmode, CC_REGNUM),
3998 gen_rtx_SET (operands[0],
3999 (gen_rtx_MINUS (SImode,
4005 /* Emit the pattern:
4006 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4008 emit_insn (gen_rtx_SET (operands[0],
4009 gen_rtx_XOR (SImode,
4010 gen_rtx_ASHIFTRT (SImode,
4014 emit_insn (gen_rtx_SET (operands[0],
4015 gen_rtx_MINUS (SImode,
4016 gen_rtx_ASHIFTRT (SImode,
4023 [(set_attr "conds" "clob,*")
4024 (set_attr "shift" "1")
4025 (set_attr "predicable" "no, yes")
4026 (set_attr "length" "8")
4027 (set_attr "type" "multiple")]
4030 (define_expand "abssf2"
4031 [(set (match_operand:SF 0 "s_register_operand")
4032 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4033 "TARGET_32BIT && TARGET_HARD_FLOAT"
4036 (define_expand "absdf2"
4037 [(set (match_operand:DF 0 "s_register_operand")
4038 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4039 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4042 (define_expand "sqrtsf2"
4043 [(set (match_operand:SF 0 "s_register_operand")
4044 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4045 "TARGET_32BIT && TARGET_HARD_FLOAT"
4048 (define_expand "sqrtdf2"
4049 [(set (match_operand:DF 0 "s_register_operand")
4050 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4051 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4054 (define_expand "one_cmplsi2"
4055 [(set (match_operand:SI 0 "s_register_operand")
4056 (not:SI (match_operand:SI 1 "s_register_operand")))]
4061 (define_insn "*arm_one_cmplsi2"
4062 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4063 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4066 [(set_attr "predicable" "yes")
4067 (set_attr "predicable_short_it" "yes,no")
4068 (set_attr "arch" "t2,*")
4069 (set_attr "length" "4")
4070 (set_attr "type" "mvn_reg")]
4073 (define_insn "*notsi_compare0"
4074 [(set (reg:CC_NOOV CC_REGNUM)
4075 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4077 (set (match_operand:SI 0 "s_register_operand" "=r")
4078 (not:SI (match_dup 1)))]
4081 [(set_attr "conds" "set")
4082 (set_attr "type" "mvn_reg")]
4085 (define_insn "*notsi_compare0_scratch"
4086 [(set (reg:CC_NOOV CC_REGNUM)
4087 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4089 (clobber (match_scratch:SI 0 "=r"))]
4092 [(set_attr "conds" "set")
4093 (set_attr "type" "mvn_reg")]
4096 ;; Fixed <--> Floating conversion insns
4098 (define_expand "floatsihf2"
4099 [(set (match_operand:HF 0 "general_operand")
4100 (float:HF (match_operand:SI 1 "general_operand")))]
4104 rtx op1 = gen_reg_rtx (SFmode);
4105 expand_float (op1, operands[1], 0);
4106 op1 = convert_to_mode (HFmode, op1, 0);
4107 emit_move_insn (operands[0], op1);
4112 (define_expand "floatdihf2"
4113 [(set (match_operand:HF 0 "general_operand")
4114 (float:HF (match_operand:DI 1 "general_operand")))]
4118 rtx op1 = gen_reg_rtx (SFmode);
4119 expand_float (op1, operands[1], 0);
4120 op1 = convert_to_mode (HFmode, op1, 0);
4121 emit_move_insn (operands[0], op1);
4126 (define_expand "floatsisf2"
4127 [(set (match_operand:SF 0 "s_register_operand")
4128 (float:SF (match_operand:SI 1 "s_register_operand")))]
4129 "TARGET_32BIT && TARGET_HARD_FLOAT"
4133 (define_expand "floatsidf2"
4134 [(set (match_operand:DF 0 "s_register_operand")
4135 (float:DF (match_operand:SI 1 "s_register_operand")))]
4136 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4140 (define_expand "fix_trunchfsi2"
4141 [(set (match_operand:SI 0 "general_operand")
4142 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4146 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4147 expand_fix (operands[0], op1, 0);
4152 (define_expand "fix_trunchfdi2"
4153 [(set (match_operand:DI 0 "general_operand")
4154 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4158 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4159 expand_fix (operands[0], op1, 0);
4164 (define_expand "fix_truncsfsi2"
4165 [(set (match_operand:SI 0 "s_register_operand")
4166 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4167 "TARGET_32BIT && TARGET_HARD_FLOAT"
4171 (define_expand "fix_truncdfsi2"
4172 [(set (match_operand:SI 0 "s_register_operand")
4173 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4174 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4180 (define_expand "truncdfsf2"
4181 [(set (match_operand:SF 0 "s_register_operand")
4183 (match_operand:DF 1 "s_register_operand")))]
4184 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4188 ;; DFmode to HFmode conversions on targets without a single-step hardware
4189 ;; instruction for it would have to go through SFmode. This is dangerous
4190 ;; as it introduces double rounding.
4192 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4193 ;; a single-step instruction.
4195 (define_expand "truncdfhf2"
4196 [(set (match_operand:HF 0 "s_register_operand")
4198 (match_operand:DF 1 "s_register_operand")))]
4199 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4200 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4202 /* We don't have a direct instruction for this, so we must be in
4203 an unsafe math mode, and going via SFmode. */
4205 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4208 op1 = convert_to_mode (SFmode, operands[1], 0);
4209 op1 = convert_to_mode (HFmode, op1, 0);
4210 emit_move_insn (operands[0], op1);
4213 /* Otherwise, we will pick this up as a single instruction with
4214 no intermediary rounding. */
4218 ;; Zero and sign extension instructions.
4220 (define_expand "zero_extend<mode>di2"
4221 [(set (match_operand:DI 0 "s_register_operand" "")
4222 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4223 "TARGET_32BIT <qhs_zextenddi_cond>"
4225 rtx res_lo, res_hi, op0_lo, op0_hi;
4226 res_lo = gen_lowpart (SImode, operands[0]);
4227 res_hi = gen_highpart (SImode, operands[0]);
4228 if (can_create_pseudo_p ())
4230 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4231 op0_hi = gen_reg_rtx (SImode);
4235 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4238 if (<MODE>mode != SImode)
4239 emit_insn (gen_rtx_SET (op0_lo,
4240 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4241 emit_insn (gen_movsi (op0_hi, const0_rtx));
4242 if (res_lo != op0_lo)
4243 emit_move_insn (res_lo, op0_lo);
4244 if (res_hi != op0_hi)
4245 emit_move_insn (res_hi, op0_hi);
4250 (define_expand "extend<mode>di2"
4251 [(set (match_operand:DI 0 "s_register_operand" "")
4252 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4253 "TARGET_32BIT <qhs_sextenddi_cond>"
4255 rtx res_lo, res_hi, op0_lo, op0_hi;
4256 res_lo = gen_lowpart (SImode, operands[0]);
4257 res_hi = gen_highpart (SImode, operands[0]);
4258 if (can_create_pseudo_p ())
4260 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4261 op0_hi = gen_reg_rtx (SImode);
4265 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4268 if (<MODE>mode != SImode)
4269 emit_insn (gen_rtx_SET (op0_lo,
4270 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4271 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4272 if (res_lo != op0_lo)
4273 emit_move_insn (res_lo, op0_lo);
4274 if (res_hi != op0_hi)
4275 emit_move_insn (res_hi, op0_hi);
4280 ;; Splits for all extensions to DImode
4282 [(set (match_operand:DI 0 "s_register_operand" "")
4283 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4285 [(set (match_dup 0) (match_dup 1))]
4287 rtx lo_part = gen_lowpart (SImode, operands[0]);
4288 machine_mode src_mode = GET_MODE (operands[1]);
4290 if (src_mode == SImode)
4291 emit_move_insn (lo_part, operands[1]);
4293 emit_insn (gen_rtx_SET (lo_part,
4294 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4295 operands[0] = gen_highpart (SImode, operands[0]);
4296 operands[1] = const0_rtx;
4300 [(set (match_operand:DI 0 "s_register_operand" "")
4301 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4303 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4305 rtx lo_part = gen_lowpart (SImode, operands[0]);
4306 machine_mode src_mode = GET_MODE (operands[1]);
4308 if (src_mode == SImode)
4309 emit_move_insn (lo_part, operands[1]);
4311 emit_insn (gen_rtx_SET (lo_part,
4312 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4313 operands[1] = lo_part;
4314 operands[0] = gen_highpart (SImode, operands[0]);
4317 (define_expand "zero_extendhisi2"
4318 [(set (match_operand:SI 0 "s_register_operand")
4319 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4322 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4324 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4327 if (!arm_arch6 && !MEM_P (operands[1]))
4329 rtx t = gen_lowpart (SImode, operands[1]);
4330 rtx tmp = gen_reg_rtx (SImode);
4331 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4332 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4338 [(set (match_operand:SI 0 "s_register_operand" "")
4339 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4340 "!TARGET_THUMB2 && !arm_arch6"
4341 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4342 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4344 operands[2] = gen_lowpart (SImode, operands[1]);
4347 (define_insn "*arm_zero_extendhisi2"
4348 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4349 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4350 "TARGET_ARM && arm_arch4 && !arm_arch6"
4354 [(set_attr "type" "alu_shift_reg,load_byte")
4355 (set_attr "predicable" "yes")]
4358 (define_insn "*arm_zero_extendhisi2_v6"
4359 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4360 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4361 "TARGET_ARM && arm_arch6"
4365 [(set_attr "predicable" "yes")
4366 (set_attr "type" "extend,load_byte")]
4369 (define_insn "*arm_zero_extendhisi2addsi"
4370 [(set (match_operand:SI 0 "s_register_operand" "=r")
4371 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4372 (match_operand:SI 2 "s_register_operand" "r")))]
4374 "uxtah%?\\t%0, %2, %1"
4375 [(set_attr "type" "alu_shift_reg")
4376 (set_attr "predicable" "yes")]
4379 (define_expand "zero_extendqisi2"
4380 [(set (match_operand:SI 0 "s_register_operand")
4381 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4384 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4386 emit_insn (gen_andsi3 (operands[0],
4387 gen_lowpart (SImode, operands[1]),
4391 if (!arm_arch6 && !MEM_P (operands[1]))
4393 rtx t = gen_lowpart (SImode, operands[1]);
4394 rtx tmp = gen_reg_rtx (SImode);
4395 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4396 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4402 [(set (match_operand:SI 0 "s_register_operand" "")
4403 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4405 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4406 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4408 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4411 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4416 (define_insn "*arm_zero_extendqisi2"
4417 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4418 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4419 "TARGET_ARM && !arm_arch6"
4422 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4423 [(set_attr "length" "8,4")
4424 (set_attr "type" "alu_shift_reg,load_byte")
4425 (set_attr "predicable" "yes")]
4428 (define_insn "*arm_zero_extendqisi2_v6"
4429 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4430 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4431 "TARGET_ARM && arm_arch6"
4434 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4435 [(set_attr "type" "extend,load_byte")
4436 (set_attr "predicable" "yes")]
4439 (define_insn "*arm_zero_extendqisi2addsi"
4440 [(set (match_operand:SI 0 "s_register_operand" "=r")
4441 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4442 (match_operand:SI 2 "s_register_operand" "r")))]
4444 "uxtab%?\\t%0, %2, %1"
4445 [(set_attr "predicable" "yes")
4446 (set_attr "type" "alu_shift_reg")]
4450 [(set (match_operand:SI 0 "s_register_operand" "")
4451 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4452 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4453 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4454 [(set (match_dup 2) (match_dup 1))
4455 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4460 [(set (match_operand:SI 0 "s_register_operand" "")
4461 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4462 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4463 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4464 [(set (match_dup 2) (match_dup 1))
4465 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4471 [(set (match_operand:SI 0 "s_register_operand" "")
4472 (IOR_XOR:SI (and:SI (ashift:SI
4473 (match_operand:SI 1 "s_register_operand" "")
4474 (match_operand:SI 2 "const_int_operand" ""))
4475 (match_operand:SI 3 "const_int_operand" ""))
4477 (match_operator 5 "subreg_lowpart_operator"
4478 [(match_operand:SI 4 "s_register_operand" "")]))))]
4480 && (UINTVAL (operands[3])
4481 == (GET_MODE_MASK (GET_MODE (operands[5]))
4482 & (GET_MODE_MASK (GET_MODE (operands[5]))
4483 << (INTVAL (operands[2])))))"
4484 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4486 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4487 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4490 (define_insn "*compareqi_eq0"
4491 [(set (reg:CC_Z CC_REGNUM)
4492 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4496 [(set_attr "conds" "set")
4497 (set_attr "predicable" "yes")
4498 (set_attr "type" "logic_imm")]
4501 (define_expand "extendhisi2"
4502 [(set (match_operand:SI 0 "s_register_operand")
4503 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4508 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4511 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4513 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4517 if (!arm_arch6 && !MEM_P (operands[1]))
4519 rtx t = gen_lowpart (SImode, operands[1]);
4520 rtx tmp = gen_reg_rtx (SImode);
4521 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4522 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4529 [(set (match_operand:SI 0 "register_operand" "")
4530 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4531 (clobber (match_scratch:SI 2 ""))])]
4533 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4534 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4536 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4539 ;; This pattern will only be used when ldsh is not available
4540 (define_expand "extendhisi2_mem"
4541 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4543 (zero_extend:SI (match_dup 7)))
4544 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4545 (set (match_operand:SI 0 "" "")
4546 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4551 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4553 mem1 = change_address (operands[1], QImode, addr);
4554 mem2 = change_address (operands[1], QImode,
4555 plus_constant (Pmode, addr, 1));
4556 operands[0] = gen_lowpart (SImode, operands[0]);
4558 operands[2] = gen_reg_rtx (SImode);
4559 operands[3] = gen_reg_rtx (SImode);
4560 operands[6] = gen_reg_rtx (SImode);
4563 if (BYTES_BIG_ENDIAN)
4565 operands[4] = operands[2];
4566 operands[5] = operands[3];
4570 operands[4] = operands[3];
4571 operands[5] = operands[2];
4577 [(set (match_operand:SI 0 "register_operand" "")
4578 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4580 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4581 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4583 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4586 (define_insn "*arm_extendhisi2"
4587 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4588 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4589 "TARGET_ARM && arm_arch4 && !arm_arch6"
4593 [(set_attr "length" "8,4")
4594 (set_attr "type" "alu_shift_reg,load_byte")
4595 (set_attr "predicable" "yes")]
4598 ;; ??? Check Thumb-2 pool range
4599 (define_insn "*arm_extendhisi2_v6"
4600 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4601 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4602 "TARGET_32BIT && arm_arch6"
4606 [(set_attr "type" "extend,load_byte")
4607 (set_attr "predicable" "yes")]
4610 (define_insn "*arm_extendhisi2addsi"
4611 [(set (match_operand:SI 0 "s_register_operand" "=r")
4612 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4613 (match_operand:SI 2 "s_register_operand" "r")))]
4615 "sxtah%?\\t%0, %2, %1"
4616 [(set_attr "type" "alu_shift_reg")]
4619 (define_expand "extendqihi2"
4621 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4623 (set (match_operand:HI 0 "s_register_operand")
4624 (ashiftrt:SI (match_dup 2)
4629 if (arm_arch4 && MEM_P (operands[1]))
4631 emit_insn (gen_rtx_SET (operands[0],
4632 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4635 if (!s_register_operand (operands[1], QImode))
4636 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4637 operands[0] = gen_lowpart (SImode, operands[0]);
4638 operands[1] = gen_lowpart (SImode, operands[1]);
4639 operands[2] = gen_reg_rtx (SImode);
4643 (define_insn "*arm_extendqihi_insn"
4644 [(set (match_operand:HI 0 "s_register_operand" "=r")
4645 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4646 "TARGET_ARM && arm_arch4"
4648 [(set_attr "type" "load_byte")
4649 (set_attr "predicable" "yes")]
4652 (define_expand "extendqisi2"
4653 [(set (match_operand:SI 0 "s_register_operand")
4654 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4657 if (!arm_arch4 && MEM_P (operands[1]))
4658 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4660 if (!arm_arch6 && !MEM_P (operands[1]))
4662 rtx t = gen_lowpart (SImode, operands[1]);
4663 rtx tmp = gen_reg_rtx (SImode);
4664 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4665 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4671 [(set (match_operand:SI 0 "register_operand" "")
4672 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4674 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4675 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4677 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4680 (define_insn "*arm_extendqisi"
4681 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4682 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4683 "TARGET_ARM && arm_arch4 && !arm_arch6"
4687 [(set_attr "length" "8,4")
4688 (set_attr "type" "alu_shift_reg,load_byte")
4689 (set_attr "predicable" "yes")]
4692 (define_insn "*arm_extendqisi_v6"
4693 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4695 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4696 "TARGET_ARM && arm_arch6"
4700 [(set_attr "type" "extend,load_byte")
4701 (set_attr "predicable" "yes")]
4704 (define_insn "*arm_extendqisi2addsi"
4705 [(set (match_operand:SI 0 "s_register_operand" "=r")
4706 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4707 (match_operand:SI 2 "s_register_operand" "r")))]
4709 "sxtab%?\\t%0, %2, %1"
4710 [(set_attr "type" "alu_shift_reg")
4711 (set_attr "predicable" "yes")]
4714 (define_insn "arm_<sup>xtb16"
4715 [(set (match_operand:SI 0 "s_register_operand" "=r")
4717 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4719 "<sup>xtb16%?\\t%0, %1"
4720 [(set_attr "predicable" "yes")
4721 (set_attr "type" "alu_dsp_reg")])
4723 (define_insn "arm_<simd32_op>"
4724 [(set (match_operand:SI 0 "s_register_operand" "=r")
4726 [(match_operand:SI 1 "s_register_operand" "r")
4727 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4729 "<simd32_op>%?\\t%0, %1, %2"
4730 [(set_attr "predicable" "yes")
4731 (set_attr "type" "alu_dsp_reg")])
4733 (define_insn "arm_usada8"
4734 [(set (match_operand:SI 0 "s_register_operand" "=r")
4736 [(match_operand:SI 1 "s_register_operand" "r")
4737 (match_operand:SI 2 "s_register_operand" "r")
4738 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4740 "usada8%?\\t%0, %1, %2, %3"
4741 [(set_attr "predicable" "yes")
4742 (set_attr "type" "alu_dsp_reg")])
4744 (define_insn "arm_<simd32_op>"
4745 [(set (match_operand:DI 0 "s_register_operand" "=r")
4747 [(match_operand:SI 1 "s_register_operand" "r")
4748 (match_operand:SI 2 "s_register_operand" "r")
4749 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4751 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4752 [(set_attr "predicable" "yes")
4753 (set_attr "type" "smlald")])
4755 (define_expand "extendsfdf2"
4756 [(set (match_operand:DF 0 "s_register_operand")
4757 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4758 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4762 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4763 ;; must go through SFmode.
4765 ;; This is always safe for an extend.
4767 (define_expand "extendhfdf2"
4768 [(set (match_operand:DF 0 "s_register_operand")
4769 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4772 /* We don't have a direct instruction for this, so go via SFmode. */
4773 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4776 op1 = convert_to_mode (SFmode, operands[1], 0);
4777 op1 = convert_to_mode (DFmode, op1, 0);
4778 emit_insn (gen_movdf (operands[0], op1));
4781 /* Otherwise, we're done producing RTL and will pick up the correct
4782 pattern to do this with one rounding-step in a single instruction. */
4786 ;; Move insns (including loads and stores)
4788 ;; XXX Just some ideas about movti.
4789 ;; I don't think these are a good idea on the arm, there just aren't enough
4791 ;;(define_expand "loadti"
4792 ;; [(set (match_operand:TI 0 "s_register_operand")
4793 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4796 ;;(define_expand "storeti"
4797 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4798 ;; (match_operand:TI 1 "s_register_operand"))]
4801 ;;(define_expand "movti"
4802 ;; [(set (match_operand:TI 0 "general_operand")
4803 ;; (match_operand:TI 1 "general_operand"))]
4809 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4810 ;; operands[1] = copy_to_reg (operands[1]);
4811 ;; if (MEM_P (operands[0]))
4812 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4813 ;; else if (MEM_P (operands[1]))
4814 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4818 ;; emit_insn (insn);
4822 ;; Recognize garbage generated above.
4825 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4826 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4830 ;; register mem = (which_alternative < 3);
4831 ;; register const char *template;
4833 ;; operands[mem] = XEXP (operands[mem], 0);
4834 ;; switch (which_alternative)
4836 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4837 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4838 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4839 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4840 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4841 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4843 ;; output_asm_insn (template, operands);
4847 (define_expand "movdi"
4848 [(set (match_operand:DI 0 "general_operand")
4849 (match_operand:DI 1 "general_operand"))]
4852 gcc_checking_assert (aligned_operand (operands[0], DImode));
4853 gcc_checking_assert (aligned_operand (operands[1], DImode));
4854 if (can_create_pseudo_p ())
4856 if (!REG_P (operands[0]))
4857 operands[1] = force_reg (DImode, operands[1]);
4859 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4860 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4862 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4863 when expanding function calls. */
4864 gcc_assert (can_create_pseudo_p ());
4865 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4867 /* Perform load into legal reg pair first, then move. */
4868 rtx reg = gen_reg_rtx (DImode);
4869 emit_insn (gen_movdi (reg, operands[1]));
4872 emit_move_insn (gen_lowpart (SImode, operands[0]),
4873 gen_lowpart (SImode, operands[1]));
4874 emit_move_insn (gen_highpart (SImode, operands[0]),
4875 gen_highpart (SImode, operands[1]));
4878 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4879 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4881 /* Avoid STRD's from an odd-numbered register pair in ARM state
4882 when expanding function prologue. */
4883 gcc_assert (can_create_pseudo_p ());
4884 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4885 ? gen_reg_rtx (DImode)
4887 emit_move_insn (gen_lowpart (SImode, split_dest),
4888 gen_lowpart (SImode, operands[1]));
4889 emit_move_insn (gen_highpart (SImode, split_dest),
4890 gen_highpart (SImode, operands[1]));
4891 if (split_dest != operands[0])
4892 emit_insn (gen_movdi (operands[0], split_dest));
4898 (define_insn "*arm_movdi"
4899 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4900 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4902 && !(TARGET_HARD_FLOAT)
4904 && ( register_operand (operands[0], DImode)
4905 || register_operand (operands[1], DImode))"
4907 switch (which_alternative)
4914 /* Cannot load it directly, split to load it via MOV / MOVT. */
4915 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4919 return output_move_double (operands, true, NULL);
4922 [(set_attr "length" "8,12,16,8,8")
4923 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4924 (set_attr "arm_pool_range" "*,*,*,1020,*")
4925 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4926 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4927 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4931 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4932 (match_operand:ANY64 1 "immediate_operand" ""))]
4935 && (arm_disable_literal_pool
4936 || (arm_const_double_inline_cost (operands[1])
4937 <= arm_max_const_double_inline_cost ()))"
4940 arm_split_constant (SET, SImode, curr_insn,
4941 INTVAL (gen_lowpart (SImode, operands[1])),
4942 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4943 arm_split_constant (SET, SImode, curr_insn,
4944 INTVAL (gen_highpart_mode (SImode,
4945 GET_MODE (operands[0]),
4947 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4952 ; If optimizing for size, or if we have load delay slots, then
4953 ; we want to split the constant into two separate operations.
4954 ; In both cases this may split a trivial part into a single data op
4955 ; leaving a single complex constant to load. We can also get longer
4956 ; offsets in a LDR which means we get better chances of sharing the pool
4957 ; entries. Finally, we can normally do a better job of scheduling
4958 ; LDR instructions than we can with LDM.
4959 ; This pattern will only match if the one above did not.
4961 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4962 (match_operand:ANY64 1 "const_double_operand" ""))]
4963 "TARGET_ARM && reload_completed
4964 && arm_const_double_by_parts (operands[1])"
4965 [(set (match_dup 0) (match_dup 1))
4966 (set (match_dup 2) (match_dup 3))]
4968 operands[2] = gen_highpart (SImode, operands[0]);
4969 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4971 operands[0] = gen_lowpart (SImode, operands[0]);
4972 operands[1] = gen_lowpart (SImode, operands[1]);
4977 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4978 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4979 "TARGET_EITHER && reload_completed"
4980 [(set (match_dup 0) (match_dup 1))
4981 (set (match_dup 2) (match_dup 3))]
4983 operands[2] = gen_highpart (SImode, operands[0]);
4984 operands[3] = gen_highpart (SImode, operands[1]);
4985 operands[0] = gen_lowpart (SImode, operands[0]);
4986 operands[1] = gen_lowpart (SImode, operands[1]);
4988 /* Handle a partial overlap. */
4989 if (rtx_equal_p (operands[0], operands[3]))
4991 rtx tmp0 = operands[0];
4992 rtx tmp1 = operands[1];
4994 operands[0] = operands[2];
4995 operands[1] = operands[3];
5002 ;; We can't actually do base+index doubleword loads if the index and
5003 ;; destination overlap. Split here so that we at least have chance to
5006 [(set (match_operand:DI 0 "s_register_operand" "")
5007 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5008 (match_operand:SI 2 "s_register_operand" ""))))]
5010 && reg_overlap_mentioned_p (operands[0], operands[1])
5011 && reg_overlap_mentioned_p (operands[0], operands[2])"
5013 (plus:SI (match_dup 1)
5016 (mem:DI (match_dup 4)))]
5018 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5022 (define_expand "movsi"
5023 [(set (match_operand:SI 0 "general_operand")
5024 (match_operand:SI 1 "general_operand"))]
5028 rtx base, offset, tmp;
5030 gcc_checking_assert (aligned_operand (operands[0], SImode));
5031 gcc_checking_assert (aligned_operand (operands[1], SImode));
5032 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5034 /* Everything except mem = const or mem = mem can be done easily. */
5035 if (MEM_P (operands[0]))
5036 operands[1] = force_reg (SImode, operands[1]);
5037 if (arm_general_register_operand (operands[0], SImode)
5038 && CONST_INT_P (operands[1])
5039 && !(const_ok_for_arm (INTVAL (operands[1]))
5040 || const_ok_for_arm (~INTVAL (operands[1]))))
5042 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5044 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5049 arm_split_constant (SET, SImode, NULL_RTX,
5050 INTVAL (operands[1]), operands[0], NULL_RTX,
5051 optimize && can_create_pseudo_p ());
5056 else /* Target doesn't have MOVT... */
5058 if (can_create_pseudo_p ())
5060 if (!REG_P (operands[0]))
5061 operands[1] = force_reg (SImode, operands[1]);
5065 split_const (operands[1], &base, &offset);
5066 if (INTVAL (offset) != 0
5067 && targetm.cannot_force_const_mem (SImode, operands[1]))
5069 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5070 emit_move_insn (tmp, base);
5071 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5075 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5077 /* Recognize the case where operand[1] is a reference to thread-local
5078 data and load its address to a register. Offsets have been split off
5080 if (arm_tls_referenced_p (operands[1]))
5081 operands[1] = legitimize_tls_address (operands[1], tmp);
5083 && (CONSTANT_P (operands[1])
5084 || symbol_mentioned_p (operands[1])
5085 || label_mentioned_p (operands[1])))
5087 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5092 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5093 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5094 ;; so this does not matter.
5095 (define_insn "*arm_movt"
5096 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5097 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5098 (match_operand:SI 2 "general_operand" "i,i")))]
5099 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5101 movt%?\t%0, #:upper16:%c2
5102 movt\t%0, #:upper16:%c2"
5103 [(set_attr "arch" "32,v8mb")
5104 (set_attr "predicable" "yes")
5105 (set_attr "length" "4")
5106 (set_attr "type" "alu_sreg")]
5109 (define_insn "*arm_movsi_insn"
5110 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5111 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5112 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5113 && ( register_operand (operands[0], SImode)
5114 || register_operand (operands[1], SImode))"
5122 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5123 (set_attr "predicable" "yes")
5124 (set_attr "arch" "*,*,*,v6t2,*,*")
5125 (set_attr "pool_range" "*,*,*,*,4096,*")
5126 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5130 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5131 (match_operand:SI 1 "const_int_operand" ""))]
5132 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5133 && (!(const_ok_for_arm (INTVAL (operands[1]))
5134 || const_ok_for_arm (~INTVAL (operands[1]))))"
5135 [(clobber (const_int 0))]
5137 arm_split_constant (SET, SImode, NULL_RTX,
5138 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5143 ;; A normal way to do (symbol + offset) requires three instructions at least
5144 ;; (depends on how big the offset is) as below:
5145 ;; movw r0, #:lower16:g
5146 ;; movw r0, #:upper16:g
5149 ;; A better way would be:
5150 ;; movw r0, #:lower16:g+4
5151 ;; movw r0, #:upper16:g+4
5153 ;; The limitation of this way is that the length of offset should be a 16-bit
5154 ;; signed value, because current assembler only supports REL type relocation for
5155 ;; such case. If the more powerful RELA type is supported in future, we should
5156 ;; update this pattern to go with better way.
5158 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5159 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5160 (match_operand:SI 2 "const_int_operand" ""))))]
5163 && arm_disable_literal_pool
5165 && GET_CODE (operands[1]) == SYMBOL_REF"
5166 [(clobber (const_int 0))]
5168 int offset = INTVAL (operands[2]);
5170 if (offset < -0x8000 || offset > 0x7fff)
5172 arm_emit_movpair (operands[0], operands[1]);
5173 emit_insn (gen_rtx_SET (operands[0],
5174 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5178 rtx op = gen_rtx_CONST (SImode,
5179 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5180 arm_emit_movpair (operands[0], op);
5185 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5186 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5187 ;; and lo_sum would be merged back into memory load at cprop. However,
5188 ;; if the default is to prefer movt/movw rather than a load from the constant
5189 ;; pool, the performance is better.
5191 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5192 (match_operand:SI 1 "general_operand" ""))]
5193 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5194 && !target_word_relocations
5195 && !arm_tls_referenced_p (operands[1])"
5196 [(clobber (const_int 0))]
5198 arm_emit_movpair (operands[0], operands[1]);
5202 ;; When generating pic, we need to load the symbol offset into a register.
5203 ;; So that the optimizer does not confuse this with a normal symbol load
5204 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5205 ;; since that is the only type of relocation we can use.
5207 ;; Wrap calculation of the whole PIC address in a single pattern for the
5208 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5209 ;; a PIC address involves two loads from memory, so we want to CSE it
5210 ;; as often as possible.
5211 ;; This pattern will be split into one of the pic_load_addr_* patterns
5212 ;; and a move after GCSE optimizations.
5214 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5215 (define_expand "calculate_pic_address"
5216 [(set (match_operand:SI 0 "register_operand")
5217 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5218 (unspec:SI [(match_operand:SI 2 "" "")]
5223 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5225 [(set (match_operand:SI 0 "register_operand" "")
5226 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5227 (unspec:SI [(match_operand:SI 2 "" "")]
5230 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5231 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5232 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5235 ;; operand1 is the memory address to go into
5236 ;; pic_load_addr_32bit.
5237 ;; operand2 is the PIC label to be emitted
5238 ;; from pic_add_dot_plus_eight.
5239 ;; We do this to allow hoisting of the entire insn.
5240 (define_insn_and_split "pic_load_addr_unified"
5241 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5242 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5243 (match_operand:SI 2 "" "")]
5244 UNSPEC_PIC_UNIFIED))]
5247 "&& reload_completed"
5248 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5249 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5250 (match_dup 2)] UNSPEC_PIC_BASE))]
5251 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5252 [(set_attr "type" "load_4,load_4,load_4")
5253 (set_attr "pool_range" "4096,4094,1022")
5254 (set_attr "neg_pool_range" "4084,0,0")
5255 (set_attr "arch" "a,t2,t1")
5256 (set_attr "length" "8,6,4")]
5259 ;; The rather odd constraints on the following are to force reload to leave
5260 ;; the insn alone, and to force the minipool generation pass to then move
5261 ;; the GOT symbol to memory.
5263 (define_insn "pic_load_addr_32bit"
5264 [(set (match_operand:SI 0 "s_register_operand" "=r")
5265 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5266 "TARGET_32BIT && flag_pic"
5268 [(set_attr "type" "load_4")
5269 (set (attr "pool_range")
5270 (if_then_else (eq_attr "is_thumb" "no")
5273 (set (attr "neg_pool_range")
5274 (if_then_else (eq_attr "is_thumb" "no")
5279 (define_insn "pic_load_addr_thumb1"
5280 [(set (match_operand:SI 0 "s_register_operand" "=l")
5281 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5282 "TARGET_THUMB1 && flag_pic"
5284 [(set_attr "type" "load_4")
5285 (set (attr "pool_range") (const_int 1018))]
5288 (define_insn "pic_add_dot_plus_four"
5289 [(set (match_operand:SI 0 "register_operand" "=r")
5290 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5292 (match_operand 2 "" "")]
5296 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5297 INTVAL (operands[2]));
5298 return \"add\\t%0, %|pc\";
5300 [(set_attr "length" "2")
5301 (set_attr "type" "alu_sreg")]
5304 (define_insn "pic_add_dot_plus_eight"
5305 [(set (match_operand:SI 0 "register_operand" "=r")
5306 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5308 (match_operand 2 "" "")]
5312 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5313 INTVAL (operands[2]));
5314 return \"add%?\\t%0, %|pc, %1\";
5316 [(set_attr "predicable" "yes")
5317 (set_attr "type" "alu_sreg")]
5320 (define_insn "tls_load_dot_plus_eight"
5321 [(set (match_operand:SI 0 "register_operand" "=r")
5322 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5324 (match_operand 2 "" "")]
5328 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5329 INTVAL (operands[2]));
5330 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5332 [(set_attr "predicable" "yes")
5333 (set_attr "type" "load_4")]
5336 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5337 ;; followed by a load. These sequences can be crunched down to
5338 ;; tls_load_dot_plus_eight by a peephole.
5341 [(set (match_operand:SI 0 "register_operand" "")
5342 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5344 (match_operand 1 "" "")]
5346 (set (match_operand:SI 2 "arm_general_register_operand" "")
5347 (mem:SI (match_dup 0)))]
5348 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5350 (mem:SI (unspec:SI [(match_dup 3)
5357 (define_insn "pic_offset_arm"
5358 [(set (match_operand:SI 0 "register_operand" "=r")
5359 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5360 (unspec:SI [(match_operand:SI 2 "" "X")]
5361 UNSPEC_PIC_OFFSET))))]
5362 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5363 "ldr%?\\t%0, [%1,%2]"
5364 [(set_attr "type" "load_4")]
5367 (define_expand "builtin_setjmp_receiver"
5368 [(label_ref (match_operand 0 "" ""))]
5372 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5374 if (arm_pic_register != INVALID_REGNUM)
5375 arm_load_pic_register (1UL << 3, NULL_RTX);
5379 ;; If copying one reg to another we can set the condition codes according to
5380 ;; its value. Such a move is common after a return from subroutine and the
5381 ;; result is being tested against zero.
5383 (define_insn "*movsi_compare0"
5384 [(set (reg:CC CC_REGNUM)
5385 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5387 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5392 subs%?\\t%0, %1, #0"
5393 [(set_attr "conds" "set")
5394 (set_attr "type" "alus_imm,alus_imm")]
5397 ;; Subroutine to store a half word from a register into memory.
5398 ;; Operand 0 is the source register (HImode)
5399 ;; Operand 1 is the destination address in a register (SImode)
5401 ;; In both this routine and the next, we must be careful not to spill
5402 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5403 ;; can generate unrecognizable rtl.
5405 (define_expand "storehi"
5406 [;; store the low byte
5407 (set (match_operand 1 "" "") (match_dup 3))
5408 ;; extract the high byte
5410 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5411 ;; store the high byte
5412 (set (match_dup 4) (match_dup 5))]
5416 rtx op1 = operands[1];
5417 rtx addr = XEXP (op1, 0);
5418 enum rtx_code code = GET_CODE (addr);
5420 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5422 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5424 operands[4] = adjust_address (op1, QImode, 1);
5425 operands[1] = adjust_address (operands[1], QImode, 0);
5426 operands[3] = gen_lowpart (QImode, operands[0]);
5427 operands[0] = gen_lowpart (SImode, operands[0]);
5428 operands[2] = gen_reg_rtx (SImode);
5429 operands[5] = gen_lowpart (QImode, operands[2]);
5433 (define_expand "storehi_bigend"
5434 [(set (match_dup 4) (match_dup 3))
5436 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5437 (set (match_operand 1 "" "") (match_dup 5))]
5441 rtx op1 = operands[1];
5442 rtx addr = XEXP (op1, 0);
5443 enum rtx_code code = GET_CODE (addr);
5445 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5447 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5449 operands[4] = adjust_address (op1, QImode, 1);
5450 operands[1] = adjust_address (operands[1], QImode, 0);
5451 operands[3] = gen_lowpart (QImode, operands[0]);
5452 operands[0] = gen_lowpart (SImode, operands[0]);
5453 operands[2] = gen_reg_rtx (SImode);
5454 operands[5] = gen_lowpart (QImode, operands[2]);
5458 ;; Subroutine to store a half word integer constant into memory.
5459 (define_expand "storeinthi"
5460 [(set (match_operand 0 "" "")
5461 (match_operand 1 "" ""))
5462 (set (match_dup 3) (match_dup 2))]
5466 HOST_WIDE_INT value = INTVAL (operands[1]);
5467 rtx addr = XEXP (operands[0], 0);
5468 rtx op0 = operands[0];
5469 enum rtx_code code = GET_CODE (addr);
5471 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5473 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5475 operands[1] = gen_reg_rtx (SImode);
5476 if (BYTES_BIG_ENDIAN)
5478 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5479 if ((value & 255) == ((value >> 8) & 255))
5480 operands[2] = operands[1];
5483 operands[2] = gen_reg_rtx (SImode);
5484 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5489 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5490 if ((value & 255) == ((value >> 8) & 255))
5491 operands[2] = operands[1];
5494 operands[2] = gen_reg_rtx (SImode);
5495 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5499 operands[3] = adjust_address (op0, QImode, 1);
5500 operands[0] = adjust_address (operands[0], QImode, 0);
5501 operands[2] = gen_lowpart (QImode, operands[2]);
5502 operands[1] = gen_lowpart (QImode, operands[1]);
5506 (define_expand "storehi_single_op"
5507 [(set (match_operand:HI 0 "memory_operand")
5508 (match_operand:HI 1 "general_operand"))]
5509 "TARGET_32BIT && arm_arch4"
5511 if (!s_register_operand (operands[1], HImode))
5512 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5516 (define_expand "movhi"
5517 [(set (match_operand:HI 0 "general_operand")
5518 (match_operand:HI 1 "general_operand"))]
5521 gcc_checking_assert (aligned_operand (operands[0], HImode));
5522 gcc_checking_assert (aligned_operand (operands[1], HImode));
5525 if (can_create_pseudo_p ())
5527 if (MEM_P (operands[0]))
5531 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5534 if (CONST_INT_P (operands[1]))
5535 emit_insn (gen_storeinthi (operands[0], operands[1]));
5538 if (MEM_P (operands[1]))
5539 operands[1] = force_reg (HImode, operands[1]);
5540 if (BYTES_BIG_ENDIAN)
5541 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5543 emit_insn (gen_storehi (operands[1], operands[0]));
5547 /* Sign extend a constant, and keep it in an SImode reg. */
5548 else if (CONST_INT_P (operands[1]))
5550 rtx reg = gen_reg_rtx (SImode);
5551 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5553 /* If the constant is already valid, leave it alone. */
5554 if (!const_ok_for_arm (val))
5556 /* If setting all the top bits will make the constant
5557 loadable in a single instruction, then set them.
5558 Otherwise, sign extend the number. */
5560 if (const_ok_for_arm (~(val | ~0xffff)))
5562 else if (val & 0x8000)
5566 emit_insn (gen_movsi (reg, GEN_INT (val)));
5567 operands[1] = gen_lowpart (HImode, reg);
5569 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5570 && MEM_P (operands[1]))
5572 rtx reg = gen_reg_rtx (SImode);
5574 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5575 operands[1] = gen_lowpart (HImode, reg);
5577 else if (!arm_arch4)
5579 if (MEM_P (operands[1]))
5582 rtx offset = const0_rtx;
5583 rtx reg = gen_reg_rtx (SImode);
5585 if ((REG_P (base = XEXP (operands[1], 0))
5586 || (GET_CODE (base) == PLUS
5587 && (CONST_INT_P (offset = XEXP (base, 1)))
5588 && ((INTVAL(offset) & 1) != 1)
5589 && REG_P (base = XEXP (base, 0))))
5590 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5594 new_rtx = widen_memory_access (operands[1], SImode,
5595 ((INTVAL (offset) & ~3)
5596 - INTVAL (offset)));
5597 emit_insn (gen_movsi (reg, new_rtx));
5598 if (((INTVAL (offset) & 2) != 0)
5599 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5601 rtx reg2 = gen_reg_rtx (SImode);
5603 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5608 emit_insn (gen_movhi_bytes (reg, operands[1]));
5610 operands[1] = gen_lowpart (HImode, reg);
5614 /* Handle loading a large integer during reload. */
5615 else if (CONST_INT_P (operands[1])
5616 && !const_ok_for_arm (INTVAL (operands[1]))
5617 && !const_ok_for_arm (~INTVAL (operands[1])))
5619 /* Writing a constant to memory needs a scratch, which should
5620 be handled with SECONDARY_RELOADs. */
5621 gcc_assert (REG_P (operands[0]));
5623 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5624 emit_insn (gen_movsi (operands[0], operands[1]));
5628 else if (TARGET_THUMB2)
5630 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5631 if (can_create_pseudo_p ())
5633 if (!REG_P (operands[0]))
5634 operands[1] = force_reg (HImode, operands[1]);
5635 /* Zero extend a constant, and keep it in an SImode reg. */
5636 else if (CONST_INT_P (operands[1]))
5638 rtx reg = gen_reg_rtx (SImode);
5639 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5641 emit_insn (gen_movsi (reg, GEN_INT (val)));
5642 operands[1] = gen_lowpart (HImode, reg);
5646 else /* TARGET_THUMB1 */
5648 if (can_create_pseudo_p ())
5650 if (CONST_INT_P (operands[1]))
5652 rtx reg = gen_reg_rtx (SImode);
5654 emit_insn (gen_movsi (reg, operands[1]));
5655 operands[1] = gen_lowpart (HImode, reg);
5658 /* ??? We shouldn't really get invalid addresses here, but this can
5659 happen if we are passed a SP (never OK for HImode/QImode) or
5660 virtual register (also rejected as illegitimate for HImode/QImode)
5661 relative address. */
5662 /* ??? This should perhaps be fixed elsewhere, for instance, in
5663 fixup_stack_1, by checking for other kinds of invalid addresses,
5664 e.g. a bare reference to a virtual register. This may confuse the
5665 alpha though, which must handle this case differently. */
5666 if (MEM_P (operands[0])
5667 && !memory_address_p (GET_MODE (operands[0]),
5668 XEXP (operands[0], 0)))
5670 = replace_equiv_address (operands[0],
5671 copy_to_reg (XEXP (operands[0], 0)));
5673 if (MEM_P (operands[1])
5674 && !memory_address_p (GET_MODE (operands[1]),
5675 XEXP (operands[1], 0)))
5677 = replace_equiv_address (operands[1],
5678 copy_to_reg (XEXP (operands[1], 0)));
5680 if (MEM_P (operands[1]) && optimize > 0)
5682 rtx reg = gen_reg_rtx (SImode);
5684 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5685 operands[1] = gen_lowpart (HImode, reg);
5688 if (MEM_P (operands[0]))
5689 operands[1] = force_reg (HImode, operands[1]);
5691 else if (CONST_INT_P (operands[1])
5692 && !satisfies_constraint_I (operands[1]))
5694 /* Handle loading a large integer during reload. */
5696 /* Writing a constant to memory needs a scratch, which should
5697 be handled with SECONDARY_RELOADs. */
5698 gcc_assert (REG_P (operands[0]));
5700 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5701 emit_insn (gen_movsi (operands[0], operands[1]));
5708 (define_expand "movhi_bytes"
5709 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5711 (zero_extend:SI (match_dup 6)))
5712 (set (match_operand:SI 0 "" "")
5713 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5718 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5720 mem1 = change_address (operands[1], QImode, addr);
5721 mem2 = change_address (operands[1], QImode,
5722 plus_constant (Pmode, addr, 1));
5723 operands[0] = gen_lowpart (SImode, operands[0]);
5725 operands[2] = gen_reg_rtx (SImode);
5726 operands[3] = gen_reg_rtx (SImode);
5729 if (BYTES_BIG_ENDIAN)
5731 operands[4] = operands[2];
5732 operands[5] = operands[3];
5736 operands[4] = operands[3];
5737 operands[5] = operands[2];
5742 (define_expand "movhi_bigend"
5744 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5747 (ashiftrt:SI (match_dup 2) (const_int 16)))
5748 (set (match_operand:HI 0 "s_register_operand")
5752 operands[2] = gen_reg_rtx (SImode);
5753 operands[3] = gen_reg_rtx (SImode);
5754 operands[4] = gen_lowpart (HImode, operands[3]);
5758 ;; Pattern to recognize insn generated default case above
5759 (define_insn "*movhi_insn_arch4"
5760 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5761 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5763 && arm_arch4 && !TARGET_HARD_FLOAT
5764 && (register_operand (operands[0], HImode)
5765 || register_operand (operands[1], HImode))"
5767 mov%?\\t%0, %1\\t%@ movhi
5768 mvn%?\\t%0, #%B1\\t%@ movhi
5769 movw%?\\t%0, %L1\\t%@ movhi
5770 strh%?\\t%1, %0\\t%@ movhi
5771 ldrh%?\\t%0, %1\\t%@ movhi"
5772 [(set_attr "predicable" "yes")
5773 (set_attr "pool_range" "*,*,*,*,256")
5774 (set_attr "neg_pool_range" "*,*,*,*,244")
5775 (set_attr "arch" "*,*,v6t2,*,*")
5776 (set_attr_alternative "type"
5777 [(if_then_else (match_operand 1 "const_int_operand" "")
5778 (const_string "mov_imm" )
5779 (const_string "mov_reg"))
5780 (const_string "mvn_imm")
5781 (const_string "mov_imm")
5782 (const_string "store_4")
5783 (const_string "load_4")])]
5786 (define_insn "*movhi_bytes"
5787 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5788 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5789 "TARGET_ARM && !TARGET_HARD_FLOAT"
5791 mov%?\\t%0, %1\\t%@ movhi
5792 mov%?\\t%0, %1\\t%@ movhi
5793 mvn%?\\t%0, #%B1\\t%@ movhi"
5794 [(set_attr "predicable" "yes")
5795 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5798 ;; We use a DImode scratch because we may occasionally need an additional
5799 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5800 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5801 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5802 ;; to be correctly handled in default_secondary_reload function.
5803 (define_expand "reload_outhi"
5804 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5805 (match_operand:HI 1 "s_register_operand" "r")
5806 (match_operand:DI 2 "s_register_operand" "=&l")])]
5809 arm_reload_out_hi (operands);
5811 thumb_reload_out_hi (operands);
5816 (define_expand "reload_inhi"
5817 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5818 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5819 (match_operand:DI 2 "s_register_operand" "=&r")])]
5823 arm_reload_in_hi (operands);
5825 thumb_reload_out_hi (operands);
5829 (define_expand "movqi"
5830 [(set (match_operand:QI 0 "general_operand")
5831 (match_operand:QI 1 "general_operand"))]
5834 /* Everything except mem = const or mem = mem can be done easily */
5836 if (can_create_pseudo_p ())
5838 if (CONST_INT_P (operands[1]))
5840 rtx reg = gen_reg_rtx (SImode);
5842 /* For thumb we want an unsigned immediate, then we are more likely
5843 to be able to use a movs insn. */
5845 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5847 emit_insn (gen_movsi (reg, operands[1]));
5848 operands[1] = gen_lowpart (QImode, reg);
5853 /* ??? We shouldn't really get invalid addresses here, but this can
5854 happen if we are passed a SP (never OK for HImode/QImode) or
5855 virtual register (also rejected as illegitimate for HImode/QImode)
5856 relative address. */
5857 /* ??? This should perhaps be fixed elsewhere, for instance, in
5858 fixup_stack_1, by checking for other kinds of invalid addresses,
5859 e.g. a bare reference to a virtual register. This may confuse the
5860 alpha though, which must handle this case differently. */
5861 if (MEM_P (operands[0])
5862 && !memory_address_p (GET_MODE (operands[0]),
5863 XEXP (operands[0], 0)))
5865 = replace_equiv_address (operands[0],
5866 copy_to_reg (XEXP (operands[0], 0)));
5867 if (MEM_P (operands[1])
5868 && !memory_address_p (GET_MODE (operands[1]),
5869 XEXP (operands[1], 0)))
5871 = replace_equiv_address (operands[1],
5872 copy_to_reg (XEXP (operands[1], 0)));
5875 if (MEM_P (operands[1]) && optimize > 0)
5877 rtx reg = gen_reg_rtx (SImode);
5879 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5880 operands[1] = gen_lowpart (QImode, reg);
5883 if (MEM_P (operands[0]))
5884 operands[1] = force_reg (QImode, operands[1]);
5886 else if (TARGET_THUMB
5887 && CONST_INT_P (operands[1])
5888 && !satisfies_constraint_I (operands[1]))
5890 /* Handle loading a large integer during reload. */
5892 /* Writing a constant to memory needs a scratch, which should
5893 be handled with SECONDARY_RELOADs. */
5894 gcc_assert (REG_P (operands[0]));
5896 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5897 emit_insn (gen_movsi (operands[0], operands[1]));
5903 (define_insn "*arm_movqi_insn"
5904 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5905 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5907 && ( register_operand (operands[0], QImode)
5908 || register_operand (operands[1], QImode))"
5919 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5920 (set_attr "predicable" "yes")
5921 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5922 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5923 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5927 (define_expand "movhf"
5928 [(set (match_operand:HF 0 "general_operand")
5929 (match_operand:HF 1 "general_operand"))]
5932 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5933 gcc_checking_assert (aligned_operand (operands[1], HFmode));
5936 if (MEM_P (operands[0]))
5937 operands[1] = force_reg (HFmode, operands[1]);
5939 else /* TARGET_THUMB1 */
5941 if (can_create_pseudo_p ())
5943 if (!REG_P (operands[0]))
5944 operands[1] = force_reg (HFmode, operands[1]);
5950 (define_insn "*arm32_movhf"
5951 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5952 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5953 "TARGET_32BIT && !TARGET_HARD_FLOAT
5954 && ( s_register_operand (operands[0], HFmode)
5955 || s_register_operand (operands[1], HFmode))"
5957 switch (which_alternative)
5959 case 0: /* ARM register from memory */
5960 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
5961 case 1: /* memory from ARM register */
5962 return \"strh%?\\t%1, %0\\t%@ __fp16\";
5963 case 2: /* ARM register from ARM register */
5964 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5965 case 3: /* ARM register from constant */
5970 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
5972 ops[0] = operands[0];
5973 ops[1] = GEN_INT (bits);
5974 ops[2] = GEN_INT (bits & 0xff00);
5975 ops[3] = GEN_INT (bits & 0x00ff);
5977 if (arm_arch_thumb2)
5978 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5980 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5987 [(set_attr "conds" "unconditional")
5988 (set_attr "type" "load_4,store_4,mov_reg,multiple")
5989 (set_attr "length" "4,4,4,8")
5990 (set_attr "predicable" "yes")]
5993 (define_expand "movsf"
5994 [(set (match_operand:SF 0 "general_operand")
5995 (match_operand:SF 1 "general_operand"))]
5998 gcc_checking_assert (aligned_operand (operands[0], SFmode));
5999 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6002 if (MEM_P (operands[0]))
6003 operands[1] = force_reg (SFmode, operands[1]);
6005 else /* TARGET_THUMB1 */
6007 if (can_create_pseudo_p ())
6009 if (!REG_P (operands[0]))
6010 operands[1] = force_reg (SFmode, operands[1]);
6014 /* Cannot load it directly, generate a load with clobber so that it can be
6015 loaded via GPR with MOV / MOVT. */
6016 if (arm_disable_literal_pool
6017 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6018 && CONST_DOUBLE_P (operands[1])
6019 && TARGET_HARD_FLOAT
6020 && !vfp3_const_double_rtx (operands[1]))
6022 rtx clobreg = gen_reg_rtx (SFmode);
6023 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6030 ;; Transform a floating-point move of a constant into a core register into
6031 ;; an SImode operation.
6033 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6034 (match_operand:SF 1 "immediate_operand" ""))]
6037 && CONST_DOUBLE_P (operands[1])"
6038 [(set (match_dup 2) (match_dup 3))]
6040 operands[2] = gen_lowpart (SImode, operands[0]);
6041 operands[3] = gen_lowpart (SImode, operands[1]);
6042 if (operands[2] == 0 || operands[3] == 0)
6047 (define_insn "*arm_movsf_soft_insn"
6048 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6049 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6051 && TARGET_SOFT_FLOAT
6052 && (!MEM_P (operands[0])
6053 || register_operand (operands[1], SFmode))"
6055 switch (which_alternative)
6057 case 0: return \"mov%?\\t%0, %1\";
6059 /* Cannot load it directly, split to load it via MOV / MOVT. */
6060 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6062 return \"ldr%?\\t%0, %1\\t%@ float\";
6063 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6064 default: gcc_unreachable ();
6067 [(set_attr "predicable" "yes")
6068 (set_attr "type" "mov_reg,load_4,store_4")
6069 (set_attr "arm_pool_range" "*,4096,*")
6070 (set_attr "thumb2_pool_range" "*,4094,*")
6071 (set_attr "arm_neg_pool_range" "*,4084,*")
6072 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6075 ;; Splitter for the above.
6077 [(set (match_operand:SF 0 "s_register_operand")
6078 (match_operand:SF 1 "const_double_operand"))]
6079 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6083 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6084 rtx cst = gen_int_mode (buf, SImode);
6085 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6090 (define_expand "movdf"
6091 [(set (match_operand:DF 0 "general_operand")
6092 (match_operand:DF 1 "general_operand"))]
6095 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6096 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6099 if (MEM_P (operands[0]))
6100 operands[1] = force_reg (DFmode, operands[1]);
6102 else /* TARGET_THUMB */
6104 if (can_create_pseudo_p ())
6106 if (!REG_P (operands[0]))
6107 operands[1] = force_reg (DFmode, operands[1]);
6111 /* Cannot load it directly, generate a load with clobber so that it can be
6112 loaded via GPR with MOV / MOVT. */
6113 if (arm_disable_literal_pool
6114 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6115 && CONSTANT_P (operands[1])
6116 && TARGET_HARD_FLOAT
6117 && !arm_const_double_rtx (operands[1])
6118 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6120 rtx clobreg = gen_reg_rtx (DFmode);
6121 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6128 ;; Reloading a df mode value stored in integer regs to memory can require a
6130 ;; Another reload_out<m> pattern that requires special constraints.
6131 (define_expand "reload_outdf"
6132 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6133 (match_operand:DF 1 "s_register_operand" "r")
6134 (match_operand:SI 2 "s_register_operand" "=&r")]
6138 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6141 operands[2] = XEXP (operands[0], 0);
6142 else if (code == POST_INC || code == PRE_DEC)
6144 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6145 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6146 emit_insn (gen_movdi (operands[0], operands[1]));
6149 else if (code == PRE_INC)
6151 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6153 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6156 else if (code == POST_DEC)
6157 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6159 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6160 XEXP (XEXP (operands[0], 0), 1)));
6162 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6165 if (code == POST_DEC)
6166 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6172 (define_insn "*movdf_soft_insn"
6173 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6174 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6175 "TARGET_32BIT && TARGET_SOFT_FLOAT
6176 && ( register_operand (operands[0], DFmode)
6177 || register_operand (operands[1], DFmode))"
6179 switch (which_alternative)
6186 /* Cannot load it directly, split to load it via MOV / MOVT. */
6187 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6191 return output_move_double (operands, true, NULL);
6194 [(set_attr "length" "8,12,16,8,8")
6195 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6196 (set_attr "arm_pool_range" "*,*,*,1020,*")
6197 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6198 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6199 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6202 ;; Splitter for the above.
6204 [(set (match_operand:DF 0 "s_register_operand")
6205 (match_operand:DF 1 "const_double_operand"))]
6206 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6210 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6211 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6212 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6213 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6214 rtx cst = gen_int_mode (ival, DImode);
6215 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6221 ;; load- and store-multiple insns
6222 ;; The arm can load/store any set of registers, provided that they are in
6223 ;; ascending order, but these expanders assume a contiguous set.
6225 (define_expand "load_multiple"
6226 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6227 (match_operand:SI 1 "" ""))
6228 (use (match_operand:SI 2 "" ""))])]
6231 HOST_WIDE_INT offset = 0;
6233 /* Support only fixed point registers. */
6234 if (!CONST_INT_P (operands[2])
6235 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6236 || INTVAL (operands[2]) < 2
6237 || !MEM_P (operands[1])
6238 || !REG_P (operands[0])
6239 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6240 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6244 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6245 INTVAL (operands[2]),
6246 force_reg (SImode, XEXP (operands[1], 0)),
6247 FALSE, operands[1], &offset);
6250 (define_expand "store_multiple"
6251 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6252 (match_operand:SI 1 "" ""))
6253 (use (match_operand:SI 2 "" ""))])]
6256 HOST_WIDE_INT offset = 0;
6258 /* Support only fixed point registers. */
6259 if (!CONST_INT_P (operands[2])
6260 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6261 || INTVAL (operands[2]) < 2
6262 || !REG_P (operands[1])
6263 || !MEM_P (operands[0])
6264 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6265 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6269 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6270 INTVAL (operands[2]),
6271 force_reg (SImode, XEXP (operands[0], 0)),
6272 FALSE, operands[0], &offset);
6276 (define_expand "setmemsi"
6277 [(match_operand:BLK 0 "general_operand")
6278 (match_operand:SI 1 "const_int_operand")
6279 (match_operand:SI 2 "const_int_operand")
6280 (match_operand:SI 3 "const_int_operand")]
6283 if (arm_gen_setmem (operands))
6290 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6291 ;; We could let this apply for blocks of less than this, but it clobbers so
6292 ;; many registers that there is then probably a better way.
6294 (define_expand "cpymemqi"
6295 [(match_operand:BLK 0 "general_operand")
6296 (match_operand:BLK 1 "general_operand")
6297 (match_operand:SI 2 "const_int_operand")
6298 (match_operand:SI 3 "const_int_operand")]
6303 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6304 && !optimize_function_for_size_p (cfun))
6306 if (gen_cpymem_ldrd_strd (operands))
6311 if (arm_gen_cpymemqi (operands))
6315 else /* TARGET_THUMB1 */
6317 if ( INTVAL (operands[3]) != 4
6318 || INTVAL (operands[2]) > 48)
6321 thumb_expand_cpymemqi (operands);
6328 ;; Compare & branch insns
6329 ;; The range calculations are based as follows:
6330 ;; For forward branches, the address calculation returns the address of
6331 ;; the next instruction. This is 2 beyond the branch instruction.
6332 ;; For backward branches, the address calculation returns the address of
6333 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6334 ;; instruction for the shortest sequence, and 4 before the branch instruction
6335 ;; if we have to jump around an unconditional branch.
6336 ;; To the basic branch range the PC offset must be added (this is +4).
6337 ;; So for forward branches we have
6338 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6339 ;; And for backward branches we have
6340 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6342 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6343 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6345 (define_expand "cbranchsi4"
6346 [(set (pc) (if_then_else
6347 (match_operator 0 "expandable_comparison_operator"
6348 [(match_operand:SI 1 "s_register_operand")
6349 (match_operand:SI 2 "nonmemory_operand")])
6350 (label_ref (match_operand 3 "" ""))
6356 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6358 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6362 if (thumb1_cmpneg_operand (operands[2], SImode))
6364 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6365 operands[3], operands[0]));
6368 if (!thumb1_cmp_operand (operands[2], SImode))
6369 operands[2] = force_reg (SImode, operands[2]);
6372 (define_expand "cbranchsf4"
6373 [(set (pc) (if_then_else
6374 (match_operator 0 "expandable_comparison_operator"
6375 [(match_operand:SF 1 "s_register_operand")
6376 (match_operand:SF 2 "vfp_compare_operand")])
6377 (label_ref (match_operand 3 "" ""))
6379 "TARGET_32BIT && TARGET_HARD_FLOAT"
6380 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6381 operands[3])); DONE;"
6384 (define_expand "cbranchdf4"
6385 [(set (pc) (if_then_else
6386 (match_operator 0 "expandable_comparison_operator"
6387 [(match_operand:DF 1 "s_register_operand")
6388 (match_operand:DF 2 "vfp_compare_operand")])
6389 (label_ref (match_operand 3 "" ""))
6391 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6392 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6393 operands[3])); DONE;"
6396 (define_expand "cbranchdi4"
6397 [(set (pc) (if_then_else
6398 (match_operator 0 "expandable_comparison_operator"
6399 [(match_operand:DI 1 "s_register_operand")
6400 (match_operand:DI 2 "reg_or_int_operand")])
6401 (label_ref (match_operand 3 "" ""))
6405 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6407 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6413 ;; Comparison and test insns
6415 (define_insn "*arm_cmpsi_insn"
6416 [(set (reg:CC CC_REGNUM)
6417 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6418 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6426 [(set_attr "conds" "set")
6427 (set_attr "arch" "t2,t2,any,any,any")
6428 (set_attr "length" "2,2,4,4,4")
6429 (set_attr "predicable" "yes")
6430 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6431 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6434 (define_insn "*cmpsi_shiftsi"
6435 [(set (reg:CC CC_REGNUM)
6436 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6437 (match_operator:SI 3 "shift_operator"
6438 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6439 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6442 [(set_attr "conds" "set")
6443 (set_attr "shift" "1")
6444 (set_attr "arch" "32,a,a")
6445 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6447 (define_insn "*cmpsi_shiftsi_swp"
6448 [(set (reg:CC_SWP CC_REGNUM)
6449 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6450 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6451 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6452 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6455 [(set_attr "conds" "set")
6456 (set_attr "shift" "1")
6457 (set_attr "arch" "32,a,a")
6458 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6460 (define_insn "*arm_cmpsi_negshiftsi_si"
6461 [(set (reg:CC_Z CC_REGNUM)
6463 (neg:SI (match_operator:SI 1 "shift_operator"
6464 [(match_operand:SI 2 "s_register_operand" "r")
6465 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6466 (match_operand:SI 0 "s_register_operand" "r")))]
6469 [(set_attr "conds" "set")
6470 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6471 (const_string "alus_shift_imm")
6472 (const_string "alus_shift_reg")))
6473 (set_attr "predicable" "yes")]
6476 ;; DImode comparisons. The generic code generates branches that
6477 ;; if-conversion cannot reduce to a conditional compare, so we do
6480 (define_insn "*arm_cmpdi_insn"
6481 [(set (reg:CC_NCV CC_REGNUM)
6482 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6483 (match_operand:DI 1 "arm_di_operand" "rDi")))
6484 (clobber (match_scratch:SI 2 "=r"))]
6486 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6487 [(set_attr "conds" "set")
6488 (set_attr "length" "8")
6489 (set_attr "type" "multiple")]
6492 (define_insn_and_split "*arm_cmpdi_unsigned"
6493 [(set (reg:CC_CZ CC_REGNUM)
6494 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6495 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6498 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6499 "&& reload_completed"
6500 [(set (reg:CC CC_REGNUM)
6501 (compare:CC (match_dup 2) (match_dup 3)))
6502 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6503 (set (reg:CC CC_REGNUM)
6504 (compare:CC (match_dup 0) (match_dup 1))))]
6506 operands[2] = gen_highpart (SImode, operands[0]);
6507 operands[0] = gen_lowpart (SImode, operands[0]);
6508 if (CONST_INT_P (operands[1]))
6509 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6511 operands[3] = gen_highpart (SImode, operands[1]);
6512 operands[1] = gen_lowpart (SImode, operands[1]);
6514 [(set_attr "conds" "set")
6515 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6516 (set_attr "arch" "t2,t2,t2,a")
6517 (set_attr "length" "6,6,10,8")
6518 (set_attr "type" "multiple")]
6521 ; This insn allows redundant compares to be removed by cse, nothing should
6522 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6523 ; is deleted later on. The match_dup will match the mode here, so that
6524 ; mode changes of the condition codes aren't lost by this even though we don't
6525 ; specify what they are.
6527 (define_insn "*deleted_compare"
6528 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6530 "\\t%@ deleted compare"
6531 [(set_attr "conds" "set")
6532 (set_attr "length" "0")
6533 (set_attr "type" "no_insn")]
6537 ;; Conditional branch insns
6539 (define_expand "cbranch_cc"
6541 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6542 (match_operand 2 "" "")])
6543 (label_ref (match_operand 3 "" ""))
6546 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6547 operands[1], operands[2], NULL_RTX);
6548 operands[2] = const0_rtx;"
6552 ;; Patterns to match conditional branch insns.
6555 (define_insn "arm_cond_branch"
6557 (if_then_else (match_operator 1 "arm_comparison_operator"
6558 [(match_operand 2 "cc_register" "") (const_int 0)])
6559 (label_ref (match_operand 0 "" ""))
6563 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6565 arm_ccfsm_state += 2;
6568 return \"b%d1\\t%l0\";
6570 [(set_attr "conds" "use")
6571 (set_attr "type" "branch")
6572 (set (attr "length")
6574 (and (match_test "TARGET_THUMB2")
6575 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6576 (le (minus (match_dup 0) (pc)) (const_int 256))))
6581 (define_insn "*arm_cond_branch_reversed"
6583 (if_then_else (match_operator 1 "arm_comparison_operator"
6584 [(match_operand 2 "cc_register" "") (const_int 0)])
6586 (label_ref (match_operand 0 "" ""))))]
6589 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6591 arm_ccfsm_state += 2;
6594 return \"b%D1\\t%l0\";
6596 [(set_attr "conds" "use")
6597 (set_attr "type" "branch")
6598 (set (attr "length")
6600 (and (match_test "TARGET_THUMB2")
6601 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6602 (le (minus (match_dup 0) (pc)) (const_int 256))))
6611 (define_expand "cstore_cc"
6612 [(set (match_operand:SI 0 "s_register_operand")
6613 (match_operator:SI 1 "" [(match_operand 2 "" "")
6614 (match_operand 3 "" "")]))]
6616 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6617 operands[2], operands[3], NULL_RTX);
6618 operands[3] = const0_rtx;"
6621 (define_insn_and_split "*mov_scc"
6622 [(set (match_operand:SI 0 "s_register_operand" "=r")
6623 (match_operator:SI 1 "arm_comparison_operator_mode"
6624 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6626 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6629 (if_then_else:SI (match_dup 1)
6633 [(set_attr "conds" "use")
6634 (set_attr "length" "8")
6635 (set_attr "type" "multiple")]
6638 (define_insn "*negscc_borrow"
6639 [(set (match_operand:SI 0 "s_register_operand" "=r")
6640 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
6643 [(set_attr "conds" "use")
6644 (set_attr "length" "4")
6645 (set_attr "type" "adc_reg")]
6648 (define_insn_and_split "*mov_negscc"
6649 [(set (match_operand:SI 0 "s_register_operand" "=r")
6650 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6651 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6652 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
6653 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6656 (if_then_else:SI (match_dup 1)
6660 operands[3] = GEN_INT (~0);
6662 [(set_attr "conds" "use")
6663 (set_attr "length" "8")
6664 (set_attr "type" "multiple")]
6667 (define_insn_and_split "*mov_notscc"
6668 [(set (match_operand:SI 0 "s_register_operand" "=r")
6669 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6670 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6672 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6675 (if_then_else:SI (match_dup 1)
6679 operands[3] = GEN_INT (~1);
6680 operands[4] = GEN_INT (~0);
6682 [(set_attr "conds" "use")
6683 (set_attr "length" "8")
6684 (set_attr "type" "multiple")]
6687 (define_expand "cstoresi4"
6688 [(set (match_operand:SI 0 "s_register_operand")
6689 (match_operator:SI 1 "expandable_comparison_operator"
6690 [(match_operand:SI 2 "s_register_operand")
6691 (match_operand:SI 3 "reg_or_int_operand")]))]
6692 "TARGET_32BIT || TARGET_THUMB1"
6694 rtx op3, scratch, scratch2;
6698 if (!arm_add_operand (operands[3], SImode))
6699 operands[3] = force_reg (SImode, operands[3]);
6700 emit_insn (gen_cstore_cc (operands[0], operands[1],
6701 operands[2], operands[3]));
6705 if (operands[3] == const0_rtx)
6707 switch (GET_CODE (operands[1]))
6710 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6714 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6718 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6719 NULL_RTX, 0, OPTAB_WIDEN);
6720 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6721 NULL_RTX, 0, OPTAB_WIDEN);
6722 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6723 operands[0], 1, OPTAB_WIDEN);
6727 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6729 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6730 NULL_RTX, 1, OPTAB_WIDEN);
6734 scratch = expand_binop (SImode, ashr_optab, operands[2],
6735 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6736 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6737 NULL_RTX, 0, OPTAB_WIDEN);
6738 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6742 /* LT is handled by generic code. No need for unsigned with 0. */
6749 switch (GET_CODE (operands[1]))
6752 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6753 NULL_RTX, 0, OPTAB_WIDEN);
6754 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6758 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6759 NULL_RTX, 0, OPTAB_WIDEN);
6760 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6764 op3 = force_reg (SImode, operands[3]);
6766 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6767 NULL_RTX, 1, OPTAB_WIDEN);
6768 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6769 NULL_RTX, 0, OPTAB_WIDEN);
6770 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6776 if (!thumb1_cmp_operand (op3, SImode))
6777 op3 = force_reg (SImode, op3);
6778 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6779 NULL_RTX, 0, OPTAB_WIDEN);
6780 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6781 NULL_RTX, 1, OPTAB_WIDEN);
6782 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6787 op3 = force_reg (SImode, operands[3]);
6788 scratch = force_reg (SImode, const0_rtx);
6789 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6795 if (!thumb1_cmp_operand (op3, SImode))
6796 op3 = force_reg (SImode, op3);
6797 scratch = force_reg (SImode, const0_rtx);
6798 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6804 if (!thumb1_cmp_operand (op3, SImode))
6805 op3 = force_reg (SImode, op3);
6806 scratch = gen_reg_rtx (SImode);
6807 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6811 op3 = force_reg (SImode, operands[3]);
6812 scratch = gen_reg_rtx (SImode);
6813 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6816 /* No good sequences for GT, LT. */
6823 (define_expand "cstorehf4"
6824 [(set (match_operand:SI 0 "s_register_operand")
6825 (match_operator:SI 1 "expandable_comparison_operator"
6826 [(match_operand:HF 2 "s_register_operand")
6827 (match_operand:HF 3 "vfp_compare_operand")]))]
6828 "TARGET_VFP_FP16INST"
6830 if (!arm_validize_comparison (&operands[1],
6835 emit_insn (gen_cstore_cc (operands[0], operands[1],
6836 operands[2], operands[3]));
6841 (define_expand "cstoresf4"
6842 [(set (match_operand:SI 0 "s_register_operand")
6843 (match_operator:SI 1 "expandable_comparison_operator"
6844 [(match_operand:SF 2 "s_register_operand")
6845 (match_operand:SF 3 "vfp_compare_operand")]))]
6846 "TARGET_32BIT && TARGET_HARD_FLOAT"
6847 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6848 operands[2], operands[3])); DONE;"
6851 (define_expand "cstoredf4"
6852 [(set (match_operand:SI 0 "s_register_operand")
6853 (match_operator:SI 1 "expandable_comparison_operator"
6854 [(match_operand:DF 2 "s_register_operand")
6855 (match_operand:DF 3 "vfp_compare_operand")]))]
6856 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6857 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6858 operands[2], operands[3])); DONE;"
6861 (define_expand "cstoredi4"
6862 [(set (match_operand:SI 0 "s_register_operand")
6863 (match_operator:SI 1 "expandable_comparison_operator"
6864 [(match_operand:DI 2 "s_register_operand")
6865 (match_operand:DI 3 "reg_or_int_operand")]))]
6868 if (!arm_validize_comparison (&operands[1],
6872 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6879 ;; Conditional move insns
6881 (define_expand "movsicc"
6882 [(set (match_operand:SI 0 "s_register_operand")
6883 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6884 (match_operand:SI 2 "arm_not_operand")
6885 (match_operand:SI 3 "arm_not_operand")))]
6892 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6893 &XEXP (operands[1], 1)))
6896 code = GET_CODE (operands[1]);
6897 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6898 XEXP (operands[1], 1), NULL_RTX);
6899 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6903 (define_expand "movhfcc"
6904 [(set (match_operand:HF 0 "s_register_operand")
6905 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6906 (match_operand:HF 2 "s_register_operand")
6907 (match_operand:HF 3 "s_register_operand")))]
6908 "TARGET_VFP_FP16INST"
6911 enum rtx_code code = GET_CODE (operands[1]);
6914 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6915 &XEXP (operands[1], 1)))
6918 code = GET_CODE (operands[1]);
6919 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6920 XEXP (operands[1], 1), NULL_RTX);
6921 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6925 (define_expand "movsfcc"
6926 [(set (match_operand:SF 0 "s_register_operand")
6927 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6928 (match_operand:SF 2 "s_register_operand")
6929 (match_operand:SF 3 "s_register_operand")))]
6930 "TARGET_32BIT && TARGET_HARD_FLOAT"
6933 enum rtx_code code = GET_CODE (operands[1]);
6936 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6937 &XEXP (operands[1], 1)))
6940 code = GET_CODE (operands[1]);
6941 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6942 XEXP (operands[1], 1), NULL_RTX);
6943 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6947 (define_expand "movdfcc"
6948 [(set (match_operand:DF 0 "s_register_operand")
6949 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6950 (match_operand:DF 2 "s_register_operand")
6951 (match_operand:DF 3 "s_register_operand")))]
6952 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6955 enum rtx_code code = GET_CODE (operands[1]);
6958 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6959 &XEXP (operands[1], 1)))
6961 code = GET_CODE (operands[1]);
6962 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6963 XEXP (operands[1], 1), NULL_RTX);
6964 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6968 (define_insn "*cmov<mode>"
6969 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
6970 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
6971 [(match_operand 2 "cc_register" "") (const_int 0)])
6972 (match_operand:SDF 3 "s_register_operand"
6974 (match_operand:SDF 4 "s_register_operand"
6975 "<F_constraint>")))]
6976 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
6979 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6986 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
6991 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
6997 [(set_attr "conds" "use")
6998 (set_attr "type" "fcsel")]
7001 (define_insn "*cmovhf"
7002 [(set (match_operand:HF 0 "s_register_operand" "=t")
7003 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7004 [(match_operand 2 "cc_register" "") (const_int 0)])
7005 (match_operand:HF 3 "s_register_operand" "t")
7006 (match_operand:HF 4 "s_register_operand" "t")))]
7007 "TARGET_VFP_FP16INST"
7010 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7017 return \"vsel%d1.f16\\t%0, %3, %4\";
7022 return \"vsel%D1.f16\\t%0, %4, %3\";
7028 [(set_attr "conds" "use")
7029 (set_attr "type" "fcsel")]
7032 (define_insn_and_split "*movsicc_insn"
7033 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7035 (match_operator 3 "arm_comparison_operator"
7036 [(match_operand 4 "cc_register" "") (const_int 0)])
7037 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7038 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7049 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7050 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7051 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7052 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7053 "&& reload_completed"
7056 enum rtx_code rev_code;
7060 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7062 gen_rtx_SET (operands[0], operands[1])));
7064 rev_code = GET_CODE (operands[3]);
7065 mode = GET_MODE (operands[4]);
7066 if (mode == CCFPmode || mode == CCFPEmode)
7067 rev_code = reverse_condition_maybe_unordered (rev_code);
7069 rev_code = reverse_condition (rev_code);
7071 rev_cond = gen_rtx_fmt_ee (rev_code,
7075 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7077 gen_rtx_SET (operands[0], operands[2])));
7080 [(set_attr "length" "4,4,4,4,8,8,8,8")
7081 (set_attr "conds" "use")
7082 (set_attr_alternative "type"
7083 [(if_then_else (match_operand 2 "const_int_operand" "")
7084 (const_string "mov_imm")
7085 (const_string "mov_reg"))
7086 (const_string "mvn_imm")
7087 (if_then_else (match_operand 1 "const_int_operand" "")
7088 (const_string "mov_imm")
7089 (const_string "mov_reg"))
7090 (const_string "mvn_imm")
7091 (const_string "multiple")
7092 (const_string "multiple")
7093 (const_string "multiple")
7094 (const_string "multiple")])]
7097 (define_insn "*movsfcc_soft_insn"
7098 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7099 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7100 [(match_operand 4 "cc_register" "") (const_int 0)])
7101 (match_operand:SF 1 "s_register_operand" "0,r")
7102 (match_operand:SF 2 "s_register_operand" "r,0")))]
7103 "TARGET_ARM && TARGET_SOFT_FLOAT"
7107 [(set_attr "conds" "use")
7108 (set_attr "type" "mov_reg")]
7112 ;; Jump and linkage insns
7114 (define_expand "jump"
7116 (label_ref (match_operand 0 "" "")))]
7121 (define_insn "*arm_jump"
7123 (label_ref (match_operand 0 "" "")))]
7127 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7129 arm_ccfsm_state += 2;
7132 return \"b%?\\t%l0\";
7135 [(set_attr "predicable" "yes")
7136 (set (attr "length")
7138 (and (match_test "TARGET_THUMB2")
7139 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7140 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7143 (set_attr "type" "branch")]
7146 (define_expand "call"
7147 [(parallel [(call (match_operand 0 "memory_operand")
7148 (match_operand 1 "general_operand"))
7149 (use (match_operand 2 "" ""))
7150 (clobber (reg:SI LR_REGNUM))])]
7155 tree addr = MEM_EXPR (operands[0]);
7157 /* In an untyped call, we can get NULL for operand 2. */
7158 if (operands[2] == NULL_RTX)
7159 operands[2] = const0_rtx;
7161 /* Decide if we should generate indirect calls by loading the
7162 32-bit address of the callee into a register before performing the
7164 callee = XEXP (operands[0], 0);
7165 if (GET_CODE (callee) == SYMBOL_REF
7166 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7168 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7170 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7171 /* Indirect call: set r9 with FDPIC value of callee. */
7172 XEXP (operands[0], 0)
7173 = arm_load_function_descriptor (XEXP (operands[0], 0));
7175 if (detect_cmse_nonsecure_call (addr))
7177 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7179 emit_call_insn (pat);
7183 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7184 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7187 /* Restore FDPIC register (r9) after call. */
7190 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7191 rtx initial_fdpic_reg
7192 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7194 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7195 initial_fdpic_reg));
7202 (define_insn "restore_pic_register_after_call"
7203 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7204 (unspec:SI [(match_dup 0)
7205 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7206 UNSPEC_PIC_RESTORE))]
7213 (define_expand "call_internal"
7214 [(parallel [(call (match_operand 0 "memory_operand")
7215 (match_operand 1 "general_operand"))
7216 (use (match_operand 2 "" ""))
7217 (clobber (reg:SI LR_REGNUM))])])
7219 (define_expand "nonsecure_call_internal"
7220 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7221 UNSPEC_NONSECURE_MEM)
7222 (match_operand 1 "general_operand"))
7223 (use (match_operand 2 "" ""))
7224 (clobber (reg:SI LR_REGNUM))])]
7229 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7230 gen_rtx_REG (SImode, R4_REGNUM),
7233 operands[0] = replace_equiv_address (operands[0], tmp);
7236 (define_insn "*call_reg_armv5"
7237 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7238 (match_operand 1 "" ""))
7239 (use (match_operand 2 "" ""))
7240 (clobber (reg:SI LR_REGNUM))]
7241 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7243 [(set_attr "type" "call")]
7246 (define_insn "*call_reg_arm"
7247 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7248 (match_operand 1 "" ""))
7249 (use (match_operand 2 "" ""))
7250 (clobber (reg:SI LR_REGNUM))]
7251 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7253 return output_call (operands);
7255 ;; length is worst case, normally it is only two
7256 [(set_attr "length" "12")
7257 (set_attr "type" "call")]
7261 (define_expand "call_value"
7262 [(parallel [(set (match_operand 0 "" "")
7263 (call (match_operand 1 "memory_operand")
7264 (match_operand 2 "general_operand")))
7265 (use (match_operand 3 "" ""))
7266 (clobber (reg:SI LR_REGNUM))])]
7271 tree addr = MEM_EXPR (operands[1]);
7273 /* In an untyped call, we can get NULL for operand 2. */
7274 if (operands[3] == 0)
7275 operands[3] = const0_rtx;
7277 /* Decide if we should generate indirect calls by loading the
7278 32-bit address of the callee into a register before performing the
7280 callee = XEXP (operands[1], 0);
7281 if (GET_CODE (callee) == SYMBOL_REF
7282 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7284 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7286 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7287 /* Indirect call: set r9 with FDPIC value of callee. */
7288 XEXP (operands[1], 0)
7289 = arm_load_function_descriptor (XEXP (operands[1], 0));
7291 if (detect_cmse_nonsecure_call (addr))
7293 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7294 operands[2], operands[3]);
7295 emit_call_insn (pat);
7299 pat = gen_call_value_internal (operands[0], operands[1],
7300 operands[2], operands[3]);
7301 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7304 /* Restore FDPIC register (r9) after call. */
7307 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7308 rtx initial_fdpic_reg
7309 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7311 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7312 initial_fdpic_reg));
7319 (define_expand "call_value_internal"
7320 [(parallel [(set (match_operand 0 "" "")
7321 (call (match_operand 1 "memory_operand")
7322 (match_operand 2 "general_operand")))
7323 (use (match_operand 3 "" ""))
7324 (clobber (reg:SI LR_REGNUM))])])
7326 (define_expand "nonsecure_call_value_internal"
7327 [(parallel [(set (match_operand 0 "" "")
7328 (call (unspec:SI [(match_operand 1 "memory_operand")]
7329 UNSPEC_NONSECURE_MEM)
7330 (match_operand 2 "general_operand")))
7331 (use (match_operand 3 "" ""))
7332 (clobber (reg:SI LR_REGNUM))])]
7337 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7338 gen_rtx_REG (SImode, R4_REGNUM),
7341 operands[1] = replace_equiv_address (operands[1], tmp);
7344 (define_insn "*call_value_reg_armv5"
7345 [(set (match_operand 0 "" "")
7346 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7347 (match_operand 2 "" "")))
7348 (use (match_operand 3 "" ""))
7349 (clobber (reg:SI LR_REGNUM))]
7350 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7352 [(set_attr "type" "call")]
7355 (define_insn "*call_value_reg_arm"
7356 [(set (match_operand 0 "" "")
7357 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7358 (match_operand 2 "" "")))
7359 (use (match_operand 3 "" ""))
7360 (clobber (reg:SI LR_REGNUM))]
7361 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7363 return output_call (&operands[1]);
7365 [(set_attr "length" "12")
7366 (set_attr "type" "call")]
7369 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7370 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7372 (define_insn "*call_symbol"
7373 [(call (mem:SI (match_operand:SI 0 "" ""))
7374 (match_operand 1 "" ""))
7375 (use (match_operand 2 "" ""))
7376 (clobber (reg:SI LR_REGNUM))]
7378 && !SIBLING_CALL_P (insn)
7379 && (GET_CODE (operands[0]) == SYMBOL_REF)
7380 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7383 rtx op = operands[0];
7385 /* Switch mode now when possible. */
7386 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7387 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7388 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7390 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7392 [(set_attr "type" "call")]
7395 (define_insn "*call_value_symbol"
7396 [(set (match_operand 0 "" "")
7397 (call (mem:SI (match_operand:SI 1 "" ""))
7398 (match_operand:SI 2 "" "")))
7399 (use (match_operand 3 "" ""))
7400 (clobber (reg:SI LR_REGNUM))]
7402 && !SIBLING_CALL_P (insn)
7403 && (GET_CODE (operands[1]) == SYMBOL_REF)
7404 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7407 rtx op = operands[1];
7409 /* Switch mode now when possible. */
7410 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7411 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7412 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7414 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7416 [(set_attr "type" "call")]
7419 (define_expand "sibcall_internal"
7420 [(parallel [(call (match_operand 0 "memory_operand")
7421 (match_operand 1 "general_operand"))
7423 (use (match_operand 2 "" ""))])])
7425 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7426 (define_expand "sibcall"
7427 [(parallel [(call (match_operand 0 "memory_operand")
7428 (match_operand 1 "general_operand"))
7430 (use (match_operand 2 "" ""))])]
7436 if ((!REG_P (XEXP (operands[0], 0))
7437 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7438 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7439 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7440 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7442 if (operands[2] == NULL_RTX)
7443 operands[2] = const0_rtx;
7445 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7446 arm_emit_call_insn (pat, operands[0], true);
7451 (define_expand "sibcall_value_internal"
7452 [(parallel [(set (match_operand 0 "" "")
7453 (call (match_operand 1 "memory_operand")
7454 (match_operand 2 "general_operand")))
7456 (use (match_operand 3 "" ""))])])
7458 (define_expand "sibcall_value"
7459 [(parallel [(set (match_operand 0 "" "")
7460 (call (match_operand 1 "memory_operand")
7461 (match_operand 2 "general_operand")))
7463 (use (match_operand 3 "" ""))])]
7469 if ((!REG_P (XEXP (operands[1], 0))
7470 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7471 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7472 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7473 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7475 if (operands[3] == NULL_RTX)
7476 operands[3] = const0_rtx;
7478 pat = gen_sibcall_value_internal (operands[0], operands[1],
7479 operands[2], operands[3]);
7480 arm_emit_call_insn (pat, operands[1], true);
7485 (define_insn "*sibcall_insn"
7486 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7487 (match_operand 1 "" ""))
7489 (use (match_operand 2 "" ""))]
7490 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7492 if (which_alternative == 1)
7493 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7496 if (arm_arch5t || arm_arch4t)
7497 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7499 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7502 [(set_attr "type" "call")]
7505 (define_insn "*sibcall_value_insn"
7506 [(set (match_operand 0 "" "")
7507 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7508 (match_operand 2 "" "")))
7510 (use (match_operand 3 "" ""))]
7511 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7513 if (which_alternative == 1)
7514 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7517 if (arm_arch5t || arm_arch4t)
7518 return \"bx%?\\t%1\";
7520 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7523 [(set_attr "type" "call")]
7526 (define_expand "<return_str>return"
7528 "(TARGET_ARM || (TARGET_THUMB2
7529 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7530 && !IS_STACKALIGN (arm_current_func_type ())))
7531 <return_cond_false>"
7536 thumb2_expand_return (<return_simple_p>);
7543 ;; Often the return insn will be the same as loading from memory, so set attr
7544 (define_insn "*arm_return"
7546 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7549 if (arm_ccfsm_state == 2)
7551 arm_ccfsm_state += 2;
7554 return output_return_instruction (const_true_rtx, true, false, false);
7556 [(set_attr "type" "load_4")
7557 (set_attr "length" "12")
7558 (set_attr "predicable" "yes")]
7561 (define_insn "*cond_<return_str>return"
7563 (if_then_else (match_operator 0 "arm_comparison_operator"
7564 [(match_operand 1 "cc_register" "") (const_int 0)])
7567 "TARGET_ARM <return_cond_true>"
7570 if (arm_ccfsm_state == 2)
7572 arm_ccfsm_state += 2;
7575 return output_return_instruction (operands[0], true, false,
7578 [(set_attr "conds" "use")
7579 (set_attr "length" "12")
7580 (set_attr "type" "load_4")]
7583 (define_insn "*cond_<return_str>return_inverted"
7585 (if_then_else (match_operator 0 "arm_comparison_operator"
7586 [(match_operand 1 "cc_register" "") (const_int 0)])
7589 "TARGET_ARM <return_cond_true>"
7592 if (arm_ccfsm_state == 2)
7594 arm_ccfsm_state += 2;
7597 return output_return_instruction (operands[0], true, true,
7600 [(set_attr "conds" "use")
7601 (set_attr "length" "12")
7602 (set_attr "type" "load_4")]
7605 (define_insn "*arm_simple_return"
7610 if (arm_ccfsm_state == 2)
7612 arm_ccfsm_state += 2;
7615 return output_return_instruction (const_true_rtx, true, false, true);
7617 [(set_attr "type" "branch")
7618 (set_attr "length" "4")
7619 (set_attr "predicable" "yes")]
7622 ;; Generate a sequence of instructions to determine if the processor is
7623 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7626 (define_expand "return_addr_mask"
7628 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7630 (set (match_operand:SI 0 "s_register_operand")
7631 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7633 (const_int 67108860)))] ; 0x03fffffc
7636 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7639 (define_insn "*check_arch2"
7640 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7641 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7644 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7645 [(set_attr "length" "8")
7646 (set_attr "conds" "set")
7647 (set_attr "type" "multiple")]
7650 ;; Call subroutine returning any type.
7652 (define_expand "untyped_call"
7653 [(parallel [(call (match_operand 0 "" "")
7655 (match_operand 1 "" "")
7656 (match_operand 2 "" "")])]
7657 "TARGET_EITHER && !TARGET_FDPIC"
7661 rtx par = gen_rtx_PARALLEL (VOIDmode,
7662 rtvec_alloc (XVECLEN (operands[2], 0)));
7663 rtx addr = gen_reg_rtx (Pmode);
7667 emit_move_insn (addr, XEXP (operands[1], 0));
7668 mem = change_address (operands[1], BLKmode, addr);
7670 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7672 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7674 /* Default code only uses r0 as a return value, but we could
7675 be using anything up to 4 registers. */
7676 if (REGNO (src) == R0_REGNUM)
7677 src = gen_rtx_REG (TImode, R0_REGNUM);
7679 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7681 size += GET_MODE_SIZE (GET_MODE (src));
7684 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7688 for (i = 0; i < XVECLEN (par, 0); i++)
7690 HOST_WIDE_INT offset = 0;
7691 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7694 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7696 mem = change_address (mem, GET_MODE (reg), NULL);
7697 if (REGNO (reg) == R0_REGNUM)
7699 /* On thumb we have to use a write-back instruction. */
7700 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7701 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7702 size = TARGET_ARM ? 16 : 0;
7706 emit_move_insn (mem, reg);
7707 size = GET_MODE_SIZE (GET_MODE (reg));
7711 /* The optimizer does not know that the call sets the function value
7712 registers we stored in the result block. We avoid problems by
7713 claiming that all hard registers are used and clobbered at this
7715 emit_insn (gen_blockage ());
7721 (define_expand "untyped_return"
7722 [(match_operand:BLK 0 "memory_operand")
7723 (match_operand 1 "" "")]
7724 "TARGET_EITHER && !TARGET_FDPIC"
7728 rtx addr = gen_reg_rtx (Pmode);
7732 emit_move_insn (addr, XEXP (operands[0], 0));
7733 mem = change_address (operands[0], BLKmode, addr);
7735 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7737 HOST_WIDE_INT offset = 0;
7738 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7741 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7743 mem = change_address (mem, GET_MODE (reg), NULL);
7744 if (REGNO (reg) == R0_REGNUM)
7746 /* On thumb we have to use a write-back instruction. */
7747 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7748 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7749 size = TARGET_ARM ? 16 : 0;
7753 emit_move_insn (reg, mem);
7754 size = GET_MODE_SIZE (GET_MODE (reg));
7758 /* Emit USE insns before the return. */
7759 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7760 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7762 /* Construct the return. */
7763 expand_naked_return ();
7769 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7770 ;; all of memory. This blocks insns from being moved across this point.
7772 (define_insn "blockage"
7773 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7776 [(set_attr "length" "0")
7777 (set_attr "type" "block")]
7780 ;; Since we hard code r0 here use the 'o' constraint to prevent
7781 ;; provoking undefined behaviour in the hardware with putting out
7782 ;; auto-increment operations with potentially r0 as the base register.
7783 (define_insn "probe_stack"
7784 [(set (match_operand:SI 0 "memory_operand" "=o")
7785 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7788 [(set_attr "type" "store_4")
7789 (set_attr "predicable" "yes")]
7792 (define_insn "probe_stack_range"
7793 [(set (match_operand:SI 0 "register_operand" "=r")
7794 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7795 (match_operand:SI 2 "register_operand" "r")]
7796 VUNSPEC_PROBE_STACK_RANGE))]
7799 return output_probe_stack_range (operands[0], operands[2]);
7801 [(set_attr "type" "multiple")
7802 (set_attr "conds" "clob")]
7805 ;; Named patterns for stack smashing protection.
7806 (define_expand "stack_protect_combined_set"
7808 [(set (match_operand:SI 0 "memory_operand")
7809 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7811 (clobber (match_scratch:SI 2 ""))
7812 (clobber (match_scratch:SI 3 ""))])]
7817 ;; Use a separate insn from the above expand to be able to have the mem outside
7818 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7819 ;; try to reload the guard since we need to control how PIC access is done in
7820 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7821 ;; legitimize_pic_address ()).
7822 (define_insn_and_split "*stack_protect_combined_set_insn"
7823 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7824 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7826 (clobber (match_scratch:SI 2 "=&l,&r"))
7827 (clobber (match_scratch:SI 3 "=&l,&r"))]
7831 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7833 (clobber (match_dup 2))])]
7841 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7843 pic_reg = operands[3];
7845 /* Forces recomputing of GOT base now. */
7846 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7847 true /*compute_now*/);
7851 if (address_operand (operands[1], SImode))
7852 operands[2] = operands[1];
7855 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7856 emit_move_insn (operands[2], mem);
7860 [(set_attr "arch" "t1,32")]
7863 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7864 ;; canary value does not live beyond the life of this sequence.
7865 (define_insn "*stack_protect_set_insn"
7866 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7867 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7869 (clobber (match_dup 1))]
7872 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7873 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7874 [(set_attr "length" "8,12")
7875 (set_attr "conds" "clob,nocond")
7876 (set_attr "type" "multiple")
7877 (set_attr "arch" "t1,32")]
7880 (define_expand "stack_protect_combined_test"
7884 (eq (match_operand:SI 0 "memory_operand")
7885 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7887 (label_ref (match_operand 2))
7889 (clobber (match_scratch:SI 3 ""))
7890 (clobber (match_scratch:SI 4 ""))
7891 (clobber (reg:CC CC_REGNUM))])]
7896 ;; Use a separate insn from the above expand to be able to have the mem outside
7897 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7898 ;; try to reload the guard since we need to control how PIC access is done in
7899 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7900 ;; legitimize_pic_address ()).
7901 (define_insn_and_split "*stack_protect_combined_test_insn"
7904 (eq (match_operand:SI 0 "memory_operand" "m,m")
7905 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7907 (label_ref (match_operand 2))
7909 (clobber (match_scratch:SI 3 "=&l,&r"))
7910 (clobber (match_scratch:SI 4 "=&l,&r"))
7911 (clobber (reg:CC CC_REGNUM))]
7924 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7926 pic_reg = operands[4];
7928 /* Forces recomputing of GOT base now. */
7929 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7930 true /*compute_now*/);
7934 if (address_operand (operands[1], SImode))
7935 operands[3] = operands[1];
7938 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7939 emit_move_insn (operands[3], mem);
7944 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7946 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7947 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7948 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7952 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7954 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7955 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7960 [(set_attr "arch" "t1,32")]
7963 (define_insn "arm_stack_protect_test_insn"
7964 [(set (reg:CC_Z CC_REGNUM)
7965 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7966 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
7969 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
7970 (clobber (match_dup 2))]
7972 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
7973 [(set_attr "length" "8,12")
7974 (set_attr "conds" "set")
7975 (set_attr "type" "multiple")
7976 (set_attr "arch" "t,32")]
7979 (define_expand "casesi"
7980 [(match_operand:SI 0 "s_register_operand") ; index to jump on
7981 (match_operand:SI 1 "const_int_operand") ; lower bound
7982 (match_operand:SI 2 "const_int_operand") ; total range
7983 (match_operand:SI 3 "" "") ; table label
7984 (match_operand:SI 4 "" "")] ; Out of range label
7985 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
7988 enum insn_code code;
7989 if (operands[1] != const0_rtx)
7991 rtx reg = gen_reg_rtx (SImode);
7993 emit_insn (gen_addsi3 (reg, operands[0],
7994 gen_int_mode (-INTVAL (operands[1]),
8000 code = CODE_FOR_arm_casesi_internal;
8001 else if (TARGET_THUMB1)
8002 code = CODE_FOR_thumb1_casesi_internal_pic;
8004 code = CODE_FOR_thumb2_casesi_internal_pic;
8006 code = CODE_FOR_thumb2_casesi_internal;
8008 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8009 operands[2] = force_reg (SImode, operands[2]);
8011 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8012 operands[3], operands[4]));
8017 ;; The USE in this pattern is needed to tell flow analysis that this is
8018 ;; a CASESI insn. It has no other purpose.
8019 (define_expand "arm_casesi_internal"
8020 [(parallel [(set (pc)
8022 (leu (match_operand:SI 0 "s_register_operand")
8023 (match_operand:SI 1 "arm_rhs_operand"))
8025 (label_ref:SI (match_operand 3 ""))))
8026 (clobber (reg:CC CC_REGNUM))
8027 (use (label_ref:SI (match_operand 2 "")))])]
8030 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8031 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8032 gen_rtx_LABEL_REF (SImode, operands[2]));
8033 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8034 MEM_READONLY_P (operands[4]) = 1;
8035 MEM_NOTRAP_P (operands[4]) = 1;
8038 (define_insn "*arm_casesi_internal"
8039 [(parallel [(set (pc)
8041 (leu (match_operand:SI 0 "s_register_operand" "r")
8042 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8043 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8044 (label_ref:SI (match_operand 2 "" ""))))
8045 (label_ref:SI (match_operand 3 "" ""))))
8046 (clobber (reg:CC CC_REGNUM))
8047 (use (label_ref:SI (match_dup 2)))])]
8051 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8052 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8054 [(set_attr "conds" "clob")
8055 (set_attr "length" "12")
8056 (set_attr "type" "multiple")]
8059 (define_expand "indirect_jump"
8061 (match_operand:SI 0 "s_register_operand"))]
8064 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8065 address and use bx. */
8069 tmp = gen_reg_rtx (SImode);
8070 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8076 ;; NB Never uses BX.
8077 (define_insn "*arm_indirect_jump"
8079 (match_operand:SI 0 "s_register_operand" "r"))]
8081 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8082 [(set_attr "predicable" "yes")
8083 (set_attr "type" "branch")]
8086 (define_insn "*load_indirect_jump"
8088 (match_operand:SI 0 "memory_operand" "m"))]
8090 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8091 [(set_attr "type" "load_4")
8092 (set_attr "pool_range" "4096")
8093 (set_attr "neg_pool_range" "4084")
8094 (set_attr "predicable" "yes")]
8104 [(set (attr "length")
8105 (if_then_else (eq_attr "is_thumb" "yes")
8108 (set_attr "type" "mov_reg")]
8112 [(trap_if (const_int 1) (const_int 0))]
8116 return \".inst\\t0xe7f000f0\";
8118 return \".inst\\t0xdeff\";
8120 [(set (attr "length")
8121 (if_then_else (eq_attr "is_thumb" "yes")
8124 (set_attr "type" "trap")
8125 (set_attr "conds" "unconditional")]
8129 ;; Patterns to allow combination of arithmetic, cond code and shifts
8131 (define_insn "*<arith_shift_insn>_multsi"
8132 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8134 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8135 (match_operand:SI 3 "power_of_two_operand" ""))
8136 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8138 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8139 [(set_attr "predicable" "yes")
8140 (set_attr "shift" "2")
8141 (set_attr "arch" "a,t2")
8142 (set_attr "type" "alu_shift_imm")])
8144 (define_insn "*<arith_shift_insn>_shiftsi"
8145 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8147 (match_operator:SI 2 "shift_nomul_operator"
8148 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8149 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8150 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8151 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8152 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8153 [(set_attr "predicable" "yes")
8154 (set_attr "shift" "3")
8155 (set_attr "arch" "a,t2,a")
8156 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8159 [(set (match_operand:SI 0 "s_register_operand" "")
8160 (match_operator:SI 1 "shiftable_operator"
8161 [(match_operator:SI 2 "shiftable_operator"
8162 [(match_operator:SI 3 "shift_operator"
8163 [(match_operand:SI 4 "s_register_operand" "")
8164 (match_operand:SI 5 "reg_or_int_operand" "")])
8165 (match_operand:SI 6 "s_register_operand" "")])
8166 (match_operand:SI 7 "arm_rhs_operand" "")]))
8167 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8170 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8173 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8176 (define_insn "*arith_shiftsi_compare0"
8177 [(set (reg:CC_NOOV CC_REGNUM)
8179 (match_operator:SI 1 "shiftable_operator"
8180 [(match_operator:SI 3 "shift_operator"
8181 [(match_operand:SI 4 "s_register_operand" "r,r")
8182 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8183 (match_operand:SI 2 "s_register_operand" "r,r")])
8185 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8186 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8189 "%i1s%?\\t%0, %2, %4%S3"
8190 [(set_attr "conds" "set")
8191 (set_attr "shift" "4")
8192 (set_attr "arch" "32,a")
8193 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8195 (define_insn "*arith_shiftsi_compare0_scratch"
8196 [(set (reg:CC_NOOV CC_REGNUM)
8198 (match_operator:SI 1 "shiftable_operator"
8199 [(match_operator:SI 3 "shift_operator"
8200 [(match_operand:SI 4 "s_register_operand" "r,r")
8201 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8202 (match_operand:SI 2 "s_register_operand" "r,r")])
8204 (clobber (match_scratch:SI 0 "=r,r"))]
8206 "%i1s%?\\t%0, %2, %4%S3"
8207 [(set_attr "conds" "set")
8208 (set_attr "shift" "4")
8209 (set_attr "arch" "32,a")
8210 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8212 (define_insn "*sub_shiftsi"
8213 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8214 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8215 (match_operator:SI 2 "shift_operator"
8216 [(match_operand:SI 3 "s_register_operand" "r,r")
8217 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8219 "sub%?\\t%0, %1, %3%S2"
8220 [(set_attr "predicable" "yes")
8221 (set_attr "predicable_short_it" "no")
8222 (set_attr "shift" "3")
8223 (set_attr "arch" "32,a")
8224 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8226 (define_insn "*sub_shiftsi_compare0"
8227 [(set (reg:CC_NOOV CC_REGNUM)
8229 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8230 (match_operator:SI 2 "shift_operator"
8231 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8232 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8234 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8235 (minus:SI (match_dup 1)
8236 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8238 "subs%?\\t%0, %1, %3%S2"
8239 [(set_attr "conds" "set")
8240 (set_attr "shift" "3")
8241 (set_attr "arch" "32,a,a")
8242 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8244 (define_insn "*sub_shiftsi_compare0_scratch"
8245 [(set (reg:CC_NOOV CC_REGNUM)
8247 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8248 (match_operator:SI 2 "shift_operator"
8249 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8250 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8252 (clobber (match_scratch:SI 0 "=r,r,r"))]
8254 "subs%?\\t%0, %1, %3%S2"
8255 [(set_attr "conds" "set")
8256 (set_attr "shift" "3")
8257 (set_attr "arch" "32,a,a")
8258 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8261 (define_insn_and_split "*and_scc"
8262 [(set (match_operand:SI 0 "s_register_operand" "=r")
8263 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8264 [(match_operand 2 "cc_register" "") (const_int 0)])
8265 (match_operand:SI 3 "s_register_operand" "r")))]
8267 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8268 "&& reload_completed"
8269 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8270 (cond_exec (match_dup 4) (set (match_dup 0)
8271 (and:SI (match_dup 3) (const_int 1))))]
8273 machine_mode mode = GET_MODE (operands[2]);
8274 enum rtx_code rc = GET_CODE (operands[1]);
8276 /* Note that operands[4] is the same as operands[1],
8277 but with VOIDmode as the result. */
8278 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8279 if (mode == CCFPmode || mode == CCFPEmode)
8280 rc = reverse_condition_maybe_unordered (rc);
8282 rc = reverse_condition (rc);
8283 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8285 [(set_attr "conds" "use")
8286 (set_attr "type" "multiple")
8287 (set_attr "length" "8")]
8290 (define_insn_and_split "*ior_scc"
8291 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8292 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8293 [(match_operand 2 "cc_register" "") (const_int 0)])
8294 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8299 "&& reload_completed
8300 && REGNO (operands [0]) != REGNO (operands[3])"
8301 ;; && which_alternative == 1
8302 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8303 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8304 (cond_exec (match_dup 4) (set (match_dup 0)
8305 (ior:SI (match_dup 3) (const_int 1))))]
8307 machine_mode mode = GET_MODE (operands[2]);
8308 enum rtx_code rc = GET_CODE (operands[1]);
8310 /* Note that operands[4] is the same as operands[1],
8311 but with VOIDmode as the result. */
8312 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8313 if (mode == CCFPmode || mode == CCFPEmode)
8314 rc = reverse_condition_maybe_unordered (rc);
8316 rc = reverse_condition (rc);
8317 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8319 [(set_attr "conds" "use")
8320 (set_attr "length" "4,8")
8321 (set_attr "type" "logic_imm,multiple")]
8324 ; A series of splitters for the compare_scc pattern below. Note that
8325 ; order is important.
8327 [(set (match_operand:SI 0 "s_register_operand" "")
8328 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8330 (clobber (reg:CC CC_REGNUM))]
8331 "TARGET_32BIT && reload_completed"
8332 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8335 [(set (match_operand:SI 0 "s_register_operand" "")
8336 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8338 (clobber (reg:CC CC_REGNUM))]
8339 "TARGET_32BIT && reload_completed"
8340 [(set (match_dup 0) (not:SI (match_dup 1)))
8341 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8344 [(set (match_operand:SI 0 "s_register_operand" "")
8345 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8347 (clobber (reg:CC CC_REGNUM))]
8348 "arm_arch5t && TARGET_32BIT"
8349 [(set (match_dup 0) (clz:SI (match_dup 1)))
8350 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8354 [(set (match_operand:SI 0 "s_register_operand" "")
8355 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8357 (clobber (reg:CC CC_REGNUM))]
8358 "TARGET_32BIT && reload_completed"
8360 [(set (reg:CC CC_REGNUM)
8361 (compare:CC (const_int 1) (match_dup 1)))
8363 (minus:SI (const_int 1) (match_dup 1)))])
8364 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8365 (set (match_dup 0) (const_int 0)))])
8368 [(set (match_operand:SI 0 "s_register_operand" "")
8369 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8370 (match_operand:SI 2 "const_int_operand" "")))
8371 (clobber (reg:CC CC_REGNUM))]
8372 "TARGET_32BIT && reload_completed"
8374 [(set (reg:CC CC_REGNUM)
8375 (compare:CC (match_dup 1) (match_dup 2)))
8376 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8377 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8378 (set (match_dup 0) (const_int 1)))]
8380 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8384 [(set (match_operand:SI 0 "s_register_operand" "")
8385 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8386 (match_operand:SI 2 "arm_add_operand" "")))
8387 (clobber (reg:CC CC_REGNUM))]
8388 "TARGET_32BIT && reload_completed"
8390 [(set (reg:CC_NOOV CC_REGNUM)
8391 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8393 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8394 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8395 (set (match_dup 0) (const_int 1)))])
8397 (define_insn_and_split "*compare_scc"
8398 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8399 (match_operator:SI 1 "arm_comparison_operator"
8400 [(match_operand:SI 2 "s_register_operand" "r,r")
8401 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8402 (clobber (reg:CC CC_REGNUM))]
8405 "&& reload_completed"
8406 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8407 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8408 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8411 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8412 operands[2], operands[3]);
8413 enum rtx_code rc = GET_CODE (operands[1]);
8415 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8417 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8418 if (mode == CCFPmode || mode == CCFPEmode)
8419 rc = reverse_condition_maybe_unordered (rc);
8421 rc = reverse_condition (rc);
8422 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8424 [(set_attr "type" "multiple")]
8427 ;; Attempt to improve the sequence generated by the compare_scc splitters
8428 ;; not to use conditional execution.
8430 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8434 [(set (reg:CC CC_REGNUM)
8435 (compare:CC (match_operand:SI 1 "register_operand" "")
8437 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8438 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8439 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8440 (set (match_dup 0) (const_int 1)))]
8441 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8442 [(set (match_dup 0) (clz:SI (match_dup 1)))
8443 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8446 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8450 [(set (reg:CC CC_REGNUM)
8451 (compare:CC (match_operand:SI 1 "register_operand" "")
8453 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8454 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8455 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8456 (set (match_dup 0) (const_int 1)))
8457 (match_scratch:SI 2 "r")]
8458 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8460 [(set (reg:CC CC_REGNUM)
8461 (compare:CC (const_int 0) (match_dup 1)))
8462 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8464 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8465 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8468 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8469 ;; sub Rd, Reg1, reg2
8473 [(set (reg:CC CC_REGNUM)
8474 (compare:CC (match_operand:SI 1 "register_operand" "")
8475 (match_operand:SI 2 "arm_rhs_operand" "")))
8476 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8477 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8478 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8479 (set (match_dup 0) (const_int 1)))]
8480 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8481 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8482 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8483 (set (match_dup 0) (clz:SI (match_dup 0)))
8484 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8488 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8489 ;; sub T1, Reg1, reg2
8493 [(set (reg:CC CC_REGNUM)
8494 (compare:CC (match_operand:SI 1 "register_operand" "")
8495 (match_operand:SI 2 "arm_rhs_operand" "")))
8496 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8497 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8498 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8499 (set (match_dup 0) (const_int 1)))
8500 (match_scratch:SI 3 "r")]
8501 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8502 [(set (match_dup 3) (match_dup 4))
8504 [(set (reg:CC CC_REGNUM)
8505 (compare:CC (const_int 0) (match_dup 3)))
8506 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8508 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8509 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8511 if (CONST_INT_P (operands[2]))
8512 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8514 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8517 (define_insn "*cond_move"
8518 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8519 (if_then_else:SI (match_operator 3 "equality_operator"
8520 [(match_operator 4 "arm_comparison_operator"
8521 [(match_operand 5 "cc_register" "") (const_int 0)])
8523 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8524 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8527 if (GET_CODE (operands[3]) == NE)
8529 if (which_alternative != 1)
8530 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8531 if (which_alternative != 0)
8532 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8535 if (which_alternative != 0)
8536 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8537 if (which_alternative != 1)
8538 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8541 [(set_attr "conds" "use")
8542 (set_attr_alternative "type"
8543 [(if_then_else (match_operand 2 "const_int_operand" "")
8544 (const_string "mov_imm")
8545 (const_string "mov_reg"))
8546 (if_then_else (match_operand 1 "const_int_operand" "")
8547 (const_string "mov_imm")
8548 (const_string "mov_reg"))
8549 (const_string "multiple")])
8550 (set_attr "length" "4,4,8")]
8553 (define_insn "*cond_arith"
8554 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8555 (match_operator:SI 5 "shiftable_operator"
8556 [(match_operator:SI 4 "arm_comparison_operator"
8557 [(match_operand:SI 2 "s_register_operand" "r,r")
8558 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8559 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8560 (clobber (reg:CC CC_REGNUM))]
8563 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8564 return \"%i5\\t%0, %1, %2, lsr #31\";
8566 output_asm_insn (\"cmp\\t%2, %3\", operands);
8567 if (GET_CODE (operands[5]) == AND)
8568 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8569 else if (GET_CODE (operands[5]) == MINUS)
8570 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8571 else if (which_alternative != 0)
8572 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8573 return \"%i5%d4\\t%0, %1, #1\";
8575 [(set_attr "conds" "clob")
8576 (set_attr "length" "12")
8577 (set_attr "type" "multiple")]
8580 (define_insn "*cond_sub"
8581 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8582 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8583 (match_operator:SI 4 "arm_comparison_operator"
8584 [(match_operand:SI 2 "s_register_operand" "r,r")
8585 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8586 (clobber (reg:CC CC_REGNUM))]
8589 output_asm_insn (\"cmp\\t%2, %3\", operands);
8590 if (which_alternative != 0)
8591 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8592 return \"sub%d4\\t%0, %1, #1\";
8594 [(set_attr "conds" "clob")
8595 (set_attr "length" "8,12")
8596 (set_attr "type" "multiple")]
8599 (define_insn "*cmp_ite0"
8600 [(set (match_operand 6 "dominant_cc_register" "")
8603 (match_operator 4 "arm_comparison_operator"
8604 [(match_operand:SI 0 "s_register_operand"
8605 "l,l,l,r,r,r,r,r,r")
8606 (match_operand:SI 1 "arm_add_operand"
8607 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8608 (match_operator:SI 5 "arm_comparison_operator"
8609 [(match_operand:SI 2 "s_register_operand"
8610 "l,r,r,l,l,r,r,r,r")
8611 (match_operand:SI 3 "arm_add_operand"
8612 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8618 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8620 {\"cmp%d5\\t%0, %1\",
8621 \"cmp%d4\\t%2, %3\"},
8622 {\"cmn%d5\\t%0, #%n1\",
8623 \"cmp%d4\\t%2, %3\"},
8624 {\"cmp%d5\\t%0, %1\",
8625 \"cmn%d4\\t%2, #%n3\"},
8626 {\"cmn%d5\\t%0, #%n1\",
8627 \"cmn%d4\\t%2, #%n3\"}
8629 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8634 \"cmn\\t%0, #%n1\"},
8635 {\"cmn\\t%2, #%n3\",
8637 {\"cmn\\t%2, #%n3\",
8640 static const char * const ite[2] =
8645 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8646 CMP_CMP, CMN_CMP, CMP_CMP,
8647 CMN_CMP, CMP_CMN, CMN_CMN};
8649 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8651 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8652 if (TARGET_THUMB2) {
8653 output_asm_insn (ite[swap], operands);
8655 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8658 [(set_attr "conds" "set")
8659 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8660 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8661 (set_attr "type" "multiple")
8662 (set_attr_alternative "length"
8668 (if_then_else (eq_attr "is_thumb" "no")
8671 (if_then_else (eq_attr "is_thumb" "no")
8674 (if_then_else (eq_attr "is_thumb" "no")
8677 (if_then_else (eq_attr "is_thumb" "no")
8682 (define_insn "*cmp_ite1"
8683 [(set (match_operand 6 "dominant_cc_register" "")
8686 (match_operator 4 "arm_comparison_operator"
8687 [(match_operand:SI 0 "s_register_operand"
8688 "l,l,l,r,r,r,r,r,r")
8689 (match_operand:SI 1 "arm_add_operand"
8690 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8691 (match_operator:SI 5 "arm_comparison_operator"
8692 [(match_operand:SI 2 "s_register_operand"
8693 "l,r,r,l,l,r,r,r,r")
8694 (match_operand:SI 3 "arm_add_operand"
8695 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8701 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8705 {\"cmn\\t%0, #%n1\",
8708 \"cmn\\t%2, #%n3\"},
8709 {\"cmn\\t%0, #%n1\",
8712 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8714 {\"cmp%d4\\t%2, %3\",
8715 \"cmp%D5\\t%0, %1\"},
8716 {\"cmp%d4\\t%2, %3\",
8717 \"cmn%D5\\t%0, #%n1\"},
8718 {\"cmn%d4\\t%2, #%n3\",
8719 \"cmp%D5\\t%0, %1\"},
8720 {\"cmn%d4\\t%2, #%n3\",
8721 \"cmn%D5\\t%0, #%n1\"}
8723 static const char * const ite[2] =
8728 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8729 CMP_CMP, CMN_CMP, CMP_CMP,
8730 CMN_CMP, CMP_CMN, CMN_CMN};
8732 comparison_dominates_p (GET_CODE (operands[5]),
8733 reverse_condition (GET_CODE (operands[4])));
8735 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8736 if (TARGET_THUMB2) {
8737 output_asm_insn (ite[swap], operands);
8739 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8742 [(set_attr "conds" "set")
8743 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8744 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8745 (set_attr_alternative "length"
8751 (if_then_else (eq_attr "is_thumb" "no")
8754 (if_then_else (eq_attr "is_thumb" "no")
8757 (if_then_else (eq_attr "is_thumb" "no")
8760 (if_then_else (eq_attr "is_thumb" "no")
8763 (set_attr "type" "multiple")]
8766 (define_insn "*cmp_and"
8767 [(set (match_operand 6 "dominant_cc_register" "")
8770 (match_operator 4 "arm_comparison_operator"
8771 [(match_operand:SI 0 "s_register_operand"
8772 "l,l,l,r,r,r,r,r,r,r")
8773 (match_operand:SI 1 "arm_add_operand"
8774 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8775 (match_operator:SI 5 "arm_comparison_operator"
8776 [(match_operand:SI 2 "s_register_operand"
8777 "l,r,r,l,l,r,r,r,r,r")
8778 (match_operand:SI 3 "arm_add_operand"
8779 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8784 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8786 {\"cmp%d5\\t%0, %1\",
8787 \"cmp%d4\\t%2, %3\"},
8788 {\"cmn%d5\\t%0, #%n1\",
8789 \"cmp%d4\\t%2, %3\"},
8790 {\"cmp%d5\\t%0, %1\",
8791 \"cmn%d4\\t%2, #%n3\"},
8792 {\"cmn%d5\\t%0, #%n1\",
8793 \"cmn%d4\\t%2, #%n3\"}
8795 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8800 \"cmn\\t%0, #%n1\"},
8801 {\"cmn\\t%2, #%n3\",
8803 {\"cmn\\t%2, #%n3\",
8806 static const char *const ite[2] =
8811 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8812 CMP_CMP, CMN_CMP, CMP_CMP,
8813 CMP_CMP, CMN_CMP, CMP_CMN,
8816 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8818 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8819 if (TARGET_THUMB2) {
8820 output_asm_insn (ite[swap], operands);
8822 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8825 [(set_attr "conds" "set")
8826 (set_attr "predicable" "no")
8827 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8828 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8829 (set_attr_alternative "length"
8836 (if_then_else (eq_attr "is_thumb" "no")
8839 (if_then_else (eq_attr "is_thumb" "no")
8842 (if_then_else (eq_attr "is_thumb" "no")
8845 (if_then_else (eq_attr "is_thumb" "no")
8848 (set_attr "type" "multiple")]
8851 (define_insn "*cmp_ior"
8852 [(set (match_operand 6 "dominant_cc_register" "")
8855 (match_operator 4 "arm_comparison_operator"
8856 [(match_operand:SI 0 "s_register_operand"
8857 "l,l,l,r,r,r,r,r,r,r")
8858 (match_operand:SI 1 "arm_add_operand"
8859 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8860 (match_operator:SI 5 "arm_comparison_operator"
8861 [(match_operand:SI 2 "s_register_operand"
8862 "l,r,r,l,l,r,r,r,r,r")
8863 (match_operand:SI 3 "arm_add_operand"
8864 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8869 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8873 {\"cmn\\t%0, #%n1\",
8876 \"cmn\\t%2, #%n3\"},
8877 {\"cmn\\t%0, #%n1\",
8880 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8882 {\"cmp%D4\\t%2, %3\",
8883 \"cmp%D5\\t%0, %1\"},
8884 {\"cmp%D4\\t%2, %3\",
8885 \"cmn%D5\\t%0, #%n1\"},
8886 {\"cmn%D4\\t%2, #%n3\",
8887 \"cmp%D5\\t%0, %1\"},
8888 {\"cmn%D4\\t%2, #%n3\",
8889 \"cmn%D5\\t%0, #%n1\"}
8891 static const char *const ite[2] =
8896 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8897 CMP_CMP, CMN_CMP, CMP_CMP,
8898 CMP_CMP, CMN_CMP, CMP_CMN,
8901 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8903 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8904 if (TARGET_THUMB2) {
8905 output_asm_insn (ite[swap], operands);
8907 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8911 [(set_attr "conds" "set")
8912 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8913 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8914 (set_attr_alternative "length"
8921 (if_then_else (eq_attr "is_thumb" "no")
8924 (if_then_else (eq_attr "is_thumb" "no")
8927 (if_then_else (eq_attr "is_thumb" "no")
8930 (if_then_else (eq_attr "is_thumb" "no")
8933 (set_attr "type" "multiple")]
8936 (define_insn_and_split "*ior_scc_scc"
8937 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8938 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8939 [(match_operand:SI 1 "s_register_operand" "l,r")
8940 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8941 (match_operator:SI 6 "arm_comparison_operator"
8942 [(match_operand:SI 4 "s_register_operand" "l,r")
8943 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8944 (clobber (reg:CC CC_REGNUM))]
8946 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8949 "TARGET_32BIT && reload_completed"
8953 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8954 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8956 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8958 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8961 [(set_attr "conds" "clob")
8962 (set_attr "enabled_for_short_it" "yes,no")
8963 (set_attr "length" "16")
8964 (set_attr "type" "multiple")]
8967 ; If the above pattern is followed by a CMP insn, then the compare is
8968 ; redundant, since we can rework the conditional instruction that follows.
8969 (define_insn_and_split "*ior_scc_scc_cmp"
8970 [(set (match_operand 0 "dominant_cc_register" "")
8971 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8972 [(match_operand:SI 1 "s_register_operand" "l,r")
8973 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8974 (match_operator:SI 6 "arm_comparison_operator"
8975 [(match_operand:SI 4 "s_register_operand" "l,r")
8976 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
8978 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
8979 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8980 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8983 "TARGET_32BIT && reload_completed"
8987 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8988 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8990 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8992 [(set_attr "conds" "set")
8993 (set_attr "enabled_for_short_it" "yes,no")
8994 (set_attr "length" "16")
8995 (set_attr "type" "multiple")]
8998 (define_insn_and_split "*and_scc_scc"
8999 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9000 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9001 [(match_operand:SI 1 "s_register_operand" "l,r")
9002 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9003 (match_operator:SI 6 "arm_comparison_operator"
9004 [(match_operand:SI 4 "s_register_operand" "l,r")
9005 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9006 (clobber (reg:CC CC_REGNUM))]
9008 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9011 "TARGET_32BIT && reload_completed
9012 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9017 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9018 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9020 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9022 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9025 [(set_attr "conds" "clob")
9026 (set_attr "enabled_for_short_it" "yes,no")
9027 (set_attr "length" "16")
9028 (set_attr "type" "multiple")]
9031 ; If the above pattern is followed by a CMP insn, then the compare is
9032 ; redundant, since we can rework the conditional instruction that follows.
9033 (define_insn_and_split "*and_scc_scc_cmp"
9034 [(set (match_operand 0 "dominant_cc_register" "")
9035 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9036 [(match_operand:SI 1 "s_register_operand" "l,r")
9037 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9038 (match_operator:SI 6 "arm_comparison_operator"
9039 [(match_operand:SI 4 "s_register_operand" "l,r")
9040 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9042 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9043 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9044 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9047 "TARGET_32BIT && reload_completed"
9051 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9052 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9054 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9056 [(set_attr "conds" "set")
9057 (set_attr "enabled_for_short_it" "yes,no")
9058 (set_attr "length" "16")
9059 (set_attr "type" "multiple")]
9062 ;; If there is no dominance in the comparison, then we can still save an
9063 ;; instruction in the AND case, since we can know that the second compare
9064 ;; need only zero the value if false (if true, then the value is already
9066 (define_insn_and_split "*and_scc_scc_nodom"
9067 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9068 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9069 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9070 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9071 (match_operator:SI 6 "arm_comparison_operator"
9072 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9073 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9074 (clobber (reg:CC CC_REGNUM))]
9076 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9079 "TARGET_32BIT && reload_completed"
9080 [(parallel [(set (match_dup 0)
9081 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9082 (clobber (reg:CC CC_REGNUM))])
9083 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9085 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9088 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9089 operands[4], operands[5]),
9091 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9093 [(set_attr "conds" "clob")
9094 (set_attr "length" "20")
9095 (set_attr "type" "multiple")]
9099 [(set (reg:CC_NOOV CC_REGNUM)
9100 (compare:CC_NOOV (ior:SI
9101 (and:SI (match_operand:SI 0 "s_register_operand" "")
9103 (match_operator:SI 1 "arm_comparison_operator"
9104 [(match_operand:SI 2 "s_register_operand" "")
9105 (match_operand:SI 3 "arm_add_operand" "")]))
9107 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9110 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9112 (set (reg:CC_NOOV CC_REGNUM)
9113 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9118 [(set (reg:CC_NOOV CC_REGNUM)
9119 (compare:CC_NOOV (ior:SI
9120 (match_operator:SI 1 "arm_comparison_operator"
9121 [(match_operand:SI 2 "s_register_operand" "")
9122 (match_operand:SI 3 "arm_add_operand" "")])
9123 (and:SI (match_operand:SI 0 "s_register_operand" "")
9126 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9129 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9131 (set (reg:CC_NOOV CC_REGNUM)
9132 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9135 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9137 (define_insn_and_split "*negscc"
9138 [(set (match_operand:SI 0 "s_register_operand" "=r")
9139 (neg:SI (match_operator 3 "arm_comparison_operator"
9140 [(match_operand:SI 1 "s_register_operand" "r")
9141 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9142 (clobber (reg:CC CC_REGNUM))]
9145 "&& reload_completed"
9148 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9150 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9152 /* Emit mov\\t%0, %1, asr #31 */
9153 emit_insn (gen_rtx_SET (operands[0],
9154 gen_rtx_ASHIFTRT (SImode,
9159 else if (GET_CODE (operands[3]) == NE)
9161 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9162 if (CONST_INT_P (operands[2]))
9163 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9164 gen_int_mode (-INTVAL (operands[2]),
9167 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9169 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9173 gen_rtx_SET (operands[0],
9179 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9180 emit_insn (gen_rtx_SET (cc_reg,
9181 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9182 enum rtx_code rc = GET_CODE (operands[3]);
9184 rc = reverse_condition (rc);
9185 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9190 gen_rtx_SET (operands[0], const0_rtx)));
9191 rc = GET_CODE (operands[3]);
9192 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9197 gen_rtx_SET (operands[0],
9203 [(set_attr "conds" "clob")
9204 (set_attr "length" "12")
9205 (set_attr "type" "multiple")]
9208 (define_insn_and_split "movcond_addsi"
9209 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9211 (match_operator 5 "comparison_operator"
9212 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9213 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9215 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9216 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9217 (clobber (reg:CC CC_REGNUM))]
9220 "&& reload_completed"
9221 [(set (reg:CC_NOOV CC_REGNUM)
9223 (plus:SI (match_dup 3)
9226 (set (match_dup 0) (match_dup 1))
9227 (cond_exec (match_dup 6)
9228 (set (match_dup 0) (match_dup 2)))]
9231 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9232 operands[3], operands[4]);
9233 enum rtx_code rc = GET_CODE (operands[5]);
9234 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9235 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9236 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9237 rc = reverse_condition (rc);
9239 std::swap (operands[1], operands[2]);
9241 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9244 [(set_attr "conds" "clob")
9245 (set_attr "enabled_for_short_it" "no,yes,yes")
9246 (set_attr "type" "multiple")]
9249 (define_insn "movcond"
9250 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9252 (match_operator 5 "arm_comparison_operator"
9253 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9254 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9255 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9256 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9257 (clobber (reg:CC CC_REGNUM))]
9260 if (GET_CODE (operands[5]) == LT
9261 && (operands[4] == const0_rtx))
9263 if (which_alternative != 1 && REG_P (operands[1]))
9265 if (operands[2] == const0_rtx)
9266 return \"and\\t%0, %1, %3, asr #31\";
9267 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9269 else if (which_alternative != 0 && REG_P (operands[2]))
9271 if (operands[1] == const0_rtx)
9272 return \"bic\\t%0, %2, %3, asr #31\";
9273 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9275 /* The only case that falls through to here is when both ops 1 & 2
9279 if (GET_CODE (operands[5]) == GE
9280 && (operands[4] == const0_rtx))
9282 if (which_alternative != 1 && REG_P (operands[1]))
9284 if (operands[2] == const0_rtx)
9285 return \"bic\\t%0, %1, %3, asr #31\";
9286 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9288 else if (which_alternative != 0 && REG_P (operands[2]))
9290 if (operands[1] == const0_rtx)
9291 return \"and\\t%0, %2, %3, asr #31\";
9292 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9294 /* The only case that falls through to here is when both ops 1 & 2
9297 if (CONST_INT_P (operands[4])
9298 && !const_ok_for_arm (INTVAL (operands[4])))
9299 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9301 output_asm_insn (\"cmp\\t%3, %4\", operands);
9302 if (which_alternative != 0)
9303 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9304 if (which_alternative != 1)
9305 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9308 [(set_attr "conds" "clob")
9309 (set_attr "length" "8,8,12")
9310 (set_attr "type" "multiple")]
9313 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9315 (define_insn "*ifcompare_plus_move"
9316 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9317 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9318 [(match_operand:SI 4 "s_register_operand" "r,r")
9319 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9321 (match_operand:SI 2 "s_register_operand" "r,r")
9322 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9323 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9324 (clobber (reg:CC CC_REGNUM))]
9327 [(set_attr "conds" "clob")
9328 (set_attr "length" "8,12")
9329 (set_attr "type" "multiple")]
9332 (define_insn "*if_plus_move"
9333 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9335 (match_operator 4 "arm_comparison_operator"
9336 [(match_operand 5 "cc_register" "") (const_int 0)])
9338 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9339 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9340 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9344 sub%d4\\t%0, %2, #%n3
9345 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9346 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9347 [(set_attr "conds" "use")
9348 (set_attr "length" "4,4,8,8")
9349 (set_attr_alternative "type"
9350 [(if_then_else (match_operand 3 "const_int_operand" "")
9351 (const_string "alu_imm" )
9352 (const_string "alu_sreg"))
9353 (const_string "alu_imm")
9354 (const_string "multiple")
9355 (const_string "multiple")])]
9358 (define_insn "*ifcompare_move_plus"
9359 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9360 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9361 [(match_operand:SI 4 "s_register_operand" "r,r")
9362 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9363 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9365 (match_operand:SI 2 "s_register_operand" "r,r")
9366 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9367 (clobber (reg:CC CC_REGNUM))]
9370 [(set_attr "conds" "clob")
9371 (set_attr "length" "8,12")
9372 (set_attr "type" "multiple")]
9375 (define_insn "*if_move_plus"
9376 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9378 (match_operator 4 "arm_comparison_operator"
9379 [(match_operand 5 "cc_register" "") (const_int 0)])
9380 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9382 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9383 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9387 sub%D4\\t%0, %2, #%n3
9388 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9389 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9390 [(set_attr "conds" "use")
9391 (set_attr "length" "4,4,8,8")
9392 (set_attr_alternative "type"
9393 [(if_then_else (match_operand 3 "const_int_operand" "")
9394 (const_string "alu_imm" )
9395 (const_string "alu_sreg"))
9396 (const_string "alu_imm")
9397 (const_string "multiple")
9398 (const_string "multiple")])]
9401 (define_insn "*ifcompare_arith_arith"
9402 [(set (match_operand:SI 0 "s_register_operand" "=r")
9403 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9404 [(match_operand:SI 5 "s_register_operand" "r")
9405 (match_operand:SI 6 "arm_add_operand" "rIL")])
9406 (match_operator:SI 8 "shiftable_operator"
9407 [(match_operand:SI 1 "s_register_operand" "r")
9408 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9409 (match_operator:SI 7 "shiftable_operator"
9410 [(match_operand:SI 3 "s_register_operand" "r")
9411 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9412 (clobber (reg:CC CC_REGNUM))]
9415 [(set_attr "conds" "clob")
9416 (set_attr "length" "12")
9417 (set_attr "type" "multiple")]
9420 (define_insn "*if_arith_arith"
9421 [(set (match_operand:SI 0 "s_register_operand" "=r")
9422 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9423 [(match_operand 8 "cc_register" "") (const_int 0)])
9424 (match_operator:SI 6 "shiftable_operator"
9425 [(match_operand:SI 1 "s_register_operand" "r")
9426 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9427 (match_operator:SI 7 "shiftable_operator"
9428 [(match_operand:SI 3 "s_register_operand" "r")
9429 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9431 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9432 [(set_attr "conds" "use")
9433 (set_attr "length" "8")
9434 (set_attr "type" "multiple")]
9437 (define_insn "*ifcompare_arith_move"
9438 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9439 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9440 [(match_operand:SI 2 "s_register_operand" "r,r")
9441 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9442 (match_operator:SI 7 "shiftable_operator"
9443 [(match_operand:SI 4 "s_register_operand" "r,r")
9444 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9445 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9446 (clobber (reg:CC CC_REGNUM))]
9449 /* If we have an operation where (op x 0) is the identity operation and
9450 the conditional operator is LT or GE and we are comparing against zero and
9451 everything is in registers then we can do this in two instructions. */
9452 if (operands[3] == const0_rtx
9453 && GET_CODE (operands[7]) != AND
9454 && REG_P (operands[5])
9455 && REG_P (operands[1])
9456 && REGNO (operands[1]) == REGNO (operands[4])
9457 && REGNO (operands[4]) != REGNO (operands[0]))
9459 if (GET_CODE (operands[6]) == LT)
9460 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9461 else if (GET_CODE (operands[6]) == GE)
9462 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9464 if (CONST_INT_P (operands[3])
9465 && !const_ok_for_arm (INTVAL (operands[3])))
9466 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9468 output_asm_insn (\"cmp\\t%2, %3\", operands);
9469 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9470 if (which_alternative != 0)
9471 return \"mov%D6\\t%0, %1\";
9474 [(set_attr "conds" "clob")
9475 (set_attr "length" "8,12")
9476 (set_attr "type" "multiple")]
9479 (define_insn "*if_arith_move"
9480 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9481 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9482 [(match_operand 6 "cc_register" "") (const_int 0)])
9483 (match_operator:SI 5 "shiftable_operator"
9484 [(match_operand:SI 2 "s_register_operand" "r,r")
9485 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9486 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9490 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9491 [(set_attr "conds" "use")
9492 (set_attr "length" "4,8")
9493 (set_attr_alternative "type"
9494 [(if_then_else (match_operand 3 "const_int_operand" "")
9495 (const_string "alu_shift_imm" )
9496 (const_string "alu_shift_reg"))
9497 (const_string "multiple")])]
9500 (define_insn "*ifcompare_move_arith"
9501 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9502 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9503 [(match_operand:SI 4 "s_register_operand" "r,r")
9504 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9505 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9506 (match_operator:SI 7 "shiftable_operator"
9507 [(match_operand:SI 2 "s_register_operand" "r,r")
9508 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9509 (clobber (reg:CC CC_REGNUM))]
9512 /* If we have an operation where (op x 0) is the identity operation and
9513 the conditional operator is LT or GE and we are comparing against zero and
9514 everything is in registers then we can do this in two instructions */
9515 if (operands[5] == const0_rtx
9516 && GET_CODE (operands[7]) != AND
9517 && REG_P (operands[3])
9518 && REG_P (operands[1])
9519 && REGNO (operands[1]) == REGNO (operands[2])
9520 && REGNO (operands[2]) != REGNO (operands[0]))
9522 if (GET_CODE (operands[6]) == GE)
9523 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9524 else if (GET_CODE (operands[6]) == LT)
9525 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9528 if (CONST_INT_P (operands[5])
9529 && !const_ok_for_arm (INTVAL (operands[5])))
9530 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9532 output_asm_insn (\"cmp\\t%4, %5\", operands);
9534 if (which_alternative != 0)
9535 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9536 return \"%I7%D6\\t%0, %2, %3\";
9538 [(set_attr "conds" "clob")
9539 (set_attr "length" "8,12")
9540 (set_attr "type" "multiple")]
9543 (define_insn "*if_move_arith"
9544 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9546 (match_operator 4 "arm_comparison_operator"
9547 [(match_operand 6 "cc_register" "") (const_int 0)])
9548 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9549 (match_operator:SI 5 "shiftable_operator"
9550 [(match_operand:SI 2 "s_register_operand" "r,r")
9551 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9555 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9556 [(set_attr "conds" "use")
9557 (set_attr "length" "4,8")
9558 (set_attr_alternative "type"
9559 [(if_then_else (match_operand 3 "const_int_operand" "")
9560 (const_string "alu_shift_imm" )
9561 (const_string "alu_shift_reg"))
9562 (const_string "multiple")])]
9565 (define_insn "*ifcompare_move_not"
9566 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9568 (match_operator 5 "arm_comparison_operator"
9569 [(match_operand:SI 3 "s_register_operand" "r,r")
9570 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9571 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9573 (match_operand:SI 2 "s_register_operand" "r,r"))))
9574 (clobber (reg:CC CC_REGNUM))]
9577 [(set_attr "conds" "clob")
9578 (set_attr "length" "8,12")
9579 (set_attr "type" "multiple")]
9582 (define_insn "*if_move_not"
9583 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9585 (match_operator 4 "arm_comparison_operator"
9586 [(match_operand 3 "cc_register" "") (const_int 0)])
9587 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9588 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9592 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9593 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9594 [(set_attr "conds" "use")
9595 (set_attr "type" "mvn_reg")
9596 (set_attr "length" "4,8,8")
9597 (set_attr "type" "mvn_reg,multiple,multiple")]
9600 (define_insn "*ifcompare_not_move"
9601 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9603 (match_operator 5 "arm_comparison_operator"
9604 [(match_operand:SI 3 "s_register_operand" "r,r")
9605 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9607 (match_operand:SI 2 "s_register_operand" "r,r"))
9608 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9609 (clobber (reg:CC CC_REGNUM))]
9612 [(set_attr "conds" "clob")
9613 (set_attr "length" "8,12")
9614 (set_attr "type" "multiple")]
9617 (define_insn "*if_not_move"
9618 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9620 (match_operator 4 "arm_comparison_operator"
9621 [(match_operand 3 "cc_register" "") (const_int 0)])
9622 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9623 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9627 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9628 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9629 [(set_attr "conds" "use")
9630 (set_attr "type" "mvn_reg,multiple,multiple")
9631 (set_attr "length" "4,8,8")]
9634 (define_insn "*ifcompare_shift_move"
9635 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9637 (match_operator 6 "arm_comparison_operator"
9638 [(match_operand:SI 4 "s_register_operand" "r,r")
9639 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9640 (match_operator:SI 7 "shift_operator"
9641 [(match_operand:SI 2 "s_register_operand" "r,r")
9642 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9643 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9644 (clobber (reg:CC CC_REGNUM))]
9647 [(set_attr "conds" "clob")
9648 (set_attr "length" "8,12")
9649 (set_attr "type" "multiple")]
9652 (define_insn "*if_shift_move"
9653 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9655 (match_operator 5 "arm_comparison_operator"
9656 [(match_operand 6 "cc_register" "") (const_int 0)])
9657 (match_operator:SI 4 "shift_operator"
9658 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9659 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9660 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9664 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9665 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9666 [(set_attr "conds" "use")
9667 (set_attr "shift" "2")
9668 (set_attr "length" "4,8,8")
9669 (set_attr_alternative "type"
9670 [(if_then_else (match_operand 3 "const_int_operand" "")
9671 (const_string "mov_shift" )
9672 (const_string "mov_shift_reg"))
9673 (const_string "multiple")
9674 (const_string "multiple")])]
9677 (define_insn "*ifcompare_move_shift"
9678 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9680 (match_operator 6 "arm_comparison_operator"
9681 [(match_operand:SI 4 "s_register_operand" "r,r")
9682 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9683 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9684 (match_operator:SI 7 "shift_operator"
9685 [(match_operand:SI 2 "s_register_operand" "r,r")
9686 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9687 (clobber (reg:CC CC_REGNUM))]
9690 [(set_attr "conds" "clob")
9691 (set_attr "length" "8,12")
9692 (set_attr "type" "multiple")]
9695 (define_insn "*if_move_shift"
9696 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9698 (match_operator 5 "arm_comparison_operator"
9699 [(match_operand 6 "cc_register" "") (const_int 0)])
9700 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9701 (match_operator:SI 4 "shift_operator"
9702 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9703 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9707 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9708 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9709 [(set_attr "conds" "use")
9710 (set_attr "shift" "2")
9711 (set_attr "length" "4,8,8")
9712 (set_attr_alternative "type"
9713 [(if_then_else (match_operand 3 "const_int_operand" "")
9714 (const_string "mov_shift" )
9715 (const_string "mov_shift_reg"))
9716 (const_string "multiple")
9717 (const_string "multiple")])]
9720 (define_insn "*ifcompare_shift_shift"
9721 [(set (match_operand:SI 0 "s_register_operand" "=r")
9723 (match_operator 7 "arm_comparison_operator"
9724 [(match_operand:SI 5 "s_register_operand" "r")
9725 (match_operand:SI 6 "arm_add_operand" "rIL")])
9726 (match_operator:SI 8 "shift_operator"
9727 [(match_operand:SI 1 "s_register_operand" "r")
9728 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9729 (match_operator:SI 9 "shift_operator"
9730 [(match_operand:SI 3 "s_register_operand" "r")
9731 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9732 (clobber (reg:CC CC_REGNUM))]
9735 [(set_attr "conds" "clob")
9736 (set_attr "length" "12")
9737 (set_attr "type" "multiple")]
9740 (define_insn "*if_shift_shift"
9741 [(set (match_operand:SI 0 "s_register_operand" "=r")
9743 (match_operator 5 "arm_comparison_operator"
9744 [(match_operand 8 "cc_register" "") (const_int 0)])
9745 (match_operator:SI 6 "shift_operator"
9746 [(match_operand:SI 1 "s_register_operand" "r")
9747 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9748 (match_operator:SI 7 "shift_operator"
9749 [(match_operand:SI 3 "s_register_operand" "r")
9750 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9752 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9753 [(set_attr "conds" "use")
9754 (set_attr "shift" "1")
9755 (set_attr "length" "8")
9756 (set (attr "type") (if_then_else
9757 (and (match_operand 2 "const_int_operand" "")
9758 (match_operand 4 "const_int_operand" ""))
9759 (const_string "mov_shift")
9760 (const_string "mov_shift_reg")))]
9763 (define_insn "*ifcompare_not_arith"
9764 [(set (match_operand:SI 0 "s_register_operand" "=r")
9766 (match_operator 6 "arm_comparison_operator"
9767 [(match_operand:SI 4 "s_register_operand" "r")
9768 (match_operand:SI 5 "arm_add_operand" "rIL")])
9769 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9770 (match_operator:SI 7 "shiftable_operator"
9771 [(match_operand:SI 2 "s_register_operand" "r")
9772 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9773 (clobber (reg:CC CC_REGNUM))]
9776 [(set_attr "conds" "clob")
9777 (set_attr "length" "12")
9778 (set_attr "type" "multiple")]
9781 (define_insn "*if_not_arith"
9782 [(set (match_operand:SI 0 "s_register_operand" "=r")
9784 (match_operator 5 "arm_comparison_operator"
9785 [(match_operand 4 "cc_register" "") (const_int 0)])
9786 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9787 (match_operator:SI 6 "shiftable_operator"
9788 [(match_operand:SI 2 "s_register_operand" "r")
9789 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9791 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9792 [(set_attr "conds" "use")
9793 (set_attr "type" "mvn_reg")
9794 (set_attr "length" "8")]
9797 (define_insn "*ifcompare_arith_not"
9798 [(set (match_operand:SI 0 "s_register_operand" "=r")
9800 (match_operator 6 "arm_comparison_operator"
9801 [(match_operand:SI 4 "s_register_operand" "r")
9802 (match_operand:SI 5 "arm_add_operand" "rIL")])
9803 (match_operator:SI 7 "shiftable_operator"
9804 [(match_operand:SI 2 "s_register_operand" "r")
9805 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9806 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9807 (clobber (reg:CC CC_REGNUM))]
9810 [(set_attr "conds" "clob")
9811 (set_attr "length" "12")
9812 (set_attr "type" "multiple")]
9815 (define_insn "*if_arith_not"
9816 [(set (match_operand:SI 0 "s_register_operand" "=r")
9818 (match_operator 5 "arm_comparison_operator"
9819 [(match_operand 4 "cc_register" "") (const_int 0)])
9820 (match_operator:SI 6 "shiftable_operator"
9821 [(match_operand:SI 2 "s_register_operand" "r")
9822 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9823 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9825 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9826 [(set_attr "conds" "use")
9827 (set_attr "type" "multiple")
9828 (set_attr "length" "8")]
9831 (define_insn "*ifcompare_neg_move"
9832 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9834 (match_operator 5 "arm_comparison_operator"
9835 [(match_operand:SI 3 "s_register_operand" "r,r")
9836 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9837 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9838 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9839 (clobber (reg:CC CC_REGNUM))]
9842 [(set_attr "conds" "clob")
9843 (set_attr "length" "8,12")
9844 (set_attr "type" "multiple")]
9847 (define_insn_and_split "*if_neg_move"
9848 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9850 (match_operator 4 "arm_comparison_operator"
9851 [(match_operand 3 "cc_register" "") (const_int 0)])
9852 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9853 (match_operand:SI 1 "s_register_operand" "0,0")))]
9856 "&& reload_completed"
9857 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9858 (set (match_dup 0) (neg:SI (match_dup 2))))]
9860 [(set_attr "conds" "use")
9861 (set_attr "length" "4")
9862 (set_attr "arch" "t2,32")
9863 (set_attr "enabled_for_short_it" "yes,no")
9864 (set_attr "type" "logic_shift_imm")]
9867 (define_insn "*ifcompare_move_neg"
9868 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9870 (match_operator 5 "arm_comparison_operator"
9871 [(match_operand:SI 3 "s_register_operand" "r,r")
9872 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9873 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9874 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9875 (clobber (reg:CC CC_REGNUM))]
9878 [(set_attr "conds" "clob")
9879 (set_attr "length" "8,12")
9880 (set_attr "type" "multiple")]
9883 (define_insn_and_split "*if_move_neg"
9884 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9886 (match_operator 4 "arm_comparison_operator"
9887 [(match_operand 3 "cc_register" "") (const_int 0)])
9888 (match_operand:SI 1 "s_register_operand" "0,0")
9889 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9892 "&& reload_completed"
9893 [(cond_exec (match_dup 5)
9894 (set (match_dup 0) (neg:SI (match_dup 2))))]
9896 machine_mode mode = GET_MODE (operands[3]);
9897 rtx_code rc = GET_CODE (operands[4]);
9899 if (mode == CCFPmode || mode == CCFPEmode)
9900 rc = reverse_condition_maybe_unordered (rc);
9902 rc = reverse_condition (rc);
9904 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9906 [(set_attr "conds" "use")
9907 (set_attr "length" "4")
9908 (set_attr "arch" "t2,32")
9909 (set_attr "enabled_for_short_it" "yes,no")
9910 (set_attr "type" "logic_shift_imm")]
9913 (define_insn "*arith_adjacentmem"
9914 [(set (match_operand:SI 0 "s_register_operand" "=r")
9915 (match_operator:SI 1 "shiftable_operator"
9916 [(match_operand:SI 2 "memory_operand" "m")
9917 (match_operand:SI 3 "memory_operand" "m")]))
9918 (clobber (match_scratch:SI 4 "=r"))]
9919 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9925 HOST_WIDE_INT val1 = 0, val2 = 0;
9927 if (REGNO (operands[0]) > REGNO (operands[4]))
9929 ldm[1] = operands[4];
9930 ldm[2] = operands[0];
9934 ldm[1] = operands[0];
9935 ldm[2] = operands[4];
9938 base_reg = XEXP (operands[2], 0);
9940 if (!REG_P (base_reg))
9942 val1 = INTVAL (XEXP (base_reg, 1));
9943 base_reg = XEXP (base_reg, 0);
9946 if (!REG_P (XEXP (operands[3], 0)))
9947 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9949 arith[0] = operands[0];
9950 arith[3] = operands[1];
9964 if (val1 !=0 && val2 != 0)
9968 if (val1 == 4 || val2 == 4)
9969 /* Other val must be 8, since we know they are adjacent and neither
9971 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
9972 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9974 ldm[0] = ops[0] = operands[4];
9976 ops[2] = GEN_INT (val1);
9977 output_add_immediate (ops);
9979 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9981 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9985 /* Offset is out of range for a single add, so use two ldr. */
9988 ops[2] = GEN_INT (val1);
9989 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9991 ops[2] = GEN_INT (val2);
9992 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9998 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10000 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10005 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10007 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10009 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10012 [(set_attr "length" "12")
10013 (set_attr "predicable" "yes")
10014 (set_attr "type" "load_4")]
10017 ; This pattern is never tried by combine, so do it as a peephole
10020 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10021 (match_operand:SI 1 "arm_general_register_operand" ""))
10022 (set (reg:CC CC_REGNUM)
10023 (compare:CC (match_dup 1) (const_int 0)))]
10025 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10026 (set (match_dup 0) (match_dup 1))])]
10031 [(set (match_operand:SI 0 "s_register_operand" "")
10032 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10034 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10035 [(match_operand:SI 3 "s_register_operand" "")
10036 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10037 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10039 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10040 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10045 ;; This split can be used because CC_Z mode implies that the following
10046 ;; branch will be an equality, or an unsigned inequality, so the sign
10047 ;; extension is not needed.
10050 [(set (reg:CC_Z CC_REGNUM)
10052 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10054 (match_operand 1 "const_int_operand" "")))
10055 (clobber (match_scratch:SI 2 ""))]
10057 && ((UINTVAL (operands[1]))
10058 == ((UINTVAL (operands[1])) >> 24) << 24)"
10059 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10060 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10062 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10065 ;; ??? Check the patterns above for Thumb-2 usefulness
10067 (define_expand "prologue"
10068 [(clobber (const_int 0))]
10071 arm_expand_prologue ();
10073 thumb1_expand_prologue ();
10078 (define_expand "epilogue"
10079 [(clobber (const_int 0))]
10082 if (crtl->calls_eh_return)
10083 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10086 thumb1_expand_epilogue ();
10087 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10088 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10090 else if (HAVE_return)
10092 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10093 no need for explicit testing again. */
10094 emit_jump_insn (gen_return ());
10096 else if (TARGET_32BIT)
10098 arm_expand_epilogue (true);
10104 ;; Note - although unspec_volatile's USE all hard registers,
10105 ;; USEs are ignored after relaod has completed. Thus we need
10106 ;; to add an unspec of the link register to ensure that flow
10107 ;; does not think that it is unused by the sibcall branch that
10108 ;; will replace the standard function epilogue.
10109 (define_expand "sibcall_epilogue"
10110 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10111 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10114 arm_expand_epilogue (false);
10119 (define_expand "eh_epilogue"
10120 [(use (match_operand:SI 0 "register_operand"))
10121 (use (match_operand:SI 1 "register_operand"))
10122 (use (match_operand:SI 2 "register_operand"))]
10126 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10127 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10129 rtx ra = gen_rtx_REG (Pmode, 2);
10131 emit_move_insn (ra, operands[2]);
10134 /* This is a hack -- we may have crystalized the function type too
10136 cfun->machine->func_type = 0;
10140 ;; This split is only used during output to reduce the number of patterns
10141 ;; that need assembler instructions adding to them. We allowed the setting
10142 ;; of the conditions to be implicit during rtl generation so that
10143 ;; the conditional compare patterns would work. However this conflicts to
10144 ;; some extent with the conditional data operations, so we have to split them
10147 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10148 ;; conditional execution sufficient?
10151 [(set (match_operand:SI 0 "s_register_operand" "")
10152 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10153 [(match_operand 2 "" "") (match_operand 3 "" "")])
10155 (match_operand 4 "" "")))
10156 (clobber (reg:CC CC_REGNUM))]
10157 "TARGET_ARM && reload_completed"
10158 [(set (match_dup 5) (match_dup 6))
10159 (cond_exec (match_dup 7)
10160 (set (match_dup 0) (match_dup 4)))]
10163 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10164 operands[2], operands[3]);
10165 enum rtx_code rc = GET_CODE (operands[1]);
10167 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10168 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10169 if (mode == CCFPmode || mode == CCFPEmode)
10170 rc = reverse_condition_maybe_unordered (rc);
10172 rc = reverse_condition (rc);
10174 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10179 [(set (match_operand:SI 0 "s_register_operand" "")
10180 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10181 [(match_operand 2 "" "") (match_operand 3 "" "")])
10182 (match_operand 4 "" "")
10184 (clobber (reg:CC CC_REGNUM))]
10185 "TARGET_ARM && reload_completed"
10186 [(set (match_dup 5) (match_dup 6))
10187 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10188 (set (match_dup 0) (match_dup 4)))]
10191 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10192 operands[2], operands[3]);
10194 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10195 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10200 [(set (match_operand:SI 0 "s_register_operand" "")
10201 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10202 [(match_operand 2 "" "") (match_operand 3 "" "")])
10203 (match_operand 4 "" "")
10204 (match_operand 5 "" "")))
10205 (clobber (reg:CC CC_REGNUM))]
10206 "TARGET_ARM && reload_completed"
10207 [(set (match_dup 6) (match_dup 7))
10208 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10209 (set (match_dup 0) (match_dup 4)))
10210 (cond_exec (match_dup 8)
10211 (set (match_dup 0) (match_dup 5)))]
10214 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10215 operands[2], operands[3]);
10216 enum rtx_code rc = GET_CODE (operands[1]);
10218 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10219 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10220 if (mode == CCFPmode || mode == CCFPEmode)
10221 rc = reverse_condition_maybe_unordered (rc);
10223 rc = reverse_condition (rc);
10225 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10230 [(set (match_operand:SI 0 "s_register_operand" "")
10231 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10232 [(match_operand:SI 2 "s_register_operand" "")
10233 (match_operand:SI 3 "arm_add_operand" "")])
10234 (match_operand:SI 4 "arm_rhs_operand" "")
10236 (match_operand:SI 5 "s_register_operand" ""))))
10237 (clobber (reg:CC CC_REGNUM))]
10238 "TARGET_ARM && reload_completed"
10239 [(set (match_dup 6) (match_dup 7))
10240 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10241 (set (match_dup 0) (match_dup 4)))
10242 (cond_exec (match_dup 8)
10243 (set (match_dup 0) (not:SI (match_dup 5))))]
10246 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10247 operands[2], operands[3]);
10248 enum rtx_code rc = GET_CODE (operands[1]);
10250 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10251 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10252 if (mode == CCFPmode || mode == CCFPEmode)
10253 rc = reverse_condition_maybe_unordered (rc);
10255 rc = reverse_condition (rc);
10257 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10261 (define_insn "*cond_move_not"
10262 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10263 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10264 [(match_operand 3 "cc_register" "") (const_int 0)])
10265 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10267 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10271 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10272 [(set_attr "conds" "use")
10273 (set_attr "type" "mvn_reg,multiple")
10274 (set_attr "length" "4,8")]
10277 ;; The next two patterns occur when an AND operation is followed by a
10278 ;; scc insn sequence
10280 (define_insn "*sign_extract_onebit"
10281 [(set (match_operand:SI 0 "s_register_operand" "=r")
10282 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10284 (match_operand:SI 2 "const_int_operand" "n")))
10285 (clobber (reg:CC CC_REGNUM))]
10288 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10289 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10290 return \"mvnne\\t%0, #0\";
10292 [(set_attr "conds" "clob")
10293 (set_attr "length" "8")
10294 (set_attr "type" "multiple")]
10297 (define_insn "*not_signextract_onebit"
10298 [(set (match_operand:SI 0 "s_register_operand" "=r")
10300 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10302 (match_operand:SI 2 "const_int_operand" "n"))))
10303 (clobber (reg:CC CC_REGNUM))]
10306 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10307 output_asm_insn (\"tst\\t%1, %2\", operands);
10308 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10309 return \"movne\\t%0, #0\";
10311 [(set_attr "conds" "clob")
10312 (set_attr "length" "12")
10313 (set_attr "type" "multiple")]
10315 ;; ??? The above patterns need auditing for Thumb-2
10317 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10318 ;; expressions. For simplicity, the first register is also in the unspec
10320 ;; To avoid the usage of GNU extension, the length attribute is computed
10321 ;; in a C function arm_attr_length_push_multi.
10322 (define_insn "*push_multi"
10323 [(match_parallel 2 "multi_register_push"
10324 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10325 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10326 UNSPEC_PUSH_MULT))])]
10330 int num_saves = XVECLEN (operands[2], 0);
10332 /* For the StrongARM at least it is faster to
10333 use STR to store only a single register.
10334 In Thumb mode always use push, and the assembler will pick
10335 something appropriate. */
10336 if (num_saves == 1 && TARGET_ARM)
10337 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10344 strcpy (pattern, \"push%?\\t{%1\");
10346 strcpy (pattern, \"push\\t{%1\");
10348 for (i = 1; i < num_saves; i++)
10350 strcat (pattern, \", %|\");
10352 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10355 strcat (pattern, \"}\");
10356 output_asm_insn (pattern, operands);
10361 [(set_attr "type" "store_16")
10362 (set (attr "length")
10363 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10366 (define_insn "stack_tie"
10367 [(set (mem:BLK (scratch))
10368 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10369 (match_operand:SI 1 "s_register_operand" "rk")]
10373 [(set_attr "length" "0")
10374 (set_attr "type" "block")]
10377 ;; Pop (as used in epilogue RTL)
10379 (define_insn "*load_multiple_with_writeback"
10380 [(match_parallel 0 "load_multiple_operation"
10381 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10382 (plus:SI (match_dup 1)
10383 (match_operand:SI 2 "const_int_I_operand" "I")))
10384 (set (match_operand:SI 3 "s_register_operand" "=rk")
10385 (mem:SI (match_dup 1)))
10387 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10390 arm_output_multireg_pop (operands, /*return_pc=*/false,
10391 /*cond=*/const_true_rtx,
10397 [(set_attr "type" "load_16")
10398 (set_attr "predicable" "yes")
10399 (set (attr "length")
10400 (symbol_ref "arm_attr_length_pop_multi (operands,
10401 /*return_pc=*/false,
10402 /*write_back_p=*/true)"))]
10405 ;; Pop with return (as used in epilogue RTL)
10407 ;; This instruction is generated when the registers are popped at the end of
10408 ;; epilogue. Here, instead of popping the value into LR and then generating
10409 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10411 (define_insn "*pop_multiple_with_writeback_and_return"
10412 [(match_parallel 0 "pop_multiple_return"
10414 (set (match_operand:SI 1 "s_register_operand" "+rk")
10415 (plus:SI (match_dup 1)
10416 (match_operand:SI 2 "const_int_I_operand" "I")))
10417 (set (match_operand:SI 3 "s_register_operand" "=rk")
10418 (mem:SI (match_dup 1)))
10420 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10423 arm_output_multireg_pop (operands, /*return_pc=*/true,
10424 /*cond=*/const_true_rtx,
10430 [(set_attr "type" "load_16")
10431 (set_attr "predicable" "yes")
10432 (set (attr "length")
10433 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10434 /*write_back_p=*/true)"))]
10437 (define_insn "*pop_multiple_with_return"
10438 [(match_parallel 0 "pop_multiple_return"
10440 (set (match_operand:SI 2 "s_register_operand" "=rk")
10441 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10443 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10446 arm_output_multireg_pop (operands, /*return_pc=*/true,
10447 /*cond=*/const_true_rtx,
10453 [(set_attr "type" "load_16")
10454 (set_attr "predicable" "yes")
10455 (set (attr "length")
10456 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10457 /*write_back_p=*/false)"))]
10460 ;; Load into PC and return
10461 (define_insn "*ldr_with_return"
10463 (set (reg:SI PC_REGNUM)
10464 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10465 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10466 "ldr%?\t%|pc, [%0], #4"
10467 [(set_attr "type" "load_4")
10468 (set_attr "predicable" "yes")]
10470 ;; Pop for floating point registers (as used in epilogue RTL)
10471 (define_insn "*vfp_pop_multiple_with_writeback"
10472 [(match_parallel 0 "pop_multiple_fp"
10473 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10474 (plus:SI (match_dup 1)
10475 (match_operand:SI 2 "const_int_I_operand" "I")))
10476 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10477 (mem:DF (match_dup 1)))])]
10478 "TARGET_32BIT && TARGET_HARD_FLOAT"
10481 int num_regs = XVECLEN (operands[0], 0);
10484 strcpy (pattern, \"vldm\\t\");
10485 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10486 strcat (pattern, \"!, {\");
10487 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10488 strcat (pattern, \"%P0\");
10489 if ((num_regs - 1) > 1)
10491 strcat (pattern, \"-%P1\");
10492 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10495 strcat (pattern, \"}\");
10496 output_asm_insn (pattern, op_list);
10500 [(set_attr "type" "load_16")
10501 (set_attr "conds" "unconditional")
10502 (set_attr "predicable" "no")]
10505 ;; Special patterns for dealing with the constant pool
10507 (define_insn "align_4"
10508 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10511 assemble_align (32);
10514 [(set_attr "type" "no_insn")]
10517 (define_insn "align_8"
10518 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10521 assemble_align (64);
10524 [(set_attr "type" "no_insn")]
10527 (define_insn "consttable_end"
10528 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10531 making_const_table = FALSE;
10534 [(set_attr "type" "no_insn")]
10537 (define_insn "consttable_1"
10538 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10541 making_const_table = TRUE;
10542 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10543 assemble_zeros (3);
10546 [(set_attr "length" "4")
10547 (set_attr "type" "no_insn")]
10550 (define_insn "consttable_2"
10551 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10555 rtx x = operands[0];
10556 making_const_table = TRUE;
10557 switch (GET_MODE_CLASS (GET_MODE (x)))
10560 arm_emit_fp16_const (x);
10563 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10564 assemble_zeros (2);
10569 [(set_attr "length" "4")
10570 (set_attr "type" "no_insn")]
10573 (define_insn "consttable_4"
10574 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10578 rtx x = operands[0];
10579 making_const_table = TRUE;
10580 scalar_float_mode float_mode;
10581 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10582 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10585 /* XXX: Sometimes gcc does something really dumb and ends up with
10586 a HIGH in a constant pool entry, usually because it's trying to
10587 load into a VFP register. We know this will always be used in
10588 combination with a LO_SUM which ignores the high bits, so just
10589 strip off the HIGH. */
10590 if (GET_CODE (x) == HIGH)
10592 assemble_integer (x, 4, BITS_PER_WORD, 1);
10593 mark_symbol_refs_as_used (x);
10597 [(set_attr "length" "4")
10598 (set_attr "type" "no_insn")]
10601 (define_insn "consttable_8"
10602 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10606 making_const_table = TRUE;
10607 scalar_float_mode float_mode;
10608 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10609 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10610 float_mode, BITS_PER_WORD);
10612 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10615 [(set_attr "length" "8")
10616 (set_attr "type" "no_insn")]
10619 (define_insn "consttable_16"
10620 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10624 making_const_table = TRUE;
10625 scalar_float_mode float_mode;
10626 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10627 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10628 float_mode, BITS_PER_WORD);
10630 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10633 [(set_attr "length" "16")
10634 (set_attr "type" "no_insn")]
10637 ;; V5 Instructions,
10639 (define_insn "clzsi2"
10640 [(set (match_operand:SI 0 "s_register_operand" "=r")
10641 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10642 "TARGET_32BIT && arm_arch5t"
10644 [(set_attr "predicable" "yes")
10645 (set_attr "type" "clz")])
10647 (define_insn "rbitsi2"
10648 [(set (match_operand:SI 0 "s_register_operand" "=r")
10649 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10650 "TARGET_32BIT && arm_arch_thumb2"
10652 [(set_attr "predicable" "yes")
10653 (set_attr "type" "clz")])
10655 ;; Keep this as a CTZ expression until after reload and then split
10656 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10657 ;; to fold with any other expression.
10659 (define_insn_and_split "ctzsi2"
10660 [(set (match_operand:SI 0 "s_register_operand" "=r")
10661 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10662 "TARGET_32BIT && arm_arch_thumb2"
10664 "&& reload_completed"
10667 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10668 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10672 ;; V5E instructions.
10674 (define_insn "prefetch"
10675 [(prefetch (match_operand:SI 0 "address_operand" "p")
10676 (match_operand:SI 1 "" "")
10677 (match_operand:SI 2 "" ""))]
10678 "TARGET_32BIT && arm_arch5te"
10680 [(set_attr "type" "load_4")]
10683 ;; General predication pattern
10686 [(match_operator 0 "arm_comparison_operator"
10687 [(match_operand 1 "cc_register" "")
10690 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10692 [(set_attr "predicated" "yes")]
10695 (define_insn "force_register_use"
10696 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10699 [(set_attr "length" "0")
10700 (set_attr "type" "no_insn")]
10704 ;; Patterns for exception handling
10706 (define_expand "eh_return"
10707 [(use (match_operand 0 "general_operand"))]
10712 emit_insn (gen_arm_eh_return (operands[0]));
10714 emit_insn (gen_thumb_eh_return (operands[0]));
10719 ;; We can't expand this before we know where the link register is stored.
10720 (define_insn_and_split "arm_eh_return"
10721 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10723 (clobber (match_scratch:SI 1 "=&r"))]
10726 "&& reload_completed"
10730 arm_set_return_address (operands[0], operands[1]);
10738 (define_insn "load_tp_hard"
10739 [(set (match_operand:SI 0 "register_operand" "=r")
10740 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10742 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10743 [(set_attr "predicable" "yes")
10744 (set_attr "type" "mrs")]
10747 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10748 (define_insn "load_tp_soft_fdpic"
10749 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10750 (clobber (reg:SI FDPIC_REGNUM))
10751 (clobber (reg:SI LR_REGNUM))
10752 (clobber (reg:SI IP_REGNUM))
10753 (clobber (reg:CC CC_REGNUM))]
10754 "TARGET_SOFT_TP && TARGET_FDPIC"
10755 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10756 [(set_attr "conds" "clob")
10757 (set_attr "type" "branch")]
10760 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10761 (define_insn "load_tp_soft"
10762 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10763 (clobber (reg:SI LR_REGNUM))
10764 (clobber (reg:SI IP_REGNUM))
10765 (clobber (reg:CC CC_REGNUM))]
10766 "TARGET_SOFT_TP && !TARGET_FDPIC"
10767 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10768 [(set_attr "conds" "clob")
10769 (set_attr "type" "branch")]
10772 ;; tls descriptor call
10773 (define_insn "tlscall"
10774 [(set (reg:SI R0_REGNUM)
10775 (unspec:SI [(reg:SI R0_REGNUM)
10776 (match_operand:SI 0 "" "X")
10777 (match_operand 1 "" "")] UNSPEC_TLS))
10778 (clobber (reg:SI R1_REGNUM))
10779 (clobber (reg:SI LR_REGNUM))
10780 (clobber (reg:SI CC_REGNUM))]
10783 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10784 INTVAL (operands[1]));
10785 return "bl\\t%c0(tlscall)";
10787 [(set_attr "conds" "clob")
10788 (set_attr "length" "4")
10789 (set_attr "type" "branch")]
10792 ;; For thread pointer builtin
10793 (define_expand "get_thread_pointersi"
10794 [(match_operand:SI 0 "s_register_operand")]
10798 arm_load_tp (operands[0]);
10804 ;; We only care about the lower 16 bits of the constant
10805 ;; being inserted into the upper 16 bits of the register.
10806 (define_insn "*arm_movtas_ze"
10807 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10810 (match_operand:SI 1 "const_int_operand" ""))]
10815 [(set_attr "arch" "32,v8mb")
10816 (set_attr "predicable" "yes")
10817 (set_attr "length" "4")
10818 (set_attr "type" "alu_sreg")]
10821 (define_insn "*arm_rev"
10822 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10823 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10829 [(set_attr "arch" "t1,t2,32")
10830 (set_attr "length" "2,2,4")
10831 (set_attr "predicable" "no,yes,yes")
10832 (set_attr "type" "rev")]
10835 (define_expand "arm_legacy_rev"
10836 [(set (match_operand:SI 2 "s_register_operand")
10837 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10841 (lshiftrt:SI (match_dup 2)
10843 (set (match_operand:SI 3 "s_register_operand")
10844 (rotatert:SI (match_dup 1)
10847 (and:SI (match_dup 2)
10848 (const_int -65281)))
10849 (set (match_operand:SI 0 "s_register_operand")
10850 (xor:SI (match_dup 3)
10856 ;; Reuse temporaries to keep register pressure down.
10857 (define_expand "thumb_legacy_rev"
10858 [(set (match_operand:SI 2 "s_register_operand")
10859 (ashift:SI (match_operand:SI 1 "s_register_operand")
10861 (set (match_operand:SI 3 "s_register_operand")
10862 (lshiftrt:SI (match_dup 1)
10865 (ior:SI (match_dup 3)
10867 (set (match_operand:SI 4 "s_register_operand")
10869 (set (match_operand:SI 5 "s_register_operand")
10870 (rotatert:SI (match_dup 1)
10873 (ashift:SI (match_dup 5)
10876 (lshiftrt:SI (match_dup 5)
10879 (ior:SI (match_dup 5)
10882 (rotatert:SI (match_dup 5)
10884 (set (match_operand:SI 0 "s_register_operand")
10885 (ior:SI (match_dup 5)
10891 ;; ARM-specific expansion of signed mod by power of 2
10892 ;; using conditional negate.
10893 ;; For r0 % n where n is a power of 2 produce:
10895 ;; and r0, r0, #(n - 1)
10896 ;; and r1, r1, #(n - 1)
10897 ;; rsbpl r0, r1, #0
10899 (define_expand "modsi3"
10900 [(match_operand:SI 0 "register_operand")
10901 (match_operand:SI 1 "register_operand")
10902 (match_operand:SI 2 "const_int_operand")]
10905 HOST_WIDE_INT val = INTVAL (operands[2]);
10908 || exact_log2 (val) <= 0)
10911 rtx mask = GEN_INT (val - 1);
10913 /* In the special case of x0 % 2 we can do the even shorter:
10916 rsblt r0, r0, #0. */
10920 rtx cc_reg = arm_gen_compare_reg (LT,
10921 operands[1], const0_rtx, NULL_RTX);
10922 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10923 rtx masked = gen_reg_rtx (SImode);
10925 emit_insn (gen_andsi3 (masked, operands[1], mask));
10926 emit_move_insn (operands[0],
10927 gen_rtx_IF_THEN_ELSE (SImode, cond,
10928 gen_rtx_NEG (SImode,
10934 rtx neg_op = gen_reg_rtx (SImode);
10935 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10938 /* Extract the condition register and mode. */
10939 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10940 rtx cc_reg = SET_DEST (cmp);
10941 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10943 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10945 rtx masked_neg = gen_reg_rtx (SImode);
10946 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10948 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10949 during expand does not always work. Do an IF_THEN_ELSE instead. */
10950 emit_move_insn (operands[0],
10951 gen_rtx_IF_THEN_ELSE (SImode, cond,
10952 gen_rtx_NEG (SImode, masked_neg),
10960 (define_expand "bswapsi2"
10961 [(set (match_operand:SI 0 "s_register_operand")
10962 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10963 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10967 rtx op2 = gen_reg_rtx (SImode);
10968 rtx op3 = gen_reg_rtx (SImode);
10972 rtx op4 = gen_reg_rtx (SImode);
10973 rtx op5 = gen_reg_rtx (SImode);
10975 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10976 op2, op3, op4, op5));
10980 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10989 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
10990 ;; and unsigned variants, respectively. For rev16, expose
10991 ;; byte-swapping in the lower 16 bits only.
10992 (define_insn "*arm_revsh"
10993 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10994 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11000 [(set_attr "arch" "t1,t2,32")
11001 (set_attr "length" "2,2,4")
11002 (set_attr "type" "rev")]
11005 (define_insn "*arm_rev16"
11006 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11007 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11013 [(set_attr "arch" "t1,t2,32")
11014 (set_attr "length" "2,2,4")
11015 (set_attr "type" "rev")]
11018 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11019 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11020 ;; each valid permutation.
11022 (define_insn "arm_rev16si2"
11023 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11024 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11026 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11027 (and:SI (lshiftrt:SI (match_dup 1)
11029 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11031 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11032 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11034 [(set_attr "arch" "t1,t2,32")
11035 (set_attr "length" "2,2,4")
11036 (set_attr "type" "rev")]
11039 (define_insn "arm_rev16si2_alt"
11040 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11041 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11043 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11044 (and:SI (ashift:SI (match_dup 1)
11046 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11048 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11049 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11051 [(set_attr "arch" "t1,t2,32")
11052 (set_attr "length" "2,2,4")
11053 (set_attr "type" "rev")]
11056 (define_expand "bswaphi2"
11057 [(set (match_operand:HI 0 "s_register_operand")
11058 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11063 ;; Patterns for LDRD/STRD in Thumb2 mode
11065 (define_insn "*thumb2_ldrd"
11066 [(set (match_operand:SI 0 "s_register_operand" "=r")
11067 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11068 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11069 (set (match_operand:SI 3 "s_register_operand" "=r")
11070 (mem:SI (plus:SI (match_dup 1)
11071 (match_operand:SI 4 "const_int_operand" ""))))]
11072 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11073 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11074 && (operands_ok_ldrd_strd (operands[0], operands[3],
11075 operands[1], INTVAL (operands[2]),
11077 "ldrd%?\t%0, %3, [%1, %2]"
11078 [(set_attr "type" "load_8")
11079 (set_attr "predicable" "yes")])
11081 (define_insn "*thumb2_ldrd_base"
11082 [(set (match_operand:SI 0 "s_register_operand" "=r")
11083 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11084 (set (match_operand:SI 2 "s_register_operand" "=r")
11085 (mem:SI (plus:SI (match_dup 1)
11087 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11088 && (operands_ok_ldrd_strd (operands[0], operands[2],
11089 operands[1], 0, false, true))"
11090 "ldrd%?\t%0, %2, [%1]"
11091 [(set_attr "type" "load_8")
11092 (set_attr "predicable" "yes")])
11094 (define_insn "*thumb2_ldrd_base_neg"
11095 [(set (match_operand:SI 0 "s_register_operand" "=r")
11096 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11098 (set (match_operand:SI 2 "s_register_operand" "=r")
11099 (mem:SI (match_dup 1)))]
11100 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11101 && (operands_ok_ldrd_strd (operands[0], operands[2],
11102 operands[1], -4, false, true))"
11103 "ldrd%?\t%0, %2, [%1, #-4]"
11104 [(set_attr "type" "load_8")
11105 (set_attr "predicable" "yes")])
11107 (define_insn "*thumb2_strd"
11108 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11109 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11110 (match_operand:SI 2 "s_register_operand" "r"))
11111 (set (mem:SI (plus:SI (match_dup 0)
11112 (match_operand:SI 3 "const_int_operand" "")))
11113 (match_operand:SI 4 "s_register_operand" "r"))]
11114 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11115 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11116 && (operands_ok_ldrd_strd (operands[2], operands[4],
11117 operands[0], INTVAL (operands[1]),
11119 "strd%?\t%2, %4, [%0, %1]"
11120 [(set_attr "type" "store_8")
11121 (set_attr "predicable" "yes")])
11123 (define_insn "*thumb2_strd_base"
11124 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11125 (match_operand:SI 1 "s_register_operand" "r"))
11126 (set (mem:SI (plus:SI (match_dup 0)
11128 (match_operand:SI 2 "s_register_operand" "r"))]
11129 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11130 && (operands_ok_ldrd_strd (operands[1], operands[2],
11131 operands[0], 0, false, false))"
11132 "strd%?\t%1, %2, [%0]"
11133 [(set_attr "type" "store_8")
11134 (set_attr "predicable" "yes")])
11136 (define_insn "*thumb2_strd_base_neg"
11137 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11139 (match_operand:SI 1 "s_register_operand" "r"))
11140 (set (mem:SI (match_dup 0))
11141 (match_operand:SI 2 "s_register_operand" "r"))]
11142 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11143 && (operands_ok_ldrd_strd (operands[1], operands[2],
11144 operands[0], -4, false, false))"
11145 "strd%?\t%1, %2, [%0, #-4]"
11146 [(set_attr "type" "store_8")
11147 (set_attr "predicable" "yes")])
11149 ;; ARMv8 CRC32 instructions.
11150 (define_insn "arm_<crc_variant>"
11151 [(set (match_operand:SI 0 "s_register_operand" "=r")
11152 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11153 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11156 "<crc_variant>\\t%0, %1, %2"
11157 [(set_attr "type" "crc")
11158 (set_attr "conds" "unconditional")]
11161 ;; Load the load/store double peephole optimizations.
11162 (include "ldrdstrd.md")
11164 ;; Load the load/store multiple patterns
11165 (include "ldmstm.md")
11167 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11168 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11169 ;; The operands are validated through the load_multiple_operation
11170 ;; match_parallel predicate rather than through constraints so enable it only
11172 (define_insn "*load_multiple"
11173 [(match_parallel 0 "load_multiple_operation"
11174 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11175 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11177 "TARGET_32BIT && reload_completed"
11180 arm_output_multireg_pop (operands, /*return_pc=*/false,
11181 /*cond=*/const_true_rtx,
11187 [(set_attr "predicable" "yes")]
11190 (define_expand "copysignsf3"
11191 [(match_operand:SF 0 "register_operand")
11192 (match_operand:SF 1 "register_operand")
11193 (match_operand:SF 2 "register_operand")]
11194 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11196 emit_move_insn (operands[0], operands[2]);
11197 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11198 GEN_INT (31), GEN_INT (0),
11199 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11204 (define_expand "copysigndf3"
11205 [(match_operand:DF 0 "register_operand")
11206 (match_operand:DF 1 "register_operand")
11207 (match_operand:DF 2 "register_operand")]
11208 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11210 rtx op0_low = gen_lowpart (SImode, operands[0]);
11211 rtx op0_high = gen_highpart (SImode, operands[0]);
11212 rtx op1_low = gen_lowpart (SImode, operands[1]);
11213 rtx op1_high = gen_highpart (SImode, operands[1]);
11214 rtx op2_high = gen_highpart (SImode, operands[2]);
11216 rtx scratch1 = gen_reg_rtx (SImode);
11217 rtx scratch2 = gen_reg_rtx (SImode);
11218 emit_move_insn (scratch1, op2_high);
11219 emit_move_insn (scratch2, op1_high);
11221 emit_insn(gen_rtx_SET(scratch1,
11222 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11223 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11224 emit_move_insn (op0_low, op1_low);
11225 emit_move_insn (op0_high, scratch2);
11231 ;; movmisalign patterns for HImode and SImode.
11232 (define_expand "movmisalign<mode>"
11233 [(match_operand:HSI 0 "general_operand")
11234 (match_operand:HSI 1 "general_operand")]
11237 /* This pattern is not permitted to fail during expansion: if both arguments
11238 are non-registers (e.g. memory := constant), force operand 1 into a
11240 rtx (* gen_unaligned_load)(rtx, rtx);
11241 rtx tmp_dest = operands[0];
11242 if (!s_register_operand (operands[0], <MODE>mode)
11243 && !s_register_operand (operands[1], <MODE>mode))
11244 operands[1] = force_reg (<MODE>mode, operands[1]);
11246 if (<MODE>mode == HImode)
11248 gen_unaligned_load = gen_unaligned_loadhiu;
11249 tmp_dest = gen_reg_rtx (SImode);
11252 gen_unaligned_load = gen_unaligned_loadsi;
11254 if (MEM_P (operands[1]))
11256 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11257 if (<MODE>mode == HImode)
11258 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11261 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11266 (define_insn "arm_<cdp>"
11267 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11268 (match_operand:SI 1 "immediate_operand" "n")
11269 (match_operand:SI 2 "immediate_operand" "n")
11270 (match_operand:SI 3 "immediate_operand" "n")
11271 (match_operand:SI 4 "immediate_operand" "n")
11272 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11273 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11275 arm_const_bounds (operands[0], 0, 16);
11276 arm_const_bounds (operands[1], 0, 16);
11277 arm_const_bounds (operands[2], 0, (1 << 5));
11278 arm_const_bounds (operands[3], 0, (1 << 5));
11279 arm_const_bounds (operands[4], 0, (1 << 5));
11280 arm_const_bounds (operands[5], 0, 8);
11281 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11283 [(set_attr "length" "4")
11284 (set_attr "type" "coproc")])
11286 (define_insn "*ldc"
11287 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11288 (match_operand:SI 1 "immediate_operand" "n")
11289 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11290 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11292 arm_const_bounds (operands[0], 0, 16);
11293 arm_const_bounds (operands[1], 0, (1 << 5));
11294 return "<ldc>\\tp%c0, CR%c1, %2";
11296 [(set_attr "length" "4")
11297 (set_attr "type" "coproc")])
11299 (define_insn "*stc"
11300 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11301 (match_operand:SI 1 "immediate_operand" "n")
11302 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11303 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11305 arm_const_bounds (operands[0], 0, 16);
11306 arm_const_bounds (operands[1], 0, (1 << 5));
11307 return "<stc>\\tp%c0, CR%c1, %2";
11309 [(set_attr "length" "4")
11310 (set_attr "type" "coproc")])
11312 (define_expand "arm_<ldc>"
11313 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11314 (match_operand:SI 1 "immediate_operand")
11315 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11316 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11318 (define_expand "arm_<stc>"
11319 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11320 (match_operand:SI 1 "immediate_operand")
11321 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11322 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11324 (define_insn "arm_<mcr>"
11325 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11326 (match_operand:SI 1 "immediate_operand" "n")
11327 (match_operand:SI 2 "s_register_operand" "r")
11328 (match_operand:SI 3 "immediate_operand" "n")
11329 (match_operand:SI 4 "immediate_operand" "n")
11330 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11331 (use (match_dup 2))]
11332 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11334 arm_const_bounds (operands[0], 0, 16);
11335 arm_const_bounds (operands[1], 0, 8);
11336 arm_const_bounds (operands[3], 0, (1 << 5));
11337 arm_const_bounds (operands[4], 0, (1 << 5));
11338 arm_const_bounds (operands[5], 0, 8);
11339 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11341 [(set_attr "length" "4")
11342 (set_attr "type" "coproc")])
11344 (define_insn "arm_<mrc>"
11345 [(set (match_operand:SI 0 "s_register_operand" "=r")
11346 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11347 (match_operand:SI 2 "immediate_operand" "n")
11348 (match_operand:SI 3 "immediate_operand" "n")
11349 (match_operand:SI 4 "immediate_operand" "n")
11350 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11351 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11353 arm_const_bounds (operands[1], 0, 16);
11354 arm_const_bounds (operands[2], 0, 8);
11355 arm_const_bounds (operands[3], 0, (1 << 5));
11356 arm_const_bounds (operands[4], 0, (1 << 5));
11357 arm_const_bounds (operands[5], 0, 8);
11358 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11360 [(set_attr "length" "4")
11361 (set_attr "type" "coproc")])
11363 (define_insn "arm_<mcrr>"
11364 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11365 (match_operand:SI 1 "immediate_operand" "n")
11366 (match_operand:DI 2 "s_register_operand" "r")
11367 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11368 (use (match_dup 2))]
11369 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11371 arm_const_bounds (operands[0], 0, 16);
11372 arm_const_bounds (operands[1], 0, 8);
11373 arm_const_bounds (operands[3], 0, (1 << 5));
11374 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11376 [(set_attr "length" "4")
11377 (set_attr "type" "coproc")])
11379 (define_insn "arm_<mrrc>"
11380 [(set (match_operand:DI 0 "s_register_operand" "=r")
11381 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11382 (match_operand:SI 2 "immediate_operand" "n")
11383 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11384 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11386 arm_const_bounds (operands[1], 0, 16);
11387 arm_const_bounds (operands[2], 0, 8);
11388 arm_const_bounds (operands[3], 0, (1 << 5));
11389 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11391 [(set_attr "length" "4")
11392 (set_attr "type" "coproc")])
11394 (define_expand "speculation_barrier"
11395 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11398 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11399 have a usable barrier (and probably don't need one in practice).
11400 But to be safe if such code is run on later architectures, call a
11401 helper function in libgcc that will do the thing for the active
11403 if (!(arm_arch7 || arm_arch8))
11405 arm_emit_speculation_barrier_function ();
11411 ;; Generate a hard speculation barrier when we have not enabled speculation
11413 (define_insn "*speculation_barrier_insn"
11414 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11415 "arm_arch7 || arm_arch8"
11417 [(set_attr "type" "block")
11418 (set_attr "length" "8")]
11421 ;; Vector bits common to IWMMXT and Neon
11422 (include "vec-common.md")
11423 ;; Load the Intel Wireless Multimedia Extension patterns
11424 (include "iwmmxt.md")
11425 ;; Load the VFP co-processor patterns
11427 ;; Thumb-1 patterns
11428 (include "thumb1.md")
11429 ;; Thumb-2 patterns
11430 (include "thumb2.md")
11432 (include "neon.md")
11434 (include "crypto.md")
11435 ;; Synchronization Primitives
11436 (include "sync.md")
11437 ;; Fixed-point patterns
11438 (include "arm-fixed.md")