1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
491 (define_expand "addv<mode>4"
492 [(match_operand:SIDI 0 "register_operand")
493 (match_operand:SIDI 1 "register_operand")
494 (match_operand:SIDI 2 "register_operand")
495 (match_operand 3 "")]
498 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
499 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
504 (define_expand "uaddv<mode>4"
505 [(match_operand:SIDI 0 "register_operand")
506 (match_operand:SIDI 1 "register_operand")
507 (match_operand:SIDI 2 "register_operand")
508 (match_operand 3 "")]
511 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
512 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
517 (define_expand "addsi3"
518 [(set (match_operand:SI 0 "s_register_operand")
519 (plus:SI (match_operand:SI 1 "s_register_operand")
520 (match_operand:SI 2 "reg_or_int_operand")))]
523 if (TARGET_32BIT && CONST_INT_P (operands[2]))
525 arm_split_constant (PLUS, SImode, NULL_RTX,
526 INTVAL (operands[2]), operands[0], operands[1],
527 optimize && can_create_pseudo_p ());
533 ; If there is a scratch available, this will be faster than synthesizing the
536 [(match_scratch:SI 3 "r")
537 (set (match_operand:SI 0 "arm_general_register_operand" "")
538 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
539 (match_operand:SI 2 "const_int_operand" "")))]
541 !(const_ok_for_arm (INTVAL (operands[2]))
542 || const_ok_for_arm (-INTVAL (operands[2])))
543 && const_ok_for_arm (~INTVAL (operands[2]))"
544 [(set (match_dup 3) (match_dup 2))
545 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
549 ;; The r/r/k alternative is required when reloading the address
550 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
551 ;; put the duplicated register first, and not try the commutative version.
552 (define_insn_and_split "*arm_addsi3"
553 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
554 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
555 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
571 subw%?\\t%0, %1, #%n2
572 subw%?\\t%0, %1, #%n2
575 && CONST_INT_P (operands[2])
576 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
577 && (reload_completed || !arm_eliminable_register (operands[1]))"
578 [(clobber (const_int 0))]
580 arm_split_constant (PLUS, SImode, curr_insn,
581 INTVAL (operands[2]), operands[0],
585 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
586 (set_attr "predicable" "yes")
587 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
588 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
589 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
590 (const_string "alu_imm")
591 (const_string "alu_sreg")))
595 (define_insn "adddi3_compareV"
596 [(set (reg:CC_V CC_REGNUM)
599 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
600 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
601 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
602 (set (match_operand:DI 0 "s_register_operand" "=&r")
603 (plus:DI (match_dup 1) (match_dup 2)))]
605 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
606 [(set_attr "conds" "set")
607 (set_attr "length" "8")
608 (set_attr "type" "multiple")]
611 (define_insn "addsi3_compareV"
612 [(set (reg:CC_V CC_REGNUM)
615 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
616 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
617 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
618 (set (match_operand:SI 0 "register_operand" "=r")
619 (plus:SI (match_dup 1) (match_dup 2)))]
621 "adds%?\\t%0, %1, %2"
622 [(set_attr "conds" "set")
623 (set_attr "type" "alus_sreg")]
626 (define_insn "adddi3_compareC"
627 [(set (reg:CC_C CC_REGNUM)
630 (match_operand:DI 1 "register_operand" "r")
631 (match_operand:DI 2 "register_operand" "r"))
633 (set (match_operand:DI 0 "register_operand" "=&r")
634 (plus:DI (match_dup 1) (match_dup 2)))]
636 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
637 [(set_attr "conds" "set")
638 (set_attr "length" "8")
639 (set_attr "type" "multiple")]
642 (define_insn "addsi3_compareC"
643 [(set (reg:CC_C CC_REGNUM)
644 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
645 (match_operand:SI 2 "register_operand" "r"))
647 (set (match_operand:SI 0 "register_operand" "=r")
648 (plus:SI (match_dup 1) (match_dup 2)))]
650 "adds%?\\t%0, %1, %2"
651 [(set_attr "conds" "set")
652 (set_attr "type" "alus_sreg")]
655 (define_insn "addsi3_compare0"
656 [(set (reg:CC_NOOV CC_REGNUM)
658 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
659 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
661 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
662 (plus:SI (match_dup 1) (match_dup 2)))]
666 subs%?\\t%0, %1, #%n2
668 [(set_attr "conds" "set")
669 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
672 (define_insn "*addsi3_compare0_scratch"
673 [(set (reg:CC_NOOV CC_REGNUM)
675 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
676 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
683 [(set_attr "conds" "set")
684 (set_attr "predicable" "yes")
685 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
688 (define_insn "*compare_negsi_si"
689 [(set (reg:CC_Z CC_REGNUM)
691 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
692 (match_operand:SI 1 "s_register_operand" "l,r")))]
695 [(set_attr "conds" "set")
696 (set_attr "predicable" "yes")
697 (set_attr "arch" "t2,*")
698 (set_attr "length" "2,4")
699 (set_attr "predicable_short_it" "yes,no")
700 (set_attr "type" "alus_sreg")]
703 ;; This is the canonicalization of subsi3_compare when the
704 ;; addend is a constant.
705 (define_insn "cmpsi2_addneg"
706 [(set (reg:CC CC_REGNUM)
708 (match_operand:SI 1 "s_register_operand" "r,r")
709 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
711 (plus:SI (match_dup 1)
712 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
714 && (INTVAL (operands[2])
715 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
717 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
718 in different condition codes (like cmn rather than like cmp), so that
719 alternative comes first. Both alternatives can match for any 0x??000000
720 where except for 0 and INT_MIN it doesn't matter what we choose, and also
721 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
723 if (which_alternative == 0 && operands[3] != const1_rtx)
724 return "subs%?\\t%0, %1, #%n3";
726 return "adds%?\\t%0, %1, %3";
728 [(set_attr "conds" "set")
729 (set_attr "type" "alus_sreg")]
732 ;; Convert the sequence
734 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
738 ;; bcs dest ((unsigned)rn >= 1)
739 ;; similarly for the beq variant using bcc.
740 ;; This is a common looping idiom (while (n--))
742 [(set (match_operand:SI 0 "arm_general_register_operand" "")
743 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
745 (set (match_operand 2 "cc_register" "")
746 (compare (match_dup 0) (const_int -1)))
748 (if_then_else (match_operator 3 "equality_operator"
749 [(match_dup 2) (const_int 0)])
750 (match_operand 4 "" "")
751 (match_operand 5 "" "")))]
752 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
756 (match_dup 1) (const_int 1)))
757 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
759 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
762 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
763 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
766 operands[2], const0_rtx);"
769 ;; The next four insns work because they compare the result with one of
770 ;; the operands, and we know that the use of the condition code is
771 ;; either GEU or LTU, so we can use the carry flag from the addition
772 ;; instead of doing the compare a second time.
773 (define_insn "*addsi3_compare_op1"
774 [(set (reg:CC_C CC_REGNUM)
776 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
777 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
779 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
780 (plus:SI (match_dup 1) (match_dup 2)))]
785 subs%?\\t%0, %1, #%n2
786 subs%?\\t%0, %0, #%n2
788 subs%?\\t%0, %1, #%n2
790 [(set_attr "conds" "set")
791 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
792 (set_attr "length" "2,2,2,2,4,4,4")
794 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
797 (define_insn "*addsi3_compare_op2"
798 [(set (reg:CC_C CC_REGNUM)
800 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
801 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
803 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
804 (plus:SI (match_dup 1) (match_dup 2)))]
809 subs%?\\t%0, %1, #%n2
810 subs%?\\t%0, %0, #%n2
812 subs%?\\t%0, %1, #%n2
814 [(set_attr "conds" "set")
815 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
816 (set_attr "length" "2,2,2,2,4,4,4")
818 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
821 (define_insn "*compare_addsi2_op0"
822 [(set (reg:CC_C CC_REGNUM)
824 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
825 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
834 [(set_attr "conds" "set")
835 (set_attr "predicable" "yes")
836 (set_attr "arch" "t2,t2,*,*,*")
837 (set_attr "predicable_short_it" "yes,yes,no,no,no")
838 (set_attr "length" "2,2,4,4,4")
839 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
842 (define_insn "*compare_addsi2_op1"
843 [(set (reg:CC_C CC_REGNUM)
845 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
846 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
855 [(set_attr "conds" "set")
856 (set_attr "predicable" "yes")
857 (set_attr "arch" "t2,t2,*,*,*")
858 (set_attr "predicable_short_it" "yes,yes,no,no,no")
859 (set_attr "length" "2,2,4,4,4")
860 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
863 (define_insn "addsi3_carryin"
864 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
865 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
866 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
867 (match_operand:SI 3 "arm_carry_operation" "")))]
872 sbc%?\\t%0, %1, #%B2"
873 [(set_attr "conds" "use")
874 (set_attr "predicable" "yes")
875 (set_attr "arch" "t2,*,*")
876 (set_attr "length" "4")
877 (set_attr "predicable_short_it" "yes,no,no")
878 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
881 ;; Canonicalization of the above when the immediate is zero.
882 (define_insn "add0si3_carryin"
883 [(set (match_operand:SI 0 "s_register_operand" "=r")
884 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
885 (match_operand:SI 1 "arm_not_operand" "r")))]
888 [(set_attr "conds" "use")
889 (set_attr "predicable" "yes")
890 (set_attr "length" "4")
891 (set_attr "type" "adc_imm")]
894 (define_insn "*addsi3_carryin_alt2"
895 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
896 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
897 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
898 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
903 sbc%?\\t%0, %1, #%B2"
904 [(set_attr "conds" "use")
905 (set_attr "predicable" "yes")
906 (set_attr "arch" "t2,*,*")
907 (set_attr "length" "4")
908 (set_attr "predicable_short_it" "yes,no,no")
909 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
912 (define_insn "*addsi3_carryin_shift"
913 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
915 (match_operator:SI 2 "shift_operator"
916 [(match_operand:SI 3 "s_register_operand" "r,r")
917 (match_operand:SI 4 "shift_amount_operand" "M,r")])
918 (match_operand:SI 5 "arm_carry_operation" ""))
919 (match_operand:SI 1 "s_register_operand" "r,r")))]
921 "adc%?\\t%0, %1, %3%S2"
922 [(set_attr "conds" "use")
923 (set_attr "arch" "32,a")
924 (set_attr "shift" "3")
925 (set_attr "predicable" "yes")
926 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
927 (const_string "alu_shift_imm")
928 (const_string "alu_shift_reg")))]
931 (define_insn "*addsi3_carryin_clobercc"
932 [(set (match_operand:SI 0 "s_register_operand" "=r")
933 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
934 (match_operand:SI 2 "arm_rhs_operand" "rI"))
935 (match_operand:SI 3 "arm_carry_operation" "")))
936 (clobber (reg:CC CC_REGNUM))]
938 "adcs%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "type" "adcs_reg")]
943 (define_expand "subv<mode>4"
944 [(match_operand:SIDI 0 "register_operand")
945 (match_operand:SIDI 1 "register_operand")
946 (match_operand:SIDI 2 "register_operand")
947 (match_operand 3 "")]
950 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
951 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
956 (define_expand "usubv<mode>4"
957 [(match_operand:SIDI 0 "register_operand")
958 (match_operand:SIDI 1 "register_operand")
959 (match_operand:SIDI 2 "register_operand")
960 (match_operand 3 "")]
963 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
964 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
969 (define_insn "subdi3_compare1"
970 [(set (reg:CC CC_REGNUM)
972 (match_operand:DI 1 "s_register_operand" "r")
973 (match_operand:DI 2 "s_register_operand" "r")))
974 (set (match_operand:DI 0 "s_register_operand" "=&r")
975 (minus:DI (match_dup 1) (match_dup 2)))]
977 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
978 [(set_attr "conds" "set")
979 (set_attr "length" "8")
980 (set_attr "type" "multiple")]
983 (define_insn "subsi3_compare1"
984 [(set (reg:CC CC_REGNUM)
986 (match_operand:SI 1 "register_operand" "r")
987 (match_operand:SI 2 "register_operand" "r")))
988 (set (match_operand:SI 0 "register_operand" "=r")
989 (minus:SI (match_dup 1) (match_dup 2)))]
991 "subs%?\\t%0, %1, %2"
992 [(set_attr "conds" "set")
993 (set_attr "type" "alus_sreg")]
996 (define_insn "subsi3_carryin"
997 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
998 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
999 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1000 (match_operand:SI 3 "arm_borrow_operation" "")))]
1005 sbc%?\\t%0, %2, %2, lsl #1"
1006 [(set_attr "conds" "use")
1007 (set_attr "arch" "*,a,t2")
1008 (set_attr "predicable" "yes")
1009 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1012 (define_insn "*subsi3_carryin_const"
1013 [(set (match_operand:SI 0 "s_register_operand" "=r")
1015 (match_operand:SI 1 "s_register_operand" "r")
1016 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1017 (match_operand:SI 3 "arm_borrow_operation" "")))]
1019 "sbc\\t%0, %1, #%n2"
1020 [(set_attr "conds" "use")
1021 (set_attr "type" "adc_imm")]
1024 (define_insn "*subsi3_carryin_const0"
1025 [(set (match_operand:SI 0 "s_register_operand" "=r")
1026 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1027 (match_operand:SI 2 "arm_borrow_operation" "")))]
1030 [(set_attr "conds" "use")
1031 (set_attr "type" "adc_imm")]
1034 (define_insn "*subsi3_carryin_shift"
1035 [(set (match_operand:SI 0 "s_register_operand" "=r")
1037 (match_operand:SI 1 "s_register_operand" "r")
1038 (match_operator:SI 2 "shift_operator"
1039 [(match_operand:SI 3 "s_register_operand" "r")
1040 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1041 (match_operand:SI 5 "arm_borrow_operation" "")))]
1043 "sbc%?\\t%0, %1, %3%S2"
1044 [(set_attr "conds" "use")
1045 (set_attr "predicable" "yes")
1046 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1047 (const_string "alu_shift_imm")
1048 (const_string "alu_shift_reg")))]
1051 (define_insn "*rsbsi3_carryin_shift"
1052 [(set (match_operand:SI 0 "s_register_operand" "=r")
1054 (match_operator:SI 2 "shift_operator"
1055 [(match_operand:SI 3 "s_register_operand" "r")
1056 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1057 (match_operand:SI 1 "s_register_operand" "r"))
1058 (match_operand:SI 5 "arm_borrow_operation" "")))]
1060 "rsc%?\\t%0, %1, %3%S2"
1061 [(set_attr "conds" "use")
1062 (set_attr "predicable" "yes")
1063 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1064 (const_string "alu_shift_imm")
1065 (const_string "alu_shift_reg")))]
1068 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1070 [(set (match_operand:SI 0 "s_register_operand" "")
1071 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1072 (match_operand:SI 2 "s_register_operand" ""))
1074 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1076 [(set (match_dup 3) (match_dup 1))
1077 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1079 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1082 (define_expand "addsf3"
1083 [(set (match_operand:SF 0 "s_register_operand")
1084 (plus:SF (match_operand:SF 1 "s_register_operand")
1085 (match_operand:SF 2 "s_register_operand")))]
1086 "TARGET_32BIT && TARGET_HARD_FLOAT"
1090 (define_expand "adddf3"
1091 [(set (match_operand:DF 0 "s_register_operand")
1092 (plus:DF (match_operand:DF 1 "s_register_operand")
1093 (match_operand:DF 2 "s_register_operand")))]
1094 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1098 (define_expand "subdi3"
1100 [(set (match_operand:DI 0 "s_register_operand")
1101 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1102 (match_operand:DI 2 "s_register_operand")))
1103 (clobber (reg:CC CC_REGNUM))])]
1108 if (!REG_P (operands[1]))
1109 operands[1] = force_reg (DImode, operands[1]);
1113 rtx lo_result, hi_result, lo_dest, hi_dest;
1114 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1117 /* Since operands[1] may be an integer, pass it second, so that
1118 any necessary simplifications will be done on the decomposed
1120 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1122 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1123 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1125 if (!arm_rhs_operand (lo_op1, SImode))
1126 lo_op1 = force_reg (SImode, lo_op1);
1128 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1129 || !arm_rhs_operand (hi_op1, SImode))
1130 hi_op1 = force_reg (SImode, hi_op1);
1133 if (lo_op1 == const0_rtx)
1135 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1136 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1138 else if (CONST_INT_P (lo_op1))
1140 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1141 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1142 GEN_INT (~UINTVAL (lo_op1))));
1146 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1147 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1150 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1152 if (hi_op1 == const0_rtx)
1153 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1155 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1157 if (lo_result != lo_dest)
1158 emit_move_insn (lo_result, lo_dest);
1160 if (hi_result != hi_dest)
1161 emit_move_insn (hi_result, hi_dest);
1168 (define_expand "subsi3"
1169 [(set (match_operand:SI 0 "s_register_operand")
1170 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1171 (match_operand:SI 2 "s_register_operand")))]
1174 if (CONST_INT_P (operands[1]))
1178 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1179 operands[1] = force_reg (SImode, operands[1]);
1182 arm_split_constant (MINUS, SImode, NULL_RTX,
1183 INTVAL (operands[1]), operands[0],
1185 optimize && can_create_pseudo_p ());
1189 else /* TARGET_THUMB1 */
1190 operands[1] = force_reg (SImode, operands[1]);
1195 ; ??? Check Thumb-2 split length
1196 (define_insn_and_split "*arm_subsi3_insn"
1197 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1198 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1199 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1211 "&& (CONST_INT_P (operands[1])
1212 && !const_ok_for_arm (INTVAL (operands[1])))"
1213 [(clobber (const_int 0))]
1215 arm_split_constant (MINUS, SImode, curr_insn,
1216 INTVAL (operands[1]), operands[0], operands[2], 0);
1219 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1220 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1221 (set_attr "predicable" "yes")
1222 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1223 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1227 [(match_scratch:SI 3 "r")
1228 (set (match_operand:SI 0 "arm_general_register_operand" "")
1229 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1230 (match_operand:SI 2 "arm_general_register_operand" "")))]
1232 && !const_ok_for_arm (INTVAL (operands[1]))
1233 && const_ok_for_arm (~INTVAL (operands[1]))"
1234 [(set (match_dup 3) (match_dup 1))
1235 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1239 (define_insn "subsi3_compare0"
1240 [(set (reg:CC_NOOV CC_REGNUM)
1242 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1243 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1245 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1246 (minus:SI (match_dup 1) (match_dup 2)))]
1251 rsbs%?\\t%0, %2, %1"
1252 [(set_attr "conds" "set")
1253 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1256 (define_insn "subsi3_compare"
1257 [(set (reg:CC CC_REGNUM)
1258 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1259 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1260 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1261 (minus:SI (match_dup 1) (match_dup 2)))]
1266 rsbs%?\\t%0, %2, %1"
1267 [(set_attr "conds" "set")
1268 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1271 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1272 ;; rather than (0 cmp reg). This gives the same results for unsigned
1273 ;; and equality compares which is what we mostly need here.
1274 (define_insn "rsb_imm_compare"
1275 [(set (reg:CC_RSB CC_REGNUM)
1276 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1277 (match_operand 3 "const_int_operand" "")))
1278 (set (match_operand:SI 0 "s_register_operand" "=r")
1279 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1281 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1283 [(set_attr "conds" "set")
1284 (set_attr "type" "alus_imm")]
1287 (define_expand "subsf3"
1288 [(set (match_operand:SF 0 "s_register_operand")
1289 (minus:SF (match_operand:SF 1 "s_register_operand")
1290 (match_operand:SF 2 "s_register_operand")))]
1291 "TARGET_32BIT && TARGET_HARD_FLOAT"
1295 (define_expand "subdf3"
1296 [(set (match_operand:DF 0 "s_register_operand")
1297 (minus:DF (match_operand:DF 1 "s_register_operand")
1298 (match_operand:DF 2 "s_register_operand")))]
1299 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1304 ;; Multiplication insns
1306 (define_expand "mulhi3"
1307 [(set (match_operand:HI 0 "s_register_operand")
1308 (mult:HI (match_operand:HI 1 "s_register_operand")
1309 (match_operand:HI 2 "s_register_operand")))]
1310 "TARGET_DSP_MULTIPLY"
1313 rtx result = gen_reg_rtx (SImode);
1314 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1315 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1320 (define_expand "mulsi3"
1321 [(set (match_operand:SI 0 "s_register_operand")
1322 (mult:SI (match_operand:SI 2 "s_register_operand")
1323 (match_operand:SI 1 "s_register_operand")))]
1328 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1330 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1331 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1332 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1334 "mul%?\\t%0, %2, %1"
1335 [(set_attr "type" "mul")
1336 (set_attr "predicable" "yes")
1337 (set_attr "arch" "t2,v6,nov6,nov6")
1338 (set_attr "length" "4")
1339 (set_attr "predicable_short_it" "yes,no,*,*")]
1342 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1343 ;; reusing the same register.
1346 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1348 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1349 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1350 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1352 "mla%?\\t%0, %3, %2, %1"
1353 [(set_attr "type" "mla")
1354 (set_attr "predicable" "yes")
1355 (set_attr "arch" "v6,nov6,nov6,nov6")]
1359 [(set (match_operand:SI 0 "s_register_operand" "=r")
1361 (match_operand:SI 1 "s_register_operand" "r")
1362 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1363 (match_operand:SI 2 "s_register_operand" "r"))))]
1364 "TARGET_32BIT && arm_arch_thumb2"
1365 "mls%?\\t%0, %3, %2, %1"
1366 [(set_attr "type" "mla")
1367 (set_attr "predicable" "yes")]
1370 (define_insn "*mulsi3_compare0"
1371 [(set (reg:CC_NOOV CC_REGNUM)
1372 (compare:CC_NOOV (mult:SI
1373 (match_operand:SI 2 "s_register_operand" "r,r")
1374 (match_operand:SI 1 "s_register_operand" "%0,r"))
1376 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1377 (mult:SI (match_dup 2) (match_dup 1)))]
1378 "TARGET_ARM && !arm_arch6"
1379 "muls%?\\t%0, %2, %1"
1380 [(set_attr "conds" "set")
1381 (set_attr "type" "muls")]
1384 (define_insn "*mulsi3_compare0_v6"
1385 [(set (reg:CC_NOOV CC_REGNUM)
1386 (compare:CC_NOOV (mult:SI
1387 (match_operand:SI 2 "s_register_operand" "r")
1388 (match_operand:SI 1 "s_register_operand" "r"))
1390 (set (match_operand:SI 0 "s_register_operand" "=r")
1391 (mult:SI (match_dup 2) (match_dup 1)))]
1392 "TARGET_ARM && arm_arch6 && optimize_size"
1393 "muls%?\\t%0, %2, %1"
1394 [(set_attr "conds" "set")
1395 (set_attr "type" "muls")]
1398 (define_insn "*mulsi_compare0_scratch"
1399 [(set (reg:CC_NOOV CC_REGNUM)
1400 (compare:CC_NOOV (mult:SI
1401 (match_operand:SI 2 "s_register_operand" "r,r")
1402 (match_operand:SI 1 "s_register_operand" "%0,r"))
1404 (clobber (match_scratch:SI 0 "=&r,&r"))]
1405 "TARGET_ARM && !arm_arch6"
1406 "muls%?\\t%0, %2, %1"
1407 [(set_attr "conds" "set")
1408 (set_attr "type" "muls")]
1411 (define_insn "*mulsi_compare0_scratch_v6"
1412 [(set (reg:CC_NOOV CC_REGNUM)
1413 (compare:CC_NOOV (mult:SI
1414 (match_operand:SI 2 "s_register_operand" "r")
1415 (match_operand:SI 1 "s_register_operand" "r"))
1417 (clobber (match_scratch:SI 0 "=r"))]
1418 "TARGET_ARM && arm_arch6 && optimize_size"
1419 "muls%?\\t%0, %2, %1"
1420 [(set_attr "conds" "set")
1421 (set_attr "type" "muls")]
1424 (define_insn "*mulsi3addsi_compare0"
1425 [(set (reg:CC_NOOV CC_REGNUM)
1428 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1429 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1430 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1432 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1433 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1435 "TARGET_ARM && arm_arch6"
1436 "mlas%?\\t%0, %2, %1, %3"
1437 [(set_attr "conds" "set")
1438 (set_attr "type" "mlas")]
1441 (define_insn "*mulsi3addsi_compare0_v6"
1442 [(set (reg:CC_NOOV CC_REGNUM)
1445 (match_operand:SI 2 "s_register_operand" "r")
1446 (match_operand:SI 1 "s_register_operand" "r"))
1447 (match_operand:SI 3 "s_register_operand" "r"))
1449 (set (match_operand:SI 0 "s_register_operand" "=r")
1450 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1452 "TARGET_ARM && arm_arch6 && optimize_size"
1453 "mlas%?\\t%0, %2, %1, %3"
1454 [(set_attr "conds" "set")
1455 (set_attr "type" "mlas")]
1458 (define_insn "*mulsi3addsi_compare0_scratch"
1459 [(set (reg:CC_NOOV CC_REGNUM)
1462 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1463 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1464 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1466 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1467 "TARGET_ARM && !arm_arch6"
1468 "mlas%?\\t%0, %2, %1, %3"
1469 [(set_attr "conds" "set")
1470 (set_attr "type" "mlas")]
1473 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1474 [(set (reg:CC_NOOV CC_REGNUM)
1477 (match_operand:SI 2 "s_register_operand" "r")
1478 (match_operand:SI 1 "s_register_operand" "r"))
1479 (match_operand:SI 3 "s_register_operand" "r"))
1481 (clobber (match_scratch:SI 0 "=r"))]
1482 "TARGET_ARM && arm_arch6 && optimize_size"
1483 "mlas%?\\t%0, %2, %1, %3"
1484 [(set_attr "conds" "set")
1485 (set_attr "type" "mlas")]
1488 ;; 32x32->64 widening multiply.
1489 ;; The only difference between the v3-5 and v6+ versions is the requirement
1490 ;; that the output does not overlap with either input.
1492 (define_expand "<Us>mulsidi3"
1493 [(set (match_operand:DI 0 "s_register_operand")
1495 (SE:DI (match_operand:SI 1 "s_register_operand"))
1496 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1499 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1500 gen_highpart (SImode, operands[0]),
1501 operands[1], operands[2]));
1506 (define_insn "<US>mull"
1507 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1509 (match_operand:SI 2 "s_register_operand" "%r,r")
1510 (match_operand:SI 3 "s_register_operand" "r,r")))
1511 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1514 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1517 "<US>mull%?\\t%0, %1, %2, %3"
1518 [(set_attr "type" "umull")
1519 (set_attr "predicable" "yes")
1520 (set_attr "arch" "v6,nov6")]
1523 (define_expand "<Us>maddsidi4"
1524 [(set (match_operand:DI 0 "s_register_operand")
1527 (SE:DI (match_operand:SI 1 "s_register_operand"))
1528 (SE:DI (match_operand:SI 2 "s_register_operand")))
1529 (match_operand:DI 3 "s_register_operand")))]
1532 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1533 gen_lowpart (SImode, operands[3]),
1534 gen_highpart (SImode, operands[0]),
1535 gen_highpart (SImode, operands[3]),
1536 operands[1], operands[2]));
1541 (define_insn "<US>mlal"
1542 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1545 (match_operand:SI 4 "s_register_operand" "%r,r")
1546 (match_operand:SI 5 "s_register_operand" "r,r"))
1547 (match_operand:SI 1 "s_register_operand" "0,0")))
1548 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1553 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1554 (zero_extend:DI (match_dup 1)))
1556 (match_operand:SI 3 "s_register_operand" "2,2")))]
1558 "<US>mlal%?\\t%0, %2, %4, %5"
1559 [(set_attr "type" "umlal")
1560 (set_attr "predicable" "yes")
1561 (set_attr "arch" "v6,nov6")]
1564 (define_expand "<US>mulsi3_highpart"
1566 [(set (match_operand:SI 0 "s_register_operand")
1570 (SE:DI (match_operand:SI 1 "s_register_operand"))
1571 (SE:DI (match_operand:SI 2 "s_register_operand")))
1573 (clobber (match_scratch:SI 3 ""))])]
1578 (define_insn "*<US>mull_high"
1579 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1583 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1584 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1586 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1588 "<US>mull%?\\t%3, %0, %2, %1"
1589 [(set_attr "type" "umull")
1590 (set_attr "predicable" "yes")
1591 (set_attr "arch" "v6,nov6,nov6")]
1594 (define_insn "mulhisi3"
1595 [(set (match_operand:SI 0 "s_register_operand" "=r")
1596 (mult:SI (sign_extend:SI
1597 (match_operand:HI 1 "s_register_operand" "%r"))
1599 (match_operand:HI 2 "s_register_operand" "r"))))]
1600 "TARGET_DSP_MULTIPLY"
1601 "smulbb%?\\t%0, %1, %2"
1602 [(set_attr "type" "smulxy")
1603 (set_attr "predicable" "yes")]
1606 (define_insn "*mulhisi3tb"
1607 [(set (match_operand:SI 0 "s_register_operand" "=r")
1608 (mult:SI (ashiftrt:SI
1609 (match_operand:SI 1 "s_register_operand" "r")
1612 (match_operand:HI 2 "s_register_operand" "r"))))]
1613 "TARGET_DSP_MULTIPLY"
1614 "smultb%?\\t%0, %1, %2"
1615 [(set_attr "type" "smulxy")
1616 (set_attr "predicable" "yes")]
1619 (define_insn "*mulhisi3bt"
1620 [(set (match_operand:SI 0 "s_register_operand" "=r")
1621 (mult:SI (sign_extend:SI
1622 (match_operand:HI 1 "s_register_operand" "r"))
1624 (match_operand:SI 2 "s_register_operand" "r")
1626 "TARGET_DSP_MULTIPLY"
1627 "smulbt%?\\t%0, %1, %2"
1628 [(set_attr "type" "smulxy")
1629 (set_attr "predicable" "yes")]
1632 (define_insn "*mulhisi3tt"
1633 [(set (match_operand:SI 0 "s_register_operand" "=r")
1634 (mult:SI (ashiftrt:SI
1635 (match_operand:SI 1 "s_register_operand" "r")
1638 (match_operand:SI 2 "s_register_operand" "r")
1640 "TARGET_DSP_MULTIPLY"
1641 "smultt%?\\t%0, %1, %2"
1642 [(set_attr "type" "smulxy")
1643 (set_attr "predicable" "yes")]
1646 (define_insn "maddhisi4"
1647 [(set (match_operand:SI 0 "s_register_operand" "=r")
1648 (plus:SI (mult:SI (sign_extend:SI
1649 (match_operand:HI 1 "s_register_operand" "r"))
1651 (match_operand:HI 2 "s_register_operand" "r")))
1652 (match_operand:SI 3 "s_register_operand" "r")))]
1653 "TARGET_DSP_MULTIPLY"
1654 "smlabb%?\\t%0, %1, %2, %3"
1655 [(set_attr "type" "smlaxy")
1656 (set_attr "predicable" "yes")]
1659 ;; Note: there is no maddhisi4ibt because this one is canonical form
1660 (define_insn "*maddhisi4tb"
1661 [(set (match_operand:SI 0 "s_register_operand" "=r")
1662 (plus:SI (mult:SI (ashiftrt:SI
1663 (match_operand:SI 1 "s_register_operand" "r")
1666 (match_operand:HI 2 "s_register_operand" "r")))
1667 (match_operand:SI 3 "s_register_operand" "r")))]
1668 "TARGET_DSP_MULTIPLY"
1669 "smlatb%?\\t%0, %1, %2, %3"
1670 [(set_attr "type" "smlaxy")
1671 (set_attr "predicable" "yes")]
1674 (define_insn "*maddhisi4tt"
1675 [(set (match_operand:SI 0 "s_register_operand" "=r")
1676 (plus:SI (mult:SI (ashiftrt:SI
1677 (match_operand:SI 1 "s_register_operand" "r")
1680 (match_operand:SI 2 "s_register_operand" "r")
1682 (match_operand:SI 3 "s_register_operand" "r")))]
1683 "TARGET_DSP_MULTIPLY"
1684 "smlatt%?\\t%0, %1, %2, %3"
1685 [(set_attr "type" "smlaxy")
1686 (set_attr "predicable" "yes")]
1689 (define_insn "maddhidi4"
1690 [(set (match_operand:DI 0 "s_register_operand" "=r")
1692 (mult:DI (sign_extend:DI
1693 (match_operand:HI 1 "s_register_operand" "r"))
1695 (match_operand:HI 2 "s_register_operand" "r")))
1696 (match_operand:DI 3 "s_register_operand" "0")))]
1697 "TARGET_DSP_MULTIPLY"
1698 "smlalbb%?\\t%Q0, %R0, %1, %2"
1699 [(set_attr "type" "smlalxy")
1700 (set_attr "predicable" "yes")])
1702 ;; Note: there is no maddhidi4ibt because this one is canonical form
1703 (define_insn "*maddhidi4tb"
1704 [(set (match_operand:DI 0 "s_register_operand" "=r")
1706 (mult:DI (sign_extend:DI
1708 (match_operand:SI 1 "s_register_operand" "r")
1711 (match_operand:HI 2 "s_register_operand" "r")))
1712 (match_operand:DI 3 "s_register_operand" "0")))]
1713 "TARGET_DSP_MULTIPLY"
1714 "smlaltb%?\\t%Q0, %R0, %1, %2"
1715 [(set_attr "type" "smlalxy")
1716 (set_attr "predicable" "yes")])
1718 (define_insn "*maddhidi4tt"
1719 [(set (match_operand:DI 0 "s_register_operand" "=r")
1721 (mult:DI (sign_extend:DI
1723 (match_operand:SI 1 "s_register_operand" "r")
1727 (match_operand:SI 2 "s_register_operand" "r")
1729 (match_operand:DI 3 "s_register_operand" "0")))]
1730 "TARGET_DSP_MULTIPLY"
1731 "smlaltt%?\\t%Q0, %R0, %1, %2"
1732 [(set_attr "type" "smlalxy")
1733 (set_attr "predicable" "yes")])
1735 (define_expand "mulsf3"
1736 [(set (match_operand:SF 0 "s_register_operand")
1737 (mult:SF (match_operand:SF 1 "s_register_operand")
1738 (match_operand:SF 2 "s_register_operand")))]
1739 "TARGET_32BIT && TARGET_HARD_FLOAT"
1743 (define_expand "muldf3"
1744 [(set (match_operand:DF 0 "s_register_operand")
1745 (mult:DF (match_operand:DF 1 "s_register_operand")
1746 (match_operand:DF 2 "s_register_operand")))]
1747 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1753 (define_expand "divsf3"
1754 [(set (match_operand:SF 0 "s_register_operand")
1755 (div:SF (match_operand:SF 1 "s_register_operand")
1756 (match_operand:SF 2 "s_register_operand")))]
1757 "TARGET_32BIT && TARGET_HARD_FLOAT"
1760 (define_expand "divdf3"
1761 [(set (match_operand:DF 0 "s_register_operand")
1762 (div:DF (match_operand:DF 1 "s_register_operand")
1763 (match_operand:DF 2 "s_register_operand")))]
1764 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1768 ; Expand logical operations. The mid-end expander does not split off memory
1769 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1770 ; So an explicit expander is needed to generate better code.
1772 (define_expand "<LOGICAL:optab>di3"
1773 [(set (match_operand:DI 0 "s_register_operand")
1774 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1775 (match_operand:DI 2 "arm_<optab>di_operand")))]
1778 rtx low = simplify_gen_binary (<CODE>, SImode,
1779 gen_lowpart (SImode, operands[1]),
1780 gen_lowpart (SImode, operands[2]));
1781 rtx high = simplify_gen_binary (<CODE>, SImode,
1782 gen_highpart (SImode, operands[1]),
1783 gen_highpart_mode (SImode, DImode,
1786 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1787 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1792 (define_expand "one_cmpldi2"
1793 [(set (match_operand:DI 0 "s_register_operand")
1794 (not:DI (match_operand:DI 1 "s_register_operand")))]
1797 rtx low = simplify_gen_unary (NOT, SImode,
1798 gen_lowpart (SImode, operands[1]),
1800 rtx high = simplify_gen_unary (NOT, SImode,
1801 gen_highpart_mode (SImode, DImode,
1805 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1806 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1811 ;; Split DImode and, ior, xor operations. Simply perform the logical
1812 ;; operation on the upper and lower halves of the registers.
1813 ;; This is needed for atomic operations in arm_split_atomic_op.
1814 ;; Avoid splitting IWMMXT instructions.
1816 [(set (match_operand:DI 0 "s_register_operand" "")
1817 (match_operator:DI 6 "logical_binary_operator"
1818 [(match_operand:DI 1 "s_register_operand" "")
1819 (match_operand:DI 2 "s_register_operand" "")]))]
1820 "TARGET_32BIT && reload_completed
1821 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1822 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1823 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1826 operands[3] = gen_highpart (SImode, operands[0]);
1827 operands[0] = gen_lowpart (SImode, operands[0]);
1828 operands[4] = gen_highpart (SImode, operands[1]);
1829 operands[1] = gen_lowpart (SImode, operands[1]);
1830 operands[5] = gen_highpart (SImode, operands[2]);
1831 operands[2] = gen_lowpart (SImode, operands[2]);
1835 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1836 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1838 [(set (match_operand:DI 0 "s_register_operand")
1839 (not:DI (match_operand:DI 1 "s_register_operand")))]
1841 [(set (match_dup 0) (not:SI (match_dup 1)))
1842 (set (match_dup 2) (not:SI (match_dup 3)))]
1845 operands[2] = gen_highpart (SImode, operands[0]);
1846 operands[0] = gen_lowpart (SImode, operands[0]);
1847 operands[3] = gen_highpart (SImode, operands[1]);
1848 operands[1] = gen_lowpart (SImode, operands[1]);
1852 (define_expand "andsi3"
1853 [(set (match_operand:SI 0 "s_register_operand")
1854 (and:SI (match_operand:SI 1 "s_register_operand")
1855 (match_operand:SI 2 "reg_or_int_operand")))]
1860 if (CONST_INT_P (operands[2]))
1862 if (INTVAL (operands[2]) == 255 && arm_arch6)
1864 operands[1] = convert_to_mode (QImode, operands[1], 1);
1865 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1869 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1870 operands[2] = force_reg (SImode, operands[2]);
1873 arm_split_constant (AND, SImode, NULL_RTX,
1874 INTVAL (operands[2]), operands[0],
1876 optimize && can_create_pseudo_p ());
1882 else /* TARGET_THUMB1 */
1884 if (!CONST_INT_P (operands[2]))
1886 rtx tmp = force_reg (SImode, operands[2]);
1887 if (rtx_equal_p (operands[0], operands[1]))
1891 operands[2] = operands[1];
1899 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1901 operands[2] = force_reg (SImode,
1902 GEN_INT (~INTVAL (operands[2])));
1904 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1909 for (i = 9; i <= 31; i++)
1911 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1913 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1917 else if ((HOST_WIDE_INT_1 << i) - 1
1918 == ~INTVAL (operands[2]))
1920 rtx shift = GEN_INT (i);
1921 rtx reg = gen_reg_rtx (SImode);
1923 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1924 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1930 operands[2] = force_reg (SImode, operands[2]);
1936 ; ??? Check split length for Thumb-2
1937 (define_insn_and_split "*arm_andsi3_insn"
1938 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1939 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1940 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1945 bic%?\\t%0, %1, #%B2
1949 && CONST_INT_P (operands[2])
1950 && !(const_ok_for_arm (INTVAL (operands[2]))
1951 || const_ok_for_arm (~INTVAL (operands[2])))"
1952 [(clobber (const_int 0))]
1954 arm_split_constant (AND, SImode, curr_insn,
1955 INTVAL (operands[2]), operands[0], operands[1], 0);
1958 [(set_attr "length" "4,4,4,4,16")
1959 (set_attr "predicable" "yes")
1960 (set_attr "predicable_short_it" "no,yes,no,no,no")
1961 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1964 (define_insn "*andsi3_compare0"
1965 [(set (reg:CC_NOOV CC_REGNUM)
1967 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1968 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
1970 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1971 (and:SI (match_dup 1) (match_dup 2)))]
1975 bics%?\\t%0, %1, #%B2
1976 ands%?\\t%0, %1, %2"
1977 [(set_attr "conds" "set")
1978 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1981 (define_insn "*andsi3_compare0_scratch"
1982 [(set (reg:CC_NOOV CC_REGNUM)
1984 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1985 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
1987 (clobber (match_scratch:SI 2 "=X,r,X"))]
1991 bics%?\\t%2, %0, #%B1
1993 [(set_attr "conds" "set")
1994 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1997 (define_insn "*zeroextractsi_compare0_scratch"
1998 [(set (reg:CC_NOOV CC_REGNUM)
1999 (compare:CC_NOOV (zero_extract:SI
2000 (match_operand:SI 0 "s_register_operand" "r")
2001 (match_operand 1 "const_int_operand" "n")
2002 (match_operand 2 "const_int_operand" "n"))
2005 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2006 && INTVAL (operands[1]) > 0
2007 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2008 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2010 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2011 << INTVAL (operands[2]));
2012 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2015 [(set_attr "conds" "set")
2016 (set_attr "predicable" "yes")
2017 (set_attr "type" "logics_imm")]
2020 (define_insn_and_split "*ne_zeroextractsi"
2021 [(set (match_operand:SI 0 "s_register_operand" "=r")
2022 (ne:SI (zero_extract:SI
2023 (match_operand:SI 1 "s_register_operand" "r")
2024 (match_operand:SI 2 "const_int_operand" "n")
2025 (match_operand:SI 3 "const_int_operand" "n"))
2027 (clobber (reg:CC CC_REGNUM))]
2029 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2030 && INTVAL (operands[2]) > 0
2031 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2032 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2035 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2036 && INTVAL (operands[2]) > 0
2037 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2038 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2039 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2040 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2042 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2044 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2045 (match_dup 0) (const_int 1)))]
2047 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2048 << INTVAL (operands[3]));
2050 [(set_attr "conds" "clob")
2051 (set (attr "length")
2052 (if_then_else (eq_attr "is_thumb" "yes")
2055 (set_attr "type" "multiple")]
2058 (define_insn_and_split "*ne_zeroextractsi_shifted"
2059 [(set (match_operand:SI 0 "s_register_operand" "=r")
2060 (ne:SI (zero_extract:SI
2061 (match_operand:SI 1 "s_register_operand" "r")
2062 (match_operand:SI 2 "const_int_operand" "n")
2065 (clobber (reg:CC CC_REGNUM))]
2069 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2070 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2072 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2074 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2075 (match_dup 0) (const_int 1)))]
2077 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2079 [(set_attr "conds" "clob")
2080 (set_attr "length" "8")
2081 (set_attr "type" "multiple")]
2084 (define_insn_and_split "*ite_ne_zeroextractsi"
2085 [(set (match_operand:SI 0 "s_register_operand" "=r")
2086 (if_then_else:SI (ne (zero_extract:SI
2087 (match_operand:SI 1 "s_register_operand" "r")
2088 (match_operand:SI 2 "const_int_operand" "n")
2089 (match_operand:SI 3 "const_int_operand" "n"))
2091 (match_operand:SI 4 "arm_not_operand" "rIK")
2093 (clobber (reg:CC CC_REGNUM))]
2095 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2096 && INTVAL (operands[2]) > 0
2097 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2098 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2099 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2102 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2103 && INTVAL (operands[2]) > 0
2104 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2105 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2106 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2107 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2108 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2110 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2112 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2113 (match_dup 0) (match_dup 4)))]
2115 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2116 << INTVAL (operands[3]));
2118 [(set_attr "conds" "clob")
2119 (set_attr "length" "8")
2120 (set_attr "type" "multiple")]
2123 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2124 [(set (match_operand:SI 0 "s_register_operand" "=r")
2125 (if_then_else:SI (ne (zero_extract:SI
2126 (match_operand:SI 1 "s_register_operand" "r")
2127 (match_operand:SI 2 "const_int_operand" "n")
2130 (match_operand:SI 3 "arm_not_operand" "rIK")
2132 (clobber (reg:CC CC_REGNUM))]
2133 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2135 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2136 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2137 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2139 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2141 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2142 (match_dup 0) (match_dup 3)))]
2144 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2146 [(set_attr "conds" "clob")
2147 (set_attr "length" "8")
2148 (set_attr "type" "multiple")]
2151 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2153 [(set (match_operand:SI 0 "s_register_operand" "")
2154 (match_operator:SI 1 "shiftable_operator"
2155 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2156 (match_operand:SI 3 "const_int_operand" "")
2157 (match_operand:SI 4 "const_int_operand" ""))
2158 (match_operand:SI 5 "s_register_operand" "")]))
2159 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2161 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2164 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2167 HOST_WIDE_INT temp = INTVAL (operands[3]);
2169 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2170 operands[4] = GEN_INT (32 - temp);
2175 [(set (match_operand:SI 0 "s_register_operand" "")
2176 (match_operator:SI 1 "shiftable_operator"
2177 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2178 (match_operand:SI 3 "const_int_operand" "")
2179 (match_operand:SI 4 "const_int_operand" ""))
2180 (match_operand:SI 5 "s_register_operand" "")]))
2181 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2183 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2186 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2189 HOST_WIDE_INT temp = INTVAL (operands[3]);
2191 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2192 operands[4] = GEN_INT (32 - temp);
2196 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2197 ;;; represented by the bitfield, then this will produce incorrect results.
2198 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2199 ;;; which have a real bit-field insert instruction, the truncation happens
2200 ;;; in the bit-field insert instruction itself. Since arm does not have a
2201 ;;; bit-field insert instruction, we would have to emit code here to truncate
2202 ;;; the value before we insert. This loses some of the advantage of having
2203 ;;; this insv pattern, so this pattern needs to be reevalutated.
2205 (define_expand "insv"
2206 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2207 (match_operand 1 "general_operand")
2208 (match_operand 2 "general_operand"))
2209 (match_operand 3 "reg_or_int_operand"))]
2210 "TARGET_ARM || arm_arch_thumb2"
2213 int start_bit = INTVAL (operands[2]);
2214 int width = INTVAL (operands[1]);
2215 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2216 rtx target, subtarget;
2218 if (arm_arch_thumb2)
2220 if (unaligned_access && MEM_P (operands[0])
2221 && s_register_operand (operands[3], GET_MODE (operands[3]))
2222 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2226 if (BYTES_BIG_ENDIAN)
2227 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2232 base_addr = adjust_address (operands[0], SImode,
2233 start_bit / BITS_PER_UNIT);
2234 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2238 rtx tmp = gen_reg_rtx (HImode);
2240 base_addr = adjust_address (operands[0], HImode,
2241 start_bit / BITS_PER_UNIT);
2242 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2243 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2247 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2249 bool use_bfi = TRUE;
2251 if (CONST_INT_P (operands[3]))
2253 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2257 emit_insn (gen_insv_zero (operands[0], operands[1],
2262 /* See if the set can be done with a single orr instruction. */
2263 if (val == mask && const_ok_for_arm (val << start_bit))
2269 if (!REG_P (operands[3]))
2270 operands[3] = force_reg (SImode, operands[3]);
2272 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2281 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2284 target = copy_rtx (operands[0]);
2285 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2286 subreg as the final target. */
2287 if (GET_CODE (target) == SUBREG)
2289 subtarget = gen_reg_rtx (SImode);
2290 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2291 < GET_MODE_SIZE (SImode))
2292 target = SUBREG_REG (target);
2297 if (CONST_INT_P (operands[3]))
2299 /* Since we are inserting a known constant, we may be able to
2300 reduce the number of bits that we have to clear so that
2301 the mask becomes simple. */
2302 /* ??? This code does not check to see if the new mask is actually
2303 simpler. It may not be. */
2304 rtx op1 = gen_reg_rtx (SImode);
2305 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2306 start of this pattern. */
2307 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2308 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2310 emit_insn (gen_andsi3 (op1, operands[0],
2311 gen_int_mode (~mask2, SImode)));
2312 emit_insn (gen_iorsi3 (subtarget, op1,
2313 gen_int_mode (op3_value << start_bit, SImode)));
2315 else if (start_bit == 0
2316 && !(const_ok_for_arm (mask)
2317 || const_ok_for_arm (~mask)))
2319 /* A Trick, since we are setting the bottom bits in the word,
2320 we can shift operand[3] up, operand[0] down, OR them together
2321 and rotate the result back again. This takes 3 insns, and
2322 the third might be mergeable into another op. */
2323 /* The shift up copes with the possibility that operand[3] is
2324 wider than the bitfield. */
2325 rtx op0 = gen_reg_rtx (SImode);
2326 rtx op1 = gen_reg_rtx (SImode);
2328 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2329 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2330 emit_insn (gen_iorsi3 (op1, op1, op0));
2331 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2333 else if ((width + start_bit == 32)
2334 && !(const_ok_for_arm (mask)
2335 || const_ok_for_arm (~mask)))
2337 /* Similar trick, but slightly less efficient. */
2339 rtx op0 = gen_reg_rtx (SImode);
2340 rtx op1 = gen_reg_rtx (SImode);
2342 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2343 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2344 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2345 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2349 rtx op0 = gen_int_mode (mask, SImode);
2350 rtx op1 = gen_reg_rtx (SImode);
2351 rtx op2 = gen_reg_rtx (SImode);
2353 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2355 rtx tmp = gen_reg_rtx (SImode);
2357 emit_insn (gen_movsi (tmp, op0));
2361 /* Mask out any bits in operand[3] that are not needed. */
2362 emit_insn (gen_andsi3 (op1, operands[3], op0));
2364 if (CONST_INT_P (op0)
2365 && (const_ok_for_arm (mask << start_bit)
2366 || const_ok_for_arm (~(mask << start_bit))))
2368 op0 = gen_int_mode (~(mask << start_bit), SImode);
2369 emit_insn (gen_andsi3 (op2, operands[0], op0));
2373 if (CONST_INT_P (op0))
2375 rtx tmp = gen_reg_rtx (SImode);
2377 emit_insn (gen_movsi (tmp, op0));
2382 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2384 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2388 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2390 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2393 if (subtarget != target)
2395 /* If TARGET is still a SUBREG, then it must be wider than a word,
2396 so we must be careful only to set the subword we were asked to. */
2397 if (GET_CODE (target) == SUBREG)
2398 emit_move_insn (target, subtarget);
2400 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2407 (define_insn "insv_zero"
2408 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2409 (match_operand:SI 1 "const_int_M_operand" "M")
2410 (match_operand:SI 2 "const_int_M_operand" "M"))
2414 [(set_attr "length" "4")
2415 (set_attr "predicable" "yes")
2416 (set_attr "type" "bfm")]
2419 (define_insn "insv_t2"
2420 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2421 (match_operand:SI 1 "const_int_M_operand" "M")
2422 (match_operand:SI 2 "const_int_M_operand" "M"))
2423 (match_operand:SI 3 "s_register_operand" "r"))]
2425 "bfi%?\t%0, %3, %2, %1"
2426 [(set_attr "length" "4")
2427 (set_attr "predicable" "yes")
2428 (set_attr "type" "bfm")]
2431 (define_insn "andsi_notsi_si"
2432 [(set (match_operand:SI 0 "s_register_operand" "=r")
2433 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2434 (match_operand:SI 1 "s_register_operand" "r")))]
2436 "bic%?\\t%0, %1, %2"
2437 [(set_attr "predicable" "yes")
2438 (set_attr "type" "logic_reg")]
2441 (define_insn "andsi_not_shiftsi_si"
2442 [(set (match_operand:SI 0 "s_register_operand" "=r")
2443 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2444 [(match_operand:SI 2 "s_register_operand" "r")
2445 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2446 (match_operand:SI 1 "s_register_operand" "r")))]
2448 "bic%?\\t%0, %1, %2%S4"
2449 [(set_attr "predicable" "yes")
2450 (set_attr "shift" "2")
2451 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2452 (const_string "logic_shift_imm")
2453 (const_string "logic_shift_reg")))]
2456 ;; Shifted bics pattern used to set up CC status register and not reusing
2457 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2458 ;; does not support shift by register.
2459 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2460 [(set (reg:CC_NOOV CC_REGNUM)
2462 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2463 [(match_operand:SI 1 "s_register_operand" "r")
2464 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2465 (match_operand:SI 3 "s_register_operand" "r"))
2467 (clobber (match_scratch:SI 4 "=r"))]
2468 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2469 "bics%?\\t%4, %3, %1%S0"
2470 [(set_attr "predicable" "yes")
2471 (set_attr "conds" "set")
2472 (set_attr "shift" "1")
2473 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2474 (const_string "logic_shift_imm")
2475 (const_string "logic_shift_reg")))]
2478 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2479 ;; getting reused later.
2480 (define_insn "andsi_not_shiftsi_si_scc"
2481 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2483 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2484 [(match_operand:SI 1 "s_register_operand" "r")
2485 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2486 (match_operand:SI 3 "s_register_operand" "r"))
2488 (set (match_operand:SI 4 "s_register_operand" "=r")
2489 (and:SI (not:SI (match_op_dup 0
2493 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2494 "bics%?\\t%4, %3, %1%S0"
2495 [(set_attr "predicable" "yes")
2496 (set_attr "conds" "set")
2497 (set_attr "shift" "1")
2498 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2499 (const_string "logic_shift_imm")
2500 (const_string "logic_shift_reg")))]
2503 (define_insn "*andsi_notsi_si_compare0"
2504 [(set (reg:CC_NOOV CC_REGNUM)
2506 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2507 (match_operand:SI 1 "s_register_operand" "r"))
2509 (set (match_operand:SI 0 "s_register_operand" "=r")
2510 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2513 [(set_attr "conds" "set")
2514 (set_attr "type" "logics_shift_reg")]
2517 (define_insn "*andsi_notsi_si_compare0_scratch"
2518 [(set (reg:CC_NOOV CC_REGNUM)
2520 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2521 (match_operand:SI 1 "s_register_operand" "r"))
2523 (clobber (match_scratch:SI 0 "=r"))]
2526 [(set_attr "conds" "set")
2527 (set_attr "type" "logics_shift_reg")]
2530 (define_expand "iorsi3"
2531 [(set (match_operand:SI 0 "s_register_operand")
2532 (ior:SI (match_operand:SI 1 "s_register_operand")
2533 (match_operand:SI 2 "reg_or_int_operand")))]
2536 if (CONST_INT_P (operands[2]))
2540 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2541 operands[2] = force_reg (SImode, operands[2]);
2544 arm_split_constant (IOR, SImode, NULL_RTX,
2545 INTVAL (operands[2]), operands[0],
2547 optimize && can_create_pseudo_p ());
2551 else /* TARGET_THUMB1 */
2553 rtx tmp = force_reg (SImode, operands[2]);
2554 if (rtx_equal_p (operands[0], operands[1]))
2558 operands[2] = operands[1];
2566 (define_insn_and_split "*iorsi3_insn"
2567 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2568 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2569 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2574 orn%?\\t%0, %1, #%B2
2578 && CONST_INT_P (operands[2])
2579 && !(const_ok_for_arm (INTVAL (operands[2]))
2580 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2581 [(clobber (const_int 0))]
2583 arm_split_constant (IOR, SImode, curr_insn,
2584 INTVAL (operands[2]), operands[0], operands[1], 0);
2587 [(set_attr "length" "4,4,4,4,16")
2588 (set_attr "arch" "32,t2,t2,32,32")
2589 (set_attr "predicable" "yes")
2590 (set_attr "predicable_short_it" "no,yes,no,no,no")
2591 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2595 [(match_scratch:SI 3 "r")
2596 (set (match_operand:SI 0 "arm_general_register_operand" "")
2597 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2598 (match_operand:SI 2 "const_int_operand" "")))]
2600 && !const_ok_for_arm (INTVAL (operands[2]))
2601 && const_ok_for_arm (~INTVAL (operands[2]))"
2602 [(set (match_dup 3) (match_dup 2))
2603 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2607 (define_insn "*iorsi3_compare0"
2608 [(set (reg:CC_NOOV CC_REGNUM)
2610 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2611 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2613 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2614 (ior:SI (match_dup 1) (match_dup 2)))]
2616 "orrs%?\\t%0, %1, %2"
2617 [(set_attr "conds" "set")
2618 (set_attr "arch" "*,t2,*")
2619 (set_attr "length" "4,2,4")
2620 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2623 (define_insn "*iorsi3_compare0_scratch"
2624 [(set (reg:CC_NOOV CC_REGNUM)
2626 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2627 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2629 (clobber (match_scratch:SI 0 "=r,l,r"))]
2631 "orrs%?\\t%0, %1, %2"
2632 [(set_attr "conds" "set")
2633 (set_attr "arch" "*,t2,*")
2634 (set_attr "length" "4,2,4")
2635 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2638 (define_expand "xorsi3"
2639 [(set (match_operand:SI 0 "s_register_operand")
2640 (xor:SI (match_operand:SI 1 "s_register_operand")
2641 (match_operand:SI 2 "reg_or_int_operand")))]
2643 "if (CONST_INT_P (operands[2]))
2647 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2648 operands[2] = force_reg (SImode, operands[2]);
2651 arm_split_constant (XOR, SImode, NULL_RTX,
2652 INTVAL (operands[2]), operands[0],
2654 optimize && can_create_pseudo_p ());
2658 else /* TARGET_THUMB1 */
2660 rtx tmp = force_reg (SImode, operands[2]);
2661 if (rtx_equal_p (operands[0], operands[1]))
2665 operands[2] = operands[1];
2672 (define_insn_and_split "*arm_xorsi3"
2673 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2674 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2675 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2683 && CONST_INT_P (operands[2])
2684 && !const_ok_for_arm (INTVAL (operands[2]))"
2685 [(clobber (const_int 0))]
2687 arm_split_constant (XOR, SImode, curr_insn,
2688 INTVAL (operands[2]), operands[0], operands[1], 0);
2691 [(set_attr "length" "4,4,4,16")
2692 (set_attr "predicable" "yes")
2693 (set_attr "predicable_short_it" "no,yes,no,no")
2694 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2697 (define_insn "*xorsi3_compare0"
2698 [(set (reg:CC_NOOV CC_REGNUM)
2699 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2700 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2702 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2703 (xor:SI (match_dup 1) (match_dup 2)))]
2705 "eors%?\\t%0, %1, %2"
2706 [(set_attr "conds" "set")
2707 (set_attr "type" "logics_imm,logics_reg")]
2710 (define_insn "*xorsi3_compare0_scratch"
2711 [(set (reg:CC_NOOV CC_REGNUM)
2712 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2713 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2717 [(set_attr "conds" "set")
2718 (set_attr "type" "logics_imm,logics_reg")]
2721 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2722 ; (NOT D) we can sometimes merge the final NOT into one of the following
2726 [(set (match_operand:SI 0 "s_register_operand" "")
2727 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2728 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2729 (match_operand:SI 3 "arm_rhs_operand" "")))
2730 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2732 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2733 (not:SI (match_dup 3))))
2734 (set (match_dup 0) (not:SI (match_dup 4)))]
2738 (define_insn_and_split "*andsi_iorsi3_notsi"
2739 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2740 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2741 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2742 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2744 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2745 "&& reload_completed"
2746 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2747 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2749 /* If operands[3] is a constant make sure to fold the NOT into it
2750 to avoid creating a NOT of a CONST_INT. */
2751 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2752 if (CONST_INT_P (not_rtx))
2754 operands[4] = operands[0];
2755 operands[5] = not_rtx;
2759 operands[5] = operands[0];
2760 operands[4] = not_rtx;
2763 [(set_attr "length" "8")
2764 (set_attr "ce_count" "2")
2765 (set_attr "predicable" "yes")
2766 (set_attr "type" "multiple")]
2769 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2770 ; insns are available?
2772 [(set (match_operand:SI 0 "s_register_operand" "")
2773 (match_operator:SI 1 "logical_binary_operator"
2774 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2775 (match_operand:SI 3 "const_int_operand" "")
2776 (match_operand:SI 4 "const_int_operand" ""))
2777 (match_operator:SI 9 "logical_binary_operator"
2778 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2779 (match_operand:SI 6 "const_int_operand" ""))
2780 (match_operand:SI 7 "s_register_operand" "")])]))
2781 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2783 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2784 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2787 [(ashift:SI (match_dup 2) (match_dup 4))
2791 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2794 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2798 [(set (match_operand:SI 0 "s_register_operand" "")
2799 (match_operator:SI 1 "logical_binary_operator"
2800 [(match_operator:SI 9 "logical_binary_operator"
2801 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2802 (match_operand:SI 6 "const_int_operand" ""))
2803 (match_operand:SI 7 "s_register_operand" "")])
2804 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2805 (match_operand:SI 3 "const_int_operand" "")
2806 (match_operand:SI 4 "const_int_operand" ""))]))
2807 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2809 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2810 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2813 [(ashift:SI (match_dup 2) (match_dup 4))
2817 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2820 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2824 [(set (match_operand:SI 0 "s_register_operand" "")
2825 (match_operator:SI 1 "logical_binary_operator"
2826 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2827 (match_operand:SI 3 "const_int_operand" "")
2828 (match_operand:SI 4 "const_int_operand" ""))
2829 (match_operator:SI 9 "logical_binary_operator"
2830 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2831 (match_operand:SI 6 "const_int_operand" ""))
2832 (match_operand:SI 7 "s_register_operand" "")])]))
2833 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2835 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2836 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2839 [(ashift:SI (match_dup 2) (match_dup 4))
2843 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2846 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2850 [(set (match_operand:SI 0 "s_register_operand" "")
2851 (match_operator:SI 1 "logical_binary_operator"
2852 [(match_operator:SI 9 "logical_binary_operator"
2853 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2854 (match_operand:SI 6 "const_int_operand" ""))
2855 (match_operand:SI 7 "s_register_operand" "")])
2856 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2857 (match_operand:SI 3 "const_int_operand" "")
2858 (match_operand:SI 4 "const_int_operand" ""))]))
2859 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2861 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2862 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2865 [(ashift:SI (match_dup 2) (match_dup 4))
2869 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2872 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2876 ;; Minimum and maximum insns
2878 (define_expand "smaxsi3"
2880 (set (match_operand:SI 0 "s_register_operand")
2881 (smax:SI (match_operand:SI 1 "s_register_operand")
2882 (match_operand:SI 2 "arm_rhs_operand")))
2883 (clobber (reg:CC CC_REGNUM))])]
2886 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2888 /* No need for a clobber of the condition code register here. */
2889 emit_insn (gen_rtx_SET (operands[0],
2890 gen_rtx_SMAX (SImode, operands[1],
2896 (define_insn "*smax_0"
2897 [(set (match_operand:SI 0 "s_register_operand" "=r")
2898 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2901 "bic%?\\t%0, %1, %1, asr #31"
2902 [(set_attr "predicable" "yes")
2903 (set_attr "type" "logic_shift_reg")]
2906 (define_insn "*smax_m1"
2907 [(set (match_operand:SI 0 "s_register_operand" "=r")
2908 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2911 "orr%?\\t%0, %1, %1, asr #31"
2912 [(set_attr "predicable" "yes")
2913 (set_attr "type" "logic_shift_reg")]
2916 (define_insn_and_split "*arm_smax_insn"
2917 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2918 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2919 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2920 (clobber (reg:CC CC_REGNUM))]
2923 ; cmp\\t%1, %2\;movlt\\t%0, %2
2924 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2926 [(set (reg:CC CC_REGNUM)
2927 (compare:CC (match_dup 1) (match_dup 2)))
2929 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2933 [(set_attr "conds" "clob")
2934 (set_attr "length" "8,12")
2935 (set_attr "type" "multiple")]
2938 (define_expand "sminsi3"
2940 (set (match_operand:SI 0 "s_register_operand")
2941 (smin:SI (match_operand:SI 1 "s_register_operand")
2942 (match_operand:SI 2 "arm_rhs_operand")))
2943 (clobber (reg:CC CC_REGNUM))])]
2946 if (operands[2] == const0_rtx)
2948 /* No need for a clobber of the condition code register here. */
2949 emit_insn (gen_rtx_SET (operands[0],
2950 gen_rtx_SMIN (SImode, operands[1],
2956 (define_insn "*smin_0"
2957 [(set (match_operand:SI 0 "s_register_operand" "=r")
2958 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2961 "and%?\\t%0, %1, %1, asr #31"
2962 [(set_attr "predicable" "yes")
2963 (set_attr "type" "logic_shift_reg")]
2966 (define_insn_and_split "*arm_smin_insn"
2967 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2968 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2969 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2970 (clobber (reg:CC CC_REGNUM))]
2973 ; cmp\\t%1, %2\;movge\\t%0, %2
2974 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2976 [(set (reg:CC CC_REGNUM)
2977 (compare:CC (match_dup 1) (match_dup 2)))
2979 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
2983 [(set_attr "conds" "clob")
2984 (set_attr "length" "8,12")
2985 (set_attr "type" "multiple,multiple")]
2988 (define_expand "umaxsi3"
2990 (set (match_operand:SI 0 "s_register_operand")
2991 (umax:SI (match_operand:SI 1 "s_register_operand")
2992 (match_operand:SI 2 "arm_rhs_operand")))
2993 (clobber (reg:CC CC_REGNUM))])]
2998 (define_insn_and_split "*arm_umaxsi3"
2999 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3000 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3001 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3002 (clobber (reg:CC CC_REGNUM))]
3005 ; cmp\\t%1, %2\;movcc\\t%0, %2
3006 ; cmp\\t%1, %2\;movcs\\t%0, %1
3007 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3009 [(set (reg:CC CC_REGNUM)
3010 (compare:CC (match_dup 1) (match_dup 2)))
3012 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3016 [(set_attr "conds" "clob")
3017 (set_attr "length" "8,8,12")
3018 (set_attr "type" "store_4")]
3021 (define_expand "uminsi3"
3023 (set (match_operand:SI 0 "s_register_operand")
3024 (umin:SI (match_operand:SI 1 "s_register_operand")
3025 (match_operand:SI 2 "arm_rhs_operand")))
3026 (clobber (reg:CC CC_REGNUM))])]
3031 (define_insn_and_split "*arm_uminsi3"
3032 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3033 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3034 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3035 (clobber (reg:CC CC_REGNUM))]
3038 ; cmp\\t%1, %2\;movcs\\t%0, %2
3039 ; cmp\\t%1, %2\;movcc\\t%0, %1
3040 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3042 [(set (reg:CC CC_REGNUM)
3043 (compare:CC (match_dup 1) (match_dup 2)))
3045 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3049 [(set_attr "conds" "clob")
3050 (set_attr "length" "8,8,12")
3051 (set_attr "type" "store_4")]
3054 (define_insn "*store_minmaxsi"
3055 [(set (match_operand:SI 0 "memory_operand" "=m")
3056 (match_operator:SI 3 "minmax_operator"
3057 [(match_operand:SI 1 "s_register_operand" "r")
3058 (match_operand:SI 2 "s_register_operand" "r")]))
3059 (clobber (reg:CC CC_REGNUM))]
3060 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3062 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3063 operands[1], operands[2]);
3064 output_asm_insn (\"cmp\\t%1, %2\", operands);
3066 output_asm_insn (\"ite\t%d3\", operands);
3067 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3068 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3071 [(set_attr "conds" "clob")
3072 (set (attr "length")
3073 (if_then_else (eq_attr "is_thumb" "yes")
3076 (set_attr "type" "store_4")]
3079 ; Reject the frame pointer in operand[1], since reloading this after
3080 ; it has been eliminated can cause carnage.
3081 (define_insn "*minmax_arithsi"
3082 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3083 (match_operator:SI 4 "shiftable_operator"
3084 [(match_operator:SI 5 "minmax_operator"
3085 [(match_operand:SI 2 "s_register_operand" "r,r")
3086 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3087 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3088 (clobber (reg:CC CC_REGNUM))]
3089 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3092 enum rtx_code code = GET_CODE (operands[4]);
3095 if (which_alternative != 0 || operands[3] != const0_rtx
3096 || (code != PLUS && code != IOR && code != XOR))
3101 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3102 operands[2], operands[3]);
3103 output_asm_insn (\"cmp\\t%2, %3\", operands);
3107 output_asm_insn (\"ite\\t%d5\", operands);
3109 output_asm_insn (\"it\\t%d5\", operands);
3111 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3113 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3116 [(set_attr "conds" "clob")
3117 (set (attr "length")
3118 (if_then_else (eq_attr "is_thumb" "yes")
3121 (set_attr "type" "multiple")]
3124 ; Reject the frame pointer in operand[1], since reloading this after
3125 ; it has been eliminated can cause carnage.
3126 (define_insn_and_split "*minmax_arithsi_non_canon"
3127 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3129 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3130 (match_operator:SI 4 "minmax_operator"
3131 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3132 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3133 (clobber (reg:CC CC_REGNUM))]
3134 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3135 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3137 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3138 [(set (reg:CC CC_REGNUM)
3139 (compare:CC (match_dup 2) (match_dup 3)))
3141 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3143 (minus:SI (match_dup 1)
3145 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3149 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3150 operands[2], operands[3]);
3151 enum rtx_code rc = minmax_code (operands[4]);
3152 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3153 operands[2], operands[3]);
3155 if (mode == CCFPmode || mode == CCFPEmode)
3156 rc = reverse_condition_maybe_unordered (rc);
3158 rc = reverse_condition (rc);
3159 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3160 if (CONST_INT_P (operands[3]))
3161 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3163 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3165 [(set_attr "conds" "clob")
3166 (set (attr "length")
3167 (if_then_else (eq_attr "is_thumb" "yes")
3170 (set_attr "type" "multiple")]
3173 (define_code_iterator SAT [smin smax])
3174 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3175 (define_code_attr SATlo [(smin "1") (smax "2")])
3176 (define_code_attr SAThi [(smin "2") (smax "1")])
3178 (define_insn "*satsi_<SAT:code>"
3179 [(set (match_operand:SI 0 "s_register_operand" "=r")
3180 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3181 (match_operand:SI 1 "const_int_operand" "i"))
3182 (match_operand:SI 2 "const_int_operand" "i")))]
3183 "TARGET_32BIT && arm_arch6
3184 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3188 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3189 &mask, &signed_sat))
3192 operands[1] = GEN_INT (mask);
3194 return "ssat%?\t%0, %1, %3";
3196 return "usat%?\t%0, %1, %3";
3198 [(set_attr "predicable" "yes")
3199 (set_attr "type" "alus_imm")]
3202 (define_insn "*satsi_<SAT:code>_shift"
3203 [(set (match_operand:SI 0 "s_register_operand" "=r")
3204 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3205 [(match_operand:SI 4 "s_register_operand" "r")
3206 (match_operand:SI 5 "const_int_operand" "i")])
3207 (match_operand:SI 1 "const_int_operand" "i"))
3208 (match_operand:SI 2 "const_int_operand" "i")))]
3209 "TARGET_32BIT && arm_arch6
3210 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3214 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3215 &mask, &signed_sat))
3218 operands[1] = GEN_INT (mask);
3220 return "ssat%?\t%0, %1, %4%S3";
3222 return "usat%?\t%0, %1, %4%S3";
3224 [(set_attr "predicable" "yes")
3225 (set_attr "shift" "3")
3226 (set_attr "type" "logic_shift_reg")])
3228 ;; Shift and rotation insns
3230 (define_expand "ashldi3"
3231 [(set (match_operand:DI 0 "s_register_operand")
3232 (ashift:DI (match_operand:DI 1 "s_register_operand")
3233 (match_operand:SI 2 "reg_or_int_operand")))]
3236 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3237 operands[2], gen_reg_rtx (SImode),
3238 gen_reg_rtx (SImode));
3242 (define_expand "ashlsi3"
3243 [(set (match_operand:SI 0 "s_register_operand")
3244 (ashift:SI (match_operand:SI 1 "s_register_operand")
3245 (match_operand:SI 2 "arm_rhs_operand")))]
3248 if (CONST_INT_P (operands[2])
3249 && (UINTVAL (operands[2])) > 31)
3251 emit_insn (gen_movsi (operands[0], const0_rtx));
3257 (define_expand "ashrdi3"
3258 [(set (match_operand:DI 0 "s_register_operand")
3259 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3260 (match_operand:SI 2 "reg_or_int_operand")))]
3263 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3264 operands[2], gen_reg_rtx (SImode),
3265 gen_reg_rtx (SImode));
3269 (define_expand "ashrsi3"
3270 [(set (match_operand:SI 0 "s_register_operand")
3271 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3272 (match_operand:SI 2 "arm_rhs_operand")))]
3275 if (CONST_INT_P (operands[2])
3276 && UINTVAL (operands[2]) > 31)
3277 operands[2] = GEN_INT (31);
3281 (define_expand "lshrdi3"
3282 [(set (match_operand:DI 0 "s_register_operand")
3283 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3284 (match_operand:SI 2 "reg_or_int_operand")))]
3287 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3288 operands[2], gen_reg_rtx (SImode),
3289 gen_reg_rtx (SImode));
3293 (define_expand "lshrsi3"
3294 [(set (match_operand:SI 0 "s_register_operand")
3295 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3296 (match_operand:SI 2 "arm_rhs_operand")))]
3299 if (CONST_INT_P (operands[2])
3300 && (UINTVAL (operands[2])) > 31)
3302 emit_insn (gen_movsi (operands[0], const0_rtx));
3308 (define_expand "rotlsi3"
3309 [(set (match_operand:SI 0 "s_register_operand")
3310 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3311 (match_operand:SI 2 "reg_or_int_operand")))]
3314 if (CONST_INT_P (operands[2]))
3315 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3318 rtx reg = gen_reg_rtx (SImode);
3319 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3325 (define_expand "rotrsi3"
3326 [(set (match_operand:SI 0 "s_register_operand")
3327 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3328 (match_operand:SI 2 "arm_rhs_operand")))]
3333 if (CONST_INT_P (operands[2])
3334 && UINTVAL (operands[2]) > 31)
3335 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3337 else /* TARGET_THUMB1 */
3339 if (CONST_INT_P (operands [2]))
3340 operands [2] = force_reg (SImode, operands[2]);
3345 (define_insn "*arm_shiftsi3"
3346 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3347 (match_operator:SI 3 "shift_operator"
3348 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3349 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3351 "* return arm_output_shift(operands, 0);"
3352 [(set_attr "predicable" "yes")
3353 (set_attr "arch" "t2,t2,*,*")
3354 (set_attr "predicable_short_it" "yes,yes,no,no")
3355 (set_attr "length" "4")
3356 (set_attr "shift" "1")
3357 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3360 (define_insn "*shiftsi3_compare0"
3361 [(set (reg:CC_NOOV CC_REGNUM)
3362 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3363 [(match_operand:SI 1 "s_register_operand" "r,r")
3364 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3366 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3367 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3369 "* return arm_output_shift(operands, 1);"
3370 [(set_attr "conds" "set")
3371 (set_attr "shift" "1")
3372 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3375 (define_insn "*shiftsi3_compare0_scratch"
3376 [(set (reg:CC_NOOV CC_REGNUM)
3377 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3378 [(match_operand:SI 1 "s_register_operand" "r,r")
3379 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3381 (clobber (match_scratch:SI 0 "=r,r"))]
3383 "* return arm_output_shift(operands, 1);"
3384 [(set_attr "conds" "set")
3385 (set_attr "shift" "1")
3386 (set_attr "type" "shift_imm,shift_reg")]
3389 (define_insn "*not_shiftsi"
3390 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3391 (not:SI (match_operator:SI 3 "shift_operator"
3392 [(match_operand:SI 1 "s_register_operand" "r,r")
3393 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3396 [(set_attr "predicable" "yes")
3397 (set_attr "shift" "1")
3398 (set_attr "arch" "32,a")
3399 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3401 (define_insn "*not_shiftsi_compare0"
3402 [(set (reg:CC_NOOV CC_REGNUM)
3404 (not:SI (match_operator:SI 3 "shift_operator"
3405 [(match_operand:SI 1 "s_register_operand" "r,r")
3406 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3408 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3409 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3411 "mvns%?\\t%0, %1%S3"
3412 [(set_attr "conds" "set")
3413 (set_attr "shift" "1")
3414 (set_attr "arch" "32,a")
3415 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3417 (define_insn "*not_shiftsi_compare0_scratch"
3418 [(set (reg:CC_NOOV CC_REGNUM)
3420 (not:SI (match_operator:SI 3 "shift_operator"
3421 [(match_operand:SI 1 "s_register_operand" "r,r")
3422 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3424 (clobber (match_scratch:SI 0 "=r,r"))]
3426 "mvns%?\\t%0, %1%S3"
3427 [(set_attr "conds" "set")
3428 (set_attr "shift" "1")
3429 (set_attr "arch" "32,a")
3430 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3432 ;; We don't really have extzv, but defining this using shifts helps
3433 ;; to reduce register pressure later on.
3435 (define_expand "extzv"
3436 [(set (match_operand 0 "s_register_operand")
3437 (zero_extract (match_operand 1 "nonimmediate_operand")
3438 (match_operand 2 "const_int_operand")
3439 (match_operand 3 "const_int_operand")))]
3440 "TARGET_THUMB1 || arm_arch_thumb2"
3443 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3444 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3446 if (arm_arch_thumb2)
3448 HOST_WIDE_INT width = INTVAL (operands[2]);
3449 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3451 if (unaligned_access && MEM_P (operands[1])
3452 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3456 if (BYTES_BIG_ENDIAN)
3457 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3462 base_addr = adjust_address (operands[1], SImode,
3463 bitpos / BITS_PER_UNIT);
3464 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3468 rtx dest = operands[0];
3469 rtx tmp = gen_reg_rtx (SImode);
3471 /* We may get a paradoxical subreg here. Strip it off. */
3472 if (GET_CODE (dest) == SUBREG
3473 && GET_MODE (dest) == SImode
3474 && GET_MODE (SUBREG_REG (dest)) == HImode)
3475 dest = SUBREG_REG (dest);
3477 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3480 base_addr = adjust_address (operands[1], HImode,
3481 bitpos / BITS_PER_UNIT);
3482 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3483 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3487 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3489 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3497 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3500 operands[3] = GEN_INT (rshift);
3504 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3508 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3509 operands[3], gen_reg_rtx (SImode)));
3514 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3516 (define_expand "extzv_t1"
3517 [(set (match_operand:SI 4 "s_register_operand")
3518 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3519 (match_operand:SI 2 "const_int_operand")))
3520 (set (match_operand:SI 0 "s_register_operand")
3521 (lshiftrt:SI (match_dup 4)
3522 (match_operand:SI 3 "const_int_operand")))]
3526 (define_expand "extv"
3527 [(set (match_operand 0 "s_register_operand")
3528 (sign_extract (match_operand 1 "nonimmediate_operand")
3529 (match_operand 2 "const_int_operand")
3530 (match_operand 3 "const_int_operand")))]
3533 HOST_WIDE_INT width = INTVAL (operands[2]);
3534 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3536 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3537 && (bitpos % BITS_PER_UNIT) == 0)
3541 if (BYTES_BIG_ENDIAN)
3542 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3546 base_addr = adjust_address (operands[1], SImode,
3547 bitpos / BITS_PER_UNIT);
3548 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3552 rtx dest = operands[0];
3553 rtx tmp = gen_reg_rtx (SImode);
3555 /* We may get a paradoxical subreg here. Strip it off. */
3556 if (GET_CODE (dest) == SUBREG
3557 && GET_MODE (dest) == SImode
3558 && GET_MODE (SUBREG_REG (dest)) == HImode)
3559 dest = SUBREG_REG (dest);
3561 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3564 base_addr = adjust_address (operands[1], HImode,
3565 bitpos / BITS_PER_UNIT);
3566 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3567 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3572 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3574 else if (GET_MODE (operands[0]) == SImode
3575 && GET_MODE (operands[1]) == SImode)
3577 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3585 ; Helper to expand register forms of extv with the proper modes.
3587 (define_expand "extv_regsi"
3588 [(set (match_operand:SI 0 "s_register_operand")
3589 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3590 (match_operand 2 "const_int_operand")
3591 (match_operand 3 "const_int_operand")))]
3596 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3598 (define_insn "unaligned_loaddi"
3599 [(set (match_operand:DI 0 "s_register_operand" "=r")
3600 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3601 UNSPEC_UNALIGNED_LOAD))]
3602 "TARGET_32BIT && TARGET_LDRD"
3604 return output_move_double (operands, true, NULL);
3606 [(set_attr "length" "8")
3607 (set_attr "type" "load_8")])
3609 (define_insn "unaligned_loadsi"
3610 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3611 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3612 UNSPEC_UNALIGNED_LOAD))]
3615 ldr\t%0, %1\t@ unaligned
3616 ldr%?\t%0, %1\t@ unaligned
3617 ldr%?\t%0, %1\t@ unaligned"
3618 [(set_attr "arch" "t1,t2,32")
3619 (set_attr "length" "2,2,4")
3620 (set_attr "predicable" "no,yes,yes")
3621 (set_attr "predicable_short_it" "no,yes,no")
3622 (set_attr "type" "load_4")])
3624 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3625 ;; address (there's no immediate format). That's tricky to support
3626 ;; here and we don't really need this pattern for that case, so only
3627 ;; enable for 32-bit ISAs.
3628 (define_insn "unaligned_loadhis"
3629 [(set (match_operand:SI 0 "s_register_operand" "=r")
3631 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3632 UNSPEC_UNALIGNED_LOAD)))]
3633 "unaligned_access && TARGET_32BIT"
3634 "ldrsh%?\t%0, %1\t@ unaligned"
3635 [(set_attr "predicable" "yes")
3636 (set_attr "type" "load_byte")])
3638 (define_insn "unaligned_loadhiu"
3639 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3641 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3642 UNSPEC_UNALIGNED_LOAD)))]
3645 ldrh\t%0, %1\t@ unaligned
3646 ldrh%?\t%0, %1\t@ unaligned
3647 ldrh%?\t%0, %1\t@ unaligned"
3648 [(set_attr "arch" "t1,t2,32")
3649 (set_attr "length" "2,2,4")
3650 (set_attr "predicable" "no,yes,yes")
3651 (set_attr "predicable_short_it" "no,yes,no")
3652 (set_attr "type" "load_byte")])
3654 (define_insn "unaligned_storedi"
3655 [(set (match_operand:DI 0 "memory_operand" "=m")
3656 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3657 UNSPEC_UNALIGNED_STORE))]
3658 "TARGET_32BIT && TARGET_LDRD"
3660 return output_move_double (operands, true, NULL);
3662 [(set_attr "length" "8")
3663 (set_attr "type" "store_8")])
3665 (define_insn "unaligned_storesi"
3666 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3667 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3668 UNSPEC_UNALIGNED_STORE))]
3671 str\t%1, %0\t@ unaligned
3672 str%?\t%1, %0\t@ unaligned
3673 str%?\t%1, %0\t@ unaligned"
3674 [(set_attr "arch" "t1,t2,32")
3675 (set_attr "length" "2,2,4")
3676 (set_attr "predicable" "no,yes,yes")
3677 (set_attr "predicable_short_it" "no,yes,no")
3678 (set_attr "type" "store_4")])
3680 (define_insn "unaligned_storehi"
3681 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3682 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3683 UNSPEC_UNALIGNED_STORE))]
3686 strh\t%1, %0\t@ unaligned
3687 strh%?\t%1, %0\t@ unaligned
3688 strh%?\t%1, %0\t@ unaligned"
3689 [(set_attr "arch" "t1,t2,32")
3690 (set_attr "length" "2,2,4")
3691 (set_attr "predicable" "no,yes,yes")
3692 (set_attr "predicable_short_it" "no,yes,no")
3693 (set_attr "type" "store_4")])
3696 (define_insn "*extv_reg"
3697 [(set (match_operand:SI 0 "s_register_operand" "=r")
3698 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3699 (match_operand:SI 2 "const_int_operand" "n")
3700 (match_operand:SI 3 "const_int_operand" "n")))]
3702 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3703 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3704 "sbfx%?\t%0, %1, %3, %2"
3705 [(set_attr "length" "4")
3706 (set_attr "predicable" "yes")
3707 (set_attr "type" "bfm")]
3710 (define_insn "extzv_t2"
3711 [(set (match_operand:SI 0 "s_register_operand" "=r")
3712 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3713 (match_operand:SI 2 "const_int_operand" "n")
3714 (match_operand:SI 3 "const_int_operand" "n")))]
3716 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3717 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3718 "ubfx%?\t%0, %1, %3, %2"
3719 [(set_attr "length" "4")
3720 (set_attr "predicable" "yes")
3721 (set_attr "type" "bfm")]
3725 ;; Division instructions
3726 (define_insn "divsi3"
3727 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3728 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3729 (match_operand:SI 2 "s_register_operand" "r,r")))]
3734 [(set_attr "arch" "32,v8mb")
3735 (set_attr "predicable" "yes")
3736 (set_attr "type" "sdiv")]
3739 (define_insn "udivsi3"
3740 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3741 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3742 (match_operand:SI 2 "s_register_operand" "r,r")))]
3747 [(set_attr "arch" "32,v8mb")
3748 (set_attr "predicable" "yes")
3749 (set_attr "type" "udiv")]
3753 ;; Unary arithmetic insns
3755 (define_expand "negvsi3"
3756 [(match_operand:SI 0 "register_operand")
3757 (match_operand:SI 1 "register_operand")
3758 (match_operand 2 "")]
3761 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3762 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3767 (define_expand "negvdi3"
3768 [(match_operand:DI 0 "s_register_operand")
3769 (match_operand:DI 1 "s_register_operand")
3770 (match_operand 2 "")]
3773 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3774 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3780 (define_insn "negdi2_compare"
3781 [(set (reg:CC CC_REGNUM)
3784 (match_operand:DI 1 "register_operand" "r,r")))
3785 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3786 (minus:DI (const_int 0) (match_dup 1)))]
3789 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3790 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3791 [(set_attr "conds" "set")
3792 (set_attr "arch" "a,t2")
3793 (set_attr "length" "8")
3794 (set_attr "type" "multiple")]
3797 (define_expand "negsi2"
3798 [(set (match_operand:SI 0 "s_register_operand")
3799 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3804 (define_insn "*arm_negsi2"
3805 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3806 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3808 "rsb%?\\t%0, %1, #0"
3809 [(set_attr "predicable" "yes")
3810 (set_attr "predicable_short_it" "yes,no")
3811 (set_attr "arch" "t2,*")
3812 (set_attr "length" "4")
3813 (set_attr "type" "alu_imm")]
3816 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3817 ;; rather than (0 cmp reg). This gives the same results for unsigned
3818 ;; and equality compares which is what we mostly need here.
3819 (define_insn "negsi2_0compare"
3820 [(set (reg:CC_RSB CC_REGNUM)
3821 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3823 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3824 (neg:SI (match_dup 1)))]
3829 [(set_attr "conds" "set")
3830 (set_attr "arch" "t2,*")
3831 (set_attr "length" "2,*")
3832 (set_attr "type" "alus_imm")]
3835 (define_insn "negsi2_carryin"
3836 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3837 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3838 (match_operand:SI 2 "arm_borrow_operation" "")))]
3842 sbc\\t%0, %1, %1, lsl #1"
3843 [(set_attr "conds" "use")
3844 (set_attr "arch" "a,t2")
3845 (set_attr "type" "adc_imm,adc_reg")]
3848 (define_expand "negsf2"
3849 [(set (match_operand:SF 0 "s_register_operand")
3850 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3851 "TARGET_32BIT && TARGET_HARD_FLOAT"
3855 (define_expand "negdf2"
3856 [(set (match_operand:DF 0 "s_register_operand")
3857 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3858 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3861 ;; abssi2 doesn't really clobber the condition codes if a different register
3862 ;; is being set. To keep things simple, assume during rtl manipulations that
3863 ;; it does, but tell the final scan operator the truth. Similarly for
3866 (define_expand "abssi2"
3868 [(set (match_operand:SI 0 "s_register_operand")
3869 (abs:SI (match_operand:SI 1 "s_register_operand")))
3870 (clobber (match_dup 2))])]
3874 operands[2] = gen_rtx_SCRATCH (SImode);
3876 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3879 (define_insn_and_split "*arm_abssi2"
3880 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3881 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3882 (clobber (reg:CC CC_REGNUM))]
3885 "&& reload_completed"
3888 /* if (which_alternative == 0) */
3889 if (REGNO(operands[0]) == REGNO(operands[1]))
3891 /* Emit the pattern:
3892 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3893 [(set (reg:CC CC_REGNUM)
3894 (compare:CC (match_dup 0) (const_int 0)))
3895 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3896 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3898 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3899 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3900 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3901 (gen_rtx_LT (SImode,
3902 gen_rtx_REG (CCmode, CC_REGNUM),
3904 (gen_rtx_SET (operands[0],
3905 (gen_rtx_MINUS (SImode,
3912 /* Emit the pattern:
3913 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
3915 (xor:SI (match_dup 1)
3916 (ashiftrt:SI (match_dup 1) (const_int 31))))
3918 (minus:SI (match_dup 0)
3919 (ashiftrt:SI (match_dup 1) (const_int 31))))]
3921 emit_insn (gen_rtx_SET (operands[0],
3922 gen_rtx_XOR (SImode,
3923 gen_rtx_ASHIFTRT (SImode,
3927 emit_insn (gen_rtx_SET (operands[0],
3928 gen_rtx_MINUS (SImode,
3930 gen_rtx_ASHIFTRT (SImode,
3936 [(set_attr "conds" "clob,*")
3937 (set_attr "shift" "1")
3938 (set_attr "predicable" "no, yes")
3939 (set_attr "length" "8")
3940 (set_attr "type" "multiple")]
3943 (define_insn_and_split "*arm_neg_abssi2"
3944 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3945 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3946 (clobber (reg:CC CC_REGNUM))]
3949 "&& reload_completed"
3952 /* if (which_alternative == 0) */
3953 if (REGNO (operands[0]) == REGNO (operands[1]))
3955 /* Emit the pattern:
3956 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3958 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3959 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3960 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3962 gen_rtx_REG (CCmode, CC_REGNUM),
3964 gen_rtx_SET (operands[0],
3965 (gen_rtx_MINUS (SImode,
3971 /* Emit the pattern:
3972 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
3974 emit_insn (gen_rtx_SET (operands[0],
3975 gen_rtx_XOR (SImode,
3976 gen_rtx_ASHIFTRT (SImode,
3980 emit_insn (gen_rtx_SET (operands[0],
3981 gen_rtx_MINUS (SImode,
3982 gen_rtx_ASHIFTRT (SImode,
3989 [(set_attr "conds" "clob,*")
3990 (set_attr "shift" "1")
3991 (set_attr "predicable" "no, yes")
3992 (set_attr "length" "8")
3993 (set_attr "type" "multiple")]
3996 (define_expand "abssf2"
3997 [(set (match_operand:SF 0 "s_register_operand")
3998 (abs:SF (match_operand:SF 1 "s_register_operand")))]
3999 "TARGET_32BIT && TARGET_HARD_FLOAT"
4002 (define_expand "absdf2"
4003 [(set (match_operand:DF 0 "s_register_operand")
4004 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4005 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4008 (define_expand "sqrtsf2"
4009 [(set (match_operand:SF 0 "s_register_operand")
4010 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4011 "TARGET_32BIT && TARGET_HARD_FLOAT"
4014 (define_expand "sqrtdf2"
4015 [(set (match_operand:DF 0 "s_register_operand")
4016 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4017 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4020 (define_expand "one_cmplsi2"
4021 [(set (match_operand:SI 0 "s_register_operand")
4022 (not:SI (match_operand:SI 1 "s_register_operand")))]
4027 (define_insn "*arm_one_cmplsi2"
4028 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4029 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4032 [(set_attr "predicable" "yes")
4033 (set_attr "predicable_short_it" "yes,no")
4034 (set_attr "arch" "t2,*")
4035 (set_attr "length" "4")
4036 (set_attr "type" "mvn_reg")]
4039 (define_insn "*notsi_compare0"
4040 [(set (reg:CC_NOOV CC_REGNUM)
4041 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4043 (set (match_operand:SI 0 "s_register_operand" "=r")
4044 (not:SI (match_dup 1)))]
4047 [(set_attr "conds" "set")
4048 (set_attr "type" "mvn_reg")]
4051 (define_insn "*notsi_compare0_scratch"
4052 [(set (reg:CC_NOOV CC_REGNUM)
4053 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4055 (clobber (match_scratch:SI 0 "=r"))]
4058 [(set_attr "conds" "set")
4059 (set_attr "type" "mvn_reg")]
4062 ;; Fixed <--> Floating conversion insns
4064 (define_expand "floatsihf2"
4065 [(set (match_operand:HF 0 "general_operand")
4066 (float:HF (match_operand:SI 1 "general_operand")))]
4070 rtx op1 = gen_reg_rtx (SFmode);
4071 expand_float (op1, operands[1], 0);
4072 op1 = convert_to_mode (HFmode, op1, 0);
4073 emit_move_insn (operands[0], op1);
4078 (define_expand "floatdihf2"
4079 [(set (match_operand:HF 0 "general_operand")
4080 (float:HF (match_operand:DI 1 "general_operand")))]
4084 rtx op1 = gen_reg_rtx (SFmode);
4085 expand_float (op1, operands[1], 0);
4086 op1 = convert_to_mode (HFmode, op1, 0);
4087 emit_move_insn (operands[0], op1);
4092 (define_expand "floatsisf2"
4093 [(set (match_operand:SF 0 "s_register_operand")
4094 (float:SF (match_operand:SI 1 "s_register_operand")))]
4095 "TARGET_32BIT && TARGET_HARD_FLOAT"
4099 (define_expand "floatsidf2"
4100 [(set (match_operand:DF 0 "s_register_operand")
4101 (float:DF (match_operand:SI 1 "s_register_operand")))]
4102 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4106 (define_expand "fix_trunchfsi2"
4107 [(set (match_operand:SI 0 "general_operand")
4108 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4112 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4113 expand_fix (operands[0], op1, 0);
4118 (define_expand "fix_trunchfdi2"
4119 [(set (match_operand:DI 0 "general_operand")
4120 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4124 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4125 expand_fix (operands[0], op1, 0);
4130 (define_expand "fix_truncsfsi2"
4131 [(set (match_operand:SI 0 "s_register_operand")
4132 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4133 "TARGET_32BIT && TARGET_HARD_FLOAT"
4137 (define_expand "fix_truncdfsi2"
4138 [(set (match_operand:SI 0 "s_register_operand")
4139 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4140 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4146 (define_expand "truncdfsf2"
4147 [(set (match_operand:SF 0 "s_register_operand")
4149 (match_operand:DF 1 "s_register_operand")))]
4150 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4154 ;; DFmode to HFmode conversions on targets without a single-step hardware
4155 ;; instruction for it would have to go through SFmode. This is dangerous
4156 ;; as it introduces double rounding.
4158 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4159 ;; a single-step instruction.
4161 (define_expand "truncdfhf2"
4162 [(set (match_operand:HF 0 "s_register_operand")
4164 (match_operand:DF 1 "s_register_operand")))]
4165 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4166 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4168 /* We don't have a direct instruction for this, so we must be in
4169 an unsafe math mode, and going via SFmode. */
4171 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4174 op1 = convert_to_mode (SFmode, operands[1], 0);
4175 op1 = convert_to_mode (HFmode, op1, 0);
4176 emit_move_insn (operands[0], op1);
4179 /* Otherwise, we will pick this up as a single instruction with
4180 no intermediary rounding. */
4184 ;; Zero and sign extension instructions.
4186 (define_expand "zero_extend<mode>di2"
4187 [(set (match_operand:DI 0 "s_register_operand" "")
4188 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4189 "TARGET_32BIT <qhs_zextenddi_cond>"
4191 rtx res_lo, res_hi, op0_lo, op0_hi;
4192 res_lo = gen_lowpart (SImode, operands[0]);
4193 res_hi = gen_highpart (SImode, operands[0]);
4194 if (can_create_pseudo_p ())
4196 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4197 op0_hi = gen_reg_rtx (SImode);
4201 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4204 if (<MODE>mode != SImode)
4205 emit_insn (gen_rtx_SET (op0_lo,
4206 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4207 emit_insn (gen_movsi (op0_hi, const0_rtx));
4208 if (res_lo != op0_lo)
4209 emit_move_insn (res_lo, op0_lo);
4210 if (res_hi != op0_hi)
4211 emit_move_insn (res_hi, op0_hi);
4216 (define_expand "extend<mode>di2"
4217 [(set (match_operand:DI 0 "s_register_operand" "")
4218 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4219 "TARGET_32BIT <qhs_sextenddi_cond>"
4221 rtx res_lo, res_hi, op0_lo, op0_hi;
4222 res_lo = gen_lowpart (SImode, operands[0]);
4223 res_hi = gen_highpart (SImode, operands[0]);
4224 if (can_create_pseudo_p ())
4226 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4227 op0_hi = gen_reg_rtx (SImode);
4231 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4234 if (<MODE>mode != SImode)
4235 emit_insn (gen_rtx_SET (op0_lo,
4236 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4237 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4238 if (res_lo != op0_lo)
4239 emit_move_insn (res_lo, op0_lo);
4240 if (res_hi != op0_hi)
4241 emit_move_insn (res_hi, op0_hi);
4246 ;; Splits for all extensions to DImode
4248 [(set (match_operand:DI 0 "s_register_operand" "")
4249 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4251 [(set (match_dup 0) (match_dup 1))]
4253 rtx lo_part = gen_lowpart (SImode, operands[0]);
4254 machine_mode src_mode = GET_MODE (operands[1]);
4256 if (src_mode == SImode)
4257 emit_move_insn (lo_part, operands[1]);
4259 emit_insn (gen_rtx_SET (lo_part,
4260 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4261 operands[0] = gen_highpart (SImode, operands[0]);
4262 operands[1] = const0_rtx;
4266 [(set (match_operand:DI 0 "s_register_operand" "")
4267 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4269 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4271 rtx lo_part = gen_lowpart (SImode, operands[0]);
4272 machine_mode src_mode = GET_MODE (operands[1]);
4274 if (src_mode == SImode)
4275 emit_move_insn (lo_part, operands[1]);
4277 emit_insn (gen_rtx_SET (lo_part,
4278 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4279 operands[1] = lo_part;
4280 operands[0] = gen_highpart (SImode, operands[0]);
4283 (define_expand "zero_extendhisi2"
4284 [(set (match_operand:SI 0 "s_register_operand")
4285 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4288 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4290 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4293 if (!arm_arch6 && !MEM_P (operands[1]))
4295 rtx t = gen_lowpart (SImode, operands[1]);
4296 rtx tmp = gen_reg_rtx (SImode);
4297 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4298 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4304 [(set (match_operand:SI 0 "s_register_operand" "")
4305 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4306 "!TARGET_THUMB2 && !arm_arch6"
4307 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4308 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4310 operands[2] = gen_lowpart (SImode, operands[1]);
4313 (define_insn "*arm_zero_extendhisi2"
4314 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4315 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4316 "TARGET_ARM && arm_arch4 && !arm_arch6"
4320 [(set_attr "type" "alu_shift_reg,load_byte")
4321 (set_attr "predicable" "yes")]
4324 (define_insn "*arm_zero_extendhisi2_v6"
4325 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4326 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4327 "TARGET_ARM && arm_arch6"
4331 [(set_attr "predicable" "yes")
4332 (set_attr "type" "extend,load_byte")]
4335 (define_insn "*arm_zero_extendhisi2addsi"
4336 [(set (match_operand:SI 0 "s_register_operand" "=r")
4337 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4338 (match_operand:SI 2 "s_register_operand" "r")))]
4340 "uxtah%?\\t%0, %2, %1"
4341 [(set_attr "type" "alu_shift_reg")
4342 (set_attr "predicable" "yes")]
4345 (define_expand "zero_extendqisi2"
4346 [(set (match_operand:SI 0 "s_register_operand")
4347 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4350 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4352 emit_insn (gen_andsi3 (operands[0],
4353 gen_lowpart (SImode, operands[1]),
4357 if (!arm_arch6 && !MEM_P (operands[1]))
4359 rtx t = gen_lowpart (SImode, operands[1]);
4360 rtx tmp = gen_reg_rtx (SImode);
4361 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4362 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4368 [(set (match_operand:SI 0 "s_register_operand" "")
4369 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4371 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4372 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4374 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4377 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4382 (define_insn "*arm_zero_extendqisi2"
4383 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4384 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4385 "TARGET_ARM && !arm_arch6"
4388 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4389 [(set_attr "length" "8,4")
4390 (set_attr "type" "alu_shift_reg,load_byte")
4391 (set_attr "predicable" "yes")]
4394 (define_insn "*arm_zero_extendqisi2_v6"
4395 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4396 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4397 "TARGET_ARM && arm_arch6"
4400 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4401 [(set_attr "type" "extend,load_byte")
4402 (set_attr "predicable" "yes")]
4405 (define_insn "*arm_zero_extendqisi2addsi"
4406 [(set (match_operand:SI 0 "s_register_operand" "=r")
4407 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4408 (match_operand:SI 2 "s_register_operand" "r")))]
4410 "uxtab%?\\t%0, %2, %1"
4411 [(set_attr "predicable" "yes")
4412 (set_attr "type" "alu_shift_reg")]
4416 [(set (match_operand:SI 0 "s_register_operand" "")
4417 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4418 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4419 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4420 [(set (match_dup 2) (match_dup 1))
4421 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4426 [(set (match_operand:SI 0 "s_register_operand" "")
4427 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4428 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4429 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4430 [(set (match_dup 2) (match_dup 1))
4431 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4437 [(set (match_operand:SI 0 "s_register_operand" "")
4438 (IOR_XOR:SI (and:SI (ashift:SI
4439 (match_operand:SI 1 "s_register_operand" "")
4440 (match_operand:SI 2 "const_int_operand" ""))
4441 (match_operand:SI 3 "const_int_operand" ""))
4443 (match_operator 5 "subreg_lowpart_operator"
4444 [(match_operand:SI 4 "s_register_operand" "")]))))]
4446 && (UINTVAL (operands[3])
4447 == (GET_MODE_MASK (GET_MODE (operands[5]))
4448 & (GET_MODE_MASK (GET_MODE (operands[5]))
4449 << (INTVAL (operands[2])))))"
4450 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4452 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4453 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4456 (define_insn "*compareqi_eq0"
4457 [(set (reg:CC_Z CC_REGNUM)
4458 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4462 [(set_attr "conds" "set")
4463 (set_attr "predicable" "yes")
4464 (set_attr "type" "logic_imm")]
4467 (define_expand "extendhisi2"
4468 [(set (match_operand:SI 0 "s_register_operand")
4469 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4474 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4477 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4479 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4483 if (!arm_arch6 && !MEM_P (operands[1]))
4485 rtx t = gen_lowpart (SImode, operands[1]);
4486 rtx tmp = gen_reg_rtx (SImode);
4487 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4488 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4495 [(set (match_operand:SI 0 "register_operand" "")
4496 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4497 (clobber (match_scratch:SI 2 ""))])]
4499 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4500 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4502 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4505 ;; This pattern will only be used when ldsh is not available
4506 (define_expand "extendhisi2_mem"
4507 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4509 (zero_extend:SI (match_dup 7)))
4510 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4511 (set (match_operand:SI 0 "" "")
4512 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4517 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4519 mem1 = change_address (operands[1], QImode, addr);
4520 mem2 = change_address (operands[1], QImode,
4521 plus_constant (Pmode, addr, 1));
4522 operands[0] = gen_lowpart (SImode, operands[0]);
4524 operands[2] = gen_reg_rtx (SImode);
4525 operands[3] = gen_reg_rtx (SImode);
4526 operands[6] = gen_reg_rtx (SImode);
4529 if (BYTES_BIG_ENDIAN)
4531 operands[4] = operands[2];
4532 operands[5] = operands[3];
4536 operands[4] = operands[3];
4537 operands[5] = operands[2];
4543 [(set (match_operand:SI 0 "register_operand" "")
4544 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4546 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4547 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4549 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4552 (define_insn "*arm_extendhisi2"
4553 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4554 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4555 "TARGET_ARM && arm_arch4 && !arm_arch6"
4559 [(set_attr "length" "8,4")
4560 (set_attr "type" "alu_shift_reg,load_byte")
4561 (set_attr "predicable" "yes")]
4564 ;; ??? Check Thumb-2 pool range
4565 (define_insn "*arm_extendhisi2_v6"
4566 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4567 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4568 "TARGET_32BIT && arm_arch6"
4572 [(set_attr "type" "extend,load_byte")
4573 (set_attr "predicable" "yes")]
4576 (define_insn "*arm_extendhisi2addsi"
4577 [(set (match_operand:SI 0 "s_register_operand" "=r")
4578 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4579 (match_operand:SI 2 "s_register_operand" "r")))]
4581 "sxtah%?\\t%0, %2, %1"
4582 [(set_attr "type" "alu_shift_reg")]
4585 (define_expand "extendqihi2"
4587 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4589 (set (match_operand:HI 0 "s_register_operand")
4590 (ashiftrt:SI (match_dup 2)
4595 if (arm_arch4 && MEM_P (operands[1]))
4597 emit_insn (gen_rtx_SET (operands[0],
4598 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4601 if (!s_register_operand (operands[1], QImode))
4602 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4603 operands[0] = gen_lowpart (SImode, operands[0]);
4604 operands[1] = gen_lowpart (SImode, operands[1]);
4605 operands[2] = gen_reg_rtx (SImode);
4609 (define_insn "*arm_extendqihi_insn"
4610 [(set (match_operand:HI 0 "s_register_operand" "=r")
4611 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4612 "TARGET_ARM && arm_arch4"
4614 [(set_attr "type" "load_byte")
4615 (set_attr "predicable" "yes")]
4618 (define_expand "extendqisi2"
4619 [(set (match_operand:SI 0 "s_register_operand")
4620 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4623 if (!arm_arch4 && MEM_P (operands[1]))
4624 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4626 if (!arm_arch6 && !MEM_P (operands[1]))
4628 rtx t = gen_lowpart (SImode, operands[1]);
4629 rtx tmp = gen_reg_rtx (SImode);
4630 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4631 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4637 [(set (match_operand:SI 0 "register_operand" "")
4638 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4640 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4641 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4643 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4646 (define_insn "*arm_extendqisi"
4647 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4648 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4649 "TARGET_ARM && arm_arch4 && !arm_arch6"
4653 [(set_attr "length" "8,4")
4654 (set_attr "type" "alu_shift_reg,load_byte")
4655 (set_attr "predicable" "yes")]
4658 (define_insn "*arm_extendqisi_v6"
4659 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4661 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4662 "TARGET_ARM && arm_arch6"
4666 [(set_attr "type" "extend,load_byte")
4667 (set_attr "predicable" "yes")]
4670 (define_insn "*arm_extendqisi2addsi"
4671 [(set (match_operand:SI 0 "s_register_operand" "=r")
4672 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4673 (match_operand:SI 2 "s_register_operand" "r")))]
4675 "sxtab%?\\t%0, %2, %1"
4676 [(set_attr "type" "alu_shift_reg")
4677 (set_attr "predicable" "yes")]
4680 (define_insn "arm_<sup>xtb16"
4681 [(set (match_operand:SI 0 "s_register_operand" "=r")
4683 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4685 "<sup>xtb16%?\\t%0, %1"
4686 [(set_attr "predicable" "yes")
4687 (set_attr "type" "alu_dsp_reg")])
4689 (define_insn "arm_<simd32_op>"
4690 [(set (match_operand:SI 0 "s_register_operand" "=r")
4692 [(match_operand:SI 1 "s_register_operand" "r")
4693 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4695 "<simd32_op>%?\\t%0, %1, %2"
4696 [(set_attr "predicable" "yes")
4697 (set_attr "type" "alu_dsp_reg")])
4699 (define_insn "arm_usada8"
4700 [(set (match_operand:SI 0 "s_register_operand" "=r")
4702 [(match_operand:SI 1 "s_register_operand" "r")
4703 (match_operand:SI 2 "s_register_operand" "r")
4704 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4706 "usada8%?\\t%0, %1, %2, %3"
4707 [(set_attr "predicable" "yes")
4708 (set_attr "type" "alu_dsp_reg")])
4710 (define_insn "arm_<simd32_op>"
4711 [(set (match_operand:DI 0 "s_register_operand" "=r")
4713 [(match_operand:SI 1 "s_register_operand" "r")
4714 (match_operand:SI 2 "s_register_operand" "r")
4715 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4717 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4718 [(set_attr "predicable" "yes")
4719 (set_attr "type" "smlald")])
4721 (define_expand "extendsfdf2"
4722 [(set (match_operand:DF 0 "s_register_operand")
4723 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4724 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4728 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4729 ;; must go through SFmode.
4731 ;; This is always safe for an extend.
4733 (define_expand "extendhfdf2"
4734 [(set (match_operand:DF 0 "s_register_operand")
4735 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4738 /* We don't have a direct instruction for this, so go via SFmode. */
4739 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4742 op1 = convert_to_mode (SFmode, operands[1], 0);
4743 op1 = convert_to_mode (DFmode, op1, 0);
4744 emit_insn (gen_movdf (operands[0], op1));
4747 /* Otherwise, we're done producing RTL and will pick up the correct
4748 pattern to do this with one rounding-step in a single instruction. */
4752 ;; Move insns (including loads and stores)
4754 ;; XXX Just some ideas about movti.
4755 ;; I don't think these are a good idea on the arm, there just aren't enough
4757 ;;(define_expand "loadti"
4758 ;; [(set (match_operand:TI 0 "s_register_operand")
4759 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4762 ;;(define_expand "storeti"
4763 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4764 ;; (match_operand:TI 1 "s_register_operand"))]
4767 ;;(define_expand "movti"
4768 ;; [(set (match_operand:TI 0 "general_operand")
4769 ;; (match_operand:TI 1 "general_operand"))]
4775 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4776 ;; operands[1] = copy_to_reg (operands[1]);
4777 ;; if (MEM_P (operands[0]))
4778 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4779 ;; else if (MEM_P (operands[1]))
4780 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4784 ;; emit_insn (insn);
4788 ;; Recognize garbage generated above.
4791 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4792 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4796 ;; register mem = (which_alternative < 3);
4797 ;; register const char *template;
4799 ;; operands[mem] = XEXP (operands[mem], 0);
4800 ;; switch (which_alternative)
4802 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4803 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4804 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4805 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4806 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4807 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4809 ;; output_asm_insn (template, operands);
4813 (define_expand "movdi"
4814 [(set (match_operand:DI 0 "general_operand")
4815 (match_operand:DI 1 "general_operand"))]
4818 gcc_checking_assert (aligned_operand (operands[0], DImode));
4819 gcc_checking_assert (aligned_operand (operands[1], DImode));
4820 if (can_create_pseudo_p ())
4822 if (!REG_P (operands[0]))
4823 operands[1] = force_reg (DImode, operands[1]);
4825 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4826 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4828 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4829 when expanding function calls. */
4830 gcc_assert (can_create_pseudo_p ());
4831 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4833 /* Perform load into legal reg pair first, then move. */
4834 rtx reg = gen_reg_rtx (DImode);
4835 emit_insn (gen_movdi (reg, operands[1]));
4838 emit_move_insn (gen_lowpart (SImode, operands[0]),
4839 gen_lowpart (SImode, operands[1]));
4840 emit_move_insn (gen_highpart (SImode, operands[0]),
4841 gen_highpart (SImode, operands[1]));
4844 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4845 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4847 /* Avoid STRD's from an odd-numbered register pair in ARM state
4848 when expanding function prologue. */
4849 gcc_assert (can_create_pseudo_p ());
4850 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4851 ? gen_reg_rtx (DImode)
4853 emit_move_insn (gen_lowpart (SImode, split_dest),
4854 gen_lowpart (SImode, operands[1]));
4855 emit_move_insn (gen_highpart (SImode, split_dest),
4856 gen_highpart (SImode, operands[1]));
4857 if (split_dest != operands[0])
4858 emit_insn (gen_movdi (operands[0], split_dest));
4864 (define_insn "*arm_movdi"
4865 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4866 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4868 && !(TARGET_HARD_FLOAT)
4870 && ( register_operand (operands[0], DImode)
4871 || register_operand (operands[1], DImode))"
4873 switch (which_alternative)
4880 /* Cannot load it directly, split to load it via MOV / MOVT. */
4881 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4885 return output_move_double (operands, true, NULL);
4888 [(set_attr "length" "8,12,16,8,8")
4889 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4890 (set_attr "arm_pool_range" "*,*,*,1020,*")
4891 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4892 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4893 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4897 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4898 (match_operand:ANY64 1 "immediate_operand" ""))]
4901 && (arm_disable_literal_pool
4902 || (arm_const_double_inline_cost (operands[1])
4903 <= arm_max_const_double_inline_cost ()))"
4906 arm_split_constant (SET, SImode, curr_insn,
4907 INTVAL (gen_lowpart (SImode, operands[1])),
4908 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4909 arm_split_constant (SET, SImode, curr_insn,
4910 INTVAL (gen_highpart_mode (SImode,
4911 GET_MODE (operands[0]),
4913 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4918 ; If optimizing for size, or if we have load delay slots, then
4919 ; we want to split the constant into two separate operations.
4920 ; In both cases this may split a trivial part into a single data op
4921 ; leaving a single complex constant to load. We can also get longer
4922 ; offsets in a LDR which means we get better chances of sharing the pool
4923 ; entries. Finally, we can normally do a better job of scheduling
4924 ; LDR instructions than we can with LDM.
4925 ; This pattern will only match if the one above did not.
4927 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4928 (match_operand:ANY64 1 "const_double_operand" ""))]
4929 "TARGET_ARM && reload_completed
4930 && arm_const_double_by_parts (operands[1])"
4931 [(set (match_dup 0) (match_dup 1))
4932 (set (match_dup 2) (match_dup 3))]
4934 operands[2] = gen_highpart (SImode, operands[0]);
4935 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4937 operands[0] = gen_lowpart (SImode, operands[0]);
4938 operands[1] = gen_lowpart (SImode, operands[1]);
4943 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4944 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4945 "TARGET_EITHER && reload_completed"
4946 [(set (match_dup 0) (match_dup 1))
4947 (set (match_dup 2) (match_dup 3))]
4949 operands[2] = gen_highpart (SImode, operands[0]);
4950 operands[3] = gen_highpart (SImode, operands[1]);
4951 operands[0] = gen_lowpart (SImode, operands[0]);
4952 operands[1] = gen_lowpart (SImode, operands[1]);
4954 /* Handle a partial overlap. */
4955 if (rtx_equal_p (operands[0], operands[3]))
4957 rtx tmp0 = operands[0];
4958 rtx tmp1 = operands[1];
4960 operands[0] = operands[2];
4961 operands[1] = operands[3];
4968 ;; We can't actually do base+index doubleword loads if the index and
4969 ;; destination overlap. Split here so that we at least have chance to
4972 [(set (match_operand:DI 0 "s_register_operand" "")
4973 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4974 (match_operand:SI 2 "s_register_operand" ""))))]
4976 && reg_overlap_mentioned_p (operands[0], operands[1])
4977 && reg_overlap_mentioned_p (operands[0], operands[2])"
4979 (plus:SI (match_dup 1)
4982 (mem:DI (match_dup 4)))]
4984 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4988 (define_expand "movsi"
4989 [(set (match_operand:SI 0 "general_operand")
4990 (match_operand:SI 1 "general_operand"))]
4994 rtx base, offset, tmp;
4996 gcc_checking_assert (aligned_operand (operands[0], SImode));
4997 gcc_checking_assert (aligned_operand (operands[1], SImode));
4998 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5000 /* Everything except mem = const or mem = mem can be done easily. */
5001 if (MEM_P (operands[0]))
5002 operands[1] = force_reg (SImode, operands[1]);
5003 if (arm_general_register_operand (operands[0], SImode)
5004 && CONST_INT_P (operands[1])
5005 && !(const_ok_for_arm (INTVAL (operands[1]))
5006 || const_ok_for_arm (~INTVAL (operands[1]))))
5008 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5010 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5015 arm_split_constant (SET, SImode, NULL_RTX,
5016 INTVAL (operands[1]), operands[0], NULL_RTX,
5017 optimize && can_create_pseudo_p ());
5022 else /* Target doesn't have MOVT... */
5024 if (can_create_pseudo_p ())
5026 if (!REG_P (operands[0]))
5027 operands[1] = force_reg (SImode, operands[1]);
5031 split_const (operands[1], &base, &offset);
5032 if (INTVAL (offset) != 0
5033 && targetm.cannot_force_const_mem (SImode, operands[1]))
5035 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5036 emit_move_insn (tmp, base);
5037 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5041 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5043 /* Recognize the case where operand[1] is a reference to thread-local
5044 data and load its address to a register. Offsets have been split off
5046 if (arm_tls_referenced_p (operands[1]))
5047 operands[1] = legitimize_tls_address (operands[1], tmp);
5049 && (CONSTANT_P (operands[1])
5050 || symbol_mentioned_p (operands[1])
5051 || label_mentioned_p (operands[1])))
5053 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5058 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5059 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5060 ;; so this does not matter.
5061 (define_insn "*arm_movt"
5062 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5063 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5064 (match_operand:SI 2 "general_operand" "i,i")))]
5065 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5067 movt%?\t%0, #:upper16:%c2
5068 movt\t%0, #:upper16:%c2"
5069 [(set_attr "arch" "32,v8mb")
5070 (set_attr "predicable" "yes")
5071 (set_attr "length" "4")
5072 (set_attr "type" "alu_sreg")]
5075 (define_insn "*arm_movsi_insn"
5076 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5077 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5078 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5079 && ( register_operand (operands[0], SImode)
5080 || register_operand (operands[1], SImode))"
5088 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5089 (set_attr "predicable" "yes")
5090 (set_attr "arch" "*,*,*,v6t2,*,*")
5091 (set_attr "pool_range" "*,*,*,*,4096,*")
5092 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5096 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5097 (match_operand:SI 1 "const_int_operand" ""))]
5098 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5099 && (!(const_ok_for_arm (INTVAL (operands[1]))
5100 || const_ok_for_arm (~INTVAL (operands[1]))))"
5101 [(clobber (const_int 0))]
5103 arm_split_constant (SET, SImode, NULL_RTX,
5104 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5109 ;; A normal way to do (symbol + offset) requires three instructions at least
5110 ;; (depends on how big the offset is) as below:
5111 ;; movw r0, #:lower16:g
5112 ;; movw r0, #:upper16:g
5115 ;; A better way would be:
5116 ;; movw r0, #:lower16:g+4
5117 ;; movw r0, #:upper16:g+4
5119 ;; The limitation of this way is that the length of offset should be a 16-bit
5120 ;; signed value, because current assembler only supports REL type relocation for
5121 ;; such case. If the more powerful RELA type is supported in future, we should
5122 ;; update this pattern to go with better way.
5124 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5125 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5126 (match_operand:SI 2 "const_int_operand" ""))))]
5129 && arm_disable_literal_pool
5131 && GET_CODE (operands[1]) == SYMBOL_REF"
5132 [(clobber (const_int 0))]
5134 int offset = INTVAL (operands[2]);
5136 if (offset < -0x8000 || offset > 0x7fff)
5138 arm_emit_movpair (operands[0], operands[1]);
5139 emit_insn (gen_rtx_SET (operands[0],
5140 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5144 rtx op = gen_rtx_CONST (SImode,
5145 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5146 arm_emit_movpair (operands[0], op);
5151 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5152 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5153 ;; and lo_sum would be merged back into memory load at cprop. However,
5154 ;; if the default is to prefer movt/movw rather than a load from the constant
5155 ;; pool, the performance is better.
5157 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5158 (match_operand:SI 1 "general_operand" ""))]
5159 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5160 && !target_word_relocations
5161 && !arm_tls_referenced_p (operands[1])"
5162 [(clobber (const_int 0))]
5164 arm_emit_movpair (operands[0], operands[1]);
5168 ;; When generating pic, we need to load the symbol offset into a register.
5169 ;; So that the optimizer does not confuse this with a normal symbol load
5170 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5171 ;; since that is the only type of relocation we can use.
5173 ;; Wrap calculation of the whole PIC address in a single pattern for the
5174 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5175 ;; a PIC address involves two loads from memory, so we want to CSE it
5176 ;; as often as possible.
5177 ;; This pattern will be split into one of the pic_load_addr_* patterns
5178 ;; and a move after GCSE optimizations.
5180 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5181 (define_expand "calculate_pic_address"
5182 [(set (match_operand:SI 0 "register_operand")
5183 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5184 (unspec:SI [(match_operand:SI 2 "" "")]
5189 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5191 [(set (match_operand:SI 0 "register_operand" "")
5192 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5193 (unspec:SI [(match_operand:SI 2 "" "")]
5196 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5197 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5198 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5201 ;; operand1 is the memory address to go into
5202 ;; pic_load_addr_32bit.
5203 ;; operand2 is the PIC label to be emitted
5204 ;; from pic_add_dot_plus_eight.
5205 ;; We do this to allow hoisting of the entire insn.
5206 (define_insn_and_split "pic_load_addr_unified"
5207 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5208 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5209 (match_operand:SI 2 "" "")]
5210 UNSPEC_PIC_UNIFIED))]
5213 "&& reload_completed"
5214 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5215 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5216 (match_dup 2)] UNSPEC_PIC_BASE))]
5217 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5218 [(set_attr "type" "load_4,load_4,load_4")
5219 (set_attr "pool_range" "4096,4094,1022")
5220 (set_attr "neg_pool_range" "4084,0,0")
5221 (set_attr "arch" "a,t2,t1")
5222 (set_attr "length" "8,6,4")]
5225 ;; The rather odd constraints on the following are to force reload to leave
5226 ;; the insn alone, and to force the minipool generation pass to then move
5227 ;; the GOT symbol to memory.
5229 (define_insn "pic_load_addr_32bit"
5230 [(set (match_operand:SI 0 "s_register_operand" "=r")
5231 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5232 "TARGET_32BIT && flag_pic"
5234 [(set_attr "type" "load_4")
5235 (set (attr "pool_range")
5236 (if_then_else (eq_attr "is_thumb" "no")
5239 (set (attr "neg_pool_range")
5240 (if_then_else (eq_attr "is_thumb" "no")
5245 (define_insn "pic_load_addr_thumb1"
5246 [(set (match_operand:SI 0 "s_register_operand" "=l")
5247 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5248 "TARGET_THUMB1 && flag_pic"
5250 [(set_attr "type" "load_4")
5251 (set (attr "pool_range") (const_int 1018))]
5254 (define_insn "pic_add_dot_plus_four"
5255 [(set (match_operand:SI 0 "register_operand" "=r")
5256 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5258 (match_operand 2 "" "")]
5262 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5263 INTVAL (operands[2]));
5264 return \"add\\t%0, %|pc\";
5266 [(set_attr "length" "2")
5267 (set_attr "type" "alu_sreg")]
5270 (define_insn "pic_add_dot_plus_eight"
5271 [(set (match_operand:SI 0 "register_operand" "=r")
5272 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5274 (match_operand 2 "" "")]
5278 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5279 INTVAL (operands[2]));
5280 return \"add%?\\t%0, %|pc, %1\";
5282 [(set_attr "predicable" "yes")
5283 (set_attr "type" "alu_sreg")]
5286 (define_insn "tls_load_dot_plus_eight"
5287 [(set (match_operand:SI 0 "register_operand" "=r")
5288 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5290 (match_operand 2 "" "")]
5294 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5295 INTVAL (operands[2]));
5296 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5298 [(set_attr "predicable" "yes")
5299 (set_attr "type" "load_4")]
5302 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5303 ;; followed by a load. These sequences can be crunched down to
5304 ;; tls_load_dot_plus_eight by a peephole.
5307 [(set (match_operand:SI 0 "register_operand" "")
5308 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5310 (match_operand 1 "" "")]
5312 (set (match_operand:SI 2 "arm_general_register_operand" "")
5313 (mem:SI (match_dup 0)))]
5314 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5316 (mem:SI (unspec:SI [(match_dup 3)
5323 (define_insn "pic_offset_arm"
5324 [(set (match_operand:SI 0 "register_operand" "=r")
5325 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5326 (unspec:SI [(match_operand:SI 2 "" "X")]
5327 UNSPEC_PIC_OFFSET))))]
5328 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5329 "ldr%?\\t%0, [%1,%2]"
5330 [(set_attr "type" "load_4")]
5333 (define_expand "builtin_setjmp_receiver"
5334 [(label_ref (match_operand 0 "" ""))]
5338 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5340 if (arm_pic_register != INVALID_REGNUM)
5341 arm_load_pic_register (1UL << 3, NULL_RTX);
5345 ;; If copying one reg to another we can set the condition codes according to
5346 ;; its value. Such a move is common after a return from subroutine and the
5347 ;; result is being tested against zero.
5349 (define_insn "*movsi_compare0"
5350 [(set (reg:CC CC_REGNUM)
5351 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5353 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5358 subs%?\\t%0, %1, #0"
5359 [(set_attr "conds" "set")
5360 (set_attr "type" "alus_imm,alus_imm")]
5363 ;; Subroutine to store a half word from a register into memory.
5364 ;; Operand 0 is the source register (HImode)
5365 ;; Operand 1 is the destination address in a register (SImode)
5367 ;; In both this routine and the next, we must be careful not to spill
5368 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5369 ;; can generate unrecognizable rtl.
5371 (define_expand "storehi"
5372 [;; store the low byte
5373 (set (match_operand 1 "" "") (match_dup 3))
5374 ;; extract the high byte
5376 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5377 ;; store the high byte
5378 (set (match_dup 4) (match_dup 5))]
5382 rtx op1 = operands[1];
5383 rtx addr = XEXP (op1, 0);
5384 enum rtx_code code = GET_CODE (addr);
5386 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5388 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5390 operands[4] = adjust_address (op1, QImode, 1);
5391 operands[1] = adjust_address (operands[1], QImode, 0);
5392 operands[3] = gen_lowpart (QImode, operands[0]);
5393 operands[0] = gen_lowpart (SImode, operands[0]);
5394 operands[2] = gen_reg_rtx (SImode);
5395 operands[5] = gen_lowpart (QImode, operands[2]);
5399 (define_expand "storehi_bigend"
5400 [(set (match_dup 4) (match_dup 3))
5402 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5403 (set (match_operand 1 "" "") (match_dup 5))]
5407 rtx op1 = operands[1];
5408 rtx addr = XEXP (op1, 0);
5409 enum rtx_code code = GET_CODE (addr);
5411 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5413 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5415 operands[4] = adjust_address (op1, QImode, 1);
5416 operands[1] = adjust_address (operands[1], QImode, 0);
5417 operands[3] = gen_lowpart (QImode, operands[0]);
5418 operands[0] = gen_lowpart (SImode, operands[0]);
5419 operands[2] = gen_reg_rtx (SImode);
5420 operands[5] = gen_lowpart (QImode, operands[2]);
5424 ;; Subroutine to store a half word integer constant into memory.
5425 (define_expand "storeinthi"
5426 [(set (match_operand 0 "" "")
5427 (match_operand 1 "" ""))
5428 (set (match_dup 3) (match_dup 2))]
5432 HOST_WIDE_INT value = INTVAL (operands[1]);
5433 rtx addr = XEXP (operands[0], 0);
5434 rtx op0 = operands[0];
5435 enum rtx_code code = GET_CODE (addr);
5437 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5439 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5441 operands[1] = gen_reg_rtx (SImode);
5442 if (BYTES_BIG_ENDIAN)
5444 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5445 if ((value & 255) == ((value >> 8) & 255))
5446 operands[2] = operands[1];
5449 operands[2] = gen_reg_rtx (SImode);
5450 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5455 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5456 if ((value & 255) == ((value >> 8) & 255))
5457 operands[2] = operands[1];
5460 operands[2] = gen_reg_rtx (SImode);
5461 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5465 operands[3] = adjust_address (op0, QImode, 1);
5466 operands[0] = adjust_address (operands[0], QImode, 0);
5467 operands[2] = gen_lowpart (QImode, operands[2]);
5468 operands[1] = gen_lowpart (QImode, operands[1]);
5472 (define_expand "storehi_single_op"
5473 [(set (match_operand:HI 0 "memory_operand")
5474 (match_operand:HI 1 "general_operand"))]
5475 "TARGET_32BIT && arm_arch4"
5477 if (!s_register_operand (operands[1], HImode))
5478 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5482 (define_expand "movhi"
5483 [(set (match_operand:HI 0 "general_operand")
5484 (match_operand:HI 1 "general_operand"))]
5487 gcc_checking_assert (aligned_operand (operands[0], HImode));
5488 gcc_checking_assert (aligned_operand (operands[1], HImode));
5491 if (can_create_pseudo_p ())
5493 if (MEM_P (operands[0]))
5497 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5500 if (CONST_INT_P (operands[1]))
5501 emit_insn (gen_storeinthi (operands[0], operands[1]));
5504 if (MEM_P (operands[1]))
5505 operands[1] = force_reg (HImode, operands[1]);
5506 if (BYTES_BIG_ENDIAN)
5507 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5509 emit_insn (gen_storehi (operands[1], operands[0]));
5513 /* Sign extend a constant, and keep it in an SImode reg. */
5514 else if (CONST_INT_P (operands[1]))
5516 rtx reg = gen_reg_rtx (SImode);
5517 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5519 /* If the constant is already valid, leave it alone. */
5520 if (!const_ok_for_arm (val))
5522 /* If setting all the top bits will make the constant
5523 loadable in a single instruction, then set them.
5524 Otherwise, sign extend the number. */
5526 if (const_ok_for_arm (~(val | ~0xffff)))
5528 else if (val & 0x8000)
5532 emit_insn (gen_movsi (reg, GEN_INT (val)));
5533 operands[1] = gen_lowpart (HImode, reg);
5535 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5536 && MEM_P (operands[1]))
5538 rtx reg = gen_reg_rtx (SImode);
5540 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5541 operands[1] = gen_lowpart (HImode, reg);
5543 else if (!arm_arch4)
5545 if (MEM_P (operands[1]))
5548 rtx offset = const0_rtx;
5549 rtx reg = gen_reg_rtx (SImode);
5551 if ((REG_P (base = XEXP (operands[1], 0))
5552 || (GET_CODE (base) == PLUS
5553 && (CONST_INT_P (offset = XEXP (base, 1)))
5554 && ((INTVAL(offset) & 1) != 1)
5555 && REG_P (base = XEXP (base, 0))))
5556 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5560 new_rtx = widen_memory_access (operands[1], SImode,
5561 ((INTVAL (offset) & ~3)
5562 - INTVAL (offset)));
5563 emit_insn (gen_movsi (reg, new_rtx));
5564 if (((INTVAL (offset) & 2) != 0)
5565 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5567 rtx reg2 = gen_reg_rtx (SImode);
5569 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5574 emit_insn (gen_movhi_bytes (reg, operands[1]));
5576 operands[1] = gen_lowpart (HImode, reg);
5580 /* Handle loading a large integer during reload. */
5581 else if (CONST_INT_P (operands[1])
5582 && !const_ok_for_arm (INTVAL (operands[1]))
5583 && !const_ok_for_arm (~INTVAL (operands[1])))
5585 /* Writing a constant to memory needs a scratch, which should
5586 be handled with SECONDARY_RELOADs. */
5587 gcc_assert (REG_P (operands[0]));
5589 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5590 emit_insn (gen_movsi (operands[0], operands[1]));
5594 else if (TARGET_THUMB2)
5596 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5597 if (can_create_pseudo_p ())
5599 if (!REG_P (operands[0]))
5600 operands[1] = force_reg (HImode, operands[1]);
5601 /* Zero extend a constant, and keep it in an SImode reg. */
5602 else if (CONST_INT_P (operands[1]))
5604 rtx reg = gen_reg_rtx (SImode);
5605 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5607 emit_insn (gen_movsi (reg, GEN_INT (val)));
5608 operands[1] = gen_lowpart (HImode, reg);
5612 else /* TARGET_THUMB1 */
5614 if (can_create_pseudo_p ())
5616 if (CONST_INT_P (operands[1]))
5618 rtx reg = gen_reg_rtx (SImode);
5620 emit_insn (gen_movsi (reg, operands[1]));
5621 operands[1] = gen_lowpart (HImode, reg);
5624 /* ??? We shouldn't really get invalid addresses here, but this can
5625 happen if we are passed a SP (never OK for HImode/QImode) or
5626 virtual register (also rejected as illegitimate for HImode/QImode)
5627 relative address. */
5628 /* ??? This should perhaps be fixed elsewhere, for instance, in
5629 fixup_stack_1, by checking for other kinds of invalid addresses,
5630 e.g. a bare reference to a virtual register. This may confuse the
5631 alpha though, which must handle this case differently. */
5632 if (MEM_P (operands[0])
5633 && !memory_address_p (GET_MODE (operands[0]),
5634 XEXP (operands[0], 0)))
5636 = replace_equiv_address (operands[0],
5637 copy_to_reg (XEXP (operands[0], 0)));
5639 if (MEM_P (operands[1])
5640 && !memory_address_p (GET_MODE (operands[1]),
5641 XEXP (operands[1], 0)))
5643 = replace_equiv_address (operands[1],
5644 copy_to_reg (XEXP (operands[1], 0)));
5646 if (MEM_P (operands[1]) && optimize > 0)
5648 rtx reg = gen_reg_rtx (SImode);
5650 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5651 operands[1] = gen_lowpart (HImode, reg);
5654 if (MEM_P (operands[0]))
5655 operands[1] = force_reg (HImode, operands[1]);
5657 else if (CONST_INT_P (operands[1])
5658 && !satisfies_constraint_I (operands[1]))
5660 /* Handle loading a large integer during reload. */
5662 /* Writing a constant to memory needs a scratch, which should
5663 be handled with SECONDARY_RELOADs. */
5664 gcc_assert (REG_P (operands[0]));
5666 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5667 emit_insn (gen_movsi (operands[0], operands[1]));
5674 (define_expand "movhi_bytes"
5675 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5677 (zero_extend:SI (match_dup 6)))
5678 (set (match_operand:SI 0 "" "")
5679 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5684 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5686 mem1 = change_address (operands[1], QImode, addr);
5687 mem2 = change_address (operands[1], QImode,
5688 plus_constant (Pmode, addr, 1));
5689 operands[0] = gen_lowpart (SImode, operands[0]);
5691 operands[2] = gen_reg_rtx (SImode);
5692 operands[3] = gen_reg_rtx (SImode);
5695 if (BYTES_BIG_ENDIAN)
5697 operands[4] = operands[2];
5698 operands[5] = operands[3];
5702 operands[4] = operands[3];
5703 operands[5] = operands[2];
5708 (define_expand "movhi_bigend"
5710 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5713 (ashiftrt:SI (match_dup 2) (const_int 16)))
5714 (set (match_operand:HI 0 "s_register_operand")
5718 operands[2] = gen_reg_rtx (SImode);
5719 operands[3] = gen_reg_rtx (SImode);
5720 operands[4] = gen_lowpart (HImode, operands[3]);
5724 ;; Pattern to recognize insn generated default case above
5725 (define_insn "*movhi_insn_arch4"
5726 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5727 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5729 && arm_arch4 && !TARGET_HARD_FLOAT
5730 && (register_operand (operands[0], HImode)
5731 || register_operand (operands[1], HImode))"
5733 mov%?\\t%0, %1\\t%@ movhi
5734 mvn%?\\t%0, #%B1\\t%@ movhi
5735 movw%?\\t%0, %L1\\t%@ movhi
5736 strh%?\\t%1, %0\\t%@ movhi
5737 ldrh%?\\t%0, %1\\t%@ movhi"
5738 [(set_attr "predicable" "yes")
5739 (set_attr "pool_range" "*,*,*,*,256")
5740 (set_attr "neg_pool_range" "*,*,*,*,244")
5741 (set_attr "arch" "*,*,v6t2,*,*")
5742 (set_attr_alternative "type"
5743 [(if_then_else (match_operand 1 "const_int_operand" "")
5744 (const_string "mov_imm" )
5745 (const_string "mov_reg"))
5746 (const_string "mvn_imm")
5747 (const_string "mov_imm")
5748 (const_string "store_4")
5749 (const_string "load_4")])]
5752 (define_insn "*movhi_bytes"
5753 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5754 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5755 "TARGET_ARM && !TARGET_HARD_FLOAT"
5757 mov%?\\t%0, %1\\t%@ movhi
5758 mov%?\\t%0, %1\\t%@ movhi
5759 mvn%?\\t%0, #%B1\\t%@ movhi"
5760 [(set_attr "predicable" "yes")
5761 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5764 ;; We use a DImode scratch because we may occasionally need an additional
5765 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5766 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5767 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5768 ;; to be correctly handled in default_secondary_reload function.
5769 (define_expand "reload_outhi"
5770 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5771 (match_operand:HI 1 "s_register_operand" "r")
5772 (match_operand:DI 2 "s_register_operand" "=&l")])]
5775 arm_reload_out_hi (operands);
5777 thumb_reload_out_hi (operands);
5782 (define_expand "reload_inhi"
5783 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5784 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5785 (match_operand:DI 2 "s_register_operand" "=&r")])]
5789 arm_reload_in_hi (operands);
5791 thumb_reload_out_hi (operands);
5795 (define_expand "movqi"
5796 [(set (match_operand:QI 0 "general_operand")
5797 (match_operand:QI 1 "general_operand"))]
5800 /* Everything except mem = const or mem = mem can be done easily */
5802 if (can_create_pseudo_p ())
5804 if (CONST_INT_P (operands[1]))
5806 rtx reg = gen_reg_rtx (SImode);
5808 /* For thumb we want an unsigned immediate, then we are more likely
5809 to be able to use a movs insn. */
5811 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5813 emit_insn (gen_movsi (reg, operands[1]));
5814 operands[1] = gen_lowpart (QImode, reg);
5819 /* ??? We shouldn't really get invalid addresses here, but this can
5820 happen if we are passed a SP (never OK for HImode/QImode) or
5821 virtual register (also rejected as illegitimate for HImode/QImode)
5822 relative address. */
5823 /* ??? This should perhaps be fixed elsewhere, for instance, in
5824 fixup_stack_1, by checking for other kinds of invalid addresses,
5825 e.g. a bare reference to a virtual register. This may confuse the
5826 alpha though, which must handle this case differently. */
5827 if (MEM_P (operands[0])
5828 && !memory_address_p (GET_MODE (operands[0]),
5829 XEXP (operands[0], 0)))
5831 = replace_equiv_address (operands[0],
5832 copy_to_reg (XEXP (operands[0], 0)));
5833 if (MEM_P (operands[1])
5834 && !memory_address_p (GET_MODE (operands[1]),
5835 XEXP (operands[1], 0)))
5837 = replace_equiv_address (operands[1],
5838 copy_to_reg (XEXP (operands[1], 0)));
5841 if (MEM_P (operands[1]) && optimize > 0)
5843 rtx reg = gen_reg_rtx (SImode);
5845 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5846 operands[1] = gen_lowpart (QImode, reg);
5849 if (MEM_P (operands[0]))
5850 operands[1] = force_reg (QImode, operands[1]);
5852 else if (TARGET_THUMB
5853 && CONST_INT_P (operands[1])
5854 && !satisfies_constraint_I (operands[1]))
5856 /* Handle loading a large integer during reload. */
5858 /* Writing a constant to memory needs a scratch, which should
5859 be handled with SECONDARY_RELOADs. */
5860 gcc_assert (REG_P (operands[0]));
5862 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5863 emit_insn (gen_movsi (operands[0], operands[1]));
5869 (define_insn "*arm_movqi_insn"
5870 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5871 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5873 && ( register_operand (operands[0], QImode)
5874 || register_operand (operands[1], QImode))"
5885 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5886 (set_attr "predicable" "yes")
5887 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5888 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5889 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5893 (define_expand "movhf"
5894 [(set (match_operand:HF 0 "general_operand")
5895 (match_operand:HF 1 "general_operand"))]
5898 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5899 gcc_checking_assert (aligned_operand (operands[1], HFmode));
5902 if (MEM_P (operands[0]))
5903 operands[1] = force_reg (HFmode, operands[1]);
5905 else /* TARGET_THUMB1 */
5907 if (can_create_pseudo_p ())
5909 if (!REG_P (operands[0]))
5910 operands[1] = force_reg (HFmode, operands[1]);
5916 (define_insn "*arm32_movhf"
5917 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5918 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5919 "TARGET_32BIT && !TARGET_HARD_FLOAT
5920 && ( s_register_operand (operands[0], HFmode)
5921 || s_register_operand (operands[1], HFmode))"
5923 switch (which_alternative)
5925 case 0: /* ARM register from memory */
5926 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
5927 case 1: /* memory from ARM register */
5928 return \"strh%?\\t%1, %0\\t%@ __fp16\";
5929 case 2: /* ARM register from ARM register */
5930 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5931 case 3: /* ARM register from constant */
5936 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
5938 ops[0] = operands[0];
5939 ops[1] = GEN_INT (bits);
5940 ops[2] = GEN_INT (bits & 0xff00);
5941 ops[3] = GEN_INT (bits & 0x00ff);
5943 if (arm_arch_thumb2)
5944 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5946 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5953 [(set_attr "conds" "unconditional")
5954 (set_attr "type" "load_4,store_4,mov_reg,multiple")
5955 (set_attr "length" "4,4,4,8")
5956 (set_attr "predicable" "yes")]
5959 (define_expand "movsf"
5960 [(set (match_operand:SF 0 "general_operand")
5961 (match_operand:SF 1 "general_operand"))]
5964 gcc_checking_assert (aligned_operand (operands[0], SFmode));
5965 gcc_checking_assert (aligned_operand (operands[1], SFmode));
5968 if (MEM_P (operands[0]))
5969 operands[1] = force_reg (SFmode, operands[1]);
5971 else /* TARGET_THUMB1 */
5973 if (can_create_pseudo_p ())
5975 if (!REG_P (operands[0]))
5976 operands[1] = force_reg (SFmode, operands[1]);
5980 /* Cannot load it directly, generate a load with clobber so that it can be
5981 loaded via GPR with MOV / MOVT. */
5982 if (arm_disable_literal_pool
5983 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
5984 && CONST_DOUBLE_P (operands[1])
5985 && TARGET_HARD_FLOAT
5986 && !vfp3_const_double_rtx (operands[1]))
5988 rtx clobreg = gen_reg_rtx (SFmode);
5989 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
5996 ;; Transform a floating-point move of a constant into a core register into
5997 ;; an SImode operation.
5999 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6000 (match_operand:SF 1 "immediate_operand" ""))]
6003 && CONST_DOUBLE_P (operands[1])"
6004 [(set (match_dup 2) (match_dup 3))]
6006 operands[2] = gen_lowpart (SImode, operands[0]);
6007 operands[3] = gen_lowpart (SImode, operands[1]);
6008 if (operands[2] == 0 || operands[3] == 0)
6013 (define_insn "*arm_movsf_soft_insn"
6014 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6015 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6017 && TARGET_SOFT_FLOAT
6018 && (!MEM_P (operands[0])
6019 || register_operand (operands[1], SFmode))"
6021 switch (which_alternative)
6023 case 0: return \"mov%?\\t%0, %1\";
6025 /* Cannot load it directly, split to load it via MOV / MOVT. */
6026 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6028 return \"ldr%?\\t%0, %1\\t%@ float\";
6029 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6030 default: gcc_unreachable ();
6033 [(set_attr "predicable" "yes")
6034 (set_attr "type" "mov_reg,load_4,store_4")
6035 (set_attr "arm_pool_range" "*,4096,*")
6036 (set_attr "thumb2_pool_range" "*,4094,*")
6037 (set_attr "arm_neg_pool_range" "*,4084,*")
6038 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6041 ;; Splitter for the above.
6043 [(set (match_operand:SF 0 "s_register_operand")
6044 (match_operand:SF 1 "const_double_operand"))]
6045 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6049 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6050 rtx cst = gen_int_mode (buf, SImode);
6051 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6056 (define_expand "movdf"
6057 [(set (match_operand:DF 0 "general_operand")
6058 (match_operand:DF 1 "general_operand"))]
6061 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6062 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6065 if (MEM_P (operands[0]))
6066 operands[1] = force_reg (DFmode, operands[1]);
6068 else /* TARGET_THUMB */
6070 if (can_create_pseudo_p ())
6072 if (!REG_P (operands[0]))
6073 operands[1] = force_reg (DFmode, operands[1]);
6077 /* Cannot load it directly, generate a load with clobber so that it can be
6078 loaded via GPR with MOV / MOVT. */
6079 if (arm_disable_literal_pool
6080 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6081 && CONSTANT_P (operands[1])
6082 && TARGET_HARD_FLOAT
6083 && !arm_const_double_rtx (operands[1])
6084 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6086 rtx clobreg = gen_reg_rtx (DFmode);
6087 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6094 ;; Reloading a df mode value stored in integer regs to memory can require a
6096 ;; Another reload_out<m> pattern that requires special constraints.
6097 (define_expand "reload_outdf"
6098 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6099 (match_operand:DF 1 "s_register_operand" "r")
6100 (match_operand:SI 2 "s_register_operand" "=&r")]
6104 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6107 operands[2] = XEXP (operands[0], 0);
6108 else if (code == POST_INC || code == PRE_DEC)
6110 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6111 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6112 emit_insn (gen_movdi (operands[0], operands[1]));
6115 else if (code == PRE_INC)
6117 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6119 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6122 else if (code == POST_DEC)
6123 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6125 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6126 XEXP (XEXP (operands[0], 0), 1)));
6128 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6131 if (code == POST_DEC)
6132 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6138 (define_insn "*movdf_soft_insn"
6139 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6140 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6141 "TARGET_32BIT && TARGET_SOFT_FLOAT
6142 && ( register_operand (operands[0], DFmode)
6143 || register_operand (operands[1], DFmode))"
6145 switch (which_alternative)
6152 /* Cannot load it directly, split to load it via MOV / MOVT. */
6153 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6157 return output_move_double (operands, true, NULL);
6160 [(set_attr "length" "8,12,16,8,8")
6161 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6162 (set_attr "arm_pool_range" "*,*,*,1020,*")
6163 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6164 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6165 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6168 ;; Splitter for the above.
6170 [(set (match_operand:DF 0 "s_register_operand")
6171 (match_operand:DF 1 "const_double_operand"))]
6172 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6176 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6177 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6178 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6179 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6180 rtx cst = gen_int_mode (ival, DImode);
6181 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6187 ;; load- and store-multiple insns
6188 ;; The arm can load/store any set of registers, provided that they are in
6189 ;; ascending order, but these expanders assume a contiguous set.
6191 (define_expand "load_multiple"
6192 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6193 (match_operand:SI 1 "" ""))
6194 (use (match_operand:SI 2 "" ""))])]
6197 HOST_WIDE_INT offset = 0;
6199 /* Support only fixed point registers. */
6200 if (!CONST_INT_P (operands[2])
6201 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6202 || INTVAL (operands[2]) < 2
6203 || !MEM_P (operands[1])
6204 || !REG_P (operands[0])
6205 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6206 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6210 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6211 INTVAL (operands[2]),
6212 force_reg (SImode, XEXP (operands[1], 0)),
6213 FALSE, operands[1], &offset);
6216 (define_expand "store_multiple"
6217 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6218 (match_operand:SI 1 "" ""))
6219 (use (match_operand:SI 2 "" ""))])]
6222 HOST_WIDE_INT offset = 0;
6224 /* Support only fixed point registers. */
6225 if (!CONST_INT_P (operands[2])
6226 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6227 || INTVAL (operands[2]) < 2
6228 || !REG_P (operands[1])
6229 || !MEM_P (operands[0])
6230 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6231 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6235 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6236 INTVAL (operands[2]),
6237 force_reg (SImode, XEXP (operands[0], 0)),
6238 FALSE, operands[0], &offset);
6242 (define_expand "setmemsi"
6243 [(match_operand:BLK 0 "general_operand")
6244 (match_operand:SI 1 "const_int_operand")
6245 (match_operand:SI 2 "const_int_operand")
6246 (match_operand:SI 3 "const_int_operand")]
6249 if (arm_gen_setmem (operands))
6256 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6257 ;; We could let this apply for blocks of less than this, but it clobbers so
6258 ;; many registers that there is then probably a better way.
6260 (define_expand "cpymemqi"
6261 [(match_operand:BLK 0 "general_operand")
6262 (match_operand:BLK 1 "general_operand")
6263 (match_operand:SI 2 "const_int_operand")
6264 (match_operand:SI 3 "const_int_operand")]
6269 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6270 && !optimize_function_for_size_p (cfun))
6272 if (gen_cpymem_ldrd_strd (operands))
6277 if (arm_gen_cpymemqi (operands))
6281 else /* TARGET_THUMB1 */
6283 if ( INTVAL (operands[3]) != 4
6284 || INTVAL (operands[2]) > 48)
6287 thumb_expand_cpymemqi (operands);
6294 ;; Compare & branch insns
6295 ;; The range calculations are based as follows:
6296 ;; For forward branches, the address calculation returns the address of
6297 ;; the next instruction. This is 2 beyond the branch instruction.
6298 ;; For backward branches, the address calculation returns the address of
6299 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6300 ;; instruction for the shortest sequence, and 4 before the branch instruction
6301 ;; if we have to jump around an unconditional branch.
6302 ;; To the basic branch range the PC offset must be added (this is +4).
6303 ;; So for forward branches we have
6304 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6305 ;; And for backward branches we have
6306 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6308 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6309 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6311 (define_expand "cbranchsi4"
6312 [(set (pc) (if_then_else
6313 (match_operator 0 "expandable_comparison_operator"
6314 [(match_operand:SI 1 "s_register_operand")
6315 (match_operand:SI 2 "nonmemory_operand")])
6316 (label_ref (match_operand 3 "" ""))
6322 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6324 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6328 if (thumb1_cmpneg_operand (operands[2], SImode))
6330 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6331 operands[3], operands[0]));
6334 if (!thumb1_cmp_operand (operands[2], SImode))
6335 operands[2] = force_reg (SImode, operands[2]);
6338 (define_expand "cbranchsf4"
6339 [(set (pc) (if_then_else
6340 (match_operator 0 "expandable_comparison_operator"
6341 [(match_operand:SF 1 "s_register_operand")
6342 (match_operand:SF 2 "vfp_compare_operand")])
6343 (label_ref (match_operand 3 "" ""))
6345 "TARGET_32BIT && TARGET_HARD_FLOAT"
6346 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6347 operands[3])); DONE;"
6350 (define_expand "cbranchdf4"
6351 [(set (pc) (if_then_else
6352 (match_operator 0 "expandable_comparison_operator"
6353 [(match_operand:DF 1 "s_register_operand")
6354 (match_operand:DF 2 "vfp_compare_operand")])
6355 (label_ref (match_operand 3 "" ""))
6357 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6358 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6359 operands[3])); DONE;"
6362 (define_expand "cbranchdi4"
6363 [(set (pc) (if_then_else
6364 (match_operator 0 "expandable_comparison_operator"
6365 [(match_operand:DI 1 "s_register_operand")
6366 (match_operand:DI 2 "cmpdi_operand")])
6367 (label_ref (match_operand 3 "" ""))
6371 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6373 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6379 ;; Comparison and test insns
6381 (define_insn "*arm_cmpsi_insn"
6382 [(set (reg:CC CC_REGNUM)
6383 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6384 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6392 [(set_attr "conds" "set")
6393 (set_attr "arch" "t2,t2,any,any,any")
6394 (set_attr "length" "2,2,4,4,4")
6395 (set_attr "predicable" "yes")
6396 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6397 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6400 (define_insn "*cmpsi_shiftsi"
6401 [(set (reg:CC CC_REGNUM)
6402 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6403 (match_operator:SI 3 "shift_operator"
6404 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6405 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6408 [(set_attr "conds" "set")
6409 (set_attr "shift" "1")
6410 (set_attr "arch" "32,a,a")
6411 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6413 (define_insn "*cmpsi_shiftsi_swp"
6414 [(set (reg:CC_SWP CC_REGNUM)
6415 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6416 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6417 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6418 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6421 [(set_attr "conds" "set")
6422 (set_attr "shift" "1")
6423 (set_attr "arch" "32,a,a")
6424 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6426 (define_insn "*arm_cmpsi_negshiftsi_si"
6427 [(set (reg:CC_Z CC_REGNUM)
6429 (neg:SI (match_operator:SI 1 "shift_operator"
6430 [(match_operand:SI 2 "s_register_operand" "r")
6431 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6432 (match_operand:SI 0 "s_register_operand" "r")))]
6435 [(set_attr "conds" "set")
6436 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6437 (const_string "alus_shift_imm")
6438 (const_string "alus_shift_reg")))
6439 (set_attr "predicable" "yes")]
6442 ;; DImode comparisons. The generic code generates branches that
6443 ;; if-conversion cannot reduce to a conditional compare, so we do
6446 (define_insn "*arm_cmpdi_insn"
6447 [(set (reg:CC_NCV CC_REGNUM)
6448 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6449 (match_operand:DI 1 "arm_di_operand" "rDi")))
6450 (clobber (match_scratch:SI 2 "=r"))]
6452 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6453 [(set_attr "conds" "set")
6454 (set_attr "length" "8")
6455 (set_attr "type" "multiple")]
6458 (define_insn_and_split "*arm_cmpdi_unsigned"
6459 [(set (reg:CC_CZ CC_REGNUM)
6460 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6461 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6464 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6465 "&& reload_completed"
6466 [(set (reg:CC CC_REGNUM)
6467 (compare:CC (match_dup 2) (match_dup 3)))
6468 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6469 (set (reg:CC CC_REGNUM)
6470 (compare:CC (match_dup 0) (match_dup 1))))]
6472 operands[2] = gen_highpart (SImode, operands[0]);
6473 operands[0] = gen_lowpart (SImode, operands[0]);
6474 if (CONST_INT_P (operands[1]))
6475 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6477 operands[3] = gen_highpart (SImode, operands[1]);
6478 operands[1] = gen_lowpart (SImode, operands[1]);
6480 [(set_attr "conds" "set")
6481 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6482 (set_attr "arch" "t2,t2,t2,a")
6483 (set_attr "length" "6,6,10,8")
6484 (set_attr "type" "multiple")]
6487 (define_insn "*arm_cmpdi_zero"
6488 [(set (reg:CC_Z CC_REGNUM)
6489 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
6491 (clobber (match_scratch:SI 1 "=r"))]
6493 "orrs%?\\t%1, %Q0, %R0"
6494 [(set_attr "conds" "set")
6495 (set_attr "type" "logics_reg")]
6498 ; This insn allows redundant compares to be removed by cse, nothing should
6499 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6500 ; is deleted later on. The match_dup will match the mode here, so that
6501 ; mode changes of the condition codes aren't lost by this even though we don't
6502 ; specify what they are.
6504 (define_insn "*deleted_compare"
6505 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6507 "\\t%@ deleted compare"
6508 [(set_attr "conds" "set")
6509 (set_attr "length" "0")
6510 (set_attr "type" "no_insn")]
6514 ;; Conditional branch insns
6516 (define_expand "cbranch_cc"
6518 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6519 (match_operand 2 "" "")])
6520 (label_ref (match_operand 3 "" ""))
6523 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6524 operands[1], operands[2], NULL_RTX);
6525 operands[2] = const0_rtx;"
6529 ;; Patterns to match conditional branch insns.
6532 (define_insn "arm_cond_branch"
6534 (if_then_else (match_operator 1 "arm_comparison_operator"
6535 [(match_operand 2 "cc_register" "") (const_int 0)])
6536 (label_ref (match_operand 0 "" ""))
6540 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6542 arm_ccfsm_state += 2;
6545 return \"b%d1\\t%l0\";
6547 [(set_attr "conds" "use")
6548 (set_attr "type" "branch")
6549 (set (attr "length")
6551 (and (match_test "TARGET_THUMB2")
6552 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6553 (le (minus (match_dup 0) (pc)) (const_int 256))))
6558 (define_insn "*arm_cond_branch_reversed"
6560 (if_then_else (match_operator 1 "arm_comparison_operator"
6561 [(match_operand 2 "cc_register" "") (const_int 0)])
6563 (label_ref (match_operand 0 "" ""))))]
6566 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6568 arm_ccfsm_state += 2;
6571 return \"b%D1\\t%l0\";
6573 [(set_attr "conds" "use")
6574 (set_attr "type" "branch")
6575 (set (attr "length")
6577 (and (match_test "TARGET_THUMB2")
6578 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6579 (le (minus (match_dup 0) (pc)) (const_int 256))))
6588 (define_expand "cstore_cc"
6589 [(set (match_operand:SI 0 "s_register_operand")
6590 (match_operator:SI 1 "" [(match_operand 2 "" "")
6591 (match_operand 3 "" "")]))]
6593 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6594 operands[2], operands[3], NULL_RTX);
6595 operands[3] = const0_rtx;"
6598 (define_insn_and_split "*mov_scc"
6599 [(set (match_operand:SI 0 "s_register_operand" "=r")
6600 (match_operator:SI 1 "arm_comparison_operator_mode"
6601 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6603 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6606 (if_then_else:SI (match_dup 1)
6610 [(set_attr "conds" "use")
6611 (set_attr "length" "8")
6612 (set_attr "type" "multiple")]
6615 (define_insn "*negscc_borrow"
6616 [(set (match_operand:SI 0 "s_register_operand" "=r")
6617 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
6620 [(set_attr "conds" "use")
6621 (set_attr "length" "4")
6622 (set_attr "type" "adc_reg")]
6625 (define_insn_and_split "*mov_negscc"
6626 [(set (match_operand:SI 0 "s_register_operand" "=r")
6627 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6628 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6629 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
6630 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6633 (if_then_else:SI (match_dup 1)
6637 operands[3] = GEN_INT (~0);
6639 [(set_attr "conds" "use")
6640 (set_attr "length" "8")
6641 (set_attr "type" "multiple")]
6644 (define_insn_and_split "*mov_notscc"
6645 [(set (match_operand:SI 0 "s_register_operand" "=r")
6646 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6647 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6649 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6652 (if_then_else:SI (match_dup 1)
6656 operands[3] = GEN_INT (~1);
6657 operands[4] = GEN_INT (~0);
6659 [(set_attr "conds" "use")
6660 (set_attr "length" "8")
6661 (set_attr "type" "multiple")]
6664 (define_expand "cstoresi4"
6665 [(set (match_operand:SI 0 "s_register_operand")
6666 (match_operator:SI 1 "expandable_comparison_operator"
6667 [(match_operand:SI 2 "s_register_operand")
6668 (match_operand:SI 3 "reg_or_int_operand")]))]
6669 "TARGET_32BIT || TARGET_THUMB1"
6671 rtx op3, scratch, scratch2;
6675 if (!arm_add_operand (operands[3], SImode))
6676 operands[3] = force_reg (SImode, operands[3]);
6677 emit_insn (gen_cstore_cc (operands[0], operands[1],
6678 operands[2], operands[3]));
6682 if (operands[3] == const0_rtx)
6684 switch (GET_CODE (operands[1]))
6687 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6691 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6695 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6696 NULL_RTX, 0, OPTAB_WIDEN);
6697 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6698 NULL_RTX, 0, OPTAB_WIDEN);
6699 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6700 operands[0], 1, OPTAB_WIDEN);
6704 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6706 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6707 NULL_RTX, 1, OPTAB_WIDEN);
6711 scratch = expand_binop (SImode, ashr_optab, operands[2],
6712 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6713 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6714 NULL_RTX, 0, OPTAB_WIDEN);
6715 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6719 /* LT is handled by generic code. No need for unsigned with 0. */
6726 switch (GET_CODE (operands[1]))
6729 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6730 NULL_RTX, 0, OPTAB_WIDEN);
6731 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6735 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6736 NULL_RTX, 0, OPTAB_WIDEN);
6737 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6741 op3 = force_reg (SImode, operands[3]);
6743 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6744 NULL_RTX, 1, OPTAB_WIDEN);
6745 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6746 NULL_RTX, 0, OPTAB_WIDEN);
6747 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6753 if (!thumb1_cmp_operand (op3, SImode))
6754 op3 = force_reg (SImode, op3);
6755 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6756 NULL_RTX, 0, OPTAB_WIDEN);
6757 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6758 NULL_RTX, 1, OPTAB_WIDEN);
6759 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6764 op3 = force_reg (SImode, operands[3]);
6765 scratch = force_reg (SImode, const0_rtx);
6766 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6772 if (!thumb1_cmp_operand (op3, SImode))
6773 op3 = force_reg (SImode, op3);
6774 scratch = force_reg (SImode, const0_rtx);
6775 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6781 if (!thumb1_cmp_operand (op3, SImode))
6782 op3 = force_reg (SImode, op3);
6783 scratch = gen_reg_rtx (SImode);
6784 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6788 op3 = force_reg (SImode, operands[3]);
6789 scratch = gen_reg_rtx (SImode);
6790 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6793 /* No good sequences for GT, LT. */
6800 (define_expand "cstorehf4"
6801 [(set (match_operand:SI 0 "s_register_operand")
6802 (match_operator:SI 1 "expandable_comparison_operator"
6803 [(match_operand:HF 2 "s_register_operand")
6804 (match_operand:HF 3 "vfp_compare_operand")]))]
6805 "TARGET_VFP_FP16INST"
6807 if (!arm_validize_comparison (&operands[1],
6812 emit_insn (gen_cstore_cc (operands[0], operands[1],
6813 operands[2], operands[3]));
6818 (define_expand "cstoresf4"
6819 [(set (match_operand:SI 0 "s_register_operand")
6820 (match_operator:SI 1 "expandable_comparison_operator"
6821 [(match_operand:SF 2 "s_register_operand")
6822 (match_operand:SF 3 "vfp_compare_operand")]))]
6823 "TARGET_32BIT && TARGET_HARD_FLOAT"
6824 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6825 operands[2], operands[3])); DONE;"
6828 (define_expand "cstoredf4"
6829 [(set (match_operand:SI 0 "s_register_operand")
6830 (match_operator:SI 1 "expandable_comparison_operator"
6831 [(match_operand:DF 2 "s_register_operand")
6832 (match_operand:DF 3 "vfp_compare_operand")]))]
6833 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6834 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6835 operands[2], operands[3])); DONE;"
6838 (define_expand "cstoredi4"
6839 [(set (match_operand:SI 0 "s_register_operand")
6840 (match_operator:SI 1 "expandable_comparison_operator"
6841 [(match_operand:DI 2 "s_register_operand")
6842 (match_operand:DI 3 "cmpdi_operand")]))]
6845 if (!arm_validize_comparison (&operands[1],
6849 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6856 ;; Conditional move insns
6858 (define_expand "movsicc"
6859 [(set (match_operand:SI 0 "s_register_operand")
6860 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6861 (match_operand:SI 2 "arm_not_operand")
6862 (match_operand:SI 3 "arm_not_operand")))]
6869 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6870 &XEXP (operands[1], 1)))
6873 code = GET_CODE (operands[1]);
6874 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6875 XEXP (operands[1], 1), NULL_RTX);
6876 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6880 (define_expand "movhfcc"
6881 [(set (match_operand:HF 0 "s_register_operand")
6882 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6883 (match_operand:HF 2 "s_register_operand")
6884 (match_operand:HF 3 "s_register_operand")))]
6885 "TARGET_VFP_FP16INST"
6888 enum rtx_code code = GET_CODE (operands[1]);
6891 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6892 &XEXP (operands[1], 1)))
6895 code = GET_CODE (operands[1]);
6896 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6897 XEXP (operands[1], 1), NULL_RTX);
6898 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6902 (define_expand "movsfcc"
6903 [(set (match_operand:SF 0 "s_register_operand")
6904 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6905 (match_operand:SF 2 "s_register_operand")
6906 (match_operand:SF 3 "s_register_operand")))]
6907 "TARGET_32BIT && TARGET_HARD_FLOAT"
6910 enum rtx_code code = GET_CODE (operands[1]);
6913 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6914 &XEXP (operands[1], 1)))
6917 code = GET_CODE (operands[1]);
6918 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6919 XEXP (operands[1], 1), NULL_RTX);
6920 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6924 (define_expand "movdfcc"
6925 [(set (match_operand:DF 0 "s_register_operand")
6926 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6927 (match_operand:DF 2 "s_register_operand")
6928 (match_operand:DF 3 "s_register_operand")))]
6929 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6932 enum rtx_code code = GET_CODE (operands[1]);
6935 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6936 &XEXP (operands[1], 1)))
6938 code = GET_CODE (operands[1]);
6939 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6940 XEXP (operands[1], 1), NULL_RTX);
6941 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6945 (define_insn "*cmov<mode>"
6946 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
6947 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
6948 [(match_operand 2 "cc_register" "") (const_int 0)])
6949 (match_operand:SDF 3 "s_register_operand"
6951 (match_operand:SDF 4 "s_register_operand"
6952 "<F_constraint>")))]
6953 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
6956 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6963 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
6968 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
6974 [(set_attr "conds" "use")
6975 (set_attr "type" "fcsel")]
6978 (define_insn "*cmovhf"
6979 [(set (match_operand:HF 0 "s_register_operand" "=t")
6980 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
6981 [(match_operand 2 "cc_register" "") (const_int 0)])
6982 (match_operand:HF 3 "s_register_operand" "t")
6983 (match_operand:HF 4 "s_register_operand" "t")))]
6984 "TARGET_VFP_FP16INST"
6987 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6994 return \"vsel%d1.f16\\t%0, %3, %4\";
6999 return \"vsel%D1.f16\\t%0, %4, %3\";
7005 [(set_attr "conds" "use")
7006 (set_attr "type" "fcsel")]
7009 (define_insn_and_split "*movsicc_insn"
7010 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7012 (match_operator 3 "arm_comparison_operator"
7013 [(match_operand 4 "cc_register" "") (const_int 0)])
7014 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7015 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7026 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7027 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7028 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7029 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7030 "&& reload_completed"
7033 enum rtx_code rev_code;
7037 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7039 gen_rtx_SET (operands[0], operands[1])));
7041 rev_code = GET_CODE (operands[3]);
7042 mode = GET_MODE (operands[4]);
7043 if (mode == CCFPmode || mode == CCFPEmode)
7044 rev_code = reverse_condition_maybe_unordered (rev_code);
7046 rev_code = reverse_condition (rev_code);
7048 rev_cond = gen_rtx_fmt_ee (rev_code,
7052 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7054 gen_rtx_SET (operands[0], operands[2])));
7057 [(set_attr "length" "4,4,4,4,8,8,8,8")
7058 (set_attr "conds" "use")
7059 (set_attr_alternative "type"
7060 [(if_then_else (match_operand 2 "const_int_operand" "")
7061 (const_string "mov_imm")
7062 (const_string "mov_reg"))
7063 (const_string "mvn_imm")
7064 (if_then_else (match_operand 1 "const_int_operand" "")
7065 (const_string "mov_imm")
7066 (const_string "mov_reg"))
7067 (const_string "mvn_imm")
7068 (const_string "multiple")
7069 (const_string "multiple")
7070 (const_string "multiple")
7071 (const_string "multiple")])]
7074 (define_insn "*movsfcc_soft_insn"
7075 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7076 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7077 [(match_operand 4 "cc_register" "") (const_int 0)])
7078 (match_operand:SF 1 "s_register_operand" "0,r")
7079 (match_operand:SF 2 "s_register_operand" "r,0")))]
7080 "TARGET_ARM && TARGET_SOFT_FLOAT"
7084 [(set_attr "conds" "use")
7085 (set_attr "type" "mov_reg")]
7089 ;; Jump and linkage insns
7091 (define_expand "jump"
7093 (label_ref (match_operand 0 "" "")))]
7098 (define_insn "*arm_jump"
7100 (label_ref (match_operand 0 "" "")))]
7104 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7106 arm_ccfsm_state += 2;
7109 return \"b%?\\t%l0\";
7112 [(set_attr "predicable" "yes")
7113 (set (attr "length")
7115 (and (match_test "TARGET_THUMB2")
7116 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7117 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7120 (set_attr "type" "branch")]
7123 (define_expand "call"
7124 [(parallel [(call (match_operand 0 "memory_operand")
7125 (match_operand 1 "general_operand"))
7126 (use (match_operand 2 "" ""))
7127 (clobber (reg:SI LR_REGNUM))])]
7132 tree addr = MEM_EXPR (operands[0]);
7134 /* In an untyped call, we can get NULL for operand 2. */
7135 if (operands[2] == NULL_RTX)
7136 operands[2] = const0_rtx;
7138 /* Decide if we should generate indirect calls by loading the
7139 32-bit address of the callee into a register before performing the
7141 callee = XEXP (operands[0], 0);
7142 if (GET_CODE (callee) == SYMBOL_REF
7143 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7145 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7147 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7148 /* Indirect call: set r9 with FDPIC value of callee. */
7149 XEXP (operands[0], 0)
7150 = arm_load_function_descriptor (XEXP (operands[0], 0));
7152 if (detect_cmse_nonsecure_call (addr))
7154 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7156 emit_call_insn (pat);
7160 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7161 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7164 /* Restore FDPIC register (r9) after call. */
7167 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7168 rtx initial_fdpic_reg
7169 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7171 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7172 initial_fdpic_reg));
7179 (define_insn "restore_pic_register_after_call"
7180 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7181 (unspec:SI [(match_dup 0)
7182 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7183 UNSPEC_PIC_RESTORE))]
7190 (define_expand "call_internal"
7191 [(parallel [(call (match_operand 0 "memory_operand")
7192 (match_operand 1 "general_operand"))
7193 (use (match_operand 2 "" ""))
7194 (clobber (reg:SI LR_REGNUM))])])
7196 (define_expand "nonsecure_call_internal"
7197 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7198 UNSPEC_NONSECURE_MEM)
7199 (match_operand 1 "general_operand"))
7200 (use (match_operand 2 "" ""))
7201 (clobber (reg:SI LR_REGNUM))])]
7206 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7207 gen_rtx_REG (SImode, R4_REGNUM),
7210 operands[0] = replace_equiv_address (operands[0], tmp);
7213 (define_insn "*call_reg_armv5"
7214 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7215 (match_operand 1 "" ""))
7216 (use (match_operand 2 "" ""))
7217 (clobber (reg:SI LR_REGNUM))]
7218 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7220 [(set_attr "type" "call")]
7223 (define_insn "*call_reg_arm"
7224 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7225 (match_operand 1 "" ""))
7226 (use (match_operand 2 "" ""))
7227 (clobber (reg:SI LR_REGNUM))]
7228 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7230 return output_call (operands);
7232 ;; length is worst case, normally it is only two
7233 [(set_attr "length" "12")
7234 (set_attr "type" "call")]
7238 (define_expand "call_value"
7239 [(parallel [(set (match_operand 0 "" "")
7240 (call (match_operand 1 "memory_operand")
7241 (match_operand 2 "general_operand")))
7242 (use (match_operand 3 "" ""))
7243 (clobber (reg:SI LR_REGNUM))])]
7248 tree addr = MEM_EXPR (operands[1]);
7250 /* In an untyped call, we can get NULL for operand 2. */
7251 if (operands[3] == 0)
7252 operands[3] = const0_rtx;
7254 /* Decide if we should generate indirect calls by loading the
7255 32-bit address of the callee into a register before performing the
7257 callee = XEXP (operands[1], 0);
7258 if (GET_CODE (callee) == SYMBOL_REF
7259 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7261 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7263 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7264 /* Indirect call: set r9 with FDPIC value of callee. */
7265 XEXP (operands[1], 0)
7266 = arm_load_function_descriptor (XEXP (operands[1], 0));
7268 if (detect_cmse_nonsecure_call (addr))
7270 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7271 operands[2], operands[3]);
7272 emit_call_insn (pat);
7276 pat = gen_call_value_internal (operands[0], operands[1],
7277 operands[2], operands[3]);
7278 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7281 /* Restore FDPIC register (r9) after call. */
7284 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7285 rtx initial_fdpic_reg
7286 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7288 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7289 initial_fdpic_reg));
7296 (define_expand "call_value_internal"
7297 [(parallel [(set (match_operand 0 "" "")
7298 (call (match_operand 1 "memory_operand")
7299 (match_operand 2 "general_operand")))
7300 (use (match_operand 3 "" ""))
7301 (clobber (reg:SI LR_REGNUM))])])
7303 (define_expand "nonsecure_call_value_internal"
7304 [(parallel [(set (match_operand 0 "" "")
7305 (call (unspec:SI [(match_operand 1 "memory_operand")]
7306 UNSPEC_NONSECURE_MEM)
7307 (match_operand 2 "general_operand")))
7308 (use (match_operand 3 "" ""))
7309 (clobber (reg:SI LR_REGNUM))])]
7314 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7315 gen_rtx_REG (SImode, R4_REGNUM),
7318 operands[1] = replace_equiv_address (operands[1], tmp);
7321 (define_insn "*call_value_reg_armv5"
7322 [(set (match_operand 0 "" "")
7323 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7324 (match_operand 2 "" "")))
7325 (use (match_operand 3 "" ""))
7326 (clobber (reg:SI LR_REGNUM))]
7327 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7329 [(set_attr "type" "call")]
7332 (define_insn "*call_value_reg_arm"
7333 [(set (match_operand 0 "" "")
7334 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7335 (match_operand 2 "" "")))
7336 (use (match_operand 3 "" ""))
7337 (clobber (reg:SI LR_REGNUM))]
7338 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7340 return output_call (&operands[1]);
7342 [(set_attr "length" "12")
7343 (set_attr "type" "call")]
7346 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7347 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7349 (define_insn "*call_symbol"
7350 [(call (mem:SI (match_operand:SI 0 "" ""))
7351 (match_operand 1 "" ""))
7352 (use (match_operand 2 "" ""))
7353 (clobber (reg:SI LR_REGNUM))]
7355 && !SIBLING_CALL_P (insn)
7356 && (GET_CODE (operands[0]) == SYMBOL_REF)
7357 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7360 rtx op = operands[0];
7362 /* Switch mode now when possible. */
7363 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7364 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7365 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7367 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7369 [(set_attr "type" "call")]
7372 (define_insn "*call_value_symbol"
7373 [(set (match_operand 0 "" "")
7374 (call (mem:SI (match_operand:SI 1 "" ""))
7375 (match_operand:SI 2 "" "")))
7376 (use (match_operand 3 "" ""))
7377 (clobber (reg:SI LR_REGNUM))]
7379 && !SIBLING_CALL_P (insn)
7380 && (GET_CODE (operands[1]) == SYMBOL_REF)
7381 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7384 rtx op = operands[1];
7386 /* Switch mode now when possible. */
7387 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7388 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7389 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7391 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7393 [(set_attr "type" "call")]
7396 (define_expand "sibcall_internal"
7397 [(parallel [(call (match_operand 0 "memory_operand")
7398 (match_operand 1 "general_operand"))
7400 (use (match_operand 2 "" ""))])])
7402 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7403 (define_expand "sibcall"
7404 [(parallel [(call (match_operand 0 "memory_operand")
7405 (match_operand 1 "general_operand"))
7407 (use (match_operand 2 "" ""))])]
7413 if ((!REG_P (XEXP (operands[0], 0))
7414 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7415 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7416 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7417 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7419 if (operands[2] == NULL_RTX)
7420 operands[2] = const0_rtx;
7422 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7423 arm_emit_call_insn (pat, operands[0], true);
7428 (define_expand "sibcall_value_internal"
7429 [(parallel [(set (match_operand 0 "" "")
7430 (call (match_operand 1 "memory_operand")
7431 (match_operand 2 "general_operand")))
7433 (use (match_operand 3 "" ""))])])
7435 (define_expand "sibcall_value"
7436 [(parallel [(set (match_operand 0 "" "")
7437 (call (match_operand 1 "memory_operand")
7438 (match_operand 2 "general_operand")))
7440 (use (match_operand 3 "" ""))])]
7446 if ((!REG_P (XEXP (operands[1], 0))
7447 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7448 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7449 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7450 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7452 if (operands[3] == NULL_RTX)
7453 operands[3] = const0_rtx;
7455 pat = gen_sibcall_value_internal (operands[0], operands[1],
7456 operands[2], operands[3]);
7457 arm_emit_call_insn (pat, operands[1], true);
7462 (define_insn "*sibcall_insn"
7463 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7464 (match_operand 1 "" ""))
7466 (use (match_operand 2 "" ""))]
7467 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7469 if (which_alternative == 1)
7470 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7473 if (arm_arch5t || arm_arch4t)
7474 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7476 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7479 [(set_attr "type" "call")]
7482 (define_insn "*sibcall_value_insn"
7483 [(set (match_operand 0 "" "")
7484 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7485 (match_operand 2 "" "")))
7487 (use (match_operand 3 "" ""))]
7488 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7490 if (which_alternative == 1)
7491 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7494 if (arm_arch5t || arm_arch4t)
7495 return \"bx%?\\t%1\";
7497 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7500 [(set_attr "type" "call")]
7503 (define_expand "<return_str>return"
7505 "(TARGET_ARM || (TARGET_THUMB2
7506 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7507 && !IS_STACKALIGN (arm_current_func_type ())))
7508 <return_cond_false>"
7513 thumb2_expand_return (<return_simple_p>);
7520 ;; Often the return insn will be the same as loading from memory, so set attr
7521 (define_insn "*arm_return"
7523 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7526 if (arm_ccfsm_state == 2)
7528 arm_ccfsm_state += 2;
7531 return output_return_instruction (const_true_rtx, true, false, false);
7533 [(set_attr "type" "load_4")
7534 (set_attr "length" "12")
7535 (set_attr "predicable" "yes")]
7538 (define_insn "*cond_<return_str>return"
7540 (if_then_else (match_operator 0 "arm_comparison_operator"
7541 [(match_operand 1 "cc_register" "") (const_int 0)])
7544 "TARGET_ARM <return_cond_true>"
7547 if (arm_ccfsm_state == 2)
7549 arm_ccfsm_state += 2;
7552 return output_return_instruction (operands[0], true, false,
7555 [(set_attr "conds" "use")
7556 (set_attr "length" "12")
7557 (set_attr "type" "load_4")]
7560 (define_insn "*cond_<return_str>return_inverted"
7562 (if_then_else (match_operator 0 "arm_comparison_operator"
7563 [(match_operand 1 "cc_register" "") (const_int 0)])
7566 "TARGET_ARM <return_cond_true>"
7569 if (arm_ccfsm_state == 2)
7571 arm_ccfsm_state += 2;
7574 return output_return_instruction (operands[0], true, true,
7577 [(set_attr "conds" "use")
7578 (set_attr "length" "12")
7579 (set_attr "type" "load_4")]
7582 (define_insn "*arm_simple_return"
7587 if (arm_ccfsm_state == 2)
7589 arm_ccfsm_state += 2;
7592 return output_return_instruction (const_true_rtx, true, false, true);
7594 [(set_attr "type" "branch")
7595 (set_attr "length" "4")
7596 (set_attr "predicable" "yes")]
7599 ;; Generate a sequence of instructions to determine if the processor is
7600 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7603 (define_expand "return_addr_mask"
7605 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7607 (set (match_operand:SI 0 "s_register_operand")
7608 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7610 (const_int 67108860)))] ; 0x03fffffc
7613 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7616 (define_insn "*check_arch2"
7617 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7618 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7621 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7622 [(set_attr "length" "8")
7623 (set_attr "conds" "set")
7624 (set_attr "type" "multiple")]
7627 ;; Call subroutine returning any type.
7629 (define_expand "untyped_call"
7630 [(parallel [(call (match_operand 0 "" "")
7632 (match_operand 1 "" "")
7633 (match_operand 2 "" "")])]
7634 "TARGET_EITHER && !TARGET_FDPIC"
7638 rtx par = gen_rtx_PARALLEL (VOIDmode,
7639 rtvec_alloc (XVECLEN (operands[2], 0)));
7640 rtx addr = gen_reg_rtx (Pmode);
7644 emit_move_insn (addr, XEXP (operands[1], 0));
7645 mem = change_address (operands[1], BLKmode, addr);
7647 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7649 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7651 /* Default code only uses r0 as a return value, but we could
7652 be using anything up to 4 registers. */
7653 if (REGNO (src) == R0_REGNUM)
7654 src = gen_rtx_REG (TImode, R0_REGNUM);
7656 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7658 size += GET_MODE_SIZE (GET_MODE (src));
7661 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7665 for (i = 0; i < XVECLEN (par, 0); i++)
7667 HOST_WIDE_INT offset = 0;
7668 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7671 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7673 mem = change_address (mem, GET_MODE (reg), NULL);
7674 if (REGNO (reg) == R0_REGNUM)
7676 /* On thumb we have to use a write-back instruction. */
7677 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7678 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7679 size = TARGET_ARM ? 16 : 0;
7683 emit_move_insn (mem, reg);
7684 size = GET_MODE_SIZE (GET_MODE (reg));
7688 /* The optimizer does not know that the call sets the function value
7689 registers we stored in the result block. We avoid problems by
7690 claiming that all hard registers are used and clobbered at this
7692 emit_insn (gen_blockage ());
7698 (define_expand "untyped_return"
7699 [(match_operand:BLK 0 "memory_operand")
7700 (match_operand 1 "" "")]
7701 "TARGET_EITHER && !TARGET_FDPIC"
7705 rtx addr = gen_reg_rtx (Pmode);
7709 emit_move_insn (addr, XEXP (operands[0], 0));
7710 mem = change_address (operands[0], BLKmode, addr);
7712 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7714 HOST_WIDE_INT offset = 0;
7715 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7718 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7720 mem = change_address (mem, GET_MODE (reg), NULL);
7721 if (REGNO (reg) == R0_REGNUM)
7723 /* On thumb we have to use a write-back instruction. */
7724 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7725 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7726 size = TARGET_ARM ? 16 : 0;
7730 emit_move_insn (reg, mem);
7731 size = GET_MODE_SIZE (GET_MODE (reg));
7735 /* Emit USE insns before the return. */
7736 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7737 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7739 /* Construct the return. */
7740 expand_naked_return ();
7746 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7747 ;; all of memory. This blocks insns from being moved across this point.
7749 (define_insn "blockage"
7750 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7753 [(set_attr "length" "0")
7754 (set_attr "type" "block")]
7757 ;; Since we hard code r0 here use the 'o' constraint to prevent
7758 ;; provoking undefined behaviour in the hardware with putting out
7759 ;; auto-increment operations with potentially r0 as the base register.
7760 (define_insn "probe_stack"
7761 [(set (match_operand:SI 0 "memory_operand" "=o")
7762 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7765 [(set_attr "type" "store_4")
7766 (set_attr "predicable" "yes")]
7769 (define_insn "probe_stack_range"
7770 [(set (match_operand:SI 0 "register_operand" "=r")
7771 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7772 (match_operand:SI 2 "register_operand" "r")]
7773 VUNSPEC_PROBE_STACK_RANGE))]
7776 return output_probe_stack_range (operands[0], operands[2]);
7778 [(set_attr "type" "multiple")
7779 (set_attr "conds" "clob")]
7782 ;; Named patterns for stack smashing protection.
7783 (define_expand "stack_protect_combined_set"
7785 [(set (match_operand:SI 0 "memory_operand")
7786 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7788 (clobber (match_scratch:SI 2 ""))
7789 (clobber (match_scratch:SI 3 ""))])]
7794 ;; Use a separate insn from the above expand to be able to have the mem outside
7795 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7796 ;; try to reload the guard since we need to control how PIC access is done in
7797 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7798 ;; legitimize_pic_address ()).
7799 (define_insn_and_split "*stack_protect_combined_set_insn"
7800 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7801 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7803 (clobber (match_scratch:SI 2 "=&l,&r"))
7804 (clobber (match_scratch:SI 3 "=&l,&r"))]
7808 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7810 (clobber (match_dup 2))])]
7818 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7820 pic_reg = operands[3];
7822 /* Forces recomputing of GOT base now. */
7823 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7824 true /*compute_now*/);
7828 if (address_operand (operands[1], SImode))
7829 operands[2] = operands[1];
7832 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7833 emit_move_insn (operands[2], mem);
7837 [(set_attr "arch" "t1,32")]
7840 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7841 ;; canary value does not live beyond the life of this sequence.
7842 (define_insn "*stack_protect_set_insn"
7843 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7844 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7846 (clobber (match_dup 1))]
7849 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7850 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7851 [(set_attr "length" "8,12")
7852 (set_attr "conds" "clob,nocond")
7853 (set_attr "type" "multiple")
7854 (set_attr "arch" "t1,32")]
7857 (define_expand "stack_protect_combined_test"
7861 (eq (match_operand:SI 0 "memory_operand")
7862 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7864 (label_ref (match_operand 2))
7866 (clobber (match_scratch:SI 3 ""))
7867 (clobber (match_scratch:SI 4 ""))
7868 (clobber (reg:CC CC_REGNUM))])]
7873 ;; Use a separate insn from the above expand to be able to have the mem outside
7874 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7875 ;; try to reload the guard since we need to control how PIC access is done in
7876 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7877 ;; legitimize_pic_address ()).
7878 (define_insn_and_split "*stack_protect_combined_test_insn"
7881 (eq (match_operand:SI 0 "memory_operand" "m,m")
7882 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7884 (label_ref (match_operand 2))
7886 (clobber (match_scratch:SI 3 "=&l,&r"))
7887 (clobber (match_scratch:SI 4 "=&l,&r"))
7888 (clobber (reg:CC CC_REGNUM))]
7901 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7903 pic_reg = operands[4];
7905 /* Forces recomputing of GOT base now. */
7906 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7907 true /*compute_now*/);
7911 if (address_operand (operands[1], SImode))
7912 operands[3] = operands[1];
7915 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7916 emit_move_insn (operands[3], mem);
7921 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7923 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7924 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7925 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7929 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7931 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7932 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7937 [(set_attr "arch" "t1,32")]
7940 (define_insn "arm_stack_protect_test_insn"
7941 [(set (reg:CC_Z CC_REGNUM)
7942 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7943 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
7946 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
7947 (clobber (match_dup 2))]
7949 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
7950 [(set_attr "length" "8,12")
7951 (set_attr "conds" "set")
7952 (set_attr "type" "multiple")
7953 (set_attr "arch" "t,32")]
7956 (define_expand "casesi"
7957 [(match_operand:SI 0 "s_register_operand") ; index to jump on
7958 (match_operand:SI 1 "const_int_operand") ; lower bound
7959 (match_operand:SI 2 "const_int_operand") ; total range
7960 (match_operand:SI 3 "" "") ; table label
7961 (match_operand:SI 4 "" "")] ; Out of range label
7962 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
7965 enum insn_code code;
7966 if (operands[1] != const0_rtx)
7968 rtx reg = gen_reg_rtx (SImode);
7970 emit_insn (gen_addsi3 (reg, operands[0],
7971 gen_int_mode (-INTVAL (operands[1]),
7977 code = CODE_FOR_arm_casesi_internal;
7978 else if (TARGET_THUMB1)
7979 code = CODE_FOR_thumb1_casesi_internal_pic;
7981 code = CODE_FOR_thumb2_casesi_internal_pic;
7983 code = CODE_FOR_thumb2_casesi_internal;
7985 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
7986 operands[2] = force_reg (SImode, operands[2]);
7988 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
7989 operands[3], operands[4]));
7994 ;; The USE in this pattern is needed to tell flow analysis that this is
7995 ;; a CASESI insn. It has no other purpose.
7996 (define_expand "arm_casesi_internal"
7997 [(parallel [(set (pc)
7999 (leu (match_operand:SI 0 "s_register_operand")
8000 (match_operand:SI 1 "arm_rhs_operand"))
8002 (label_ref:SI (match_operand 3 ""))))
8003 (clobber (reg:CC CC_REGNUM))
8004 (use (label_ref:SI (match_operand 2 "")))])]
8007 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8008 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8009 gen_rtx_LABEL_REF (SImode, operands[2]));
8010 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8011 MEM_READONLY_P (operands[4]) = 1;
8012 MEM_NOTRAP_P (operands[4]) = 1;
8015 (define_insn "*arm_casesi_internal"
8016 [(parallel [(set (pc)
8018 (leu (match_operand:SI 0 "s_register_operand" "r")
8019 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8020 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8021 (label_ref:SI (match_operand 2 "" ""))))
8022 (label_ref:SI (match_operand 3 "" ""))))
8023 (clobber (reg:CC CC_REGNUM))
8024 (use (label_ref:SI (match_dup 2)))])]
8028 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8029 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8031 [(set_attr "conds" "clob")
8032 (set_attr "length" "12")
8033 (set_attr "type" "multiple")]
8036 (define_expand "indirect_jump"
8038 (match_operand:SI 0 "s_register_operand"))]
8041 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8042 address and use bx. */
8046 tmp = gen_reg_rtx (SImode);
8047 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8053 ;; NB Never uses BX.
8054 (define_insn "*arm_indirect_jump"
8056 (match_operand:SI 0 "s_register_operand" "r"))]
8058 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8059 [(set_attr "predicable" "yes")
8060 (set_attr "type" "branch")]
8063 (define_insn "*load_indirect_jump"
8065 (match_operand:SI 0 "memory_operand" "m"))]
8067 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8068 [(set_attr "type" "load_4")
8069 (set_attr "pool_range" "4096")
8070 (set_attr "neg_pool_range" "4084")
8071 (set_attr "predicable" "yes")]
8081 [(set (attr "length")
8082 (if_then_else (eq_attr "is_thumb" "yes")
8085 (set_attr "type" "mov_reg")]
8089 [(trap_if (const_int 1) (const_int 0))]
8093 return \".inst\\t0xe7f000f0\";
8095 return \".inst\\t0xdeff\";
8097 [(set (attr "length")
8098 (if_then_else (eq_attr "is_thumb" "yes")
8101 (set_attr "type" "trap")
8102 (set_attr "conds" "unconditional")]
8106 ;; Patterns to allow combination of arithmetic, cond code and shifts
8108 (define_insn "*<arith_shift_insn>_multsi"
8109 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8111 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8112 (match_operand:SI 3 "power_of_two_operand" ""))
8113 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8115 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8116 [(set_attr "predicable" "yes")
8117 (set_attr "shift" "2")
8118 (set_attr "arch" "a,t2")
8119 (set_attr "type" "alu_shift_imm")])
8121 (define_insn "*<arith_shift_insn>_shiftsi"
8122 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8124 (match_operator:SI 2 "shift_nomul_operator"
8125 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8126 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8127 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8128 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8129 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8130 [(set_attr "predicable" "yes")
8131 (set_attr "shift" "3")
8132 (set_attr "arch" "a,t2,a")
8133 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8136 [(set (match_operand:SI 0 "s_register_operand" "")
8137 (match_operator:SI 1 "shiftable_operator"
8138 [(match_operator:SI 2 "shiftable_operator"
8139 [(match_operator:SI 3 "shift_operator"
8140 [(match_operand:SI 4 "s_register_operand" "")
8141 (match_operand:SI 5 "reg_or_int_operand" "")])
8142 (match_operand:SI 6 "s_register_operand" "")])
8143 (match_operand:SI 7 "arm_rhs_operand" "")]))
8144 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8147 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8150 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8153 (define_insn "*arith_shiftsi_compare0"
8154 [(set (reg:CC_NOOV CC_REGNUM)
8156 (match_operator:SI 1 "shiftable_operator"
8157 [(match_operator:SI 3 "shift_operator"
8158 [(match_operand:SI 4 "s_register_operand" "r,r")
8159 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8160 (match_operand:SI 2 "s_register_operand" "r,r")])
8162 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8163 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8166 "%i1s%?\\t%0, %2, %4%S3"
8167 [(set_attr "conds" "set")
8168 (set_attr "shift" "4")
8169 (set_attr "arch" "32,a")
8170 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8172 (define_insn "*arith_shiftsi_compare0_scratch"
8173 [(set (reg:CC_NOOV CC_REGNUM)
8175 (match_operator:SI 1 "shiftable_operator"
8176 [(match_operator:SI 3 "shift_operator"
8177 [(match_operand:SI 4 "s_register_operand" "r,r")
8178 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8179 (match_operand:SI 2 "s_register_operand" "r,r")])
8181 (clobber (match_scratch:SI 0 "=r,r"))]
8183 "%i1s%?\\t%0, %2, %4%S3"
8184 [(set_attr "conds" "set")
8185 (set_attr "shift" "4")
8186 (set_attr "arch" "32,a")
8187 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8189 (define_insn "*sub_shiftsi"
8190 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8191 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8192 (match_operator:SI 2 "shift_operator"
8193 [(match_operand:SI 3 "s_register_operand" "r,r")
8194 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8196 "sub%?\\t%0, %1, %3%S2"
8197 [(set_attr "predicable" "yes")
8198 (set_attr "predicable_short_it" "no")
8199 (set_attr "shift" "3")
8200 (set_attr "arch" "32,a")
8201 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8203 (define_insn "*sub_shiftsi_compare0"
8204 [(set (reg:CC_NOOV CC_REGNUM)
8206 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8207 (match_operator:SI 2 "shift_operator"
8208 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8209 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8211 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8212 (minus:SI (match_dup 1)
8213 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8215 "subs%?\\t%0, %1, %3%S2"
8216 [(set_attr "conds" "set")
8217 (set_attr "shift" "3")
8218 (set_attr "arch" "32,a,a")
8219 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8221 (define_insn "*sub_shiftsi_compare0_scratch"
8222 [(set (reg:CC_NOOV CC_REGNUM)
8224 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8225 (match_operator:SI 2 "shift_operator"
8226 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8227 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8229 (clobber (match_scratch:SI 0 "=r,r,r"))]
8231 "subs%?\\t%0, %1, %3%S2"
8232 [(set_attr "conds" "set")
8233 (set_attr "shift" "3")
8234 (set_attr "arch" "32,a,a")
8235 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8238 (define_insn_and_split "*and_scc"
8239 [(set (match_operand:SI 0 "s_register_operand" "=r")
8240 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8241 [(match_operand 2 "cc_register" "") (const_int 0)])
8242 (match_operand:SI 3 "s_register_operand" "r")))]
8244 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8245 "&& reload_completed"
8246 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8247 (cond_exec (match_dup 4) (set (match_dup 0)
8248 (and:SI (match_dup 3) (const_int 1))))]
8250 machine_mode mode = GET_MODE (operands[2]);
8251 enum rtx_code rc = GET_CODE (operands[1]);
8253 /* Note that operands[4] is the same as operands[1],
8254 but with VOIDmode as the result. */
8255 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8256 if (mode == CCFPmode || mode == CCFPEmode)
8257 rc = reverse_condition_maybe_unordered (rc);
8259 rc = reverse_condition (rc);
8260 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8262 [(set_attr "conds" "use")
8263 (set_attr "type" "multiple")
8264 (set_attr "length" "8")]
8267 (define_insn_and_split "*ior_scc"
8268 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8269 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8270 [(match_operand 2 "cc_register" "") (const_int 0)])
8271 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8276 "&& reload_completed
8277 && REGNO (operands [0]) != REGNO (operands[3])"
8278 ;; && which_alternative == 1
8279 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8280 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8281 (cond_exec (match_dup 4) (set (match_dup 0)
8282 (ior:SI (match_dup 3) (const_int 1))))]
8284 machine_mode mode = GET_MODE (operands[2]);
8285 enum rtx_code rc = GET_CODE (operands[1]);
8287 /* Note that operands[4] is the same as operands[1],
8288 but with VOIDmode as the result. */
8289 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8290 if (mode == CCFPmode || mode == CCFPEmode)
8291 rc = reverse_condition_maybe_unordered (rc);
8293 rc = reverse_condition (rc);
8294 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8296 [(set_attr "conds" "use")
8297 (set_attr "length" "4,8")
8298 (set_attr "type" "logic_imm,multiple")]
8301 ; A series of splitters for the compare_scc pattern below. Note that
8302 ; order is important.
8304 [(set (match_operand:SI 0 "s_register_operand" "")
8305 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8307 (clobber (reg:CC CC_REGNUM))]
8308 "TARGET_32BIT && reload_completed"
8309 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8312 [(set (match_operand:SI 0 "s_register_operand" "")
8313 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8315 (clobber (reg:CC CC_REGNUM))]
8316 "TARGET_32BIT && reload_completed"
8317 [(set (match_dup 0) (not:SI (match_dup 1)))
8318 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8321 [(set (match_operand:SI 0 "s_register_operand" "")
8322 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8324 (clobber (reg:CC CC_REGNUM))]
8325 "arm_arch5t && TARGET_32BIT"
8326 [(set (match_dup 0) (clz:SI (match_dup 1)))
8327 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8331 [(set (match_operand:SI 0 "s_register_operand" "")
8332 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8334 (clobber (reg:CC CC_REGNUM))]
8335 "TARGET_32BIT && reload_completed"
8337 [(set (reg:CC CC_REGNUM)
8338 (compare:CC (const_int 1) (match_dup 1)))
8340 (minus:SI (const_int 1) (match_dup 1)))])
8341 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8342 (set (match_dup 0) (const_int 0)))])
8345 [(set (match_operand:SI 0 "s_register_operand" "")
8346 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8347 (match_operand:SI 2 "const_int_operand" "")))
8348 (clobber (reg:CC CC_REGNUM))]
8349 "TARGET_32BIT && reload_completed"
8351 [(set (reg:CC CC_REGNUM)
8352 (compare:CC (match_dup 1) (match_dup 2)))
8353 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8354 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8355 (set (match_dup 0) (const_int 1)))]
8357 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8361 [(set (match_operand:SI 0 "s_register_operand" "")
8362 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8363 (match_operand:SI 2 "arm_add_operand" "")))
8364 (clobber (reg:CC CC_REGNUM))]
8365 "TARGET_32BIT && reload_completed"
8367 [(set (reg:CC_NOOV CC_REGNUM)
8368 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8370 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8371 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8372 (set (match_dup 0) (const_int 1)))])
8374 (define_insn_and_split "*compare_scc"
8375 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8376 (match_operator:SI 1 "arm_comparison_operator"
8377 [(match_operand:SI 2 "s_register_operand" "r,r")
8378 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8379 (clobber (reg:CC CC_REGNUM))]
8382 "&& reload_completed"
8383 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8384 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8385 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8388 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8389 operands[2], operands[3]);
8390 enum rtx_code rc = GET_CODE (operands[1]);
8392 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8394 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8395 if (mode == CCFPmode || mode == CCFPEmode)
8396 rc = reverse_condition_maybe_unordered (rc);
8398 rc = reverse_condition (rc);
8399 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8401 [(set_attr "type" "multiple")]
8404 ;; Attempt to improve the sequence generated by the compare_scc splitters
8405 ;; not to use conditional execution.
8407 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8411 [(set (reg:CC CC_REGNUM)
8412 (compare:CC (match_operand:SI 1 "register_operand" "")
8414 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8415 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8416 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8417 (set (match_dup 0) (const_int 1)))]
8418 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8419 [(set (match_dup 0) (clz:SI (match_dup 1)))
8420 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8423 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8427 [(set (reg:CC CC_REGNUM)
8428 (compare:CC (match_operand:SI 1 "register_operand" "")
8430 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8431 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8432 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8433 (set (match_dup 0) (const_int 1)))
8434 (match_scratch:SI 2 "r")]
8435 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8437 [(set (reg:CC CC_REGNUM)
8438 (compare:CC (const_int 0) (match_dup 1)))
8439 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8441 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8442 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8445 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8446 ;; sub Rd, Reg1, reg2
8450 [(set (reg:CC CC_REGNUM)
8451 (compare:CC (match_operand:SI 1 "register_operand" "")
8452 (match_operand:SI 2 "arm_rhs_operand" "")))
8453 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8454 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8455 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8456 (set (match_dup 0) (const_int 1)))]
8457 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8458 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8459 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8460 (set (match_dup 0) (clz:SI (match_dup 0)))
8461 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8465 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8466 ;; sub T1, Reg1, reg2
8470 [(set (reg:CC CC_REGNUM)
8471 (compare:CC (match_operand:SI 1 "register_operand" "")
8472 (match_operand:SI 2 "arm_rhs_operand" "")))
8473 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8474 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8475 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8476 (set (match_dup 0) (const_int 1)))
8477 (match_scratch:SI 3 "r")]
8478 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8479 [(set (match_dup 3) (match_dup 4))
8481 [(set (reg:CC CC_REGNUM)
8482 (compare:CC (const_int 0) (match_dup 3)))
8483 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8485 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8486 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8488 if (CONST_INT_P (operands[2]))
8489 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8491 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8494 (define_insn "*cond_move"
8495 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8496 (if_then_else:SI (match_operator 3 "equality_operator"
8497 [(match_operator 4 "arm_comparison_operator"
8498 [(match_operand 5 "cc_register" "") (const_int 0)])
8500 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8501 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8504 if (GET_CODE (operands[3]) == NE)
8506 if (which_alternative != 1)
8507 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8508 if (which_alternative != 0)
8509 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8512 if (which_alternative != 0)
8513 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8514 if (which_alternative != 1)
8515 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8518 [(set_attr "conds" "use")
8519 (set_attr_alternative "type"
8520 [(if_then_else (match_operand 2 "const_int_operand" "")
8521 (const_string "mov_imm")
8522 (const_string "mov_reg"))
8523 (if_then_else (match_operand 1 "const_int_operand" "")
8524 (const_string "mov_imm")
8525 (const_string "mov_reg"))
8526 (const_string "multiple")])
8527 (set_attr "length" "4,4,8")]
8530 (define_insn "*cond_arith"
8531 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8532 (match_operator:SI 5 "shiftable_operator"
8533 [(match_operator:SI 4 "arm_comparison_operator"
8534 [(match_operand:SI 2 "s_register_operand" "r,r")
8535 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8536 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8537 (clobber (reg:CC CC_REGNUM))]
8540 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8541 return \"%i5\\t%0, %1, %2, lsr #31\";
8543 output_asm_insn (\"cmp\\t%2, %3\", operands);
8544 if (GET_CODE (operands[5]) == AND)
8545 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8546 else if (GET_CODE (operands[5]) == MINUS)
8547 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8548 else if (which_alternative != 0)
8549 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8550 return \"%i5%d4\\t%0, %1, #1\";
8552 [(set_attr "conds" "clob")
8553 (set_attr "length" "12")
8554 (set_attr "type" "multiple")]
8557 (define_insn "*cond_sub"
8558 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8559 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8560 (match_operator:SI 4 "arm_comparison_operator"
8561 [(match_operand:SI 2 "s_register_operand" "r,r")
8562 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8563 (clobber (reg:CC CC_REGNUM))]
8566 output_asm_insn (\"cmp\\t%2, %3\", operands);
8567 if (which_alternative != 0)
8568 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8569 return \"sub%d4\\t%0, %1, #1\";
8571 [(set_attr "conds" "clob")
8572 (set_attr "length" "8,12")
8573 (set_attr "type" "multiple")]
8576 (define_insn "*cmp_ite0"
8577 [(set (match_operand 6 "dominant_cc_register" "")
8580 (match_operator 4 "arm_comparison_operator"
8581 [(match_operand:SI 0 "s_register_operand"
8582 "l,l,l,r,r,r,r,r,r")
8583 (match_operand:SI 1 "arm_add_operand"
8584 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8585 (match_operator:SI 5 "arm_comparison_operator"
8586 [(match_operand:SI 2 "s_register_operand"
8587 "l,r,r,l,l,r,r,r,r")
8588 (match_operand:SI 3 "arm_add_operand"
8589 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8595 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8597 {\"cmp%d5\\t%0, %1\",
8598 \"cmp%d4\\t%2, %3\"},
8599 {\"cmn%d5\\t%0, #%n1\",
8600 \"cmp%d4\\t%2, %3\"},
8601 {\"cmp%d5\\t%0, %1\",
8602 \"cmn%d4\\t%2, #%n3\"},
8603 {\"cmn%d5\\t%0, #%n1\",
8604 \"cmn%d4\\t%2, #%n3\"}
8606 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8611 \"cmn\\t%0, #%n1\"},
8612 {\"cmn\\t%2, #%n3\",
8614 {\"cmn\\t%2, #%n3\",
8617 static const char * const ite[2] =
8622 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8623 CMP_CMP, CMN_CMP, CMP_CMP,
8624 CMN_CMP, CMP_CMN, CMN_CMN};
8626 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8628 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8629 if (TARGET_THUMB2) {
8630 output_asm_insn (ite[swap], operands);
8632 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8635 [(set_attr "conds" "set")
8636 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8637 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8638 (set_attr "type" "multiple")
8639 (set_attr_alternative "length"
8645 (if_then_else (eq_attr "is_thumb" "no")
8648 (if_then_else (eq_attr "is_thumb" "no")
8651 (if_then_else (eq_attr "is_thumb" "no")
8654 (if_then_else (eq_attr "is_thumb" "no")
8659 (define_insn "*cmp_ite1"
8660 [(set (match_operand 6 "dominant_cc_register" "")
8663 (match_operator 4 "arm_comparison_operator"
8664 [(match_operand:SI 0 "s_register_operand"
8665 "l,l,l,r,r,r,r,r,r")
8666 (match_operand:SI 1 "arm_add_operand"
8667 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8668 (match_operator:SI 5 "arm_comparison_operator"
8669 [(match_operand:SI 2 "s_register_operand"
8670 "l,r,r,l,l,r,r,r,r")
8671 (match_operand:SI 3 "arm_add_operand"
8672 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8678 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8682 {\"cmn\\t%0, #%n1\",
8685 \"cmn\\t%2, #%n3\"},
8686 {\"cmn\\t%0, #%n1\",
8689 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8691 {\"cmp%d4\\t%2, %3\",
8692 \"cmp%D5\\t%0, %1\"},
8693 {\"cmp%d4\\t%2, %3\",
8694 \"cmn%D5\\t%0, #%n1\"},
8695 {\"cmn%d4\\t%2, #%n3\",
8696 \"cmp%D5\\t%0, %1\"},
8697 {\"cmn%d4\\t%2, #%n3\",
8698 \"cmn%D5\\t%0, #%n1\"}
8700 static const char * const ite[2] =
8705 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8706 CMP_CMP, CMN_CMP, CMP_CMP,
8707 CMN_CMP, CMP_CMN, CMN_CMN};
8709 comparison_dominates_p (GET_CODE (operands[5]),
8710 reverse_condition (GET_CODE (operands[4])));
8712 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8713 if (TARGET_THUMB2) {
8714 output_asm_insn (ite[swap], operands);
8716 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8719 [(set_attr "conds" "set")
8720 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8721 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8722 (set_attr_alternative "length"
8728 (if_then_else (eq_attr "is_thumb" "no")
8731 (if_then_else (eq_attr "is_thumb" "no")
8734 (if_then_else (eq_attr "is_thumb" "no")
8737 (if_then_else (eq_attr "is_thumb" "no")
8740 (set_attr "type" "multiple")]
8743 (define_insn "*cmp_and"
8744 [(set (match_operand 6 "dominant_cc_register" "")
8747 (match_operator 4 "arm_comparison_operator"
8748 [(match_operand:SI 0 "s_register_operand"
8749 "l,l,l,r,r,r,r,r,r,r")
8750 (match_operand:SI 1 "arm_add_operand"
8751 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8752 (match_operator:SI 5 "arm_comparison_operator"
8753 [(match_operand:SI 2 "s_register_operand"
8754 "l,r,r,l,l,r,r,r,r,r")
8755 (match_operand:SI 3 "arm_add_operand"
8756 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8761 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8763 {\"cmp%d5\\t%0, %1\",
8764 \"cmp%d4\\t%2, %3\"},
8765 {\"cmn%d5\\t%0, #%n1\",
8766 \"cmp%d4\\t%2, %3\"},
8767 {\"cmp%d5\\t%0, %1\",
8768 \"cmn%d4\\t%2, #%n3\"},
8769 {\"cmn%d5\\t%0, #%n1\",
8770 \"cmn%d4\\t%2, #%n3\"}
8772 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8777 \"cmn\\t%0, #%n1\"},
8778 {\"cmn\\t%2, #%n3\",
8780 {\"cmn\\t%2, #%n3\",
8783 static const char *const ite[2] =
8788 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8789 CMP_CMP, CMN_CMP, CMP_CMP,
8790 CMP_CMP, CMN_CMP, CMP_CMN,
8793 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8795 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8796 if (TARGET_THUMB2) {
8797 output_asm_insn (ite[swap], operands);
8799 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8802 [(set_attr "conds" "set")
8803 (set_attr "predicable" "no")
8804 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8805 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8806 (set_attr_alternative "length"
8813 (if_then_else (eq_attr "is_thumb" "no")
8816 (if_then_else (eq_attr "is_thumb" "no")
8819 (if_then_else (eq_attr "is_thumb" "no")
8822 (if_then_else (eq_attr "is_thumb" "no")
8825 (set_attr "type" "multiple")]
8828 (define_insn "*cmp_ior"
8829 [(set (match_operand 6 "dominant_cc_register" "")
8832 (match_operator 4 "arm_comparison_operator"
8833 [(match_operand:SI 0 "s_register_operand"
8834 "l,l,l,r,r,r,r,r,r,r")
8835 (match_operand:SI 1 "arm_add_operand"
8836 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8837 (match_operator:SI 5 "arm_comparison_operator"
8838 [(match_operand:SI 2 "s_register_operand"
8839 "l,r,r,l,l,r,r,r,r,r")
8840 (match_operand:SI 3 "arm_add_operand"
8841 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8846 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8850 {\"cmn\\t%0, #%n1\",
8853 \"cmn\\t%2, #%n3\"},
8854 {\"cmn\\t%0, #%n1\",
8857 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8859 {\"cmp%D4\\t%2, %3\",
8860 \"cmp%D5\\t%0, %1\"},
8861 {\"cmp%D4\\t%2, %3\",
8862 \"cmn%D5\\t%0, #%n1\"},
8863 {\"cmn%D4\\t%2, #%n3\",
8864 \"cmp%D5\\t%0, %1\"},
8865 {\"cmn%D4\\t%2, #%n3\",
8866 \"cmn%D5\\t%0, #%n1\"}
8868 static const char *const ite[2] =
8873 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8874 CMP_CMP, CMN_CMP, CMP_CMP,
8875 CMP_CMP, CMN_CMP, CMP_CMN,
8878 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8880 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8881 if (TARGET_THUMB2) {
8882 output_asm_insn (ite[swap], operands);
8884 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8888 [(set_attr "conds" "set")
8889 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8890 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8891 (set_attr_alternative "length"
8898 (if_then_else (eq_attr "is_thumb" "no")
8901 (if_then_else (eq_attr "is_thumb" "no")
8904 (if_then_else (eq_attr "is_thumb" "no")
8907 (if_then_else (eq_attr "is_thumb" "no")
8910 (set_attr "type" "multiple")]
8913 (define_insn_and_split "*ior_scc_scc"
8914 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8915 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8916 [(match_operand:SI 1 "s_register_operand" "l,r")
8917 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8918 (match_operator:SI 6 "arm_comparison_operator"
8919 [(match_operand:SI 4 "s_register_operand" "l,r")
8920 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8921 (clobber (reg:CC CC_REGNUM))]
8923 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8926 "TARGET_32BIT && reload_completed"
8930 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8931 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8933 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8935 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8938 [(set_attr "conds" "clob")
8939 (set_attr "enabled_for_short_it" "yes,no")
8940 (set_attr "length" "16")
8941 (set_attr "type" "multiple")]
8944 ; If the above pattern is followed by a CMP insn, then the compare is
8945 ; redundant, since we can rework the conditional instruction that follows.
8946 (define_insn_and_split "*ior_scc_scc_cmp"
8947 [(set (match_operand 0 "dominant_cc_register" "")
8948 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8949 [(match_operand:SI 1 "s_register_operand" "l,r")
8950 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8951 (match_operator:SI 6 "arm_comparison_operator"
8952 [(match_operand:SI 4 "s_register_operand" "l,r")
8953 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
8955 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
8956 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8957 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8960 "TARGET_32BIT && reload_completed"
8964 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8965 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8967 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8969 [(set_attr "conds" "set")
8970 (set_attr "enabled_for_short_it" "yes,no")
8971 (set_attr "length" "16")
8972 (set_attr "type" "multiple")]
8975 (define_insn_and_split "*and_scc_scc"
8976 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8977 (and:SI (match_operator:SI 3 "arm_comparison_operator"
8978 [(match_operand:SI 1 "s_register_operand" "l,r")
8979 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8980 (match_operator:SI 6 "arm_comparison_operator"
8981 [(match_operand:SI 4 "s_register_operand" "l,r")
8982 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8983 (clobber (reg:CC CC_REGNUM))]
8985 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8988 "TARGET_32BIT && reload_completed
8989 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8994 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8995 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8997 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8999 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9002 [(set_attr "conds" "clob")
9003 (set_attr "enabled_for_short_it" "yes,no")
9004 (set_attr "length" "16")
9005 (set_attr "type" "multiple")]
9008 ; If the above pattern is followed by a CMP insn, then the compare is
9009 ; redundant, since we can rework the conditional instruction that follows.
9010 (define_insn_and_split "*and_scc_scc_cmp"
9011 [(set (match_operand 0 "dominant_cc_register" "")
9012 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9013 [(match_operand:SI 1 "s_register_operand" "l,r")
9014 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9015 (match_operator:SI 6 "arm_comparison_operator"
9016 [(match_operand:SI 4 "s_register_operand" "l,r")
9017 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9019 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9020 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9021 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9024 "TARGET_32BIT && reload_completed"
9028 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9029 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9031 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9033 [(set_attr "conds" "set")
9034 (set_attr "enabled_for_short_it" "yes,no")
9035 (set_attr "length" "16")
9036 (set_attr "type" "multiple")]
9039 ;; If there is no dominance in the comparison, then we can still save an
9040 ;; instruction in the AND case, since we can know that the second compare
9041 ;; need only zero the value if false (if true, then the value is already
9043 (define_insn_and_split "*and_scc_scc_nodom"
9044 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9045 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9046 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9047 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9048 (match_operator:SI 6 "arm_comparison_operator"
9049 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9050 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9051 (clobber (reg:CC CC_REGNUM))]
9053 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9056 "TARGET_32BIT && reload_completed"
9057 [(parallel [(set (match_dup 0)
9058 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9059 (clobber (reg:CC CC_REGNUM))])
9060 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9062 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9065 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9066 operands[4], operands[5]),
9068 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9070 [(set_attr "conds" "clob")
9071 (set_attr "length" "20")
9072 (set_attr "type" "multiple")]
9076 [(set (reg:CC_NOOV CC_REGNUM)
9077 (compare:CC_NOOV (ior:SI
9078 (and:SI (match_operand:SI 0 "s_register_operand" "")
9080 (match_operator:SI 1 "arm_comparison_operator"
9081 [(match_operand:SI 2 "s_register_operand" "")
9082 (match_operand:SI 3 "arm_add_operand" "")]))
9084 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9087 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9089 (set (reg:CC_NOOV CC_REGNUM)
9090 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9095 [(set (reg:CC_NOOV CC_REGNUM)
9096 (compare:CC_NOOV (ior:SI
9097 (match_operator:SI 1 "arm_comparison_operator"
9098 [(match_operand:SI 2 "s_register_operand" "")
9099 (match_operand:SI 3 "arm_add_operand" "")])
9100 (and:SI (match_operand:SI 0 "s_register_operand" "")
9103 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9106 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9108 (set (reg:CC_NOOV CC_REGNUM)
9109 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9112 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9114 (define_insn_and_split "*negscc"
9115 [(set (match_operand:SI 0 "s_register_operand" "=r")
9116 (neg:SI (match_operator 3 "arm_comparison_operator"
9117 [(match_operand:SI 1 "s_register_operand" "r")
9118 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9119 (clobber (reg:CC CC_REGNUM))]
9122 "&& reload_completed"
9125 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9127 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9129 /* Emit mov\\t%0, %1, asr #31 */
9130 emit_insn (gen_rtx_SET (operands[0],
9131 gen_rtx_ASHIFTRT (SImode,
9136 else if (GET_CODE (operands[3]) == NE)
9138 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9139 if (CONST_INT_P (operands[2]))
9140 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9141 gen_int_mode (-INTVAL (operands[2]),
9144 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9146 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9150 gen_rtx_SET (operands[0],
9156 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9157 emit_insn (gen_rtx_SET (cc_reg,
9158 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9159 enum rtx_code rc = GET_CODE (operands[3]);
9161 rc = reverse_condition (rc);
9162 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9167 gen_rtx_SET (operands[0], const0_rtx)));
9168 rc = GET_CODE (operands[3]);
9169 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9174 gen_rtx_SET (operands[0],
9180 [(set_attr "conds" "clob")
9181 (set_attr "length" "12")
9182 (set_attr "type" "multiple")]
9185 (define_insn_and_split "movcond_addsi"
9186 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9188 (match_operator 5 "comparison_operator"
9189 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9190 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9192 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9193 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9194 (clobber (reg:CC CC_REGNUM))]
9197 "&& reload_completed"
9198 [(set (reg:CC_NOOV CC_REGNUM)
9200 (plus:SI (match_dup 3)
9203 (set (match_dup 0) (match_dup 1))
9204 (cond_exec (match_dup 6)
9205 (set (match_dup 0) (match_dup 2)))]
9208 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9209 operands[3], operands[4]);
9210 enum rtx_code rc = GET_CODE (operands[5]);
9211 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9212 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9213 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9214 rc = reverse_condition (rc);
9216 std::swap (operands[1], operands[2]);
9218 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9221 [(set_attr "conds" "clob")
9222 (set_attr "enabled_for_short_it" "no,yes,yes")
9223 (set_attr "type" "multiple")]
9226 (define_insn "movcond"
9227 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9229 (match_operator 5 "arm_comparison_operator"
9230 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9231 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9232 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9233 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9234 (clobber (reg:CC CC_REGNUM))]
9237 if (GET_CODE (operands[5]) == LT
9238 && (operands[4] == const0_rtx))
9240 if (which_alternative != 1 && REG_P (operands[1]))
9242 if (operands[2] == const0_rtx)
9243 return \"and\\t%0, %1, %3, asr #31\";
9244 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9246 else if (which_alternative != 0 && REG_P (operands[2]))
9248 if (operands[1] == const0_rtx)
9249 return \"bic\\t%0, %2, %3, asr #31\";
9250 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9252 /* The only case that falls through to here is when both ops 1 & 2
9256 if (GET_CODE (operands[5]) == GE
9257 && (operands[4] == const0_rtx))
9259 if (which_alternative != 1 && REG_P (operands[1]))
9261 if (operands[2] == const0_rtx)
9262 return \"bic\\t%0, %1, %3, asr #31\";
9263 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9265 else if (which_alternative != 0 && REG_P (operands[2]))
9267 if (operands[1] == const0_rtx)
9268 return \"and\\t%0, %2, %3, asr #31\";
9269 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9271 /* The only case that falls through to here is when both ops 1 & 2
9274 if (CONST_INT_P (operands[4])
9275 && !const_ok_for_arm (INTVAL (operands[4])))
9276 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9278 output_asm_insn (\"cmp\\t%3, %4\", operands);
9279 if (which_alternative != 0)
9280 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9281 if (which_alternative != 1)
9282 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9285 [(set_attr "conds" "clob")
9286 (set_attr "length" "8,8,12")
9287 (set_attr "type" "multiple")]
9290 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9292 (define_insn "*ifcompare_plus_move"
9293 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9294 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9295 [(match_operand:SI 4 "s_register_operand" "r,r")
9296 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9298 (match_operand:SI 2 "s_register_operand" "r,r")
9299 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9300 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9301 (clobber (reg:CC CC_REGNUM))]
9304 [(set_attr "conds" "clob")
9305 (set_attr "length" "8,12")
9306 (set_attr "type" "multiple")]
9309 (define_insn "*if_plus_move"
9310 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9312 (match_operator 4 "arm_comparison_operator"
9313 [(match_operand 5 "cc_register" "") (const_int 0)])
9315 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9316 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9317 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9321 sub%d4\\t%0, %2, #%n3
9322 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9323 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9324 [(set_attr "conds" "use")
9325 (set_attr "length" "4,4,8,8")
9326 (set_attr_alternative "type"
9327 [(if_then_else (match_operand 3 "const_int_operand" "")
9328 (const_string "alu_imm" )
9329 (const_string "alu_sreg"))
9330 (const_string "alu_imm")
9331 (const_string "multiple")
9332 (const_string "multiple")])]
9335 (define_insn "*ifcompare_move_plus"
9336 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9337 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9338 [(match_operand:SI 4 "s_register_operand" "r,r")
9339 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9340 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9342 (match_operand:SI 2 "s_register_operand" "r,r")
9343 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9344 (clobber (reg:CC CC_REGNUM))]
9347 [(set_attr "conds" "clob")
9348 (set_attr "length" "8,12")
9349 (set_attr "type" "multiple")]
9352 (define_insn "*if_move_plus"
9353 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9355 (match_operator 4 "arm_comparison_operator"
9356 [(match_operand 5 "cc_register" "") (const_int 0)])
9357 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9359 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9360 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9364 sub%D4\\t%0, %2, #%n3
9365 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9366 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9367 [(set_attr "conds" "use")
9368 (set_attr "length" "4,4,8,8")
9369 (set_attr_alternative "type"
9370 [(if_then_else (match_operand 3 "const_int_operand" "")
9371 (const_string "alu_imm" )
9372 (const_string "alu_sreg"))
9373 (const_string "alu_imm")
9374 (const_string "multiple")
9375 (const_string "multiple")])]
9378 (define_insn "*ifcompare_arith_arith"
9379 [(set (match_operand:SI 0 "s_register_operand" "=r")
9380 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9381 [(match_operand:SI 5 "s_register_operand" "r")
9382 (match_operand:SI 6 "arm_add_operand" "rIL")])
9383 (match_operator:SI 8 "shiftable_operator"
9384 [(match_operand:SI 1 "s_register_operand" "r")
9385 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9386 (match_operator:SI 7 "shiftable_operator"
9387 [(match_operand:SI 3 "s_register_operand" "r")
9388 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9389 (clobber (reg:CC CC_REGNUM))]
9392 [(set_attr "conds" "clob")
9393 (set_attr "length" "12")
9394 (set_attr "type" "multiple")]
9397 (define_insn "*if_arith_arith"
9398 [(set (match_operand:SI 0 "s_register_operand" "=r")
9399 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9400 [(match_operand 8 "cc_register" "") (const_int 0)])
9401 (match_operator:SI 6 "shiftable_operator"
9402 [(match_operand:SI 1 "s_register_operand" "r")
9403 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9404 (match_operator:SI 7 "shiftable_operator"
9405 [(match_operand:SI 3 "s_register_operand" "r")
9406 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9408 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9409 [(set_attr "conds" "use")
9410 (set_attr "length" "8")
9411 (set_attr "type" "multiple")]
9414 (define_insn "*ifcompare_arith_move"
9415 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9416 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9417 [(match_operand:SI 2 "s_register_operand" "r,r")
9418 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9419 (match_operator:SI 7 "shiftable_operator"
9420 [(match_operand:SI 4 "s_register_operand" "r,r")
9421 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9422 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9423 (clobber (reg:CC CC_REGNUM))]
9426 /* If we have an operation where (op x 0) is the identity operation and
9427 the conditional operator is LT or GE and we are comparing against zero and
9428 everything is in registers then we can do this in two instructions. */
9429 if (operands[3] == const0_rtx
9430 && GET_CODE (operands[7]) != AND
9431 && REG_P (operands[5])
9432 && REG_P (operands[1])
9433 && REGNO (operands[1]) == REGNO (operands[4])
9434 && REGNO (operands[4]) != REGNO (operands[0]))
9436 if (GET_CODE (operands[6]) == LT)
9437 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9438 else if (GET_CODE (operands[6]) == GE)
9439 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9441 if (CONST_INT_P (operands[3])
9442 && !const_ok_for_arm (INTVAL (operands[3])))
9443 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9445 output_asm_insn (\"cmp\\t%2, %3\", operands);
9446 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9447 if (which_alternative != 0)
9448 return \"mov%D6\\t%0, %1\";
9451 [(set_attr "conds" "clob")
9452 (set_attr "length" "8,12")
9453 (set_attr "type" "multiple")]
9456 (define_insn "*if_arith_move"
9457 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9458 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9459 [(match_operand 6 "cc_register" "") (const_int 0)])
9460 (match_operator:SI 5 "shiftable_operator"
9461 [(match_operand:SI 2 "s_register_operand" "r,r")
9462 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9463 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9467 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9468 [(set_attr "conds" "use")
9469 (set_attr "length" "4,8")
9470 (set_attr_alternative "type"
9471 [(if_then_else (match_operand 3 "const_int_operand" "")
9472 (const_string "alu_shift_imm" )
9473 (const_string "alu_shift_reg"))
9474 (const_string "multiple")])]
9477 (define_insn "*ifcompare_move_arith"
9478 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9479 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9480 [(match_operand:SI 4 "s_register_operand" "r,r")
9481 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9482 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9483 (match_operator:SI 7 "shiftable_operator"
9484 [(match_operand:SI 2 "s_register_operand" "r,r")
9485 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9486 (clobber (reg:CC CC_REGNUM))]
9489 /* If we have an operation where (op x 0) is the identity operation and
9490 the conditional operator is LT or GE and we are comparing against zero and
9491 everything is in registers then we can do this in two instructions */
9492 if (operands[5] == const0_rtx
9493 && GET_CODE (operands[7]) != AND
9494 && REG_P (operands[3])
9495 && REG_P (operands[1])
9496 && REGNO (operands[1]) == REGNO (operands[2])
9497 && REGNO (operands[2]) != REGNO (operands[0]))
9499 if (GET_CODE (operands[6]) == GE)
9500 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9501 else if (GET_CODE (operands[6]) == LT)
9502 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9505 if (CONST_INT_P (operands[5])
9506 && !const_ok_for_arm (INTVAL (operands[5])))
9507 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9509 output_asm_insn (\"cmp\\t%4, %5\", operands);
9511 if (which_alternative != 0)
9512 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9513 return \"%I7%D6\\t%0, %2, %3\";
9515 [(set_attr "conds" "clob")
9516 (set_attr "length" "8,12")
9517 (set_attr "type" "multiple")]
9520 (define_insn "*if_move_arith"
9521 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9523 (match_operator 4 "arm_comparison_operator"
9524 [(match_operand 6 "cc_register" "") (const_int 0)])
9525 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9526 (match_operator:SI 5 "shiftable_operator"
9527 [(match_operand:SI 2 "s_register_operand" "r,r")
9528 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9532 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9533 [(set_attr "conds" "use")
9534 (set_attr "length" "4,8")
9535 (set_attr_alternative "type"
9536 [(if_then_else (match_operand 3 "const_int_operand" "")
9537 (const_string "alu_shift_imm" )
9538 (const_string "alu_shift_reg"))
9539 (const_string "multiple")])]
9542 (define_insn "*ifcompare_move_not"
9543 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9545 (match_operator 5 "arm_comparison_operator"
9546 [(match_operand:SI 3 "s_register_operand" "r,r")
9547 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9548 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9550 (match_operand:SI 2 "s_register_operand" "r,r"))))
9551 (clobber (reg:CC CC_REGNUM))]
9554 [(set_attr "conds" "clob")
9555 (set_attr "length" "8,12")
9556 (set_attr "type" "multiple")]
9559 (define_insn "*if_move_not"
9560 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9562 (match_operator 4 "arm_comparison_operator"
9563 [(match_operand 3 "cc_register" "") (const_int 0)])
9564 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9565 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9569 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9570 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9571 [(set_attr "conds" "use")
9572 (set_attr "type" "mvn_reg")
9573 (set_attr "length" "4,8,8")
9574 (set_attr "type" "mvn_reg,multiple,multiple")]
9577 (define_insn "*ifcompare_not_move"
9578 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9580 (match_operator 5 "arm_comparison_operator"
9581 [(match_operand:SI 3 "s_register_operand" "r,r")
9582 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9584 (match_operand:SI 2 "s_register_operand" "r,r"))
9585 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9586 (clobber (reg:CC CC_REGNUM))]
9589 [(set_attr "conds" "clob")
9590 (set_attr "length" "8,12")
9591 (set_attr "type" "multiple")]
9594 (define_insn "*if_not_move"
9595 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9597 (match_operator 4 "arm_comparison_operator"
9598 [(match_operand 3 "cc_register" "") (const_int 0)])
9599 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9600 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9604 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9605 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9606 [(set_attr "conds" "use")
9607 (set_attr "type" "mvn_reg,multiple,multiple")
9608 (set_attr "length" "4,8,8")]
9611 (define_insn "*ifcompare_shift_move"
9612 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9614 (match_operator 6 "arm_comparison_operator"
9615 [(match_operand:SI 4 "s_register_operand" "r,r")
9616 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9617 (match_operator:SI 7 "shift_operator"
9618 [(match_operand:SI 2 "s_register_operand" "r,r")
9619 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9620 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9621 (clobber (reg:CC CC_REGNUM))]
9624 [(set_attr "conds" "clob")
9625 (set_attr "length" "8,12")
9626 (set_attr "type" "multiple")]
9629 (define_insn "*if_shift_move"
9630 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9632 (match_operator 5 "arm_comparison_operator"
9633 [(match_operand 6 "cc_register" "") (const_int 0)])
9634 (match_operator:SI 4 "shift_operator"
9635 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9636 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9637 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9641 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9642 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9643 [(set_attr "conds" "use")
9644 (set_attr "shift" "2")
9645 (set_attr "length" "4,8,8")
9646 (set_attr_alternative "type"
9647 [(if_then_else (match_operand 3 "const_int_operand" "")
9648 (const_string "mov_shift" )
9649 (const_string "mov_shift_reg"))
9650 (const_string "multiple")
9651 (const_string "multiple")])]
9654 (define_insn "*ifcompare_move_shift"
9655 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9657 (match_operator 6 "arm_comparison_operator"
9658 [(match_operand:SI 4 "s_register_operand" "r,r")
9659 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9660 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9661 (match_operator:SI 7 "shift_operator"
9662 [(match_operand:SI 2 "s_register_operand" "r,r")
9663 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9664 (clobber (reg:CC CC_REGNUM))]
9667 [(set_attr "conds" "clob")
9668 (set_attr "length" "8,12")
9669 (set_attr "type" "multiple")]
9672 (define_insn "*if_move_shift"
9673 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9675 (match_operator 5 "arm_comparison_operator"
9676 [(match_operand 6 "cc_register" "") (const_int 0)])
9677 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9678 (match_operator:SI 4 "shift_operator"
9679 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9680 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9684 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9685 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9686 [(set_attr "conds" "use")
9687 (set_attr "shift" "2")
9688 (set_attr "length" "4,8,8")
9689 (set_attr_alternative "type"
9690 [(if_then_else (match_operand 3 "const_int_operand" "")
9691 (const_string "mov_shift" )
9692 (const_string "mov_shift_reg"))
9693 (const_string "multiple")
9694 (const_string "multiple")])]
9697 (define_insn "*ifcompare_shift_shift"
9698 [(set (match_operand:SI 0 "s_register_operand" "=r")
9700 (match_operator 7 "arm_comparison_operator"
9701 [(match_operand:SI 5 "s_register_operand" "r")
9702 (match_operand:SI 6 "arm_add_operand" "rIL")])
9703 (match_operator:SI 8 "shift_operator"
9704 [(match_operand:SI 1 "s_register_operand" "r")
9705 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9706 (match_operator:SI 9 "shift_operator"
9707 [(match_operand:SI 3 "s_register_operand" "r")
9708 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9709 (clobber (reg:CC CC_REGNUM))]
9712 [(set_attr "conds" "clob")
9713 (set_attr "length" "12")
9714 (set_attr "type" "multiple")]
9717 (define_insn "*if_shift_shift"
9718 [(set (match_operand:SI 0 "s_register_operand" "=r")
9720 (match_operator 5 "arm_comparison_operator"
9721 [(match_operand 8 "cc_register" "") (const_int 0)])
9722 (match_operator:SI 6 "shift_operator"
9723 [(match_operand:SI 1 "s_register_operand" "r")
9724 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9725 (match_operator:SI 7 "shift_operator"
9726 [(match_operand:SI 3 "s_register_operand" "r")
9727 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9729 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9730 [(set_attr "conds" "use")
9731 (set_attr "shift" "1")
9732 (set_attr "length" "8")
9733 (set (attr "type") (if_then_else
9734 (and (match_operand 2 "const_int_operand" "")
9735 (match_operand 4 "const_int_operand" ""))
9736 (const_string "mov_shift")
9737 (const_string "mov_shift_reg")))]
9740 (define_insn "*ifcompare_not_arith"
9741 [(set (match_operand:SI 0 "s_register_operand" "=r")
9743 (match_operator 6 "arm_comparison_operator"
9744 [(match_operand:SI 4 "s_register_operand" "r")
9745 (match_operand:SI 5 "arm_add_operand" "rIL")])
9746 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9747 (match_operator:SI 7 "shiftable_operator"
9748 [(match_operand:SI 2 "s_register_operand" "r")
9749 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9750 (clobber (reg:CC CC_REGNUM))]
9753 [(set_attr "conds" "clob")
9754 (set_attr "length" "12")
9755 (set_attr "type" "multiple")]
9758 (define_insn "*if_not_arith"
9759 [(set (match_operand:SI 0 "s_register_operand" "=r")
9761 (match_operator 5 "arm_comparison_operator"
9762 [(match_operand 4 "cc_register" "") (const_int 0)])
9763 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9764 (match_operator:SI 6 "shiftable_operator"
9765 [(match_operand:SI 2 "s_register_operand" "r")
9766 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9768 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9769 [(set_attr "conds" "use")
9770 (set_attr "type" "mvn_reg")
9771 (set_attr "length" "8")]
9774 (define_insn "*ifcompare_arith_not"
9775 [(set (match_operand:SI 0 "s_register_operand" "=r")
9777 (match_operator 6 "arm_comparison_operator"
9778 [(match_operand:SI 4 "s_register_operand" "r")
9779 (match_operand:SI 5 "arm_add_operand" "rIL")])
9780 (match_operator:SI 7 "shiftable_operator"
9781 [(match_operand:SI 2 "s_register_operand" "r")
9782 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9783 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9784 (clobber (reg:CC CC_REGNUM))]
9787 [(set_attr "conds" "clob")
9788 (set_attr "length" "12")
9789 (set_attr "type" "multiple")]
9792 (define_insn "*if_arith_not"
9793 [(set (match_operand:SI 0 "s_register_operand" "=r")
9795 (match_operator 5 "arm_comparison_operator"
9796 [(match_operand 4 "cc_register" "") (const_int 0)])
9797 (match_operator:SI 6 "shiftable_operator"
9798 [(match_operand:SI 2 "s_register_operand" "r")
9799 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9800 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9802 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9803 [(set_attr "conds" "use")
9804 (set_attr "type" "multiple")
9805 (set_attr "length" "8")]
9808 (define_insn "*ifcompare_neg_move"
9809 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9811 (match_operator 5 "arm_comparison_operator"
9812 [(match_operand:SI 3 "s_register_operand" "r,r")
9813 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9814 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9815 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9816 (clobber (reg:CC CC_REGNUM))]
9819 [(set_attr "conds" "clob")
9820 (set_attr "length" "8,12")
9821 (set_attr "type" "multiple")]
9824 (define_insn_and_split "*if_neg_move"
9825 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9827 (match_operator 4 "arm_comparison_operator"
9828 [(match_operand 3 "cc_register" "") (const_int 0)])
9829 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9830 (match_operand:SI 1 "s_register_operand" "0,0")))]
9833 "&& reload_completed"
9834 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9835 (set (match_dup 0) (neg:SI (match_dup 2))))]
9837 [(set_attr "conds" "use")
9838 (set_attr "length" "4")
9839 (set_attr "arch" "t2,32")
9840 (set_attr "enabled_for_short_it" "yes,no")
9841 (set_attr "type" "logic_shift_imm")]
9844 (define_insn "*ifcompare_move_neg"
9845 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9847 (match_operator 5 "arm_comparison_operator"
9848 [(match_operand:SI 3 "s_register_operand" "r,r")
9849 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9850 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9851 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9852 (clobber (reg:CC CC_REGNUM))]
9855 [(set_attr "conds" "clob")
9856 (set_attr "length" "8,12")
9857 (set_attr "type" "multiple")]
9860 (define_insn_and_split "*if_move_neg"
9861 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9863 (match_operator 4 "arm_comparison_operator"
9864 [(match_operand 3 "cc_register" "") (const_int 0)])
9865 (match_operand:SI 1 "s_register_operand" "0,0")
9866 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9869 "&& reload_completed"
9870 [(cond_exec (match_dup 5)
9871 (set (match_dup 0) (neg:SI (match_dup 2))))]
9873 machine_mode mode = GET_MODE (operands[3]);
9874 rtx_code rc = GET_CODE (operands[4]);
9876 if (mode == CCFPmode || mode == CCFPEmode)
9877 rc = reverse_condition_maybe_unordered (rc);
9879 rc = reverse_condition (rc);
9881 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9883 [(set_attr "conds" "use")
9884 (set_attr "length" "4")
9885 (set_attr "arch" "t2,32")
9886 (set_attr "enabled_for_short_it" "yes,no")
9887 (set_attr "type" "logic_shift_imm")]
9890 (define_insn "*arith_adjacentmem"
9891 [(set (match_operand:SI 0 "s_register_operand" "=r")
9892 (match_operator:SI 1 "shiftable_operator"
9893 [(match_operand:SI 2 "memory_operand" "m")
9894 (match_operand:SI 3 "memory_operand" "m")]))
9895 (clobber (match_scratch:SI 4 "=r"))]
9896 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9902 HOST_WIDE_INT val1 = 0, val2 = 0;
9904 if (REGNO (operands[0]) > REGNO (operands[4]))
9906 ldm[1] = operands[4];
9907 ldm[2] = operands[0];
9911 ldm[1] = operands[0];
9912 ldm[2] = operands[4];
9915 base_reg = XEXP (operands[2], 0);
9917 if (!REG_P (base_reg))
9919 val1 = INTVAL (XEXP (base_reg, 1));
9920 base_reg = XEXP (base_reg, 0);
9923 if (!REG_P (XEXP (operands[3], 0)))
9924 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9926 arith[0] = operands[0];
9927 arith[3] = operands[1];
9941 if (val1 !=0 && val2 != 0)
9945 if (val1 == 4 || val2 == 4)
9946 /* Other val must be 8, since we know they are adjacent and neither
9948 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
9949 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9951 ldm[0] = ops[0] = operands[4];
9953 ops[2] = GEN_INT (val1);
9954 output_add_immediate (ops);
9956 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9958 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9962 /* Offset is out of range for a single add, so use two ldr. */
9965 ops[2] = GEN_INT (val1);
9966 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9968 ops[2] = GEN_INT (val2);
9969 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9975 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9977 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9982 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9984 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9986 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9989 [(set_attr "length" "12")
9990 (set_attr "predicable" "yes")
9991 (set_attr "type" "load_4")]
9994 ; This pattern is never tried by combine, so do it as a peephole
9997 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9998 (match_operand:SI 1 "arm_general_register_operand" ""))
9999 (set (reg:CC CC_REGNUM)
10000 (compare:CC (match_dup 1) (const_int 0)))]
10002 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10003 (set (match_dup 0) (match_dup 1))])]
10008 [(set (match_operand:SI 0 "s_register_operand" "")
10009 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10011 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10012 [(match_operand:SI 3 "s_register_operand" "")
10013 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10014 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10016 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10017 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10022 ;; This split can be used because CC_Z mode implies that the following
10023 ;; branch will be an equality, or an unsigned inequality, so the sign
10024 ;; extension is not needed.
10027 [(set (reg:CC_Z CC_REGNUM)
10029 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10031 (match_operand 1 "const_int_operand" "")))
10032 (clobber (match_scratch:SI 2 ""))]
10034 && ((UINTVAL (operands[1]))
10035 == ((UINTVAL (operands[1])) >> 24) << 24)"
10036 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10037 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10039 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10042 ;; ??? Check the patterns above for Thumb-2 usefulness
10044 (define_expand "prologue"
10045 [(clobber (const_int 0))]
10048 arm_expand_prologue ();
10050 thumb1_expand_prologue ();
10055 (define_expand "epilogue"
10056 [(clobber (const_int 0))]
10059 if (crtl->calls_eh_return)
10060 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10063 thumb1_expand_epilogue ();
10064 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10065 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10067 else if (HAVE_return)
10069 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10070 no need for explicit testing again. */
10071 emit_jump_insn (gen_return ());
10073 else if (TARGET_32BIT)
10075 arm_expand_epilogue (true);
10081 ;; Note - although unspec_volatile's USE all hard registers,
10082 ;; USEs are ignored after relaod has completed. Thus we need
10083 ;; to add an unspec of the link register to ensure that flow
10084 ;; does not think that it is unused by the sibcall branch that
10085 ;; will replace the standard function epilogue.
10086 (define_expand "sibcall_epilogue"
10087 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10088 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10091 arm_expand_epilogue (false);
10096 (define_expand "eh_epilogue"
10097 [(use (match_operand:SI 0 "register_operand"))
10098 (use (match_operand:SI 1 "register_operand"))
10099 (use (match_operand:SI 2 "register_operand"))]
10103 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10104 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10106 rtx ra = gen_rtx_REG (Pmode, 2);
10108 emit_move_insn (ra, operands[2]);
10111 /* This is a hack -- we may have crystalized the function type too
10113 cfun->machine->func_type = 0;
10117 ;; This split is only used during output to reduce the number of patterns
10118 ;; that need assembler instructions adding to them. We allowed the setting
10119 ;; of the conditions to be implicit during rtl generation so that
10120 ;; the conditional compare patterns would work. However this conflicts to
10121 ;; some extent with the conditional data operations, so we have to split them
10124 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10125 ;; conditional execution sufficient?
10128 [(set (match_operand:SI 0 "s_register_operand" "")
10129 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10130 [(match_operand 2 "" "") (match_operand 3 "" "")])
10132 (match_operand 4 "" "")))
10133 (clobber (reg:CC CC_REGNUM))]
10134 "TARGET_ARM && reload_completed"
10135 [(set (match_dup 5) (match_dup 6))
10136 (cond_exec (match_dup 7)
10137 (set (match_dup 0) (match_dup 4)))]
10140 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10141 operands[2], operands[3]);
10142 enum rtx_code rc = GET_CODE (operands[1]);
10144 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10145 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10146 if (mode == CCFPmode || mode == CCFPEmode)
10147 rc = reverse_condition_maybe_unordered (rc);
10149 rc = reverse_condition (rc);
10151 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10156 [(set (match_operand:SI 0 "s_register_operand" "")
10157 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10158 [(match_operand 2 "" "") (match_operand 3 "" "")])
10159 (match_operand 4 "" "")
10161 (clobber (reg:CC CC_REGNUM))]
10162 "TARGET_ARM && reload_completed"
10163 [(set (match_dup 5) (match_dup 6))
10164 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10165 (set (match_dup 0) (match_dup 4)))]
10168 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10169 operands[2], operands[3]);
10171 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10172 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10177 [(set (match_operand:SI 0 "s_register_operand" "")
10178 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10179 [(match_operand 2 "" "") (match_operand 3 "" "")])
10180 (match_operand 4 "" "")
10181 (match_operand 5 "" "")))
10182 (clobber (reg:CC CC_REGNUM))]
10183 "TARGET_ARM && reload_completed"
10184 [(set (match_dup 6) (match_dup 7))
10185 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10186 (set (match_dup 0) (match_dup 4)))
10187 (cond_exec (match_dup 8)
10188 (set (match_dup 0) (match_dup 5)))]
10191 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10192 operands[2], operands[3]);
10193 enum rtx_code rc = GET_CODE (operands[1]);
10195 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10196 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10197 if (mode == CCFPmode || mode == CCFPEmode)
10198 rc = reverse_condition_maybe_unordered (rc);
10200 rc = reverse_condition (rc);
10202 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10207 [(set (match_operand:SI 0 "s_register_operand" "")
10208 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10209 [(match_operand:SI 2 "s_register_operand" "")
10210 (match_operand:SI 3 "arm_add_operand" "")])
10211 (match_operand:SI 4 "arm_rhs_operand" "")
10213 (match_operand:SI 5 "s_register_operand" ""))))
10214 (clobber (reg:CC CC_REGNUM))]
10215 "TARGET_ARM && reload_completed"
10216 [(set (match_dup 6) (match_dup 7))
10217 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10218 (set (match_dup 0) (match_dup 4)))
10219 (cond_exec (match_dup 8)
10220 (set (match_dup 0) (not:SI (match_dup 5))))]
10223 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10224 operands[2], operands[3]);
10225 enum rtx_code rc = GET_CODE (operands[1]);
10227 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10228 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10229 if (mode == CCFPmode || mode == CCFPEmode)
10230 rc = reverse_condition_maybe_unordered (rc);
10232 rc = reverse_condition (rc);
10234 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10238 (define_insn "*cond_move_not"
10239 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10240 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10241 [(match_operand 3 "cc_register" "") (const_int 0)])
10242 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10244 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10248 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10249 [(set_attr "conds" "use")
10250 (set_attr "type" "mvn_reg,multiple")
10251 (set_attr "length" "4,8")]
10254 ;; The next two patterns occur when an AND operation is followed by a
10255 ;; scc insn sequence
10257 (define_insn "*sign_extract_onebit"
10258 [(set (match_operand:SI 0 "s_register_operand" "=r")
10259 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10261 (match_operand:SI 2 "const_int_operand" "n")))
10262 (clobber (reg:CC CC_REGNUM))]
10265 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10266 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10267 return \"mvnne\\t%0, #0\";
10269 [(set_attr "conds" "clob")
10270 (set_attr "length" "8")
10271 (set_attr "type" "multiple")]
10274 (define_insn "*not_signextract_onebit"
10275 [(set (match_operand:SI 0 "s_register_operand" "=r")
10277 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10279 (match_operand:SI 2 "const_int_operand" "n"))))
10280 (clobber (reg:CC CC_REGNUM))]
10283 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10284 output_asm_insn (\"tst\\t%1, %2\", operands);
10285 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10286 return \"movne\\t%0, #0\";
10288 [(set_attr "conds" "clob")
10289 (set_attr "length" "12")
10290 (set_attr "type" "multiple")]
10292 ;; ??? The above patterns need auditing for Thumb-2
10294 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10295 ;; expressions. For simplicity, the first register is also in the unspec
10297 ;; To avoid the usage of GNU extension, the length attribute is computed
10298 ;; in a C function arm_attr_length_push_multi.
10299 (define_insn "*push_multi"
10300 [(match_parallel 2 "multi_register_push"
10301 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10302 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10303 UNSPEC_PUSH_MULT))])]
10307 int num_saves = XVECLEN (operands[2], 0);
10309 /* For the StrongARM at least it is faster to
10310 use STR to store only a single register.
10311 In Thumb mode always use push, and the assembler will pick
10312 something appropriate. */
10313 if (num_saves == 1 && TARGET_ARM)
10314 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10321 strcpy (pattern, \"push%?\\t{%1\");
10323 strcpy (pattern, \"push\\t{%1\");
10325 for (i = 1; i < num_saves; i++)
10327 strcat (pattern, \", %|\");
10329 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10332 strcat (pattern, \"}\");
10333 output_asm_insn (pattern, operands);
10338 [(set_attr "type" "store_16")
10339 (set (attr "length")
10340 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10343 (define_insn "stack_tie"
10344 [(set (mem:BLK (scratch))
10345 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10346 (match_operand:SI 1 "s_register_operand" "rk")]
10350 [(set_attr "length" "0")
10351 (set_attr "type" "block")]
10354 ;; Pop (as used in epilogue RTL)
10356 (define_insn "*load_multiple_with_writeback"
10357 [(match_parallel 0 "load_multiple_operation"
10358 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10359 (plus:SI (match_dup 1)
10360 (match_operand:SI 2 "const_int_I_operand" "I")))
10361 (set (match_operand:SI 3 "s_register_operand" "=rk")
10362 (mem:SI (match_dup 1)))
10364 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10367 arm_output_multireg_pop (operands, /*return_pc=*/false,
10368 /*cond=*/const_true_rtx,
10374 [(set_attr "type" "load_16")
10375 (set_attr "predicable" "yes")
10376 (set (attr "length")
10377 (symbol_ref "arm_attr_length_pop_multi (operands,
10378 /*return_pc=*/false,
10379 /*write_back_p=*/true)"))]
10382 ;; Pop with return (as used in epilogue RTL)
10384 ;; This instruction is generated when the registers are popped at the end of
10385 ;; epilogue. Here, instead of popping the value into LR and then generating
10386 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10388 (define_insn "*pop_multiple_with_writeback_and_return"
10389 [(match_parallel 0 "pop_multiple_return"
10391 (set (match_operand:SI 1 "s_register_operand" "+rk")
10392 (plus:SI (match_dup 1)
10393 (match_operand:SI 2 "const_int_I_operand" "I")))
10394 (set (match_operand:SI 3 "s_register_operand" "=rk")
10395 (mem:SI (match_dup 1)))
10397 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10400 arm_output_multireg_pop (operands, /*return_pc=*/true,
10401 /*cond=*/const_true_rtx,
10407 [(set_attr "type" "load_16")
10408 (set_attr "predicable" "yes")
10409 (set (attr "length")
10410 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10411 /*write_back_p=*/true)"))]
10414 (define_insn "*pop_multiple_with_return"
10415 [(match_parallel 0 "pop_multiple_return"
10417 (set (match_operand:SI 2 "s_register_operand" "=rk")
10418 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10420 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10423 arm_output_multireg_pop (operands, /*return_pc=*/true,
10424 /*cond=*/const_true_rtx,
10430 [(set_attr "type" "load_16")
10431 (set_attr "predicable" "yes")
10432 (set (attr "length")
10433 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10434 /*write_back_p=*/false)"))]
10437 ;; Load into PC and return
10438 (define_insn "*ldr_with_return"
10440 (set (reg:SI PC_REGNUM)
10441 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10442 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10443 "ldr%?\t%|pc, [%0], #4"
10444 [(set_attr "type" "load_4")
10445 (set_attr "predicable" "yes")]
10447 ;; Pop for floating point registers (as used in epilogue RTL)
10448 (define_insn "*vfp_pop_multiple_with_writeback"
10449 [(match_parallel 0 "pop_multiple_fp"
10450 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10451 (plus:SI (match_dup 1)
10452 (match_operand:SI 2 "const_int_I_operand" "I")))
10453 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10454 (mem:DF (match_dup 1)))])]
10455 "TARGET_32BIT && TARGET_HARD_FLOAT"
10458 int num_regs = XVECLEN (operands[0], 0);
10461 strcpy (pattern, \"vldm\\t\");
10462 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10463 strcat (pattern, \"!, {\");
10464 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10465 strcat (pattern, \"%P0\");
10466 if ((num_regs - 1) > 1)
10468 strcat (pattern, \"-%P1\");
10469 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10472 strcat (pattern, \"}\");
10473 output_asm_insn (pattern, op_list);
10477 [(set_attr "type" "load_16")
10478 (set_attr "conds" "unconditional")
10479 (set_attr "predicable" "no")]
10482 ;; Special patterns for dealing with the constant pool
10484 (define_insn "align_4"
10485 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10488 assemble_align (32);
10491 [(set_attr "type" "no_insn")]
10494 (define_insn "align_8"
10495 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10498 assemble_align (64);
10501 [(set_attr "type" "no_insn")]
10504 (define_insn "consttable_end"
10505 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10508 making_const_table = FALSE;
10511 [(set_attr "type" "no_insn")]
10514 (define_insn "consttable_1"
10515 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10518 making_const_table = TRUE;
10519 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10520 assemble_zeros (3);
10523 [(set_attr "length" "4")
10524 (set_attr "type" "no_insn")]
10527 (define_insn "consttable_2"
10528 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10532 rtx x = operands[0];
10533 making_const_table = TRUE;
10534 switch (GET_MODE_CLASS (GET_MODE (x)))
10537 arm_emit_fp16_const (x);
10540 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10541 assemble_zeros (2);
10546 [(set_attr "length" "4")
10547 (set_attr "type" "no_insn")]
10550 (define_insn "consttable_4"
10551 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10555 rtx x = operands[0];
10556 making_const_table = TRUE;
10557 scalar_float_mode float_mode;
10558 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10559 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10562 /* XXX: Sometimes gcc does something really dumb and ends up with
10563 a HIGH in a constant pool entry, usually because it's trying to
10564 load into a VFP register. We know this will always be used in
10565 combination with a LO_SUM which ignores the high bits, so just
10566 strip off the HIGH. */
10567 if (GET_CODE (x) == HIGH)
10569 assemble_integer (x, 4, BITS_PER_WORD, 1);
10570 mark_symbol_refs_as_used (x);
10574 [(set_attr "length" "4")
10575 (set_attr "type" "no_insn")]
10578 (define_insn "consttable_8"
10579 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10583 making_const_table = TRUE;
10584 scalar_float_mode float_mode;
10585 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10586 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10587 float_mode, BITS_PER_WORD);
10589 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10592 [(set_attr "length" "8")
10593 (set_attr "type" "no_insn")]
10596 (define_insn "consttable_16"
10597 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10601 making_const_table = TRUE;
10602 scalar_float_mode float_mode;
10603 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10604 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10605 float_mode, BITS_PER_WORD);
10607 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10610 [(set_attr "length" "16")
10611 (set_attr "type" "no_insn")]
10614 ;; V5 Instructions,
10616 (define_insn "clzsi2"
10617 [(set (match_operand:SI 0 "s_register_operand" "=r")
10618 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10619 "TARGET_32BIT && arm_arch5t"
10621 [(set_attr "predicable" "yes")
10622 (set_attr "type" "clz")])
10624 (define_insn "rbitsi2"
10625 [(set (match_operand:SI 0 "s_register_operand" "=r")
10626 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10627 "TARGET_32BIT && arm_arch_thumb2"
10629 [(set_attr "predicable" "yes")
10630 (set_attr "type" "clz")])
10632 ;; Keep this as a CTZ expression until after reload and then split
10633 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10634 ;; to fold with any other expression.
10636 (define_insn_and_split "ctzsi2"
10637 [(set (match_operand:SI 0 "s_register_operand" "=r")
10638 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10639 "TARGET_32BIT && arm_arch_thumb2"
10641 "&& reload_completed"
10644 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10645 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10649 ;; V5E instructions.
10651 (define_insn "prefetch"
10652 [(prefetch (match_operand:SI 0 "address_operand" "p")
10653 (match_operand:SI 1 "" "")
10654 (match_operand:SI 2 "" ""))]
10655 "TARGET_32BIT && arm_arch5te"
10657 [(set_attr "type" "load_4")]
10660 ;; General predication pattern
10663 [(match_operator 0 "arm_comparison_operator"
10664 [(match_operand 1 "cc_register" "")
10667 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10669 [(set_attr "predicated" "yes")]
10672 (define_insn "force_register_use"
10673 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10676 [(set_attr "length" "0")
10677 (set_attr "type" "no_insn")]
10681 ;; Patterns for exception handling
10683 (define_expand "eh_return"
10684 [(use (match_operand 0 "general_operand"))]
10689 emit_insn (gen_arm_eh_return (operands[0]));
10691 emit_insn (gen_thumb_eh_return (operands[0]));
10696 ;; We can't expand this before we know where the link register is stored.
10697 (define_insn_and_split "arm_eh_return"
10698 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10700 (clobber (match_scratch:SI 1 "=&r"))]
10703 "&& reload_completed"
10707 arm_set_return_address (operands[0], operands[1]);
10715 (define_insn "load_tp_hard"
10716 [(set (match_operand:SI 0 "register_operand" "=r")
10717 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10719 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10720 [(set_attr "predicable" "yes")
10721 (set_attr "type" "mrs")]
10724 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10725 (define_insn "load_tp_soft_fdpic"
10726 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10727 (clobber (reg:SI FDPIC_REGNUM))
10728 (clobber (reg:SI LR_REGNUM))
10729 (clobber (reg:SI IP_REGNUM))
10730 (clobber (reg:CC CC_REGNUM))]
10731 "TARGET_SOFT_TP && TARGET_FDPIC"
10732 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10733 [(set_attr "conds" "clob")
10734 (set_attr "type" "branch")]
10737 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10738 (define_insn "load_tp_soft"
10739 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10740 (clobber (reg:SI LR_REGNUM))
10741 (clobber (reg:SI IP_REGNUM))
10742 (clobber (reg:CC CC_REGNUM))]
10743 "TARGET_SOFT_TP && !TARGET_FDPIC"
10744 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10745 [(set_attr "conds" "clob")
10746 (set_attr "type" "branch")]
10749 ;; tls descriptor call
10750 (define_insn "tlscall"
10751 [(set (reg:SI R0_REGNUM)
10752 (unspec:SI [(reg:SI R0_REGNUM)
10753 (match_operand:SI 0 "" "X")
10754 (match_operand 1 "" "")] UNSPEC_TLS))
10755 (clobber (reg:SI R1_REGNUM))
10756 (clobber (reg:SI LR_REGNUM))
10757 (clobber (reg:SI CC_REGNUM))]
10760 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10761 INTVAL (operands[1]));
10762 return "bl\\t%c0(tlscall)";
10764 [(set_attr "conds" "clob")
10765 (set_attr "length" "4")
10766 (set_attr "type" "branch")]
10769 ;; For thread pointer builtin
10770 (define_expand "get_thread_pointersi"
10771 [(match_operand:SI 0 "s_register_operand")]
10775 arm_load_tp (operands[0]);
10781 ;; We only care about the lower 16 bits of the constant
10782 ;; being inserted into the upper 16 bits of the register.
10783 (define_insn "*arm_movtas_ze"
10784 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10787 (match_operand:SI 1 "const_int_operand" ""))]
10792 [(set_attr "arch" "32,v8mb")
10793 (set_attr "predicable" "yes")
10794 (set_attr "length" "4")
10795 (set_attr "type" "alu_sreg")]
10798 (define_insn "*arm_rev"
10799 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10800 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10806 [(set_attr "arch" "t1,t2,32")
10807 (set_attr "length" "2,2,4")
10808 (set_attr "predicable" "no,yes,yes")
10809 (set_attr "type" "rev")]
10812 (define_expand "arm_legacy_rev"
10813 [(set (match_operand:SI 2 "s_register_operand")
10814 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10818 (lshiftrt:SI (match_dup 2)
10820 (set (match_operand:SI 3 "s_register_operand")
10821 (rotatert:SI (match_dup 1)
10824 (and:SI (match_dup 2)
10825 (const_int -65281)))
10826 (set (match_operand:SI 0 "s_register_operand")
10827 (xor:SI (match_dup 3)
10833 ;; Reuse temporaries to keep register pressure down.
10834 (define_expand "thumb_legacy_rev"
10835 [(set (match_operand:SI 2 "s_register_operand")
10836 (ashift:SI (match_operand:SI 1 "s_register_operand")
10838 (set (match_operand:SI 3 "s_register_operand")
10839 (lshiftrt:SI (match_dup 1)
10842 (ior:SI (match_dup 3)
10844 (set (match_operand:SI 4 "s_register_operand")
10846 (set (match_operand:SI 5 "s_register_operand")
10847 (rotatert:SI (match_dup 1)
10850 (ashift:SI (match_dup 5)
10853 (lshiftrt:SI (match_dup 5)
10856 (ior:SI (match_dup 5)
10859 (rotatert:SI (match_dup 5)
10861 (set (match_operand:SI 0 "s_register_operand")
10862 (ior:SI (match_dup 5)
10868 ;; ARM-specific expansion of signed mod by power of 2
10869 ;; using conditional negate.
10870 ;; For r0 % n where n is a power of 2 produce:
10872 ;; and r0, r0, #(n - 1)
10873 ;; and r1, r1, #(n - 1)
10874 ;; rsbpl r0, r1, #0
10876 (define_expand "modsi3"
10877 [(match_operand:SI 0 "register_operand")
10878 (match_operand:SI 1 "register_operand")
10879 (match_operand:SI 2 "const_int_operand")]
10882 HOST_WIDE_INT val = INTVAL (operands[2]);
10885 || exact_log2 (val) <= 0)
10888 rtx mask = GEN_INT (val - 1);
10890 /* In the special case of x0 % 2 we can do the even shorter:
10893 rsblt r0, r0, #0. */
10897 rtx cc_reg = arm_gen_compare_reg (LT,
10898 operands[1], const0_rtx, NULL_RTX);
10899 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10900 rtx masked = gen_reg_rtx (SImode);
10902 emit_insn (gen_andsi3 (masked, operands[1], mask));
10903 emit_move_insn (operands[0],
10904 gen_rtx_IF_THEN_ELSE (SImode, cond,
10905 gen_rtx_NEG (SImode,
10911 rtx neg_op = gen_reg_rtx (SImode);
10912 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10915 /* Extract the condition register and mode. */
10916 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10917 rtx cc_reg = SET_DEST (cmp);
10918 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10920 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10922 rtx masked_neg = gen_reg_rtx (SImode);
10923 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10925 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10926 during expand does not always work. Do an IF_THEN_ELSE instead. */
10927 emit_move_insn (operands[0],
10928 gen_rtx_IF_THEN_ELSE (SImode, cond,
10929 gen_rtx_NEG (SImode, masked_neg),
10937 (define_expand "bswapsi2"
10938 [(set (match_operand:SI 0 "s_register_operand")
10939 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10940 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10944 rtx op2 = gen_reg_rtx (SImode);
10945 rtx op3 = gen_reg_rtx (SImode);
10949 rtx op4 = gen_reg_rtx (SImode);
10950 rtx op5 = gen_reg_rtx (SImode);
10952 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10953 op2, op3, op4, op5));
10957 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10966 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
10967 ;; and unsigned variants, respectively. For rev16, expose
10968 ;; byte-swapping in the lower 16 bits only.
10969 (define_insn "*arm_revsh"
10970 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10971 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
10977 [(set_attr "arch" "t1,t2,32")
10978 (set_attr "length" "2,2,4")
10979 (set_attr "type" "rev")]
10982 (define_insn "*arm_rev16"
10983 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
10984 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
10990 [(set_attr "arch" "t1,t2,32")
10991 (set_attr "length" "2,2,4")
10992 (set_attr "type" "rev")]
10995 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
10996 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
10997 ;; each valid permutation.
10999 (define_insn "arm_rev16si2"
11000 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11001 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11003 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11004 (and:SI (lshiftrt:SI (match_dup 1)
11006 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11008 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11009 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11011 [(set_attr "arch" "t1,t2,32")
11012 (set_attr "length" "2,2,4")
11013 (set_attr "type" "rev")]
11016 (define_insn "arm_rev16si2_alt"
11017 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11018 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11020 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11021 (and:SI (ashift:SI (match_dup 1)
11023 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11025 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11026 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11028 [(set_attr "arch" "t1,t2,32")
11029 (set_attr "length" "2,2,4")
11030 (set_attr "type" "rev")]
11033 (define_expand "bswaphi2"
11034 [(set (match_operand:HI 0 "s_register_operand")
11035 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11040 ;; Patterns for LDRD/STRD in Thumb2 mode
11042 (define_insn "*thumb2_ldrd"
11043 [(set (match_operand:SI 0 "s_register_operand" "=r")
11044 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11045 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11046 (set (match_operand:SI 3 "s_register_operand" "=r")
11047 (mem:SI (plus:SI (match_dup 1)
11048 (match_operand:SI 4 "const_int_operand" ""))))]
11049 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11050 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11051 && (operands_ok_ldrd_strd (operands[0], operands[3],
11052 operands[1], INTVAL (operands[2]),
11054 "ldrd%?\t%0, %3, [%1, %2]"
11055 [(set_attr "type" "load_8")
11056 (set_attr "predicable" "yes")])
11058 (define_insn "*thumb2_ldrd_base"
11059 [(set (match_operand:SI 0 "s_register_operand" "=r")
11060 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11061 (set (match_operand:SI 2 "s_register_operand" "=r")
11062 (mem:SI (plus:SI (match_dup 1)
11064 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11065 && (operands_ok_ldrd_strd (operands[0], operands[2],
11066 operands[1], 0, false, true))"
11067 "ldrd%?\t%0, %2, [%1]"
11068 [(set_attr "type" "load_8")
11069 (set_attr "predicable" "yes")])
11071 (define_insn "*thumb2_ldrd_base_neg"
11072 [(set (match_operand:SI 0 "s_register_operand" "=r")
11073 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11075 (set (match_operand:SI 2 "s_register_operand" "=r")
11076 (mem:SI (match_dup 1)))]
11077 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11078 && (operands_ok_ldrd_strd (operands[0], operands[2],
11079 operands[1], -4, false, true))"
11080 "ldrd%?\t%0, %2, [%1, #-4]"
11081 [(set_attr "type" "load_8")
11082 (set_attr "predicable" "yes")])
11084 (define_insn "*thumb2_strd"
11085 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11086 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11087 (match_operand:SI 2 "s_register_operand" "r"))
11088 (set (mem:SI (plus:SI (match_dup 0)
11089 (match_operand:SI 3 "const_int_operand" "")))
11090 (match_operand:SI 4 "s_register_operand" "r"))]
11091 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11092 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11093 && (operands_ok_ldrd_strd (operands[2], operands[4],
11094 operands[0], INTVAL (operands[1]),
11096 "strd%?\t%2, %4, [%0, %1]"
11097 [(set_attr "type" "store_8")
11098 (set_attr "predicable" "yes")])
11100 (define_insn "*thumb2_strd_base"
11101 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11102 (match_operand:SI 1 "s_register_operand" "r"))
11103 (set (mem:SI (plus:SI (match_dup 0)
11105 (match_operand:SI 2 "s_register_operand" "r"))]
11106 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11107 && (operands_ok_ldrd_strd (operands[1], operands[2],
11108 operands[0], 0, false, false))"
11109 "strd%?\t%1, %2, [%0]"
11110 [(set_attr "type" "store_8")
11111 (set_attr "predicable" "yes")])
11113 (define_insn "*thumb2_strd_base_neg"
11114 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11116 (match_operand:SI 1 "s_register_operand" "r"))
11117 (set (mem:SI (match_dup 0))
11118 (match_operand:SI 2 "s_register_operand" "r"))]
11119 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11120 && (operands_ok_ldrd_strd (operands[1], operands[2],
11121 operands[0], -4, false, false))"
11122 "strd%?\t%1, %2, [%0, #-4]"
11123 [(set_attr "type" "store_8")
11124 (set_attr "predicable" "yes")])
11126 ;; ARMv8 CRC32 instructions.
11127 (define_insn "arm_<crc_variant>"
11128 [(set (match_operand:SI 0 "s_register_operand" "=r")
11129 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11130 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11133 "<crc_variant>\\t%0, %1, %2"
11134 [(set_attr "type" "crc")
11135 (set_attr "conds" "unconditional")]
11138 ;; Load the load/store double peephole optimizations.
11139 (include "ldrdstrd.md")
11141 ;; Load the load/store multiple patterns
11142 (include "ldmstm.md")
11144 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11145 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11146 ;; The operands are validated through the load_multiple_operation
11147 ;; match_parallel predicate rather than through constraints so enable it only
11149 (define_insn "*load_multiple"
11150 [(match_parallel 0 "load_multiple_operation"
11151 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11152 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11154 "TARGET_32BIT && reload_completed"
11157 arm_output_multireg_pop (operands, /*return_pc=*/false,
11158 /*cond=*/const_true_rtx,
11164 [(set_attr "predicable" "yes")]
11167 (define_expand "copysignsf3"
11168 [(match_operand:SF 0 "register_operand")
11169 (match_operand:SF 1 "register_operand")
11170 (match_operand:SF 2 "register_operand")]
11171 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11173 emit_move_insn (operands[0], operands[2]);
11174 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11175 GEN_INT (31), GEN_INT (0),
11176 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11181 (define_expand "copysigndf3"
11182 [(match_operand:DF 0 "register_operand")
11183 (match_operand:DF 1 "register_operand")
11184 (match_operand:DF 2 "register_operand")]
11185 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11187 rtx op0_low = gen_lowpart (SImode, operands[0]);
11188 rtx op0_high = gen_highpart (SImode, operands[0]);
11189 rtx op1_low = gen_lowpart (SImode, operands[1]);
11190 rtx op1_high = gen_highpart (SImode, operands[1]);
11191 rtx op2_high = gen_highpart (SImode, operands[2]);
11193 rtx scratch1 = gen_reg_rtx (SImode);
11194 rtx scratch2 = gen_reg_rtx (SImode);
11195 emit_move_insn (scratch1, op2_high);
11196 emit_move_insn (scratch2, op1_high);
11198 emit_insn(gen_rtx_SET(scratch1,
11199 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11200 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11201 emit_move_insn (op0_low, op1_low);
11202 emit_move_insn (op0_high, scratch2);
11208 ;; movmisalign patterns for HImode and SImode.
11209 (define_expand "movmisalign<mode>"
11210 [(match_operand:HSI 0 "general_operand")
11211 (match_operand:HSI 1 "general_operand")]
11214 /* This pattern is not permitted to fail during expansion: if both arguments
11215 are non-registers (e.g. memory := constant), force operand 1 into a
11217 rtx (* gen_unaligned_load)(rtx, rtx);
11218 rtx tmp_dest = operands[0];
11219 if (!s_register_operand (operands[0], <MODE>mode)
11220 && !s_register_operand (operands[1], <MODE>mode))
11221 operands[1] = force_reg (<MODE>mode, operands[1]);
11223 if (<MODE>mode == HImode)
11225 gen_unaligned_load = gen_unaligned_loadhiu;
11226 tmp_dest = gen_reg_rtx (SImode);
11229 gen_unaligned_load = gen_unaligned_loadsi;
11231 if (MEM_P (operands[1]))
11233 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11234 if (<MODE>mode == HImode)
11235 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11238 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11243 (define_insn "arm_<cdp>"
11244 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11245 (match_operand:SI 1 "immediate_operand" "n")
11246 (match_operand:SI 2 "immediate_operand" "n")
11247 (match_operand:SI 3 "immediate_operand" "n")
11248 (match_operand:SI 4 "immediate_operand" "n")
11249 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11250 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11252 arm_const_bounds (operands[0], 0, 16);
11253 arm_const_bounds (operands[1], 0, 16);
11254 arm_const_bounds (operands[2], 0, (1 << 5));
11255 arm_const_bounds (operands[3], 0, (1 << 5));
11256 arm_const_bounds (operands[4], 0, (1 << 5));
11257 arm_const_bounds (operands[5], 0, 8);
11258 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11260 [(set_attr "length" "4")
11261 (set_attr "type" "coproc")])
11263 (define_insn "*ldc"
11264 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11265 (match_operand:SI 1 "immediate_operand" "n")
11266 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11267 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11269 arm_const_bounds (operands[0], 0, 16);
11270 arm_const_bounds (operands[1], 0, (1 << 5));
11271 return "<ldc>\\tp%c0, CR%c1, %2";
11273 [(set_attr "length" "4")
11274 (set_attr "type" "coproc")])
11276 (define_insn "*stc"
11277 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11278 (match_operand:SI 1 "immediate_operand" "n")
11279 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11280 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11282 arm_const_bounds (operands[0], 0, 16);
11283 arm_const_bounds (operands[1], 0, (1 << 5));
11284 return "<stc>\\tp%c0, CR%c1, %2";
11286 [(set_attr "length" "4")
11287 (set_attr "type" "coproc")])
11289 (define_expand "arm_<ldc>"
11290 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11291 (match_operand:SI 1 "immediate_operand")
11292 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11293 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11295 (define_expand "arm_<stc>"
11296 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11297 (match_operand:SI 1 "immediate_operand")
11298 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11299 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11301 (define_insn "arm_<mcr>"
11302 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11303 (match_operand:SI 1 "immediate_operand" "n")
11304 (match_operand:SI 2 "s_register_operand" "r")
11305 (match_operand:SI 3 "immediate_operand" "n")
11306 (match_operand:SI 4 "immediate_operand" "n")
11307 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11308 (use (match_dup 2))]
11309 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11311 arm_const_bounds (operands[0], 0, 16);
11312 arm_const_bounds (operands[1], 0, 8);
11313 arm_const_bounds (operands[3], 0, (1 << 5));
11314 arm_const_bounds (operands[4], 0, (1 << 5));
11315 arm_const_bounds (operands[5], 0, 8);
11316 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11318 [(set_attr "length" "4")
11319 (set_attr "type" "coproc")])
11321 (define_insn "arm_<mrc>"
11322 [(set (match_operand:SI 0 "s_register_operand" "=r")
11323 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11324 (match_operand:SI 2 "immediate_operand" "n")
11325 (match_operand:SI 3 "immediate_operand" "n")
11326 (match_operand:SI 4 "immediate_operand" "n")
11327 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11328 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11330 arm_const_bounds (operands[1], 0, 16);
11331 arm_const_bounds (operands[2], 0, 8);
11332 arm_const_bounds (operands[3], 0, (1 << 5));
11333 arm_const_bounds (operands[4], 0, (1 << 5));
11334 arm_const_bounds (operands[5], 0, 8);
11335 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11337 [(set_attr "length" "4")
11338 (set_attr "type" "coproc")])
11340 (define_insn "arm_<mcrr>"
11341 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11342 (match_operand:SI 1 "immediate_operand" "n")
11343 (match_operand:DI 2 "s_register_operand" "r")
11344 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11345 (use (match_dup 2))]
11346 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11348 arm_const_bounds (operands[0], 0, 16);
11349 arm_const_bounds (operands[1], 0, 8);
11350 arm_const_bounds (operands[3], 0, (1 << 5));
11351 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11353 [(set_attr "length" "4")
11354 (set_attr "type" "coproc")])
11356 (define_insn "arm_<mrrc>"
11357 [(set (match_operand:DI 0 "s_register_operand" "=r")
11358 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11359 (match_operand:SI 2 "immediate_operand" "n")
11360 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11361 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11363 arm_const_bounds (operands[1], 0, 16);
11364 arm_const_bounds (operands[2], 0, 8);
11365 arm_const_bounds (operands[3], 0, (1 << 5));
11366 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11368 [(set_attr "length" "4")
11369 (set_attr "type" "coproc")])
11371 (define_expand "speculation_barrier"
11372 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11375 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11376 have a usable barrier (and probably don't need one in practice).
11377 But to be safe if such code is run on later architectures, call a
11378 helper function in libgcc that will do the thing for the active
11380 if (!(arm_arch7 || arm_arch8))
11382 arm_emit_speculation_barrier_function ();
11388 ;; Generate a hard speculation barrier when we have not enabled speculation
11390 (define_insn "*speculation_barrier_insn"
11391 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11392 "arm_arch7 || arm_arch8"
11394 [(set_attr "type" "block")
11395 (set_attr "length" "8")]
11398 ;; Vector bits common to IWMMXT and Neon
11399 (include "vec-common.md")
11400 ;; Load the Intel Wireless Multimedia Extension patterns
11401 (include "iwmmxt.md")
11402 ;; Load the VFP co-processor patterns
11404 ;; Thumb-1 patterns
11405 (include "thumb1.md")
11406 ;; Thumb-2 patterns
11407 (include "thumb2.md")
11409 (include "neon.md")
11411 (include "crypto.md")
11412 ;; Synchronization Primitives
11413 (include "sync.md")
11414 ;; Fixed-point patterns
11415 (include "arm-fixed.md")