1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 if (hi_op2 == const0_rtx)
475 emit_insn (gen_add0si3_carryin_ltu (hi_dest, hi_op1));
477 emit_insn (gen_addsi3_carryin_ltu (hi_dest, hi_op1, hi_op2));
480 if (lo_result != lo_dest)
481 emit_move_insn (lo_result, lo_dest);
482 if (hi_result != hi_dest)
483 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
489 (define_expand "addv<mode>4"
490 [(match_operand:SIDI 0 "register_operand")
491 (match_operand:SIDI 1 "register_operand")
492 (match_operand:SIDI 2 "register_operand")
493 (match_operand 3 "")]
496 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
497 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
502 (define_expand "uaddv<mode>4"
503 [(match_operand:SIDI 0 "register_operand")
504 (match_operand:SIDI 1 "register_operand")
505 (match_operand:SIDI 2 "register_operand")
506 (match_operand 3 "")]
509 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
510 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
515 (define_expand "addsi3"
516 [(set (match_operand:SI 0 "s_register_operand")
517 (plus:SI (match_operand:SI 1 "s_register_operand")
518 (match_operand:SI 2 "reg_or_int_operand")))]
521 if (TARGET_32BIT && CONST_INT_P (operands[2]))
523 arm_split_constant (PLUS, SImode, NULL_RTX,
524 INTVAL (operands[2]), operands[0], operands[1],
525 optimize && can_create_pseudo_p ());
531 ; If there is a scratch available, this will be faster than synthesizing the
534 [(match_scratch:SI 3 "r")
535 (set (match_operand:SI 0 "arm_general_register_operand" "")
536 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
537 (match_operand:SI 2 "const_int_operand" "")))]
539 !(const_ok_for_arm (INTVAL (operands[2]))
540 || const_ok_for_arm (-INTVAL (operands[2])))
541 && const_ok_for_arm (~INTVAL (operands[2]))"
542 [(set (match_dup 3) (match_dup 2))
543 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
547 ;; The r/r/k alternative is required when reloading the address
548 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
549 ;; put the duplicated register first, and not try the commutative version.
550 (define_insn_and_split "*arm_addsi3"
551 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
552 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
553 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
569 subw%?\\t%0, %1, #%n2
570 subw%?\\t%0, %1, #%n2
573 && CONST_INT_P (operands[2])
574 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
575 && (reload_completed || !arm_eliminable_register (operands[1]))"
576 [(clobber (const_int 0))]
578 arm_split_constant (PLUS, SImode, curr_insn,
579 INTVAL (operands[2]), operands[0],
583 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
584 (set_attr "predicable" "yes")
585 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
586 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
587 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
588 (const_string "alu_imm")
589 (const_string "alu_sreg")))
593 (define_insn "adddi3_compareV"
594 [(set (reg:CC_V CC_REGNUM)
597 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
598 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
599 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
600 (set (match_operand:DI 0 "s_register_operand" "=&r")
601 (plus:DI (match_dup 1) (match_dup 2)))]
603 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
604 [(set_attr "conds" "set")
605 (set_attr "length" "8")
606 (set_attr "type" "multiple")]
609 (define_insn "addsi3_compareV"
610 [(set (reg:CC_V CC_REGNUM)
613 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
614 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
615 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
616 (set (match_operand:SI 0 "register_operand" "=r")
617 (plus:SI (match_dup 1) (match_dup 2)))]
619 "adds%?\\t%0, %1, %2"
620 [(set_attr "conds" "set")
621 (set_attr "type" "alus_sreg")]
624 (define_insn "adddi3_compareC"
625 [(set (reg:CC_C CC_REGNUM)
628 (match_operand:DI 1 "register_operand" "r")
629 (match_operand:DI 2 "register_operand" "r"))
631 (set (match_operand:DI 0 "register_operand" "=&r")
632 (plus:DI (match_dup 1) (match_dup 2)))]
634 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
635 [(set_attr "conds" "set")
636 (set_attr "length" "8")
637 (set_attr "type" "multiple")]
640 (define_insn "addsi3_compareC"
641 [(set (reg:CC_C CC_REGNUM)
642 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
643 (match_operand:SI 2 "register_operand" "r"))
645 (set (match_operand:SI 0 "register_operand" "=r")
646 (plus:SI (match_dup 1) (match_dup 2)))]
648 "adds%?\\t%0, %1, %2"
649 [(set_attr "conds" "set")
650 (set_attr "type" "alus_sreg")]
653 (define_insn "addsi3_compare0"
654 [(set (reg:CC_NOOV CC_REGNUM)
656 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
657 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
659 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
660 (plus:SI (match_dup 1) (match_dup 2)))]
664 subs%?\\t%0, %1, #%n2
666 [(set_attr "conds" "set")
667 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
670 (define_insn "*addsi3_compare0_scratch"
671 [(set (reg:CC_NOOV CC_REGNUM)
673 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
674 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
681 [(set_attr "conds" "set")
682 (set_attr "predicable" "yes")
683 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
686 (define_insn "*compare_negsi_si"
687 [(set (reg:CC_Z CC_REGNUM)
689 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
690 (match_operand:SI 1 "s_register_operand" "l,r")))]
693 [(set_attr "conds" "set")
694 (set_attr "predicable" "yes")
695 (set_attr "arch" "t2,*")
696 (set_attr "length" "2,4")
697 (set_attr "predicable_short_it" "yes,no")
698 (set_attr "type" "alus_sreg")]
701 ;; This is the canonicalization of subsi3_compare when the
702 ;; addend is a constant.
703 (define_insn "cmpsi2_addneg"
704 [(set (reg:CC CC_REGNUM)
706 (match_operand:SI 1 "s_register_operand" "r,r")
707 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
709 (plus:SI (match_dup 1)
710 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
712 && (INTVAL (operands[2])
713 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
715 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
716 in different condition codes (like cmn rather than like cmp), so that
717 alternative comes first. Both alternatives can match for any 0x??000000
718 where except for 0 and INT_MIN it doesn't matter what we choose, and also
719 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
721 if (which_alternative == 0 && operands[3] != const1_rtx)
722 return "subs%?\\t%0, %1, #%n3";
724 return "adds%?\\t%0, %1, %3";
726 [(set_attr "conds" "set")
727 (set_attr "type" "alus_sreg")]
730 ;; Convert the sequence
732 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
736 ;; bcs dest ((unsigned)rn >= 1)
737 ;; similarly for the beq variant using bcc.
738 ;; This is a common looping idiom (while (n--))
740 [(set (match_operand:SI 0 "arm_general_register_operand" "")
741 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
743 (set (match_operand 2 "cc_register" "")
744 (compare (match_dup 0) (const_int -1)))
746 (if_then_else (match_operator 3 "equality_operator"
747 [(match_dup 2) (const_int 0)])
748 (match_operand 4 "" "")
749 (match_operand 5 "" "")))]
750 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
754 (match_dup 1) (const_int 1)))
755 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
757 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
760 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
761 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
764 operands[2], const0_rtx);"
767 ;; The next four insns work because they compare the result with one of
768 ;; the operands, and we know that the use of the condition code is
769 ;; either GEU or LTU, so we can use the carry flag from the addition
770 ;; instead of doing the compare a second time.
771 (define_insn "*addsi3_compare_op1"
772 [(set (reg:CC_C CC_REGNUM)
774 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
775 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
777 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
778 (plus:SI (match_dup 1) (match_dup 2)))]
783 subs%?\\t%0, %1, #%n2
784 subs%?\\t%0, %0, #%n2
786 subs%?\\t%0, %1, #%n2
788 [(set_attr "conds" "set")
789 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
790 (set_attr "length" "2,2,2,2,4,4,4")
792 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
795 (define_insn "*addsi3_compare_op2"
796 [(set (reg:CC_C CC_REGNUM)
798 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
799 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
801 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
802 (plus:SI (match_dup 1) (match_dup 2)))]
807 subs%?\\t%0, %1, #%n2
808 subs%?\\t%0, %0, #%n2
810 subs%?\\t%0, %1, #%n2
812 [(set_attr "conds" "set")
813 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
814 (set_attr "length" "2,2,2,2,4,4,4")
816 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
819 (define_insn "*compare_addsi2_op0"
820 [(set (reg:CC_C CC_REGNUM)
822 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
823 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
832 [(set_attr "conds" "set")
833 (set_attr "predicable" "yes")
834 (set_attr "arch" "t2,t2,*,*,*")
835 (set_attr "predicable_short_it" "yes,yes,no,no,no")
836 (set_attr "length" "2,2,4,4,4")
837 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
840 (define_insn "*compare_addsi2_op1"
841 [(set (reg:CC_C CC_REGNUM)
843 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
844 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
853 [(set_attr "conds" "set")
854 (set_attr "predicable" "yes")
855 (set_attr "arch" "t2,t2,*,*,*")
856 (set_attr "predicable_short_it" "yes,yes,no,no,no")
857 (set_attr "length" "2,2,4,4,4")
858 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
861 (define_insn "addsi3_carryin_<optab>"
862 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
863 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
864 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
865 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
870 sbc%?\\t%0, %1, #%B2"
871 [(set_attr "conds" "use")
872 (set_attr "predicable" "yes")
873 (set_attr "arch" "t2,*,*")
874 (set_attr "length" "4")
875 (set_attr "predicable_short_it" "yes,no,no")
876 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
879 ;; Canonicalization of the above when the immediate is zero.
880 (define_insn "add0si3_carryin_<optab>"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
883 (match_operand:SI 1 "arm_not_operand" "r")))]
886 [(set_attr "conds" "use")
887 (set_attr "predicable" "yes")
888 (set_attr "length" "4")
889 (set_attr "type" "adc_imm")]
892 (define_insn "*addsi3_carryin_alt2_<optab>"
893 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
894 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
895 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
896 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
901 sbc%?\\t%0, %1, #%B2"
902 [(set_attr "conds" "use")
903 (set_attr "predicable" "yes")
904 (set_attr "arch" "t2,*,*")
905 (set_attr "length" "4")
906 (set_attr "predicable_short_it" "yes,no,no")
907 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
910 (define_insn "*addsi3_carryin_shift_<optab>"
911 [(set (match_operand:SI 0 "s_register_operand" "=r")
913 (match_operator:SI 2 "shift_operator"
914 [(match_operand:SI 3 "s_register_operand" "r")
915 (match_operand:SI 4 "reg_or_int_operand" "rM")])
916 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0)))
917 (match_operand:SI 1 "s_register_operand" "r")))]
919 "adc%?\\t%0, %1, %3%S2"
920 [(set_attr "conds" "use")
921 (set_attr "predicable" "yes")
922 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
923 (const_string "alu_shift_imm")
924 (const_string "alu_shift_reg")))]
927 (define_insn "*addsi3_carryin_clobercc_<optab>"
928 [(set (match_operand:SI 0 "s_register_operand" "=r")
929 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
930 (match_operand:SI 2 "arm_rhs_operand" "rI"))
931 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
932 (clobber (reg:CC CC_REGNUM))]
934 "adcs%?\\t%0, %1, %2"
935 [(set_attr "conds" "set")
936 (set_attr "type" "adcs_reg")]
939 (define_expand "subv<mode>4"
940 [(match_operand:SIDI 0 "register_operand")
941 (match_operand:SIDI 1 "register_operand")
942 (match_operand:SIDI 2 "register_operand")
943 (match_operand 3 "")]
946 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
947 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
952 (define_expand "usubv<mode>4"
953 [(match_operand:SIDI 0 "register_operand")
954 (match_operand:SIDI 1 "register_operand")
955 (match_operand:SIDI 2 "register_operand")
956 (match_operand 3 "")]
959 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
960 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
965 (define_insn "subdi3_compare1"
966 [(set (reg:CC CC_REGNUM)
968 (match_operand:DI 1 "s_register_operand" "r")
969 (match_operand:DI 2 "s_register_operand" "r")))
970 (set (match_operand:DI 0 "s_register_operand" "=&r")
971 (minus:DI (match_dup 1) (match_dup 2)))]
973 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
974 [(set_attr "conds" "set")
975 (set_attr "length" "8")
976 (set_attr "type" "multiple")]
979 (define_insn "subsi3_compare1"
980 [(set (reg:CC CC_REGNUM)
982 (match_operand:SI 1 "register_operand" "r")
983 (match_operand:SI 2 "register_operand" "r")))
984 (set (match_operand:SI 0 "register_operand" "=r")
985 (minus:SI (match_dup 1) (match_dup 2)))]
987 "subs%?\\t%0, %1, %2"
988 [(set_attr "conds" "set")
989 (set_attr "type" "alus_sreg")]
992 (define_insn "subsi3_carryin"
993 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
994 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
995 (match_operand:SI 2 "s_register_operand" "r,r,r"))
996 (match_operand:SI 3 "arm_borrow_operation" "")))]
1001 sbc%?\\t%0, %2, %2, lsl #1"
1002 [(set_attr "conds" "use")
1003 (set_attr "arch" "*,a,t2")
1004 (set_attr "predicable" "yes")
1005 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1008 (define_insn "*subsi3_carryin_const"
1009 [(set (match_operand:SI 0 "s_register_operand" "=r")
1011 (match_operand:SI 1 "s_register_operand" "r")
1012 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1013 (match_operand:SI 3 "arm_borrow_operation" "")))]
1015 "sbc\\t%0, %1, #%n2"
1016 [(set_attr "conds" "use")
1017 (set_attr "type" "adc_imm")]
1020 (define_insn "*subsi3_carryin_const0"
1021 [(set (match_operand:SI 0 "s_register_operand" "=r")
1022 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1023 (match_operand:SI 2 "arm_borrow_operation" "")))]
1026 [(set_attr "conds" "use")
1027 (set_attr "type" "adc_imm")]
1030 (define_insn "*subsi3_carryin_shift"
1031 [(set (match_operand:SI 0 "s_register_operand" "=r")
1033 (match_operand:SI 1 "s_register_operand" "r")
1034 (match_operator:SI 2 "shift_operator"
1035 [(match_operand:SI 3 "s_register_operand" "r")
1036 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1037 (match_operand:SI 5 "arm_borrow_operation" "")))]
1039 "sbc%?\\t%0, %1, %3%S2"
1040 [(set_attr "conds" "use")
1041 (set_attr "predicable" "yes")
1042 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1043 (const_string "alu_shift_imm")
1044 (const_string "alu_shift_reg")))]
1047 (define_insn "*rsbsi3_carryin_shift"
1048 [(set (match_operand:SI 0 "s_register_operand" "=r")
1050 (match_operator:SI 2 "shift_operator"
1051 [(match_operand:SI 3 "s_register_operand" "r")
1052 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1053 (match_operand:SI 1 "s_register_operand" "r"))
1054 (match_operand:SI 5 "arm_borrow_operation" "")))]
1056 "rsc%?\\t%0, %1, %3%S2"
1057 [(set_attr "conds" "use")
1058 (set_attr "predicable" "yes")
1059 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1060 (const_string "alu_shift_imm")
1061 (const_string "alu_shift_reg")))]
1064 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1066 [(set (match_operand:SI 0 "s_register_operand" "")
1067 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1068 (match_operand:SI 2 "s_register_operand" ""))
1070 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1072 [(set (match_dup 3) (match_dup 1))
1073 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1075 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1078 (define_expand "addsf3"
1079 [(set (match_operand:SF 0 "s_register_operand")
1080 (plus:SF (match_operand:SF 1 "s_register_operand")
1081 (match_operand:SF 2 "s_register_operand")))]
1082 "TARGET_32BIT && TARGET_HARD_FLOAT"
1086 (define_expand "adddf3"
1087 [(set (match_operand:DF 0 "s_register_operand")
1088 (plus:DF (match_operand:DF 1 "s_register_operand")
1089 (match_operand:DF 2 "s_register_operand")))]
1090 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1094 (define_expand "subdi3"
1096 [(set (match_operand:DI 0 "s_register_operand")
1097 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1098 (match_operand:DI 2 "s_register_operand")))
1099 (clobber (reg:CC CC_REGNUM))])]
1104 if (!REG_P (operands[1]))
1105 operands[1] = force_reg (DImode, operands[1]);
1109 rtx lo_result, hi_result, lo_dest, hi_dest;
1110 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1113 /* Since operands[1] may be an integer, pass it second, so that
1114 any necessary simplifications will be done on the decomposed
1116 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1118 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1119 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1121 if (!arm_rhs_operand (lo_op1, SImode))
1122 lo_op1 = force_reg (SImode, lo_op1);
1124 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1125 || !arm_rhs_operand (hi_op1, SImode))
1126 hi_op1 = force_reg (SImode, hi_op1);
1129 if (lo_op1 == const0_rtx)
1131 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1132 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1134 else if (CONST_INT_P (lo_op1))
1136 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1137 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1138 GEN_INT (~UINTVAL (lo_op1))));
1142 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1143 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1146 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1148 if (hi_op1 == const0_rtx)
1149 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1151 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1153 if (lo_result != lo_dest)
1154 emit_move_insn (lo_result, lo_dest);
1156 if (hi_result != hi_dest)
1157 emit_move_insn (hi_result, hi_dest);
1164 (define_insn "*arm_subdi3"
1165 [(set (match_operand:DI 0 "arm_general_register_operand" "=&r,&r,&r")
1166 (minus:DI (match_operand:DI 1 "arm_general_register_operand" "0,r,0")
1167 (match_operand:DI 2 "arm_general_register_operand" "r,0,0")))
1168 (clobber (reg:CC CC_REGNUM))]
1170 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1171 [(set_attr "conds" "clob")
1172 (set_attr "length" "8")
1173 (set_attr "type" "multiple")]
1176 (define_expand "subsi3"
1177 [(set (match_operand:SI 0 "s_register_operand")
1178 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1179 (match_operand:SI 2 "s_register_operand")))]
1182 if (CONST_INT_P (operands[1]))
1186 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1187 operands[1] = force_reg (SImode, operands[1]);
1190 arm_split_constant (MINUS, SImode, NULL_RTX,
1191 INTVAL (operands[1]), operands[0],
1193 optimize && can_create_pseudo_p ());
1197 else /* TARGET_THUMB1 */
1198 operands[1] = force_reg (SImode, operands[1]);
1203 ; ??? Check Thumb-2 split length
1204 (define_insn_and_split "*arm_subsi3_insn"
1205 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1206 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1207 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1219 "&& (CONST_INT_P (operands[1])
1220 && !const_ok_for_arm (INTVAL (operands[1])))"
1221 [(clobber (const_int 0))]
1223 arm_split_constant (MINUS, SImode, curr_insn,
1224 INTVAL (operands[1]), operands[0], operands[2], 0);
1227 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1228 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1229 (set_attr "predicable" "yes")
1230 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1231 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1235 [(match_scratch:SI 3 "r")
1236 (set (match_operand:SI 0 "arm_general_register_operand" "")
1237 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1238 (match_operand:SI 2 "arm_general_register_operand" "")))]
1240 && !const_ok_for_arm (INTVAL (operands[1]))
1241 && const_ok_for_arm (~INTVAL (operands[1]))"
1242 [(set (match_dup 3) (match_dup 1))
1243 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1247 (define_insn "subsi3_compare0"
1248 [(set (reg:CC_NOOV CC_REGNUM)
1250 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1251 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1253 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1254 (minus:SI (match_dup 1) (match_dup 2)))]
1259 rsbs%?\\t%0, %2, %1"
1260 [(set_attr "conds" "set")
1261 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1264 (define_insn "subsi3_compare"
1265 [(set (reg:CC CC_REGNUM)
1266 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1267 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1268 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1269 (minus:SI (match_dup 1) (match_dup 2)))]
1274 rsbs%?\\t%0, %2, %1"
1275 [(set_attr "conds" "set")
1276 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1279 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1280 ;; rather than (0 cmp reg). This gives the same results for unsigned
1281 ;; and equality compares which is what we mostly need here.
1282 (define_insn "rsb_imm_compare"
1283 [(set (reg:CC_RSB CC_REGNUM)
1284 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1285 (match_operand 3 "const_int_operand" "")))
1286 (set (match_operand:SI 0 "s_register_operand" "=r")
1287 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1289 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1291 [(set_attr "conds" "set")
1292 (set_attr "type" "alus_imm")]
1295 (define_expand "subsf3"
1296 [(set (match_operand:SF 0 "s_register_operand")
1297 (minus:SF (match_operand:SF 1 "s_register_operand")
1298 (match_operand:SF 2 "s_register_operand")))]
1299 "TARGET_32BIT && TARGET_HARD_FLOAT"
1303 (define_expand "subdf3"
1304 [(set (match_operand:DF 0 "s_register_operand")
1305 (minus:DF (match_operand:DF 1 "s_register_operand")
1306 (match_operand:DF 2 "s_register_operand")))]
1307 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1312 ;; Multiplication insns
1314 (define_expand "mulhi3"
1315 [(set (match_operand:HI 0 "s_register_operand")
1316 (mult:HI (match_operand:HI 1 "s_register_operand")
1317 (match_operand:HI 2 "s_register_operand")))]
1318 "TARGET_DSP_MULTIPLY"
1321 rtx result = gen_reg_rtx (SImode);
1322 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1323 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1328 (define_expand "mulsi3"
1329 [(set (match_operand:SI 0 "s_register_operand")
1330 (mult:SI (match_operand:SI 2 "s_register_operand")
1331 (match_operand:SI 1 "s_register_operand")))]
1336 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1338 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1339 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1340 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1342 "mul%?\\t%0, %2, %1"
1343 [(set_attr "type" "mul")
1344 (set_attr "predicable" "yes")
1345 (set_attr "arch" "t2,v6,nov6,nov6")
1346 (set_attr "length" "4")
1347 (set_attr "predicable_short_it" "yes,no,*,*")]
1350 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1351 ;; reusing the same register.
1354 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1356 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1357 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1358 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1360 "mla%?\\t%0, %3, %2, %1"
1361 [(set_attr "type" "mla")
1362 (set_attr "predicable" "yes")
1363 (set_attr "arch" "v6,nov6,nov6,nov6")]
1367 [(set (match_operand:SI 0 "s_register_operand" "=r")
1369 (match_operand:SI 1 "s_register_operand" "r")
1370 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1371 (match_operand:SI 2 "s_register_operand" "r"))))]
1372 "TARGET_32BIT && arm_arch_thumb2"
1373 "mls%?\\t%0, %3, %2, %1"
1374 [(set_attr "type" "mla")
1375 (set_attr "predicable" "yes")]
1378 (define_insn "*mulsi3_compare0"
1379 [(set (reg:CC_NOOV CC_REGNUM)
1380 (compare:CC_NOOV (mult:SI
1381 (match_operand:SI 2 "s_register_operand" "r,r")
1382 (match_operand:SI 1 "s_register_operand" "%0,r"))
1384 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1385 (mult:SI (match_dup 2) (match_dup 1)))]
1386 "TARGET_ARM && !arm_arch6"
1387 "muls%?\\t%0, %2, %1"
1388 [(set_attr "conds" "set")
1389 (set_attr "type" "muls")]
1392 (define_insn "*mulsi3_compare0_v6"
1393 [(set (reg:CC_NOOV CC_REGNUM)
1394 (compare:CC_NOOV (mult:SI
1395 (match_operand:SI 2 "s_register_operand" "r")
1396 (match_operand:SI 1 "s_register_operand" "r"))
1398 (set (match_operand:SI 0 "s_register_operand" "=r")
1399 (mult:SI (match_dup 2) (match_dup 1)))]
1400 "TARGET_ARM && arm_arch6 && optimize_size"
1401 "muls%?\\t%0, %2, %1"
1402 [(set_attr "conds" "set")
1403 (set_attr "type" "muls")]
1406 (define_insn "*mulsi_compare0_scratch"
1407 [(set (reg:CC_NOOV CC_REGNUM)
1408 (compare:CC_NOOV (mult:SI
1409 (match_operand:SI 2 "s_register_operand" "r,r")
1410 (match_operand:SI 1 "s_register_operand" "%0,r"))
1412 (clobber (match_scratch:SI 0 "=&r,&r"))]
1413 "TARGET_ARM && !arm_arch6"
1414 "muls%?\\t%0, %2, %1"
1415 [(set_attr "conds" "set")
1416 (set_attr "type" "muls")]
1419 (define_insn "*mulsi_compare0_scratch_v6"
1420 [(set (reg:CC_NOOV CC_REGNUM)
1421 (compare:CC_NOOV (mult:SI
1422 (match_operand:SI 2 "s_register_operand" "r")
1423 (match_operand:SI 1 "s_register_operand" "r"))
1425 (clobber (match_scratch:SI 0 "=r"))]
1426 "TARGET_ARM && arm_arch6 && optimize_size"
1427 "muls%?\\t%0, %2, %1"
1428 [(set_attr "conds" "set")
1429 (set_attr "type" "muls")]
1432 (define_insn "*mulsi3addsi_compare0"
1433 [(set (reg:CC_NOOV CC_REGNUM)
1436 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1437 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1438 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1440 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1441 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1443 "TARGET_ARM && arm_arch6"
1444 "mlas%?\\t%0, %2, %1, %3"
1445 [(set_attr "conds" "set")
1446 (set_attr "type" "mlas")]
1449 (define_insn "*mulsi3addsi_compare0_v6"
1450 [(set (reg:CC_NOOV CC_REGNUM)
1453 (match_operand:SI 2 "s_register_operand" "r")
1454 (match_operand:SI 1 "s_register_operand" "r"))
1455 (match_operand:SI 3 "s_register_operand" "r"))
1457 (set (match_operand:SI 0 "s_register_operand" "=r")
1458 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1460 "TARGET_ARM && arm_arch6 && optimize_size"
1461 "mlas%?\\t%0, %2, %1, %3"
1462 [(set_attr "conds" "set")
1463 (set_attr "type" "mlas")]
1466 (define_insn "*mulsi3addsi_compare0_scratch"
1467 [(set (reg:CC_NOOV CC_REGNUM)
1470 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1471 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1472 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1474 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1475 "TARGET_ARM && !arm_arch6"
1476 "mlas%?\\t%0, %2, %1, %3"
1477 [(set_attr "conds" "set")
1478 (set_attr "type" "mlas")]
1481 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1482 [(set (reg:CC_NOOV CC_REGNUM)
1485 (match_operand:SI 2 "s_register_operand" "r")
1486 (match_operand:SI 1 "s_register_operand" "r"))
1487 (match_operand:SI 3 "s_register_operand" "r"))
1489 (clobber (match_scratch:SI 0 "=r"))]
1490 "TARGET_ARM && arm_arch6 && optimize_size"
1491 "mlas%?\\t%0, %2, %1, %3"
1492 [(set_attr "conds" "set")
1493 (set_attr "type" "mlas")]
1496 ;; 32x32->64 widening multiply.
1497 ;; The only difference between the v3-5 and v6+ versions is the requirement
1498 ;; that the output does not overlap with either input.
1500 (define_expand "<Us>mulsidi3"
1501 [(set (match_operand:DI 0 "s_register_operand")
1503 (SE:DI (match_operand:SI 1 "s_register_operand"))
1504 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1507 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1508 gen_highpart (SImode, operands[0]),
1509 operands[1], operands[2]));
1514 (define_insn "<US>mull"
1515 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1517 (match_operand:SI 2 "s_register_operand" "%r,r")
1518 (match_operand:SI 3 "s_register_operand" "r,r")))
1519 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1522 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1525 "<US>mull%?\\t%0, %1, %2, %3"
1526 [(set_attr "type" "umull")
1527 (set_attr "predicable" "yes")
1528 (set_attr "arch" "v6,nov6")]
1531 (define_expand "<Us>maddsidi4"
1532 [(set (match_operand:DI 0 "s_register_operand")
1535 (SE:DI (match_operand:SI 1 "s_register_operand"))
1536 (SE:DI (match_operand:SI 2 "s_register_operand")))
1537 (match_operand:DI 3 "s_register_operand")))]
1540 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1541 gen_lowpart (SImode, operands[3]),
1542 gen_highpart (SImode, operands[0]),
1543 gen_highpart (SImode, operands[3]),
1544 operands[1], operands[2]));
1549 (define_insn "<US>mlal"
1550 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1553 (match_operand:SI 4 "s_register_operand" "%r,r")
1554 (match_operand:SI 5 "s_register_operand" "r,r"))
1555 (match_operand:SI 1 "s_register_operand" "0,0")))
1556 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1561 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1562 (zero_extend:DI (match_dup 1)))
1564 (match_operand:SI 3 "s_register_operand" "2,2")))]
1566 "<US>mlal%?\\t%0, %2, %4, %5"
1567 [(set_attr "type" "umlal")
1568 (set_attr "predicable" "yes")
1569 (set_attr "arch" "v6,nov6")]
1572 (define_expand "<US>mulsi3_highpart"
1574 [(set (match_operand:SI 0 "s_register_operand")
1578 (SE:DI (match_operand:SI 1 "s_register_operand"))
1579 (SE:DI (match_operand:SI 2 "s_register_operand")))
1581 (clobber (match_scratch:SI 3 ""))])]
1586 (define_insn "*<US>mull_high"
1587 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1591 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1592 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1594 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1596 "<US>mull%?\\t%3, %0, %2, %1"
1597 [(set_attr "type" "umull")
1598 (set_attr "predicable" "yes")
1599 (set_attr "arch" "v6,nov6,nov6")]
1602 (define_insn "mulhisi3"
1603 [(set (match_operand:SI 0 "s_register_operand" "=r")
1604 (mult:SI (sign_extend:SI
1605 (match_operand:HI 1 "s_register_operand" "%r"))
1607 (match_operand:HI 2 "s_register_operand" "r"))))]
1608 "TARGET_DSP_MULTIPLY"
1609 "smulbb%?\\t%0, %1, %2"
1610 [(set_attr "type" "smulxy")
1611 (set_attr "predicable" "yes")]
1614 (define_insn "*mulhisi3tb"
1615 [(set (match_operand:SI 0 "s_register_operand" "=r")
1616 (mult:SI (ashiftrt:SI
1617 (match_operand:SI 1 "s_register_operand" "r")
1620 (match_operand:HI 2 "s_register_operand" "r"))))]
1621 "TARGET_DSP_MULTIPLY"
1622 "smultb%?\\t%0, %1, %2"
1623 [(set_attr "type" "smulxy")
1624 (set_attr "predicable" "yes")]
1627 (define_insn "*mulhisi3bt"
1628 [(set (match_operand:SI 0 "s_register_operand" "=r")
1629 (mult:SI (sign_extend:SI
1630 (match_operand:HI 1 "s_register_operand" "r"))
1632 (match_operand:SI 2 "s_register_operand" "r")
1634 "TARGET_DSP_MULTIPLY"
1635 "smulbt%?\\t%0, %1, %2"
1636 [(set_attr "type" "smulxy")
1637 (set_attr "predicable" "yes")]
1640 (define_insn "*mulhisi3tt"
1641 [(set (match_operand:SI 0 "s_register_operand" "=r")
1642 (mult:SI (ashiftrt:SI
1643 (match_operand:SI 1 "s_register_operand" "r")
1646 (match_operand:SI 2 "s_register_operand" "r")
1648 "TARGET_DSP_MULTIPLY"
1649 "smultt%?\\t%0, %1, %2"
1650 [(set_attr "type" "smulxy")
1651 (set_attr "predicable" "yes")]
1654 (define_insn "maddhisi4"
1655 [(set (match_operand:SI 0 "s_register_operand" "=r")
1656 (plus:SI (mult:SI (sign_extend:SI
1657 (match_operand:HI 1 "s_register_operand" "r"))
1659 (match_operand:HI 2 "s_register_operand" "r")))
1660 (match_operand:SI 3 "s_register_operand" "r")))]
1661 "TARGET_DSP_MULTIPLY"
1662 "smlabb%?\\t%0, %1, %2, %3"
1663 [(set_attr "type" "smlaxy")
1664 (set_attr "predicable" "yes")]
1667 ;; Note: there is no maddhisi4ibt because this one is canonical form
1668 (define_insn "*maddhisi4tb"
1669 [(set (match_operand:SI 0 "s_register_operand" "=r")
1670 (plus:SI (mult:SI (ashiftrt:SI
1671 (match_operand:SI 1 "s_register_operand" "r")
1674 (match_operand:HI 2 "s_register_operand" "r")))
1675 (match_operand:SI 3 "s_register_operand" "r")))]
1676 "TARGET_DSP_MULTIPLY"
1677 "smlatb%?\\t%0, %1, %2, %3"
1678 [(set_attr "type" "smlaxy")
1679 (set_attr "predicable" "yes")]
1682 (define_insn "*maddhisi4tt"
1683 [(set (match_operand:SI 0 "s_register_operand" "=r")
1684 (plus:SI (mult:SI (ashiftrt:SI
1685 (match_operand:SI 1 "s_register_operand" "r")
1688 (match_operand:SI 2 "s_register_operand" "r")
1690 (match_operand:SI 3 "s_register_operand" "r")))]
1691 "TARGET_DSP_MULTIPLY"
1692 "smlatt%?\\t%0, %1, %2, %3"
1693 [(set_attr "type" "smlaxy")
1694 (set_attr "predicable" "yes")]
1697 (define_insn "maddhidi4"
1698 [(set (match_operand:DI 0 "s_register_operand" "=r")
1700 (mult:DI (sign_extend:DI
1701 (match_operand:HI 1 "s_register_operand" "r"))
1703 (match_operand:HI 2 "s_register_operand" "r")))
1704 (match_operand:DI 3 "s_register_operand" "0")))]
1705 "TARGET_DSP_MULTIPLY"
1706 "smlalbb%?\\t%Q0, %R0, %1, %2"
1707 [(set_attr "type" "smlalxy")
1708 (set_attr "predicable" "yes")])
1710 ;; Note: there is no maddhidi4ibt because this one is canonical form
1711 (define_insn "*maddhidi4tb"
1712 [(set (match_operand:DI 0 "s_register_operand" "=r")
1714 (mult:DI (sign_extend:DI
1716 (match_operand:SI 1 "s_register_operand" "r")
1719 (match_operand:HI 2 "s_register_operand" "r")))
1720 (match_operand:DI 3 "s_register_operand" "0")))]
1721 "TARGET_DSP_MULTIPLY"
1722 "smlaltb%?\\t%Q0, %R0, %1, %2"
1723 [(set_attr "type" "smlalxy")
1724 (set_attr "predicable" "yes")])
1726 (define_insn "*maddhidi4tt"
1727 [(set (match_operand:DI 0 "s_register_operand" "=r")
1729 (mult:DI (sign_extend:DI
1731 (match_operand:SI 1 "s_register_operand" "r")
1735 (match_operand:SI 2 "s_register_operand" "r")
1737 (match_operand:DI 3 "s_register_operand" "0")))]
1738 "TARGET_DSP_MULTIPLY"
1739 "smlaltt%?\\t%Q0, %R0, %1, %2"
1740 [(set_attr "type" "smlalxy")
1741 (set_attr "predicable" "yes")])
1743 (define_expand "mulsf3"
1744 [(set (match_operand:SF 0 "s_register_operand")
1745 (mult:SF (match_operand:SF 1 "s_register_operand")
1746 (match_operand:SF 2 "s_register_operand")))]
1747 "TARGET_32BIT && TARGET_HARD_FLOAT"
1751 (define_expand "muldf3"
1752 [(set (match_operand:DF 0 "s_register_operand")
1753 (mult:DF (match_operand:DF 1 "s_register_operand")
1754 (match_operand:DF 2 "s_register_operand")))]
1755 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1761 (define_expand "divsf3"
1762 [(set (match_operand:SF 0 "s_register_operand")
1763 (div:SF (match_operand:SF 1 "s_register_operand")
1764 (match_operand:SF 2 "s_register_operand")))]
1765 "TARGET_32BIT && TARGET_HARD_FLOAT"
1768 (define_expand "divdf3"
1769 [(set (match_operand:DF 0 "s_register_operand")
1770 (div:DF (match_operand:DF 1 "s_register_operand")
1771 (match_operand:DF 2 "s_register_operand")))]
1772 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1776 ; Expand logical operations. The mid-end expander does not split off memory
1777 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1778 ; So an explicit expander is needed to generate better code.
1780 (define_expand "<LOGICAL:optab>di3"
1781 [(set (match_operand:DI 0 "s_register_operand")
1782 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1783 (match_operand:DI 2 "arm_<optab>di_operand")))]
1786 rtx low = simplify_gen_binary (<CODE>, SImode,
1787 gen_lowpart (SImode, operands[1]),
1788 gen_lowpart (SImode, operands[2]));
1789 rtx high = simplify_gen_binary (<CODE>, SImode,
1790 gen_highpart (SImode, operands[1]),
1791 gen_highpart_mode (SImode, DImode,
1794 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1795 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1800 (define_expand "one_cmpldi2"
1801 [(set (match_operand:DI 0 "s_register_operand")
1802 (not:DI (match_operand:DI 1 "s_register_operand")))]
1805 rtx low = simplify_gen_unary (NOT, SImode,
1806 gen_lowpart (SImode, operands[1]),
1808 rtx high = simplify_gen_unary (NOT, SImode,
1809 gen_highpart_mode (SImode, DImode,
1813 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1814 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1819 ;; Split DImode and, ior, xor operations. Simply perform the logical
1820 ;; operation on the upper and lower halves of the registers.
1821 ;; This is needed for atomic operations in arm_split_atomic_op.
1822 ;; Avoid splitting IWMMXT instructions.
1824 [(set (match_operand:DI 0 "s_register_operand" "")
1825 (match_operator:DI 6 "logical_binary_operator"
1826 [(match_operand:DI 1 "s_register_operand" "")
1827 (match_operand:DI 2 "s_register_operand" "")]))]
1828 "TARGET_32BIT && reload_completed
1829 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1830 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1831 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1834 operands[3] = gen_highpart (SImode, operands[0]);
1835 operands[0] = gen_lowpart (SImode, operands[0]);
1836 operands[4] = gen_highpart (SImode, operands[1]);
1837 operands[1] = gen_lowpart (SImode, operands[1]);
1838 operands[5] = gen_highpart (SImode, operands[2]);
1839 operands[2] = gen_lowpart (SImode, operands[2]);
1843 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1844 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1846 [(set (match_operand:DI 0 "s_register_operand")
1847 (not:DI (match_operand:DI 1 "s_register_operand")))]
1849 [(set (match_dup 0) (not:SI (match_dup 1)))
1850 (set (match_dup 2) (not:SI (match_dup 3)))]
1853 operands[2] = gen_highpart (SImode, operands[0]);
1854 operands[0] = gen_lowpart (SImode, operands[0]);
1855 operands[3] = gen_highpart (SImode, operands[1]);
1856 operands[1] = gen_lowpart (SImode, operands[1]);
1860 (define_expand "andsi3"
1861 [(set (match_operand:SI 0 "s_register_operand")
1862 (and:SI (match_operand:SI 1 "s_register_operand")
1863 (match_operand:SI 2 "reg_or_int_operand")))]
1868 if (CONST_INT_P (operands[2]))
1870 if (INTVAL (operands[2]) == 255 && arm_arch6)
1872 operands[1] = convert_to_mode (QImode, operands[1], 1);
1873 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1877 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1878 operands[2] = force_reg (SImode, operands[2]);
1881 arm_split_constant (AND, SImode, NULL_RTX,
1882 INTVAL (operands[2]), operands[0],
1884 optimize && can_create_pseudo_p ());
1890 else /* TARGET_THUMB1 */
1892 if (!CONST_INT_P (operands[2]))
1894 rtx tmp = force_reg (SImode, operands[2]);
1895 if (rtx_equal_p (operands[0], operands[1]))
1899 operands[2] = operands[1];
1907 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1909 operands[2] = force_reg (SImode,
1910 GEN_INT (~INTVAL (operands[2])));
1912 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1917 for (i = 9; i <= 31; i++)
1919 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1921 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1925 else if ((HOST_WIDE_INT_1 << i) - 1
1926 == ~INTVAL (operands[2]))
1928 rtx shift = GEN_INT (i);
1929 rtx reg = gen_reg_rtx (SImode);
1931 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1932 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1938 operands[2] = force_reg (SImode, operands[2]);
1944 ; ??? Check split length for Thumb-2
1945 (define_insn_and_split "*arm_andsi3_insn"
1946 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1947 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1948 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1953 bic%?\\t%0, %1, #%B2
1957 && CONST_INT_P (operands[2])
1958 && !(const_ok_for_arm (INTVAL (operands[2]))
1959 || const_ok_for_arm (~INTVAL (operands[2])))"
1960 [(clobber (const_int 0))]
1962 arm_split_constant (AND, SImode, curr_insn,
1963 INTVAL (operands[2]), operands[0], operands[1], 0);
1966 [(set_attr "length" "4,4,4,4,16")
1967 (set_attr "predicable" "yes")
1968 (set_attr "predicable_short_it" "no,yes,no,no,no")
1969 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1972 (define_insn "*andsi3_compare0"
1973 [(set (reg:CC_NOOV CC_REGNUM)
1975 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1976 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
1978 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1979 (and:SI (match_dup 1) (match_dup 2)))]
1983 bics%?\\t%0, %1, #%B2
1984 ands%?\\t%0, %1, %2"
1985 [(set_attr "conds" "set")
1986 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1989 (define_insn "*andsi3_compare0_scratch"
1990 [(set (reg:CC_NOOV CC_REGNUM)
1992 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1993 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
1995 (clobber (match_scratch:SI 2 "=X,r,X"))]
1999 bics%?\\t%2, %0, #%B1
2001 [(set_attr "conds" "set")
2002 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2005 (define_insn "*zeroextractsi_compare0_scratch"
2006 [(set (reg:CC_NOOV CC_REGNUM)
2007 (compare:CC_NOOV (zero_extract:SI
2008 (match_operand:SI 0 "s_register_operand" "r")
2009 (match_operand 1 "const_int_operand" "n")
2010 (match_operand 2 "const_int_operand" "n"))
2013 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2014 && INTVAL (operands[1]) > 0
2015 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2016 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2018 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2019 << INTVAL (operands[2]));
2020 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2023 [(set_attr "conds" "set")
2024 (set_attr "predicable" "yes")
2025 (set_attr "type" "logics_imm")]
2028 (define_insn_and_split "*ne_zeroextractsi"
2029 [(set (match_operand:SI 0 "s_register_operand" "=r")
2030 (ne:SI (zero_extract:SI
2031 (match_operand:SI 1 "s_register_operand" "r")
2032 (match_operand:SI 2 "const_int_operand" "n")
2033 (match_operand:SI 3 "const_int_operand" "n"))
2035 (clobber (reg:CC CC_REGNUM))]
2037 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2038 && INTVAL (operands[2]) > 0
2039 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2040 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2043 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2044 && INTVAL (operands[2]) > 0
2045 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2046 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2047 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2048 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2050 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2052 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2053 (match_dup 0) (const_int 1)))]
2055 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2056 << INTVAL (operands[3]));
2058 [(set_attr "conds" "clob")
2059 (set (attr "length")
2060 (if_then_else (eq_attr "is_thumb" "yes")
2063 (set_attr "type" "multiple")]
2066 (define_insn_and_split "*ne_zeroextractsi_shifted"
2067 [(set (match_operand:SI 0 "s_register_operand" "=r")
2068 (ne:SI (zero_extract:SI
2069 (match_operand:SI 1 "s_register_operand" "r")
2070 (match_operand:SI 2 "const_int_operand" "n")
2073 (clobber (reg:CC CC_REGNUM))]
2077 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2078 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2080 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2082 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2083 (match_dup 0) (const_int 1)))]
2085 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2087 [(set_attr "conds" "clob")
2088 (set_attr "length" "8")
2089 (set_attr "type" "multiple")]
2092 (define_insn_and_split "*ite_ne_zeroextractsi"
2093 [(set (match_operand:SI 0 "s_register_operand" "=r")
2094 (if_then_else:SI (ne (zero_extract:SI
2095 (match_operand:SI 1 "s_register_operand" "r")
2096 (match_operand:SI 2 "const_int_operand" "n")
2097 (match_operand:SI 3 "const_int_operand" "n"))
2099 (match_operand:SI 4 "arm_not_operand" "rIK")
2101 (clobber (reg:CC CC_REGNUM))]
2103 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2104 && INTVAL (operands[2]) > 0
2105 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2106 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2107 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2110 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2111 && INTVAL (operands[2]) > 0
2112 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2113 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2114 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2115 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2116 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2118 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2120 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2121 (match_dup 0) (match_dup 4)))]
2123 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2124 << INTVAL (operands[3]));
2126 [(set_attr "conds" "clob")
2127 (set_attr "length" "8")
2128 (set_attr "type" "multiple")]
2131 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2132 [(set (match_operand:SI 0 "s_register_operand" "=r")
2133 (if_then_else:SI (ne (zero_extract:SI
2134 (match_operand:SI 1 "s_register_operand" "r")
2135 (match_operand:SI 2 "const_int_operand" "n")
2138 (match_operand:SI 3 "arm_not_operand" "rIK")
2140 (clobber (reg:CC CC_REGNUM))]
2141 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2143 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2144 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2145 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2147 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2149 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2150 (match_dup 0) (match_dup 3)))]
2152 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2154 [(set_attr "conds" "clob")
2155 (set_attr "length" "8")
2156 (set_attr "type" "multiple")]
2159 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2161 [(set (match_operand:SI 0 "s_register_operand" "")
2162 (match_operator:SI 1 "shiftable_operator"
2163 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2164 (match_operand:SI 3 "const_int_operand" "")
2165 (match_operand:SI 4 "const_int_operand" ""))
2166 (match_operand:SI 5 "s_register_operand" "")]))
2167 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2169 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2172 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2175 HOST_WIDE_INT temp = INTVAL (operands[3]);
2177 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2178 operands[4] = GEN_INT (32 - temp);
2183 [(set (match_operand:SI 0 "s_register_operand" "")
2184 (match_operator:SI 1 "shiftable_operator"
2185 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2186 (match_operand:SI 3 "const_int_operand" "")
2187 (match_operand:SI 4 "const_int_operand" ""))
2188 (match_operand:SI 5 "s_register_operand" "")]))
2189 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2191 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2194 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2197 HOST_WIDE_INT temp = INTVAL (operands[3]);
2199 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2200 operands[4] = GEN_INT (32 - temp);
2204 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2205 ;;; represented by the bitfield, then this will produce incorrect results.
2206 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2207 ;;; which have a real bit-field insert instruction, the truncation happens
2208 ;;; in the bit-field insert instruction itself. Since arm does not have a
2209 ;;; bit-field insert instruction, we would have to emit code here to truncate
2210 ;;; the value before we insert. This loses some of the advantage of having
2211 ;;; this insv pattern, so this pattern needs to be reevalutated.
2213 (define_expand "insv"
2214 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2215 (match_operand 1 "general_operand")
2216 (match_operand 2 "general_operand"))
2217 (match_operand 3 "reg_or_int_operand"))]
2218 "TARGET_ARM || arm_arch_thumb2"
2221 int start_bit = INTVAL (operands[2]);
2222 int width = INTVAL (operands[1]);
2223 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2224 rtx target, subtarget;
2226 if (arm_arch_thumb2)
2228 if (unaligned_access && MEM_P (operands[0])
2229 && s_register_operand (operands[3], GET_MODE (operands[3]))
2230 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2234 if (BYTES_BIG_ENDIAN)
2235 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2240 base_addr = adjust_address (operands[0], SImode,
2241 start_bit / BITS_PER_UNIT);
2242 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2246 rtx tmp = gen_reg_rtx (HImode);
2248 base_addr = adjust_address (operands[0], HImode,
2249 start_bit / BITS_PER_UNIT);
2250 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2251 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2255 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2257 bool use_bfi = TRUE;
2259 if (CONST_INT_P (operands[3]))
2261 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2265 emit_insn (gen_insv_zero (operands[0], operands[1],
2270 /* See if the set can be done with a single orr instruction. */
2271 if (val == mask && const_ok_for_arm (val << start_bit))
2277 if (!REG_P (operands[3]))
2278 operands[3] = force_reg (SImode, operands[3]);
2280 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2289 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2292 target = copy_rtx (operands[0]);
2293 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2294 subreg as the final target. */
2295 if (GET_CODE (target) == SUBREG)
2297 subtarget = gen_reg_rtx (SImode);
2298 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2299 < GET_MODE_SIZE (SImode))
2300 target = SUBREG_REG (target);
2305 if (CONST_INT_P (operands[3]))
2307 /* Since we are inserting a known constant, we may be able to
2308 reduce the number of bits that we have to clear so that
2309 the mask becomes simple. */
2310 /* ??? This code does not check to see if the new mask is actually
2311 simpler. It may not be. */
2312 rtx op1 = gen_reg_rtx (SImode);
2313 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2314 start of this pattern. */
2315 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2316 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2318 emit_insn (gen_andsi3 (op1, operands[0],
2319 gen_int_mode (~mask2, SImode)));
2320 emit_insn (gen_iorsi3 (subtarget, op1,
2321 gen_int_mode (op3_value << start_bit, SImode)));
2323 else if (start_bit == 0
2324 && !(const_ok_for_arm (mask)
2325 || const_ok_for_arm (~mask)))
2327 /* A Trick, since we are setting the bottom bits in the word,
2328 we can shift operand[3] up, operand[0] down, OR them together
2329 and rotate the result back again. This takes 3 insns, and
2330 the third might be mergeable into another op. */
2331 /* The shift up copes with the possibility that operand[3] is
2332 wider than the bitfield. */
2333 rtx op0 = gen_reg_rtx (SImode);
2334 rtx op1 = gen_reg_rtx (SImode);
2336 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2337 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2338 emit_insn (gen_iorsi3 (op1, op1, op0));
2339 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2341 else if ((width + start_bit == 32)
2342 && !(const_ok_for_arm (mask)
2343 || const_ok_for_arm (~mask)))
2345 /* Similar trick, but slightly less efficient. */
2347 rtx op0 = gen_reg_rtx (SImode);
2348 rtx op1 = gen_reg_rtx (SImode);
2350 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2351 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2352 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2353 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2357 rtx op0 = gen_int_mode (mask, SImode);
2358 rtx op1 = gen_reg_rtx (SImode);
2359 rtx op2 = gen_reg_rtx (SImode);
2361 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2363 rtx tmp = gen_reg_rtx (SImode);
2365 emit_insn (gen_movsi (tmp, op0));
2369 /* Mask out any bits in operand[3] that are not needed. */
2370 emit_insn (gen_andsi3 (op1, operands[3], op0));
2372 if (CONST_INT_P (op0)
2373 && (const_ok_for_arm (mask << start_bit)
2374 || const_ok_for_arm (~(mask << start_bit))))
2376 op0 = gen_int_mode (~(mask << start_bit), SImode);
2377 emit_insn (gen_andsi3 (op2, operands[0], op0));
2381 if (CONST_INT_P (op0))
2383 rtx tmp = gen_reg_rtx (SImode);
2385 emit_insn (gen_movsi (tmp, op0));
2390 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2392 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2396 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2398 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2401 if (subtarget != target)
2403 /* If TARGET is still a SUBREG, then it must be wider than a word,
2404 so we must be careful only to set the subword we were asked to. */
2405 if (GET_CODE (target) == SUBREG)
2406 emit_move_insn (target, subtarget);
2408 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2415 (define_insn "insv_zero"
2416 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2417 (match_operand:SI 1 "const_int_M_operand" "M")
2418 (match_operand:SI 2 "const_int_M_operand" "M"))
2422 [(set_attr "length" "4")
2423 (set_attr "predicable" "yes")
2424 (set_attr "type" "bfm")]
2427 (define_insn "insv_t2"
2428 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2429 (match_operand:SI 1 "const_int_M_operand" "M")
2430 (match_operand:SI 2 "const_int_M_operand" "M"))
2431 (match_operand:SI 3 "s_register_operand" "r"))]
2433 "bfi%?\t%0, %3, %2, %1"
2434 [(set_attr "length" "4")
2435 (set_attr "predicable" "yes")
2436 (set_attr "type" "bfm")]
2439 (define_insn "andsi_notsi_si"
2440 [(set (match_operand:SI 0 "s_register_operand" "=r")
2441 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2442 (match_operand:SI 1 "s_register_operand" "r")))]
2444 "bic%?\\t%0, %1, %2"
2445 [(set_attr "predicable" "yes")
2446 (set_attr "type" "logic_reg")]
2449 (define_insn "andsi_not_shiftsi_si"
2450 [(set (match_operand:SI 0 "s_register_operand" "=r")
2451 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2452 [(match_operand:SI 2 "s_register_operand" "r")
2453 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2454 (match_operand:SI 1 "s_register_operand" "r")))]
2456 "bic%?\\t%0, %1, %2%S4"
2457 [(set_attr "predicable" "yes")
2458 (set_attr "shift" "2")
2459 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2460 (const_string "logic_shift_imm")
2461 (const_string "logic_shift_reg")))]
2464 ;; Shifted bics pattern used to set up CC status register and not reusing
2465 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2466 ;; does not support shift by register.
2467 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2468 [(set (reg:CC_NOOV CC_REGNUM)
2470 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2471 [(match_operand:SI 1 "s_register_operand" "r")
2472 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2473 (match_operand:SI 3 "s_register_operand" "r"))
2475 (clobber (match_scratch:SI 4 "=r"))]
2476 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2477 "bics%?\\t%4, %3, %1%S0"
2478 [(set_attr "predicable" "yes")
2479 (set_attr "conds" "set")
2480 (set_attr "shift" "1")
2481 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2482 (const_string "logic_shift_imm")
2483 (const_string "logic_shift_reg")))]
2486 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2487 ;; getting reused later.
2488 (define_insn "andsi_not_shiftsi_si_scc"
2489 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2491 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2492 [(match_operand:SI 1 "s_register_operand" "r")
2493 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2494 (match_operand:SI 3 "s_register_operand" "r"))
2496 (set (match_operand:SI 4 "s_register_operand" "=r")
2497 (and:SI (not:SI (match_op_dup 0
2501 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2502 "bics%?\\t%4, %3, %1%S0"
2503 [(set_attr "predicable" "yes")
2504 (set_attr "conds" "set")
2505 (set_attr "shift" "1")
2506 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2507 (const_string "logic_shift_imm")
2508 (const_string "logic_shift_reg")))]
2511 (define_insn "*andsi_notsi_si_compare0"
2512 [(set (reg:CC_NOOV CC_REGNUM)
2514 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2515 (match_operand:SI 1 "s_register_operand" "r"))
2517 (set (match_operand:SI 0 "s_register_operand" "=r")
2518 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2521 [(set_attr "conds" "set")
2522 (set_attr "type" "logics_shift_reg")]
2525 (define_insn "*andsi_notsi_si_compare0_scratch"
2526 [(set (reg:CC_NOOV CC_REGNUM)
2528 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2529 (match_operand:SI 1 "s_register_operand" "r"))
2531 (clobber (match_scratch:SI 0 "=r"))]
2534 [(set_attr "conds" "set")
2535 (set_attr "type" "logics_shift_reg")]
2538 (define_expand "iorsi3"
2539 [(set (match_operand:SI 0 "s_register_operand")
2540 (ior:SI (match_operand:SI 1 "s_register_operand")
2541 (match_operand:SI 2 "reg_or_int_operand")))]
2544 if (CONST_INT_P (operands[2]))
2548 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2549 operands[2] = force_reg (SImode, operands[2]);
2552 arm_split_constant (IOR, SImode, NULL_RTX,
2553 INTVAL (operands[2]), operands[0],
2555 optimize && can_create_pseudo_p ());
2559 else /* TARGET_THUMB1 */
2561 rtx tmp = force_reg (SImode, operands[2]);
2562 if (rtx_equal_p (operands[0], operands[1]))
2566 operands[2] = operands[1];
2574 (define_insn_and_split "*iorsi3_insn"
2575 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2576 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2577 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2582 orn%?\\t%0, %1, #%B2
2586 && CONST_INT_P (operands[2])
2587 && !(const_ok_for_arm (INTVAL (operands[2]))
2588 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2589 [(clobber (const_int 0))]
2591 arm_split_constant (IOR, SImode, curr_insn,
2592 INTVAL (operands[2]), operands[0], operands[1], 0);
2595 [(set_attr "length" "4,4,4,4,16")
2596 (set_attr "arch" "32,t2,t2,32,32")
2597 (set_attr "predicable" "yes")
2598 (set_attr "predicable_short_it" "no,yes,no,no,no")
2599 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2603 [(match_scratch:SI 3 "r")
2604 (set (match_operand:SI 0 "arm_general_register_operand" "")
2605 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2606 (match_operand:SI 2 "const_int_operand" "")))]
2608 && !const_ok_for_arm (INTVAL (operands[2]))
2609 && const_ok_for_arm (~INTVAL (operands[2]))"
2610 [(set (match_dup 3) (match_dup 2))
2611 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2615 (define_insn "*iorsi3_compare0"
2616 [(set (reg:CC_NOOV CC_REGNUM)
2618 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2619 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2621 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2622 (ior:SI (match_dup 1) (match_dup 2)))]
2624 "orrs%?\\t%0, %1, %2"
2625 [(set_attr "conds" "set")
2626 (set_attr "arch" "*,t2,*")
2627 (set_attr "length" "4,2,4")
2628 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2631 (define_insn "*iorsi3_compare0_scratch"
2632 [(set (reg:CC_NOOV CC_REGNUM)
2634 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2635 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2637 (clobber (match_scratch:SI 0 "=r,l,r"))]
2639 "orrs%?\\t%0, %1, %2"
2640 [(set_attr "conds" "set")
2641 (set_attr "arch" "*,t2,*")
2642 (set_attr "length" "4,2,4")
2643 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2646 (define_expand "xorsi3"
2647 [(set (match_operand:SI 0 "s_register_operand")
2648 (xor:SI (match_operand:SI 1 "s_register_operand")
2649 (match_operand:SI 2 "reg_or_int_operand")))]
2651 "if (CONST_INT_P (operands[2]))
2655 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2656 operands[2] = force_reg (SImode, operands[2]);
2659 arm_split_constant (XOR, SImode, NULL_RTX,
2660 INTVAL (operands[2]), operands[0],
2662 optimize && can_create_pseudo_p ());
2666 else /* TARGET_THUMB1 */
2668 rtx tmp = force_reg (SImode, operands[2]);
2669 if (rtx_equal_p (operands[0], operands[1]))
2673 operands[2] = operands[1];
2680 (define_insn_and_split "*arm_xorsi3"
2681 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2682 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2683 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2691 && CONST_INT_P (operands[2])
2692 && !const_ok_for_arm (INTVAL (operands[2]))"
2693 [(clobber (const_int 0))]
2695 arm_split_constant (XOR, SImode, curr_insn,
2696 INTVAL (operands[2]), operands[0], operands[1], 0);
2699 [(set_attr "length" "4,4,4,16")
2700 (set_attr "predicable" "yes")
2701 (set_attr "predicable_short_it" "no,yes,no,no")
2702 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2705 (define_insn "*xorsi3_compare0"
2706 [(set (reg:CC_NOOV CC_REGNUM)
2707 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2708 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2710 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2711 (xor:SI (match_dup 1) (match_dup 2)))]
2713 "eors%?\\t%0, %1, %2"
2714 [(set_attr "conds" "set")
2715 (set_attr "type" "logics_imm,logics_reg")]
2718 (define_insn "*xorsi3_compare0_scratch"
2719 [(set (reg:CC_NOOV CC_REGNUM)
2720 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2721 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2725 [(set_attr "conds" "set")
2726 (set_attr "type" "logics_imm,logics_reg")]
2729 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2730 ; (NOT D) we can sometimes merge the final NOT into one of the following
2734 [(set (match_operand:SI 0 "s_register_operand" "")
2735 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2736 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2737 (match_operand:SI 3 "arm_rhs_operand" "")))
2738 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2740 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2741 (not:SI (match_dup 3))))
2742 (set (match_dup 0) (not:SI (match_dup 4)))]
2746 (define_insn_and_split "*andsi_iorsi3_notsi"
2747 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2748 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2749 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2750 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2752 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2753 "&& reload_completed"
2754 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2755 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2757 /* If operands[3] is a constant make sure to fold the NOT into it
2758 to avoid creating a NOT of a CONST_INT. */
2759 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2760 if (CONST_INT_P (not_rtx))
2762 operands[4] = operands[0];
2763 operands[5] = not_rtx;
2767 operands[5] = operands[0];
2768 operands[4] = not_rtx;
2771 [(set_attr "length" "8")
2772 (set_attr "ce_count" "2")
2773 (set_attr "predicable" "yes")
2774 (set_attr "type" "multiple")]
2777 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2778 ; insns are available?
2780 [(set (match_operand:SI 0 "s_register_operand" "")
2781 (match_operator:SI 1 "logical_binary_operator"
2782 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2783 (match_operand:SI 3 "const_int_operand" "")
2784 (match_operand:SI 4 "const_int_operand" ""))
2785 (match_operator:SI 9 "logical_binary_operator"
2786 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2787 (match_operand:SI 6 "const_int_operand" ""))
2788 (match_operand:SI 7 "s_register_operand" "")])]))
2789 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2791 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2792 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2795 [(ashift:SI (match_dup 2) (match_dup 4))
2799 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2802 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2806 [(set (match_operand:SI 0 "s_register_operand" "")
2807 (match_operator:SI 1 "logical_binary_operator"
2808 [(match_operator:SI 9 "logical_binary_operator"
2809 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2810 (match_operand:SI 6 "const_int_operand" ""))
2811 (match_operand:SI 7 "s_register_operand" "")])
2812 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2813 (match_operand:SI 3 "const_int_operand" "")
2814 (match_operand:SI 4 "const_int_operand" ""))]))
2815 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2817 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2818 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2821 [(ashift:SI (match_dup 2) (match_dup 4))
2825 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2828 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2832 [(set (match_operand:SI 0 "s_register_operand" "")
2833 (match_operator:SI 1 "logical_binary_operator"
2834 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2835 (match_operand:SI 3 "const_int_operand" "")
2836 (match_operand:SI 4 "const_int_operand" ""))
2837 (match_operator:SI 9 "logical_binary_operator"
2838 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2839 (match_operand:SI 6 "const_int_operand" ""))
2840 (match_operand:SI 7 "s_register_operand" "")])]))
2841 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2843 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2844 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2847 [(ashift:SI (match_dup 2) (match_dup 4))
2851 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2854 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2858 [(set (match_operand:SI 0 "s_register_operand" "")
2859 (match_operator:SI 1 "logical_binary_operator"
2860 [(match_operator:SI 9 "logical_binary_operator"
2861 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2862 (match_operand:SI 6 "const_int_operand" ""))
2863 (match_operand:SI 7 "s_register_operand" "")])
2864 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2865 (match_operand:SI 3 "const_int_operand" "")
2866 (match_operand:SI 4 "const_int_operand" ""))]))
2867 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2869 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2870 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2873 [(ashift:SI (match_dup 2) (match_dup 4))
2877 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2880 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2884 ;; Minimum and maximum insns
2886 (define_expand "smaxsi3"
2888 (set (match_operand:SI 0 "s_register_operand")
2889 (smax:SI (match_operand:SI 1 "s_register_operand")
2890 (match_operand:SI 2 "arm_rhs_operand")))
2891 (clobber (reg:CC CC_REGNUM))])]
2894 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2896 /* No need for a clobber of the condition code register here. */
2897 emit_insn (gen_rtx_SET (operands[0],
2898 gen_rtx_SMAX (SImode, operands[1],
2904 (define_insn "*smax_0"
2905 [(set (match_operand:SI 0 "s_register_operand" "=r")
2906 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2909 "bic%?\\t%0, %1, %1, asr #31"
2910 [(set_attr "predicable" "yes")
2911 (set_attr "type" "logic_shift_reg")]
2914 (define_insn "*smax_m1"
2915 [(set (match_operand:SI 0 "s_register_operand" "=r")
2916 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2919 "orr%?\\t%0, %1, %1, asr #31"
2920 [(set_attr "predicable" "yes")
2921 (set_attr "type" "logic_shift_reg")]
2924 (define_insn_and_split "*arm_smax_insn"
2925 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2926 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2927 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2928 (clobber (reg:CC CC_REGNUM))]
2931 ; cmp\\t%1, %2\;movlt\\t%0, %2
2932 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2934 [(set (reg:CC CC_REGNUM)
2935 (compare:CC (match_dup 1) (match_dup 2)))
2937 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2941 [(set_attr "conds" "clob")
2942 (set_attr "length" "8,12")
2943 (set_attr "type" "multiple")]
2946 (define_expand "sminsi3"
2948 (set (match_operand:SI 0 "s_register_operand")
2949 (smin:SI (match_operand:SI 1 "s_register_operand")
2950 (match_operand:SI 2 "arm_rhs_operand")))
2951 (clobber (reg:CC CC_REGNUM))])]
2954 if (operands[2] == const0_rtx)
2956 /* No need for a clobber of the condition code register here. */
2957 emit_insn (gen_rtx_SET (operands[0],
2958 gen_rtx_SMIN (SImode, operands[1],
2964 (define_insn "*smin_0"
2965 [(set (match_operand:SI 0 "s_register_operand" "=r")
2966 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2969 "and%?\\t%0, %1, %1, asr #31"
2970 [(set_attr "predicable" "yes")
2971 (set_attr "type" "logic_shift_reg")]
2974 (define_insn_and_split "*arm_smin_insn"
2975 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2976 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2977 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2978 (clobber (reg:CC CC_REGNUM))]
2981 ; cmp\\t%1, %2\;movge\\t%0, %2
2982 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2984 [(set (reg:CC CC_REGNUM)
2985 (compare:CC (match_dup 1) (match_dup 2)))
2987 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
2991 [(set_attr "conds" "clob")
2992 (set_attr "length" "8,12")
2993 (set_attr "type" "multiple,multiple")]
2996 (define_expand "umaxsi3"
2998 (set (match_operand:SI 0 "s_register_operand")
2999 (umax:SI (match_operand:SI 1 "s_register_operand")
3000 (match_operand:SI 2 "arm_rhs_operand")))
3001 (clobber (reg:CC CC_REGNUM))])]
3006 (define_insn_and_split "*arm_umaxsi3"
3007 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3008 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3009 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3010 (clobber (reg:CC CC_REGNUM))]
3013 ; cmp\\t%1, %2\;movcc\\t%0, %2
3014 ; cmp\\t%1, %2\;movcs\\t%0, %1
3015 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3017 [(set (reg:CC CC_REGNUM)
3018 (compare:CC (match_dup 1) (match_dup 2)))
3020 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3024 [(set_attr "conds" "clob")
3025 (set_attr "length" "8,8,12")
3026 (set_attr "type" "store_4")]
3029 (define_expand "uminsi3"
3031 (set (match_operand:SI 0 "s_register_operand")
3032 (umin:SI (match_operand:SI 1 "s_register_operand")
3033 (match_operand:SI 2 "arm_rhs_operand")))
3034 (clobber (reg:CC CC_REGNUM))])]
3039 (define_insn_and_split "*arm_uminsi3"
3040 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3041 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3042 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3043 (clobber (reg:CC CC_REGNUM))]
3046 ; cmp\\t%1, %2\;movcs\\t%0, %2
3047 ; cmp\\t%1, %2\;movcc\\t%0, %1
3048 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3050 [(set (reg:CC CC_REGNUM)
3051 (compare:CC (match_dup 1) (match_dup 2)))
3053 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3057 [(set_attr "conds" "clob")
3058 (set_attr "length" "8,8,12")
3059 (set_attr "type" "store_4")]
3062 (define_insn "*store_minmaxsi"
3063 [(set (match_operand:SI 0 "memory_operand" "=m")
3064 (match_operator:SI 3 "minmax_operator"
3065 [(match_operand:SI 1 "s_register_operand" "r")
3066 (match_operand:SI 2 "s_register_operand" "r")]))
3067 (clobber (reg:CC CC_REGNUM))]
3068 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3070 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3071 operands[1], operands[2]);
3072 output_asm_insn (\"cmp\\t%1, %2\", operands);
3074 output_asm_insn (\"ite\t%d3\", operands);
3075 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3076 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3079 [(set_attr "conds" "clob")
3080 (set (attr "length")
3081 (if_then_else (eq_attr "is_thumb" "yes")
3084 (set_attr "type" "store_4")]
3087 ; Reject the frame pointer in operand[1], since reloading this after
3088 ; it has been eliminated can cause carnage.
3089 (define_insn "*minmax_arithsi"
3090 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3091 (match_operator:SI 4 "shiftable_operator"
3092 [(match_operator:SI 5 "minmax_operator"
3093 [(match_operand:SI 2 "s_register_operand" "r,r")
3094 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3095 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3096 (clobber (reg:CC CC_REGNUM))]
3097 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3100 enum rtx_code code = GET_CODE (operands[4]);
3103 if (which_alternative != 0 || operands[3] != const0_rtx
3104 || (code != PLUS && code != IOR && code != XOR))
3109 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3110 operands[2], operands[3]);
3111 output_asm_insn (\"cmp\\t%2, %3\", operands);
3115 output_asm_insn (\"ite\\t%d5\", operands);
3117 output_asm_insn (\"it\\t%d5\", operands);
3119 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3121 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3124 [(set_attr "conds" "clob")
3125 (set (attr "length")
3126 (if_then_else (eq_attr "is_thumb" "yes")
3129 (set_attr "type" "multiple")]
3132 ; Reject the frame pointer in operand[1], since reloading this after
3133 ; it has been eliminated can cause carnage.
3134 (define_insn_and_split "*minmax_arithsi_non_canon"
3135 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3137 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3138 (match_operator:SI 4 "minmax_operator"
3139 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3140 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3141 (clobber (reg:CC CC_REGNUM))]
3142 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3143 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3145 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3146 [(set (reg:CC CC_REGNUM)
3147 (compare:CC (match_dup 2) (match_dup 3)))
3149 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3151 (minus:SI (match_dup 1)
3153 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3157 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3158 operands[2], operands[3]);
3159 enum rtx_code rc = minmax_code (operands[4]);
3160 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3161 operands[2], operands[3]);
3163 if (mode == CCFPmode || mode == CCFPEmode)
3164 rc = reverse_condition_maybe_unordered (rc);
3166 rc = reverse_condition (rc);
3167 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3168 if (CONST_INT_P (operands[3]))
3169 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3171 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3173 [(set_attr "conds" "clob")
3174 (set (attr "length")
3175 (if_then_else (eq_attr "is_thumb" "yes")
3178 (set_attr "type" "multiple")]
3181 (define_code_iterator SAT [smin smax])
3182 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3183 (define_code_attr SATlo [(smin "1") (smax "2")])
3184 (define_code_attr SAThi [(smin "2") (smax "1")])
3186 (define_insn "*satsi_<SAT:code>"
3187 [(set (match_operand:SI 0 "s_register_operand" "=r")
3188 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3189 (match_operand:SI 1 "const_int_operand" "i"))
3190 (match_operand:SI 2 "const_int_operand" "i")))]
3191 "TARGET_32BIT && arm_arch6
3192 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3196 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3197 &mask, &signed_sat))
3200 operands[1] = GEN_INT (mask);
3202 return "ssat%?\t%0, %1, %3";
3204 return "usat%?\t%0, %1, %3";
3206 [(set_attr "predicable" "yes")
3207 (set_attr "type" "alus_imm")]
3210 (define_insn "*satsi_<SAT:code>_shift"
3211 [(set (match_operand:SI 0 "s_register_operand" "=r")
3212 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3213 [(match_operand:SI 4 "s_register_operand" "r")
3214 (match_operand:SI 5 "const_int_operand" "i")])
3215 (match_operand:SI 1 "const_int_operand" "i"))
3216 (match_operand:SI 2 "const_int_operand" "i")))]
3217 "TARGET_32BIT && arm_arch6
3218 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3222 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3223 &mask, &signed_sat))
3226 operands[1] = GEN_INT (mask);
3228 return "ssat%?\t%0, %1, %4%S3";
3230 return "usat%?\t%0, %1, %4%S3";
3232 [(set_attr "predicable" "yes")
3233 (set_attr "shift" "3")
3234 (set_attr "type" "logic_shift_reg")])
3236 ;; Shift and rotation insns
3238 (define_expand "ashldi3"
3239 [(set (match_operand:DI 0 "s_register_operand")
3240 (ashift:DI (match_operand:DI 1 "s_register_operand")
3241 (match_operand:SI 2 "reg_or_int_operand")))]
3244 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3245 operands[2], gen_reg_rtx (SImode),
3246 gen_reg_rtx (SImode));
3250 (define_expand "ashlsi3"
3251 [(set (match_operand:SI 0 "s_register_operand")
3252 (ashift:SI (match_operand:SI 1 "s_register_operand")
3253 (match_operand:SI 2 "arm_rhs_operand")))]
3256 if (CONST_INT_P (operands[2])
3257 && (UINTVAL (operands[2])) > 31)
3259 emit_insn (gen_movsi (operands[0], const0_rtx));
3265 (define_expand "ashrdi3"
3266 [(set (match_operand:DI 0 "s_register_operand")
3267 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3268 (match_operand:SI 2 "reg_or_int_operand")))]
3271 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3272 operands[2], gen_reg_rtx (SImode),
3273 gen_reg_rtx (SImode));
3277 (define_expand "ashrsi3"
3278 [(set (match_operand:SI 0 "s_register_operand")
3279 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3280 (match_operand:SI 2 "arm_rhs_operand")))]
3283 if (CONST_INT_P (operands[2])
3284 && UINTVAL (operands[2]) > 31)
3285 operands[2] = GEN_INT (31);
3289 (define_expand "lshrdi3"
3290 [(set (match_operand:DI 0 "s_register_operand")
3291 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3292 (match_operand:SI 2 "reg_or_int_operand")))]
3295 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3296 operands[2], gen_reg_rtx (SImode),
3297 gen_reg_rtx (SImode));
3301 (define_expand "lshrsi3"
3302 [(set (match_operand:SI 0 "s_register_operand")
3303 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3304 (match_operand:SI 2 "arm_rhs_operand")))]
3307 if (CONST_INT_P (operands[2])
3308 && (UINTVAL (operands[2])) > 31)
3310 emit_insn (gen_movsi (operands[0], const0_rtx));
3316 (define_expand "rotlsi3"
3317 [(set (match_operand:SI 0 "s_register_operand")
3318 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3319 (match_operand:SI 2 "reg_or_int_operand")))]
3322 if (CONST_INT_P (operands[2]))
3323 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3326 rtx reg = gen_reg_rtx (SImode);
3327 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3333 (define_expand "rotrsi3"
3334 [(set (match_operand:SI 0 "s_register_operand")
3335 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3336 (match_operand:SI 2 "arm_rhs_operand")))]
3341 if (CONST_INT_P (operands[2])
3342 && UINTVAL (operands[2]) > 31)
3343 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3345 else /* TARGET_THUMB1 */
3347 if (CONST_INT_P (operands [2]))
3348 operands [2] = force_reg (SImode, operands[2]);
3353 (define_insn "*arm_shiftsi3"
3354 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3355 (match_operator:SI 3 "shift_operator"
3356 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3357 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3359 "* return arm_output_shift(operands, 0);"
3360 [(set_attr "predicable" "yes")
3361 (set_attr "arch" "t2,t2,*,*")
3362 (set_attr "predicable_short_it" "yes,yes,no,no")
3363 (set_attr "length" "4")
3364 (set_attr "shift" "1")
3365 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3368 (define_insn "*shiftsi3_compare0"
3369 [(set (reg:CC_NOOV CC_REGNUM)
3370 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3371 [(match_operand:SI 1 "s_register_operand" "r,r")
3372 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3374 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3375 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3377 "* return arm_output_shift(operands, 1);"
3378 [(set_attr "conds" "set")
3379 (set_attr "shift" "1")
3380 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3383 (define_insn "*shiftsi3_compare0_scratch"
3384 [(set (reg:CC_NOOV CC_REGNUM)
3385 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3386 [(match_operand:SI 1 "s_register_operand" "r,r")
3387 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3389 (clobber (match_scratch:SI 0 "=r,r"))]
3391 "* return arm_output_shift(operands, 1);"
3392 [(set_attr "conds" "set")
3393 (set_attr "shift" "1")
3394 (set_attr "type" "shift_imm,shift_reg")]
3397 (define_insn "*not_shiftsi"
3398 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3399 (not:SI (match_operator:SI 3 "shift_operator"
3400 [(match_operand:SI 1 "s_register_operand" "r,r")
3401 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3404 [(set_attr "predicable" "yes")
3405 (set_attr "shift" "1")
3406 (set_attr "arch" "32,a")
3407 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3409 (define_insn "*not_shiftsi_compare0"
3410 [(set (reg:CC_NOOV CC_REGNUM)
3412 (not:SI (match_operator:SI 3 "shift_operator"
3413 [(match_operand:SI 1 "s_register_operand" "r,r")
3414 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3416 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3417 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3419 "mvns%?\\t%0, %1%S3"
3420 [(set_attr "conds" "set")
3421 (set_attr "shift" "1")
3422 (set_attr "arch" "32,a")
3423 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3425 (define_insn "*not_shiftsi_compare0_scratch"
3426 [(set (reg:CC_NOOV CC_REGNUM)
3428 (not:SI (match_operator:SI 3 "shift_operator"
3429 [(match_operand:SI 1 "s_register_operand" "r,r")
3430 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3432 (clobber (match_scratch:SI 0 "=r,r"))]
3434 "mvns%?\\t%0, %1%S3"
3435 [(set_attr "conds" "set")
3436 (set_attr "shift" "1")
3437 (set_attr "arch" "32,a")
3438 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3440 ;; We don't really have extzv, but defining this using shifts helps
3441 ;; to reduce register pressure later on.
3443 (define_expand "extzv"
3444 [(set (match_operand 0 "s_register_operand")
3445 (zero_extract (match_operand 1 "nonimmediate_operand")
3446 (match_operand 2 "const_int_operand")
3447 (match_operand 3 "const_int_operand")))]
3448 "TARGET_THUMB1 || arm_arch_thumb2"
3451 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3452 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3454 if (arm_arch_thumb2)
3456 HOST_WIDE_INT width = INTVAL (operands[2]);
3457 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3459 if (unaligned_access && MEM_P (operands[1])
3460 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3464 if (BYTES_BIG_ENDIAN)
3465 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3470 base_addr = adjust_address (operands[1], SImode,
3471 bitpos / BITS_PER_UNIT);
3472 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3476 rtx dest = operands[0];
3477 rtx tmp = gen_reg_rtx (SImode);
3479 /* We may get a paradoxical subreg here. Strip it off. */
3480 if (GET_CODE (dest) == SUBREG
3481 && GET_MODE (dest) == SImode
3482 && GET_MODE (SUBREG_REG (dest)) == HImode)
3483 dest = SUBREG_REG (dest);
3485 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3488 base_addr = adjust_address (operands[1], HImode,
3489 bitpos / BITS_PER_UNIT);
3490 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3491 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3495 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3497 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3505 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3508 operands[3] = GEN_INT (rshift);
3512 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3516 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3517 operands[3], gen_reg_rtx (SImode)));
3522 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3524 (define_expand "extzv_t1"
3525 [(set (match_operand:SI 4 "s_register_operand")
3526 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3527 (match_operand:SI 2 "const_int_operand")))
3528 (set (match_operand:SI 0 "s_register_operand")
3529 (lshiftrt:SI (match_dup 4)
3530 (match_operand:SI 3 "const_int_operand")))]
3534 (define_expand "extv"
3535 [(set (match_operand 0 "s_register_operand")
3536 (sign_extract (match_operand 1 "nonimmediate_operand")
3537 (match_operand 2 "const_int_operand")
3538 (match_operand 3 "const_int_operand")))]
3541 HOST_WIDE_INT width = INTVAL (operands[2]);
3542 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3544 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3545 && (bitpos % BITS_PER_UNIT) == 0)
3549 if (BYTES_BIG_ENDIAN)
3550 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3554 base_addr = adjust_address (operands[1], SImode,
3555 bitpos / BITS_PER_UNIT);
3556 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3560 rtx dest = operands[0];
3561 rtx tmp = gen_reg_rtx (SImode);
3563 /* We may get a paradoxical subreg here. Strip it off. */
3564 if (GET_CODE (dest) == SUBREG
3565 && GET_MODE (dest) == SImode
3566 && GET_MODE (SUBREG_REG (dest)) == HImode)
3567 dest = SUBREG_REG (dest);
3569 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3572 base_addr = adjust_address (operands[1], HImode,
3573 bitpos / BITS_PER_UNIT);
3574 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3575 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3580 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3582 else if (GET_MODE (operands[0]) == SImode
3583 && GET_MODE (operands[1]) == SImode)
3585 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3593 ; Helper to expand register forms of extv with the proper modes.
3595 (define_expand "extv_regsi"
3596 [(set (match_operand:SI 0 "s_register_operand")
3597 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3598 (match_operand 2 "const_int_operand")
3599 (match_operand 3 "const_int_operand")))]
3604 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3606 (define_insn "unaligned_loaddi"
3607 [(set (match_operand:DI 0 "s_register_operand" "=r")
3608 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3609 UNSPEC_UNALIGNED_LOAD))]
3610 "TARGET_32BIT && TARGET_LDRD"
3612 return output_move_double (operands, true, NULL);
3614 [(set_attr "length" "8")
3615 (set_attr "type" "load_8")])
3617 (define_insn "unaligned_loadsi"
3618 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3619 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3620 UNSPEC_UNALIGNED_LOAD))]
3623 ldr\t%0, %1\t@ unaligned
3624 ldr%?\t%0, %1\t@ unaligned
3625 ldr%?\t%0, %1\t@ unaligned"
3626 [(set_attr "arch" "t1,t2,32")
3627 (set_attr "length" "2,2,4")
3628 (set_attr "predicable" "no,yes,yes")
3629 (set_attr "predicable_short_it" "no,yes,no")
3630 (set_attr "type" "load_4")])
3632 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3633 ;; address (there's no immediate format). That's tricky to support
3634 ;; here and we don't really need this pattern for that case, so only
3635 ;; enable for 32-bit ISAs.
3636 (define_insn "unaligned_loadhis"
3637 [(set (match_operand:SI 0 "s_register_operand" "=r")
3639 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3640 UNSPEC_UNALIGNED_LOAD)))]
3641 "unaligned_access && TARGET_32BIT"
3642 "ldrsh%?\t%0, %1\t@ unaligned"
3643 [(set_attr "predicable" "yes")
3644 (set_attr "type" "load_byte")])
3646 (define_insn "unaligned_loadhiu"
3647 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3649 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3650 UNSPEC_UNALIGNED_LOAD)))]
3653 ldrh\t%0, %1\t@ unaligned
3654 ldrh%?\t%0, %1\t@ unaligned
3655 ldrh%?\t%0, %1\t@ unaligned"
3656 [(set_attr "arch" "t1,t2,32")
3657 (set_attr "length" "2,2,4")
3658 (set_attr "predicable" "no,yes,yes")
3659 (set_attr "predicable_short_it" "no,yes,no")
3660 (set_attr "type" "load_byte")])
3662 (define_insn "unaligned_storedi"
3663 [(set (match_operand:DI 0 "memory_operand" "=m")
3664 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3665 UNSPEC_UNALIGNED_STORE))]
3666 "TARGET_32BIT && TARGET_LDRD"
3668 return output_move_double (operands, true, NULL);
3670 [(set_attr "length" "8")
3671 (set_attr "type" "store_8")])
3673 (define_insn "unaligned_storesi"
3674 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3675 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3676 UNSPEC_UNALIGNED_STORE))]
3679 str\t%1, %0\t@ unaligned
3680 str%?\t%1, %0\t@ unaligned
3681 str%?\t%1, %0\t@ unaligned"
3682 [(set_attr "arch" "t1,t2,32")
3683 (set_attr "length" "2,2,4")
3684 (set_attr "predicable" "no,yes,yes")
3685 (set_attr "predicable_short_it" "no,yes,no")
3686 (set_attr "type" "store_4")])
3688 (define_insn "unaligned_storehi"
3689 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3690 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3691 UNSPEC_UNALIGNED_STORE))]
3694 strh\t%1, %0\t@ unaligned
3695 strh%?\t%1, %0\t@ unaligned
3696 strh%?\t%1, %0\t@ unaligned"
3697 [(set_attr "arch" "t1,t2,32")
3698 (set_attr "length" "2,2,4")
3699 (set_attr "predicable" "no,yes,yes")
3700 (set_attr "predicable_short_it" "no,yes,no")
3701 (set_attr "type" "store_4")])
3704 (define_insn "*extv_reg"
3705 [(set (match_operand:SI 0 "s_register_operand" "=r")
3706 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3707 (match_operand:SI 2 "const_int_operand" "n")
3708 (match_operand:SI 3 "const_int_operand" "n")))]
3710 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3711 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3712 "sbfx%?\t%0, %1, %3, %2"
3713 [(set_attr "length" "4")
3714 (set_attr "predicable" "yes")
3715 (set_attr "type" "bfm")]
3718 (define_insn "extzv_t2"
3719 [(set (match_operand:SI 0 "s_register_operand" "=r")
3720 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3721 (match_operand:SI 2 "const_int_operand" "n")
3722 (match_operand:SI 3 "const_int_operand" "n")))]
3724 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3725 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3726 "ubfx%?\t%0, %1, %3, %2"
3727 [(set_attr "length" "4")
3728 (set_attr "predicable" "yes")
3729 (set_attr "type" "bfm")]
3733 ;; Division instructions
3734 (define_insn "divsi3"
3735 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3736 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3737 (match_operand:SI 2 "s_register_operand" "r,r")))]
3742 [(set_attr "arch" "32,v8mb")
3743 (set_attr "predicable" "yes")
3744 (set_attr "type" "sdiv")]
3747 (define_insn "udivsi3"
3748 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3749 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3750 (match_operand:SI 2 "s_register_operand" "r,r")))]
3755 [(set_attr "arch" "32,v8mb")
3756 (set_attr "predicable" "yes")
3757 (set_attr "type" "udiv")]
3761 ;; Unary arithmetic insns
3763 (define_expand "negvsi3"
3764 [(match_operand:SI 0 "register_operand")
3765 (match_operand:SI 1 "register_operand")
3766 (match_operand 2 "")]
3769 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3770 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3775 (define_expand "negvdi3"
3776 [(match_operand:DI 0 "s_register_operand")
3777 (match_operand:DI 1 "s_register_operand")
3778 (match_operand 2 "")]
3781 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3782 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3788 (define_insn "negdi2_compare"
3789 [(set (reg:CC CC_REGNUM)
3792 (match_operand:DI 1 "register_operand" "r,r")))
3793 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3794 (minus:DI (const_int 0) (match_dup 1)))]
3797 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3798 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3799 [(set_attr "conds" "set")
3800 (set_attr "arch" "a,t2")
3801 (set_attr "length" "8")
3802 (set_attr "type" "multiple")]
3805 (define_expand "negsi2"
3806 [(set (match_operand:SI 0 "s_register_operand")
3807 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3812 (define_insn "*arm_negsi2"
3813 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3814 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3816 "rsb%?\\t%0, %1, #0"
3817 [(set_attr "predicable" "yes")
3818 (set_attr "predicable_short_it" "yes,no")
3819 (set_attr "arch" "t2,*")
3820 (set_attr "length" "4")
3821 (set_attr "type" "alu_imm")]
3824 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3825 ;; rather than (0 cmp reg). This gives the same results for unsigned
3826 ;; and equality compares which is what we mostly need here.
3827 (define_insn "negsi2_0compare"
3828 [(set (reg:CC_RSB CC_REGNUM)
3829 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3831 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3832 (neg:SI (match_dup 1)))]
3837 [(set_attr "conds" "set")
3838 (set_attr "arch" "t2,*")
3839 (set_attr "length" "2,*")
3840 (set_attr "type" "alus_imm")]
3843 (define_insn "negsi2_carryin"
3844 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3845 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3846 (match_operand:SI 2 "arm_borrow_operation" "")))]
3850 sbc\\t%0, %1, %1, lsl #1"
3851 [(set_attr "conds" "use")
3852 (set_attr "arch" "a,t2")
3853 (set_attr "type" "adc_imm,adc_reg")]
3856 (define_expand "negsf2"
3857 [(set (match_operand:SF 0 "s_register_operand")
3858 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3859 "TARGET_32BIT && TARGET_HARD_FLOAT"
3863 (define_expand "negdf2"
3864 [(set (match_operand:DF 0 "s_register_operand")
3865 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3866 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3869 (define_insn_and_split "*zextendsidi_negsi"
3870 [(set (match_operand:DI 0 "s_register_operand" "=r")
3871 (zero_extend:DI (neg:SI (match_operand:SI 1 "s_register_operand" "r"))))]
3876 (neg:SI (match_dup 1)))
3880 operands[2] = gen_lowpart (SImode, operands[0]);
3881 operands[3] = gen_highpart (SImode, operands[0]);
3883 [(set_attr "length" "8")
3884 (set_attr "type" "multiple")]
3887 ;; Negate an extended 32-bit value.
3888 (define_insn_and_split "*negdi_extendsidi"
3889 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
3890 (neg:DI (sign_extend:DI
3891 (match_operand:SI 1 "s_register_operand" "l,r"))))
3892 (clobber (reg:CC CC_REGNUM))]
3895 "&& reload_completed"
3898 rtx low = gen_lowpart (SImode, operands[0]);
3899 rtx high = gen_highpart (SImode, operands[0]);
3901 if (reg_overlap_mentioned_p (low, operands[1]))
3903 /* Input overlaps the low word of the output. Use:
3906 rsc Rhi, Rhi, #0 (thumb2: sbc Rhi, Rhi, Rhi, lsl #1). */
3907 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
3909 emit_insn (gen_rtx_SET (high,
3910 gen_rtx_ASHIFTRT (SImode, operands[1],
3913 emit_insn (gen_subsi3_compare (low, const0_rtx, operands[1]));
3915 emit_insn (gen_rtx_SET (high,
3916 gen_rtx_MINUS (SImode,
3917 gen_rtx_MINUS (SImode,
3920 gen_rtx_LTU (SImode,
3925 rtx two_x = gen_rtx_ASHIFT (SImode, high, GEN_INT (1));
3926 emit_insn (gen_rtx_SET (high,
3927 gen_rtx_MINUS (SImode,
3928 gen_rtx_MINUS (SImode,
3931 gen_rtx_LTU (SImode,
3938 /* No overlap, or overlap on high word. Use:
3942 Flags not needed for this sequence. */
3943 emit_insn (gen_rtx_SET (low, gen_rtx_NEG (SImode, operands[1])));
3944 emit_insn (gen_rtx_SET (high,
3945 gen_rtx_AND (SImode,
3946 gen_rtx_NOT (SImode, operands[1]),
3948 emit_insn (gen_rtx_SET (high,
3949 gen_rtx_ASHIFTRT (SImode, high,
3954 [(set_attr "length" "12")
3955 (set_attr "arch" "t2,*")
3956 (set_attr "type" "multiple")]
3959 ;; abssi2 doesn't really clobber the condition codes if a different register
3960 ;; is being set. To keep things simple, assume during rtl manipulations that
3961 ;; it does, but tell the final scan operator the truth. Similarly for
3964 (define_expand "abssi2"
3966 [(set (match_operand:SI 0 "s_register_operand")
3967 (abs:SI (match_operand:SI 1 "s_register_operand")))
3968 (clobber (match_dup 2))])]
3972 operands[2] = gen_rtx_SCRATCH (SImode);
3974 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3977 (define_insn_and_split "*arm_abssi2"
3978 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3979 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3980 (clobber (reg:CC CC_REGNUM))]
3983 "&& reload_completed"
3986 /* if (which_alternative == 0) */
3987 if (REGNO(operands[0]) == REGNO(operands[1]))
3989 /* Emit the pattern:
3990 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3991 [(set (reg:CC CC_REGNUM)
3992 (compare:CC (match_dup 0) (const_int 0)))
3993 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3994 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3996 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3997 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3998 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3999 (gen_rtx_LT (SImode,
4000 gen_rtx_REG (CCmode, CC_REGNUM),
4002 (gen_rtx_SET (operands[0],
4003 (gen_rtx_MINUS (SImode,
4010 /* Emit the pattern:
4011 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4013 (xor:SI (match_dup 1)
4014 (ashiftrt:SI (match_dup 1) (const_int 31))))
4016 (minus:SI (match_dup 0)
4017 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4019 emit_insn (gen_rtx_SET (operands[0],
4020 gen_rtx_XOR (SImode,
4021 gen_rtx_ASHIFTRT (SImode,
4025 emit_insn (gen_rtx_SET (operands[0],
4026 gen_rtx_MINUS (SImode,
4028 gen_rtx_ASHIFTRT (SImode,
4034 [(set_attr "conds" "clob,*")
4035 (set_attr "shift" "1")
4036 (set_attr "predicable" "no, yes")
4037 (set_attr "length" "8")
4038 (set_attr "type" "multiple")]
4041 (define_insn_and_split "*arm_neg_abssi2"
4042 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4043 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4044 (clobber (reg:CC CC_REGNUM))]
4047 "&& reload_completed"
4050 /* if (which_alternative == 0) */
4051 if (REGNO (operands[0]) == REGNO (operands[1]))
4053 /* Emit the pattern:
4054 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4056 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4057 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4058 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4060 gen_rtx_REG (CCmode, CC_REGNUM),
4062 gen_rtx_SET (operands[0],
4063 (gen_rtx_MINUS (SImode,
4069 /* Emit the pattern:
4070 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4072 emit_insn (gen_rtx_SET (operands[0],
4073 gen_rtx_XOR (SImode,
4074 gen_rtx_ASHIFTRT (SImode,
4078 emit_insn (gen_rtx_SET (operands[0],
4079 gen_rtx_MINUS (SImode,
4080 gen_rtx_ASHIFTRT (SImode,
4087 [(set_attr "conds" "clob,*")
4088 (set_attr "shift" "1")
4089 (set_attr "predicable" "no, yes")
4090 (set_attr "length" "8")
4091 (set_attr "type" "multiple")]
4094 (define_expand "abssf2"
4095 [(set (match_operand:SF 0 "s_register_operand")
4096 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4097 "TARGET_32BIT && TARGET_HARD_FLOAT"
4100 (define_expand "absdf2"
4101 [(set (match_operand:DF 0 "s_register_operand")
4102 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4103 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4106 (define_expand "sqrtsf2"
4107 [(set (match_operand:SF 0 "s_register_operand")
4108 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4109 "TARGET_32BIT && TARGET_HARD_FLOAT"
4112 (define_expand "sqrtdf2"
4113 [(set (match_operand:DF 0 "s_register_operand")
4114 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4115 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4118 (define_expand "one_cmplsi2"
4119 [(set (match_operand:SI 0 "s_register_operand")
4120 (not:SI (match_operand:SI 1 "s_register_operand")))]
4125 (define_insn "*arm_one_cmplsi2"
4126 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4127 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4130 [(set_attr "predicable" "yes")
4131 (set_attr "predicable_short_it" "yes,no")
4132 (set_attr "arch" "t2,*")
4133 (set_attr "length" "4")
4134 (set_attr "type" "mvn_reg")]
4137 (define_insn "*notsi_compare0"
4138 [(set (reg:CC_NOOV CC_REGNUM)
4139 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4141 (set (match_operand:SI 0 "s_register_operand" "=r")
4142 (not:SI (match_dup 1)))]
4145 [(set_attr "conds" "set")
4146 (set_attr "type" "mvn_reg")]
4149 (define_insn "*notsi_compare0_scratch"
4150 [(set (reg:CC_NOOV CC_REGNUM)
4151 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4153 (clobber (match_scratch:SI 0 "=r"))]
4156 [(set_attr "conds" "set")
4157 (set_attr "type" "mvn_reg")]
4160 ;; Fixed <--> Floating conversion insns
4162 (define_expand "floatsihf2"
4163 [(set (match_operand:HF 0 "general_operand")
4164 (float:HF (match_operand:SI 1 "general_operand")))]
4168 rtx op1 = gen_reg_rtx (SFmode);
4169 expand_float (op1, operands[1], 0);
4170 op1 = convert_to_mode (HFmode, op1, 0);
4171 emit_move_insn (operands[0], op1);
4176 (define_expand "floatdihf2"
4177 [(set (match_operand:HF 0 "general_operand")
4178 (float:HF (match_operand:DI 1 "general_operand")))]
4182 rtx op1 = gen_reg_rtx (SFmode);
4183 expand_float (op1, operands[1], 0);
4184 op1 = convert_to_mode (HFmode, op1, 0);
4185 emit_move_insn (operands[0], op1);
4190 (define_expand "floatsisf2"
4191 [(set (match_operand:SF 0 "s_register_operand")
4192 (float:SF (match_operand:SI 1 "s_register_operand")))]
4193 "TARGET_32BIT && TARGET_HARD_FLOAT"
4197 (define_expand "floatsidf2"
4198 [(set (match_operand:DF 0 "s_register_operand")
4199 (float:DF (match_operand:SI 1 "s_register_operand")))]
4200 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4204 (define_expand "fix_trunchfsi2"
4205 [(set (match_operand:SI 0 "general_operand")
4206 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4210 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4211 expand_fix (operands[0], op1, 0);
4216 (define_expand "fix_trunchfdi2"
4217 [(set (match_operand:DI 0 "general_operand")
4218 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4222 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4223 expand_fix (operands[0], op1, 0);
4228 (define_expand "fix_truncsfsi2"
4229 [(set (match_operand:SI 0 "s_register_operand")
4230 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4231 "TARGET_32BIT && TARGET_HARD_FLOAT"
4235 (define_expand "fix_truncdfsi2"
4236 [(set (match_operand:SI 0 "s_register_operand")
4237 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4238 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4244 (define_expand "truncdfsf2"
4245 [(set (match_operand:SF 0 "s_register_operand")
4247 (match_operand:DF 1 "s_register_operand")))]
4248 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4252 ;; DFmode to HFmode conversions on targets without a single-step hardware
4253 ;; instruction for it would have to go through SFmode. This is dangerous
4254 ;; as it introduces double rounding.
4256 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4257 ;; a single-step instruction.
4259 (define_expand "truncdfhf2"
4260 [(set (match_operand:HF 0 "s_register_operand")
4262 (match_operand:DF 1 "s_register_operand")))]
4263 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4264 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4266 /* We don't have a direct instruction for this, so we must be in
4267 an unsafe math mode, and going via SFmode. */
4269 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4272 op1 = convert_to_mode (SFmode, operands[1], 0);
4273 op1 = convert_to_mode (HFmode, op1, 0);
4274 emit_move_insn (operands[0], op1);
4277 /* Otherwise, we will pick this up as a single instruction with
4278 no intermediary rounding. */
4282 ;; Zero and sign extension instructions.
4284 (define_expand "zero_extend<mode>di2"
4285 [(set (match_operand:DI 0 "s_register_operand" "")
4286 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4287 "TARGET_32BIT <qhs_zextenddi_cond>"
4289 rtx res_lo, res_hi, op0_lo, op0_hi;
4290 res_lo = gen_lowpart (SImode, operands[0]);
4291 res_hi = gen_highpart (SImode, operands[0]);
4292 if (can_create_pseudo_p ())
4294 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4295 op0_hi = gen_reg_rtx (SImode);
4299 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4302 if (<MODE>mode != SImode)
4303 emit_insn (gen_rtx_SET (op0_lo,
4304 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4305 emit_insn (gen_movsi (op0_hi, const0_rtx));
4306 if (res_lo != op0_lo)
4307 emit_move_insn (res_lo, op0_lo);
4308 if (res_hi != op0_hi)
4309 emit_move_insn (res_hi, op0_hi);
4314 (define_expand "extend<mode>di2"
4315 [(set (match_operand:DI 0 "s_register_operand" "")
4316 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4317 "TARGET_32BIT <qhs_sextenddi_cond>"
4319 rtx res_lo, res_hi, op0_lo, op0_hi;
4320 res_lo = gen_lowpart (SImode, operands[0]);
4321 res_hi = gen_highpart (SImode, operands[0]);
4322 if (can_create_pseudo_p ())
4324 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4325 op0_hi = gen_reg_rtx (SImode);
4329 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4332 if (<MODE>mode != SImode)
4333 emit_insn (gen_rtx_SET (op0_lo,
4334 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4335 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4336 if (res_lo != op0_lo)
4337 emit_move_insn (res_lo, op0_lo);
4338 if (res_hi != op0_hi)
4339 emit_move_insn (res_hi, op0_hi);
4344 ;; Splits for all extensions to DImode
4346 [(set (match_operand:DI 0 "s_register_operand" "")
4347 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4349 [(set (match_dup 0) (match_dup 1))]
4351 rtx lo_part = gen_lowpart (SImode, operands[0]);
4352 machine_mode src_mode = GET_MODE (operands[1]);
4354 if (src_mode == SImode)
4355 emit_move_insn (lo_part, operands[1]);
4357 emit_insn (gen_rtx_SET (lo_part,
4358 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4359 operands[0] = gen_highpart (SImode, operands[0]);
4360 operands[1] = const0_rtx;
4364 [(set (match_operand:DI 0 "s_register_operand" "")
4365 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4367 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4369 rtx lo_part = gen_lowpart (SImode, operands[0]);
4370 machine_mode src_mode = GET_MODE (operands[1]);
4372 if (src_mode == SImode)
4373 emit_move_insn (lo_part, operands[1]);
4375 emit_insn (gen_rtx_SET (lo_part,
4376 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4377 operands[1] = lo_part;
4378 operands[0] = gen_highpart (SImode, operands[0]);
4381 (define_expand "zero_extendhisi2"
4382 [(set (match_operand:SI 0 "s_register_operand")
4383 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4386 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4388 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4391 if (!arm_arch6 && !MEM_P (operands[1]))
4393 rtx t = gen_lowpart (SImode, operands[1]);
4394 rtx tmp = gen_reg_rtx (SImode);
4395 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4396 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4402 [(set (match_operand:SI 0 "s_register_operand" "")
4403 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4404 "!TARGET_THUMB2 && !arm_arch6"
4405 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4406 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4408 operands[2] = gen_lowpart (SImode, operands[1]);
4411 (define_insn "*arm_zero_extendhisi2"
4412 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4413 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4414 "TARGET_ARM && arm_arch4 && !arm_arch6"
4418 [(set_attr "type" "alu_shift_reg,load_byte")
4419 (set_attr "predicable" "yes")]
4422 (define_insn "*arm_zero_extendhisi2_v6"
4423 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4424 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4425 "TARGET_ARM && arm_arch6"
4429 [(set_attr "predicable" "yes")
4430 (set_attr "type" "extend,load_byte")]
4433 (define_insn "*arm_zero_extendhisi2addsi"
4434 [(set (match_operand:SI 0 "s_register_operand" "=r")
4435 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4436 (match_operand:SI 2 "s_register_operand" "r")))]
4438 "uxtah%?\\t%0, %2, %1"
4439 [(set_attr "type" "alu_shift_reg")
4440 (set_attr "predicable" "yes")]
4443 (define_expand "zero_extendqisi2"
4444 [(set (match_operand:SI 0 "s_register_operand")
4445 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4448 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4450 emit_insn (gen_andsi3 (operands[0],
4451 gen_lowpart (SImode, operands[1]),
4455 if (!arm_arch6 && !MEM_P (operands[1]))
4457 rtx t = gen_lowpart (SImode, operands[1]);
4458 rtx tmp = gen_reg_rtx (SImode);
4459 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4460 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4466 [(set (match_operand:SI 0 "s_register_operand" "")
4467 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4469 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4470 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4472 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4475 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4480 (define_insn "*arm_zero_extendqisi2"
4481 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4482 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4483 "TARGET_ARM && !arm_arch6"
4486 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4487 [(set_attr "length" "8,4")
4488 (set_attr "type" "alu_shift_reg,load_byte")
4489 (set_attr "predicable" "yes")]
4492 (define_insn "*arm_zero_extendqisi2_v6"
4493 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4494 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4495 "TARGET_ARM && arm_arch6"
4498 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4499 [(set_attr "type" "extend,load_byte")
4500 (set_attr "predicable" "yes")]
4503 (define_insn "*arm_zero_extendqisi2addsi"
4504 [(set (match_operand:SI 0 "s_register_operand" "=r")
4505 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4506 (match_operand:SI 2 "s_register_operand" "r")))]
4508 "uxtab%?\\t%0, %2, %1"
4509 [(set_attr "predicable" "yes")
4510 (set_attr "type" "alu_shift_reg")]
4514 [(set (match_operand:SI 0 "s_register_operand" "")
4515 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4516 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4517 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4518 [(set (match_dup 2) (match_dup 1))
4519 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4524 [(set (match_operand:SI 0 "s_register_operand" "")
4525 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4526 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4527 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4528 [(set (match_dup 2) (match_dup 1))
4529 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4535 [(set (match_operand:SI 0 "s_register_operand" "")
4536 (IOR_XOR:SI (and:SI (ashift:SI
4537 (match_operand:SI 1 "s_register_operand" "")
4538 (match_operand:SI 2 "const_int_operand" ""))
4539 (match_operand:SI 3 "const_int_operand" ""))
4541 (match_operator 5 "subreg_lowpart_operator"
4542 [(match_operand:SI 4 "s_register_operand" "")]))))]
4544 && (UINTVAL (operands[3])
4545 == (GET_MODE_MASK (GET_MODE (operands[5]))
4546 & (GET_MODE_MASK (GET_MODE (operands[5]))
4547 << (INTVAL (operands[2])))))"
4548 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4550 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4551 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4554 (define_insn "*compareqi_eq0"
4555 [(set (reg:CC_Z CC_REGNUM)
4556 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4560 [(set_attr "conds" "set")
4561 (set_attr "predicable" "yes")
4562 (set_attr "type" "logic_imm")]
4565 (define_expand "extendhisi2"
4566 [(set (match_operand:SI 0 "s_register_operand")
4567 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4572 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4575 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4577 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4581 if (!arm_arch6 && !MEM_P (operands[1]))
4583 rtx t = gen_lowpart (SImode, operands[1]);
4584 rtx tmp = gen_reg_rtx (SImode);
4585 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4586 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4593 [(set (match_operand:SI 0 "register_operand" "")
4594 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4595 (clobber (match_scratch:SI 2 ""))])]
4597 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4598 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4600 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4603 ;; This pattern will only be used when ldsh is not available
4604 (define_expand "extendhisi2_mem"
4605 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4607 (zero_extend:SI (match_dup 7)))
4608 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4609 (set (match_operand:SI 0 "" "")
4610 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4615 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4617 mem1 = change_address (operands[1], QImode, addr);
4618 mem2 = change_address (operands[1], QImode,
4619 plus_constant (Pmode, addr, 1));
4620 operands[0] = gen_lowpart (SImode, operands[0]);
4622 operands[2] = gen_reg_rtx (SImode);
4623 operands[3] = gen_reg_rtx (SImode);
4624 operands[6] = gen_reg_rtx (SImode);
4627 if (BYTES_BIG_ENDIAN)
4629 operands[4] = operands[2];
4630 operands[5] = operands[3];
4634 operands[4] = operands[3];
4635 operands[5] = operands[2];
4641 [(set (match_operand:SI 0 "register_operand" "")
4642 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4644 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4645 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4647 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4650 (define_insn "*arm_extendhisi2"
4651 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4652 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4653 "TARGET_ARM && arm_arch4 && !arm_arch6"
4657 [(set_attr "length" "8,4")
4658 (set_attr "type" "alu_shift_reg,load_byte")
4659 (set_attr "predicable" "yes")]
4662 ;; ??? Check Thumb-2 pool range
4663 (define_insn "*arm_extendhisi2_v6"
4664 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4665 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4666 "TARGET_32BIT && arm_arch6"
4670 [(set_attr "type" "extend,load_byte")
4671 (set_attr "predicable" "yes")]
4674 (define_insn "*arm_extendhisi2addsi"
4675 [(set (match_operand:SI 0 "s_register_operand" "=r")
4676 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4677 (match_operand:SI 2 "s_register_operand" "r")))]
4679 "sxtah%?\\t%0, %2, %1"
4680 [(set_attr "type" "alu_shift_reg")]
4683 (define_expand "extendqihi2"
4685 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4687 (set (match_operand:HI 0 "s_register_operand")
4688 (ashiftrt:SI (match_dup 2)
4693 if (arm_arch4 && MEM_P (operands[1]))
4695 emit_insn (gen_rtx_SET (operands[0],
4696 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4699 if (!s_register_operand (operands[1], QImode))
4700 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4701 operands[0] = gen_lowpart (SImode, operands[0]);
4702 operands[1] = gen_lowpart (SImode, operands[1]);
4703 operands[2] = gen_reg_rtx (SImode);
4707 (define_insn "*arm_extendqihi_insn"
4708 [(set (match_operand:HI 0 "s_register_operand" "=r")
4709 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4710 "TARGET_ARM && arm_arch4"
4712 [(set_attr "type" "load_byte")
4713 (set_attr "predicable" "yes")]
4716 (define_expand "extendqisi2"
4717 [(set (match_operand:SI 0 "s_register_operand")
4718 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4721 if (!arm_arch4 && MEM_P (operands[1]))
4722 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4724 if (!arm_arch6 && !MEM_P (operands[1]))
4726 rtx t = gen_lowpart (SImode, operands[1]);
4727 rtx tmp = gen_reg_rtx (SImode);
4728 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4729 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4735 [(set (match_operand:SI 0 "register_operand" "")
4736 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4738 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4739 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4741 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4744 (define_insn "*arm_extendqisi"
4745 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4746 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4747 "TARGET_ARM && arm_arch4 && !arm_arch6"
4751 [(set_attr "length" "8,4")
4752 (set_attr "type" "alu_shift_reg,load_byte")
4753 (set_attr "predicable" "yes")]
4756 (define_insn "*arm_extendqisi_v6"
4757 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4759 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4760 "TARGET_ARM && arm_arch6"
4764 [(set_attr "type" "extend,load_byte")
4765 (set_attr "predicable" "yes")]
4768 (define_insn "*arm_extendqisi2addsi"
4769 [(set (match_operand:SI 0 "s_register_operand" "=r")
4770 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4771 (match_operand:SI 2 "s_register_operand" "r")))]
4773 "sxtab%?\\t%0, %2, %1"
4774 [(set_attr "type" "alu_shift_reg")
4775 (set_attr "predicable" "yes")]
4778 (define_insn "arm_<sup>xtb16"
4779 [(set (match_operand:SI 0 "s_register_operand" "=r")
4781 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4783 "<sup>xtb16%?\\t%0, %1"
4784 [(set_attr "predicable" "yes")
4785 (set_attr "type" "alu_dsp_reg")])
4787 (define_insn "arm_<simd32_op>"
4788 [(set (match_operand:SI 0 "s_register_operand" "=r")
4790 [(match_operand:SI 1 "s_register_operand" "r")
4791 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4793 "<simd32_op>%?\\t%0, %1, %2"
4794 [(set_attr "predicable" "yes")
4795 (set_attr "type" "alu_dsp_reg")])
4797 (define_insn "arm_usada8"
4798 [(set (match_operand:SI 0 "s_register_operand" "=r")
4800 [(match_operand:SI 1 "s_register_operand" "r")
4801 (match_operand:SI 2 "s_register_operand" "r")
4802 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4804 "usada8%?\\t%0, %1, %2, %3"
4805 [(set_attr "predicable" "yes")
4806 (set_attr "type" "alu_dsp_reg")])
4808 (define_insn "arm_<simd32_op>"
4809 [(set (match_operand:DI 0 "s_register_operand" "=r")
4811 [(match_operand:SI 1 "s_register_operand" "r")
4812 (match_operand:SI 2 "s_register_operand" "r")
4813 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4815 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4816 [(set_attr "predicable" "yes")
4817 (set_attr "type" "smlald")])
4819 (define_expand "extendsfdf2"
4820 [(set (match_operand:DF 0 "s_register_operand")
4821 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4822 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4826 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4827 ;; must go through SFmode.
4829 ;; This is always safe for an extend.
4831 (define_expand "extendhfdf2"
4832 [(set (match_operand:DF 0 "s_register_operand")
4833 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4836 /* We don't have a direct instruction for this, so go via SFmode. */
4837 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4840 op1 = convert_to_mode (SFmode, operands[1], 0);
4841 op1 = convert_to_mode (DFmode, op1, 0);
4842 emit_insn (gen_movdf (operands[0], op1));
4845 /* Otherwise, we're done producing RTL and will pick up the correct
4846 pattern to do this with one rounding-step in a single instruction. */
4850 ;; Move insns (including loads and stores)
4852 ;; XXX Just some ideas about movti.
4853 ;; I don't think these are a good idea on the arm, there just aren't enough
4855 ;;(define_expand "loadti"
4856 ;; [(set (match_operand:TI 0 "s_register_operand")
4857 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4860 ;;(define_expand "storeti"
4861 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4862 ;; (match_operand:TI 1 "s_register_operand"))]
4865 ;;(define_expand "movti"
4866 ;; [(set (match_operand:TI 0 "general_operand")
4867 ;; (match_operand:TI 1 "general_operand"))]
4873 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4874 ;; operands[1] = copy_to_reg (operands[1]);
4875 ;; if (MEM_P (operands[0]))
4876 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4877 ;; else if (MEM_P (operands[1]))
4878 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4882 ;; emit_insn (insn);
4886 ;; Recognize garbage generated above.
4889 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4890 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4894 ;; register mem = (which_alternative < 3);
4895 ;; register const char *template;
4897 ;; operands[mem] = XEXP (operands[mem], 0);
4898 ;; switch (which_alternative)
4900 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4901 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4902 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4903 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4904 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4905 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4907 ;; output_asm_insn (template, operands);
4911 (define_expand "movdi"
4912 [(set (match_operand:DI 0 "general_operand")
4913 (match_operand:DI 1 "general_operand"))]
4916 gcc_checking_assert (aligned_operand (operands[0], DImode));
4917 gcc_checking_assert (aligned_operand (operands[1], DImode));
4918 if (can_create_pseudo_p ())
4920 if (!REG_P (operands[0]))
4921 operands[1] = force_reg (DImode, operands[1]);
4923 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4924 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4926 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4927 when expanding function calls. */
4928 gcc_assert (can_create_pseudo_p ());
4929 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4931 /* Perform load into legal reg pair first, then move. */
4932 rtx reg = gen_reg_rtx (DImode);
4933 emit_insn (gen_movdi (reg, operands[1]));
4936 emit_move_insn (gen_lowpart (SImode, operands[0]),
4937 gen_lowpart (SImode, operands[1]));
4938 emit_move_insn (gen_highpart (SImode, operands[0]),
4939 gen_highpart (SImode, operands[1]));
4942 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4943 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4945 /* Avoid STRD's from an odd-numbered register pair in ARM state
4946 when expanding function prologue. */
4947 gcc_assert (can_create_pseudo_p ());
4948 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4949 ? gen_reg_rtx (DImode)
4951 emit_move_insn (gen_lowpart (SImode, split_dest),
4952 gen_lowpart (SImode, operands[1]));
4953 emit_move_insn (gen_highpart (SImode, split_dest),
4954 gen_highpart (SImode, operands[1]));
4955 if (split_dest != operands[0])
4956 emit_insn (gen_movdi (operands[0], split_dest));
4962 (define_insn "*arm_movdi"
4963 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4964 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4966 && !(TARGET_HARD_FLOAT)
4968 && ( register_operand (operands[0], DImode)
4969 || register_operand (operands[1], DImode))"
4971 switch (which_alternative)
4978 /* Cannot load it directly, split to load it via MOV / MOVT. */
4979 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4983 return output_move_double (operands, true, NULL);
4986 [(set_attr "length" "8,12,16,8,8")
4987 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4988 (set_attr "arm_pool_range" "*,*,*,1020,*")
4989 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4990 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4991 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4995 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4996 (match_operand:ANY64 1 "immediate_operand" ""))]
4999 && (arm_disable_literal_pool
5000 || (arm_const_double_inline_cost (operands[1])
5001 <= arm_max_const_double_inline_cost ()))"
5004 arm_split_constant (SET, SImode, curr_insn,
5005 INTVAL (gen_lowpart (SImode, operands[1])),
5006 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5007 arm_split_constant (SET, SImode, curr_insn,
5008 INTVAL (gen_highpart_mode (SImode,
5009 GET_MODE (operands[0]),
5011 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5016 ; If optimizing for size, or if we have load delay slots, then
5017 ; we want to split the constant into two separate operations.
5018 ; In both cases this may split a trivial part into a single data op
5019 ; leaving a single complex constant to load. We can also get longer
5020 ; offsets in a LDR which means we get better chances of sharing the pool
5021 ; entries. Finally, we can normally do a better job of scheduling
5022 ; LDR instructions than we can with LDM.
5023 ; This pattern will only match if the one above did not.
5025 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5026 (match_operand:ANY64 1 "const_double_operand" ""))]
5027 "TARGET_ARM && reload_completed
5028 && arm_const_double_by_parts (operands[1])"
5029 [(set (match_dup 0) (match_dup 1))
5030 (set (match_dup 2) (match_dup 3))]
5032 operands[2] = gen_highpart (SImode, operands[0]);
5033 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5035 operands[0] = gen_lowpart (SImode, operands[0]);
5036 operands[1] = gen_lowpart (SImode, operands[1]);
5041 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5042 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5043 "TARGET_EITHER && reload_completed"
5044 [(set (match_dup 0) (match_dup 1))
5045 (set (match_dup 2) (match_dup 3))]
5047 operands[2] = gen_highpart (SImode, operands[0]);
5048 operands[3] = gen_highpart (SImode, operands[1]);
5049 operands[0] = gen_lowpart (SImode, operands[0]);
5050 operands[1] = gen_lowpart (SImode, operands[1]);
5052 /* Handle a partial overlap. */
5053 if (rtx_equal_p (operands[0], operands[3]))
5055 rtx tmp0 = operands[0];
5056 rtx tmp1 = operands[1];
5058 operands[0] = operands[2];
5059 operands[1] = operands[3];
5066 ;; We can't actually do base+index doubleword loads if the index and
5067 ;; destination overlap. Split here so that we at least have chance to
5070 [(set (match_operand:DI 0 "s_register_operand" "")
5071 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5072 (match_operand:SI 2 "s_register_operand" ""))))]
5074 && reg_overlap_mentioned_p (operands[0], operands[1])
5075 && reg_overlap_mentioned_p (operands[0], operands[2])"
5077 (plus:SI (match_dup 1)
5080 (mem:DI (match_dup 4)))]
5082 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5086 (define_expand "movsi"
5087 [(set (match_operand:SI 0 "general_operand")
5088 (match_operand:SI 1 "general_operand"))]
5092 rtx base, offset, tmp;
5094 gcc_checking_assert (aligned_operand (operands[0], SImode));
5095 gcc_checking_assert (aligned_operand (operands[1], SImode));
5096 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5098 /* Everything except mem = const or mem = mem can be done easily. */
5099 if (MEM_P (operands[0]))
5100 operands[1] = force_reg (SImode, operands[1]);
5101 if (arm_general_register_operand (operands[0], SImode)
5102 && CONST_INT_P (operands[1])
5103 && !(const_ok_for_arm (INTVAL (operands[1]))
5104 || const_ok_for_arm (~INTVAL (operands[1]))))
5106 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5108 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5113 arm_split_constant (SET, SImode, NULL_RTX,
5114 INTVAL (operands[1]), operands[0], NULL_RTX,
5115 optimize && can_create_pseudo_p ());
5120 else /* Target doesn't have MOVT... */
5122 if (can_create_pseudo_p ())
5124 if (!REG_P (operands[0]))
5125 operands[1] = force_reg (SImode, operands[1]);
5129 split_const (operands[1], &base, &offset);
5130 if (INTVAL (offset) != 0
5131 && targetm.cannot_force_const_mem (SImode, operands[1]))
5133 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5134 emit_move_insn (tmp, base);
5135 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5139 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5141 /* Recognize the case where operand[1] is a reference to thread-local
5142 data and load its address to a register. Offsets have been split off
5144 if (arm_tls_referenced_p (operands[1]))
5145 operands[1] = legitimize_tls_address (operands[1], tmp);
5147 && (CONSTANT_P (operands[1])
5148 || symbol_mentioned_p (operands[1])
5149 || label_mentioned_p (operands[1])))
5151 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5156 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5157 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5158 ;; so this does not matter.
5159 (define_insn "*arm_movt"
5160 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5161 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5162 (match_operand:SI 2 "general_operand" "i,i")))]
5163 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5165 movt%?\t%0, #:upper16:%c2
5166 movt\t%0, #:upper16:%c2"
5167 [(set_attr "arch" "32,v8mb")
5168 (set_attr "predicable" "yes")
5169 (set_attr "length" "4")
5170 (set_attr "type" "alu_sreg")]
5173 (define_insn "*arm_movsi_insn"
5174 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5175 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5176 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5177 && ( register_operand (operands[0], SImode)
5178 || register_operand (operands[1], SImode))"
5186 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5187 (set_attr "predicable" "yes")
5188 (set_attr "arch" "*,*,*,v6t2,*,*")
5189 (set_attr "pool_range" "*,*,*,*,4096,*")
5190 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5194 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5195 (match_operand:SI 1 "const_int_operand" ""))]
5196 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5197 && (!(const_ok_for_arm (INTVAL (operands[1]))
5198 || const_ok_for_arm (~INTVAL (operands[1]))))"
5199 [(clobber (const_int 0))]
5201 arm_split_constant (SET, SImode, NULL_RTX,
5202 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5207 ;; A normal way to do (symbol + offset) requires three instructions at least
5208 ;; (depends on how big the offset is) as below:
5209 ;; movw r0, #:lower16:g
5210 ;; movw r0, #:upper16:g
5213 ;; A better way would be:
5214 ;; movw r0, #:lower16:g+4
5215 ;; movw r0, #:upper16:g+4
5217 ;; The limitation of this way is that the length of offset should be a 16-bit
5218 ;; signed value, because current assembler only supports REL type relocation for
5219 ;; such case. If the more powerful RELA type is supported in future, we should
5220 ;; update this pattern to go with better way.
5222 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5223 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5224 (match_operand:SI 2 "const_int_operand" ""))))]
5227 && arm_disable_literal_pool
5229 && GET_CODE (operands[1]) == SYMBOL_REF"
5230 [(clobber (const_int 0))]
5232 int offset = INTVAL (operands[2]);
5234 if (offset < -0x8000 || offset > 0x7fff)
5236 arm_emit_movpair (operands[0], operands[1]);
5237 emit_insn (gen_rtx_SET (operands[0],
5238 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5242 rtx op = gen_rtx_CONST (SImode,
5243 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5244 arm_emit_movpair (operands[0], op);
5249 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5250 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5251 ;; and lo_sum would be merged back into memory load at cprop. However,
5252 ;; if the default is to prefer movt/movw rather than a load from the constant
5253 ;; pool, the performance is better.
5255 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5256 (match_operand:SI 1 "general_operand" ""))]
5257 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5258 && !target_word_relocations
5259 && !arm_tls_referenced_p (operands[1])"
5260 [(clobber (const_int 0))]
5262 arm_emit_movpair (operands[0], operands[1]);
5266 ;; When generating pic, we need to load the symbol offset into a register.
5267 ;; So that the optimizer does not confuse this with a normal symbol load
5268 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5269 ;; since that is the only type of relocation we can use.
5271 ;; Wrap calculation of the whole PIC address in a single pattern for the
5272 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5273 ;; a PIC address involves two loads from memory, so we want to CSE it
5274 ;; as often as possible.
5275 ;; This pattern will be split into one of the pic_load_addr_* patterns
5276 ;; and a move after GCSE optimizations.
5278 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5279 (define_expand "calculate_pic_address"
5280 [(set (match_operand:SI 0 "register_operand")
5281 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5282 (unspec:SI [(match_operand:SI 2 "" "")]
5287 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5289 [(set (match_operand:SI 0 "register_operand" "")
5290 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5291 (unspec:SI [(match_operand:SI 2 "" "")]
5294 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5295 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5296 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5299 ;; operand1 is the memory address to go into
5300 ;; pic_load_addr_32bit.
5301 ;; operand2 is the PIC label to be emitted
5302 ;; from pic_add_dot_plus_eight.
5303 ;; We do this to allow hoisting of the entire insn.
5304 (define_insn_and_split "pic_load_addr_unified"
5305 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5306 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5307 (match_operand:SI 2 "" "")]
5308 UNSPEC_PIC_UNIFIED))]
5311 "&& reload_completed"
5312 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5313 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5314 (match_dup 2)] UNSPEC_PIC_BASE))]
5315 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5316 [(set_attr "type" "load_4,load_4,load_4")
5317 (set_attr "pool_range" "4096,4094,1022")
5318 (set_attr "neg_pool_range" "4084,0,0")
5319 (set_attr "arch" "a,t2,t1")
5320 (set_attr "length" "8,6,4")]
5323 ;; The rather odd constraints on the following are to force reload to leave
5324 ;; the insn alone, and to force the minipool generation pass to then move
5325 ;; the GOT symbol to memory.
5327 (define_insn "pic_load_addr_32bit"
5328 [(set (match_operand:SI 0 "s_register_operand" "=r")
5329 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5330 "TARGET_32BIT && flag_pic"
5332 [(set_attr "type" "load_4")
5333 (set (attr "pool_range")
5334 (if_then_else (eq_attr "is_thumb" "no")
5337 (set (attr "neg_pool_range")
5338 (if_then_else (eq_attr "is_thumb" "no")
5343 (define_insn "pic_load_addr_thumb1"
5344 [(set (match_operand:SI 0 "s_register_operand" "=l")
5345 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5346 "TARGET_THUMB1 && flag_pic"
5348 [(set_attr "type" "load_4")
5349 (set (attr "pool_range") (const_int 1018))]
5352 (define_insn "pic_add_dot_plus_four"
5353 [(set (match_operand:SI 0 "register_operand" "=r")
5354 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5356 (match_operand 2 "" "")]
5360 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5361 INTVAL (operands[2]));
5362 return \"add\\t%0, %|pc\";
5364 [(set_attr "length" "2")
5365 (set_attr "type" "alu_sreg")]
5368 (define_insn "pic_add_dot_plus_eight"
5369 [(set (match_operand:SI 0 "register_operand" "=r")
5370 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5372 (match_operand 2 "" "")]
5376 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5377 INTVAL (operands[2]));
5378 return \"add%?\\t%0, %|pc, %1\";
5380 [(set_attr "predicable" "yes")
5381 (set_attr "type" "alu_sreg")]
5384 (define_insn "tls_load_dot_plus_eight"
5385 [(set (match_operand:SI 0 "register_operand" "=r")
5386 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5388 (match_operand 2 "" "")]
5392 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5393 INTVAL (operands[2]));
5394 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5396 [(set_attr "predicable" "yes")
5397 (set_attr "type" "load_4")]
5400 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5401 ;; followed by a load. These sequences can be crunched down to
5402 ;; tls_load_dot_plus_eight by a peephole.
5405 [(set (match_operand:SI 0 "register_operand" "")
5406 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5408 (match_operand 1 "" "")]
5410 (set (match_operand:SI 2 "arm_general_register_operand" "")
5411 (mem:SI (match_dup 0)))]
5412 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5414 (mem:SI (unspec:SI [(match_dup 3)
5421 (define_insn "pic_offset_arm"
5422 [(set (match_operand:SI 0 "register_operand" "=r")
5423 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5424 (unspec:SI [(match_operand:SI 2 "" "X")]
5425 UNSPEC_PIC_OFFSET))))]
5426 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5427 "ldr%?\\t%0, [%1,%2]"
5428 [(set_attr "type" "load_4")]
5431 (define_expand "builtin_setjmp_receiver"
5432 [(label_ref (match_operand 0 "" ""))]
5436 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5438 if (arm_pic_register != INVALID_REGNUM)
5439 arm_load_pic_register (1UL << 3, NULL_RTX);
5443 ;; If copying one reg to another we can set the condition codes according to
5444 ;; its value. Such a move is common after a return from subroutine and the
5445 ;; result is being tested against zero.
5447 (define_insn "*movsi_compare0"
5448 [(set (reg:CC CC_REGNUM)
5449 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5451 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5456 subs%?\\t%0, %1, #0"
5457 [(set_attr "conds" "set")
5458 (set_attr "type" "alus_imm,alus_imm")]
5461 ;; Subroutine to store a half word from a register into memory.
5462 ;; Operand 0 is the source register (HImode)
5463 ;; Operand 1 is the destination address in a register (SImode)
5465 ;; In both this routine and the next, we must be careful not to spill
5466 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5467 ;; can generate unrecognizable rtl.
5469 (define_expand "storehi"
5470 [;; store the low byte
5471 (set (match_operand 1 "" "") (match_dup 3))
5472 ;; extract the high byte
5474 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5475 ;; store the high byte
5476 (set (match_dup 4) (match_dup 5))]
5480 rtx op1 = operands[1];
5481 rtx addr = XEXP (op1, 0);
5482 enum rtx_code code = GET_CODE (addr);
5484 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5486 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5488 operands[4] = adjust_address (op1, QImode, 1);
5489 operands[1] = adjust_address (operands[1], QImode, 0);
5490 operands[3] = gen_lowpart (QImode, operands[0]);
5491 operands[0] = gen_lowpart (SImode, operands[0]);
5492 operands[2] = gen_reg_rtx (SImode);
5493 operands[5] = gen_lowpart (QImode, operands[2]);
5497 (define_expand "storehi_bigend"
5498 [(set (match_dup 4) (match_dup 3))
5500 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5501 (set (match_operand 1 "" "") (match_dup 5))]
5505 rtx op1 = operands[1];
5506 rtx addr = XEXP (op1, 0);
5507 enum rtx_code code = GET_CODE (addr);
5509 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5511 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5513 operands[4] = adjust_address (op1, QImode, 1);
5514 operands[1] = adjust_address (operands[1], QImode, 0);
5515 operands[3] = gen_lowpart (QImode, operands[0]);
5516 operands[0] = gen_lowpart (SImode, operands[0]);
5517 operands[2] = gen_reg_rtx (SImode);
5518 operands[5] = gen_lowpart (QImode, operands[2]);
5522 ;; Subroutine to store a half word integer constant into memory.
5523 (define_expand "storeinthi"
5524 [(set (match_operand 0 "" "")
5525 (match_operand 1 "" ""))
5526 (set (match_dup 3) (match_dup 2))]
5530 HOST_WIDE_INT value = INTVAL (operands[1]);
5531 rtx addr = XEXP (operands[0], 0);
5532 rtx op0 = operands[0];
5533 enum rtx_code code = GET_CODE (addr);
5535 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5537 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5539 operands[1] = gen_reg_rtx (SImode);
5540 if (BYTES_BIG_ENDIAN)
5542 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5543 if ((value & 255) == ((value >> 8) & 255))
5544 operands[2] = operands[1];
5547 operands[2] = gen_reg_rtx (SImode);
5548 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5553 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5554 if ((value & 255) == ((value >> 8) & 255))
5555 operands[2] = operands[1];
5558 operands[2] = gen_reg_rtx (SImode);
5559 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5563 operands[3] = adjust_address (op0, QImode, 1);
5564 operands[0] = adjust_address (operands[0], QImode, 0);
5565 operands[2] = gen_lowpart (QImode, operands[2]);
5566 operands[1] = gen_lowpart (QImode, operands[1]);
5570 (define_expand "storehi_single_op"
5571 [(set (match_operand:HI 0 "memory_operand")
5572 (match_operand:HI 1 "general_operand"))]
5573 "TARGET_32BIT && arm_arch4"
5575 if (!s_register_operand (operands[1], HImode))
5576 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5580 (define_expand "movhi"
5581 [(set (match_operand:HI 0 "general_operand")
5582 (match_operand:HI 1 "general_operand"))]
5585 gcc_checking_assert (aligned_operand (operands[0], HImode));
5586 gcc_checking_assert (aligned_operand (operands[1], HImode));
5589 if (can_create_pseudo_p ())
5591 if (MEM_P (operands[0]))
5595 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5598 if (CONST_INT_P (operands[1]))
5599 emit_insn (gen_storeinthi (operands[0], operands[1]));
5602 if (MEM_P (operands[1]))
5603 operands[1] = force_reg (HImode, operands[1]);
5604 if (BYTES_BIG_ENDIAN)
5605 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5607 emit_insn (gen_storehi (operands[1], operands[0]));
5611 /* Sign extend a constant, and keep it in an SImode reg. */
5612 else if (CONST_INT_P (operands[1]))
5614 rtx reg = gen_reg_rtx (SImode);
5615 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5617 /* If the constant is already valid, leave it alone. */
5618 if (!const_ok_for_arm (val))
5620 /* If setting all the top bits will make the constant
5621 loadable in a single instruction, then set them.
5622 Otherwise, sign extend the number. */
5624 if (const_ok_for_arm (~(val | ~0xffff)))
5626 else if (val & 0x8000)
5630 emit_insn (gen_movsi (reg, GEN_INT (val)));
5631 operands[1] = gen_lowpart (HImode, reg);
5633 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5634 && MEM_P (operands[1]))
5636 rtx reg = gen_reg_rtx (SImode);
5638 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5639 operands[1] = gen_lowpart (HImode, reg);
5641 else if (!arm_arch4)
5643 if (MEM_P (operands[1]))
5646 rtx offset = const0_rtx;
5647 rtx reg = gen_reg_rtx (SImode);
5649 if ((REG_P (base = XEXP (operands[1], 0))
5650 || (GET_CODE (base) == PLUS
5651 && (CONST_INT_P (offset = XEXP (base, 1)))
5652 && ((INTVAL(offset) & 1) != 1)
5653 && REG_P (base = XEXP (base, 0))))
5654 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5658 new_rtx = widen_memory_access (operands[1], SImode,
5659 ((INTVAL (offset) & ~3)
5660 - INTVAL (offset)));
5661 emit_insn (gen_movsi (reg, new_rtx));
5662 if (((INTVAL (offset) & 2) != 0)
5663 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5665 rtx reg2 = gen_reg_rtx (SImode);
5667 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5672 emit_insn (gen_movhi_bytes (reg, operands[1]));
5674 operands[1] = gen_lowpart (HImode, reg);
5678 /* Handle loading a large integer during reload. */
5679 else if (CONST_INT_P (operands[1])
5680 && !const_ok_for_arm (INTVAL (operands[1]))
5681 && !const_ok_for_arm (~INTVAL (operands[1])))
5683 /* Writing a constant to memory needs a scratch, which should
5684 be handled with SECONDARY_RELOADs. */
5685 gcc_assert (REG_P (operands[0]));
5687 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5688 emit_insn (gen_movsi (operands[0], operands[1]));
5692 else if (TARGET_THUMB2)
5694 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5695 if (can_create_pseudo_p ())
5697 if (!REG_P (operands[0]))
5698 operands[1] = force_reg (HImode, operands[1]);
5699 /* Zero extend a constant, and keep it in an SImode reg. */
5700 else if (CONST_INT_P (operands[1]))
5702 rtx reg = gen_reg_rtx (SImode);
5703 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5705 emit_insn (gen_movsi (reg, GEN_INT (val)));
5706 operands[1] = gen_lowpart (HImode, reg);
5710 else /* TARGET_THUMB1 */
5712 if (can_create_pseudo_p ())
5714 if (CONST_INT_P (operands[1]))
5716 rtx reg = gen_reg_rtx (SImode);
5718 emit_insn (gen_movsi (reg, operands[1]));
5719 operands[1] = gen_lowpart (HImode, reg);
5722 /* ??? We shouldn't really get invalid addresses here, but this can
5723 happen if we are passed a SP (never OK for HImode/QImode) or
5724 virtual register (also rejected as illegitimate for HImode/QImode)
5725 relative address. */
5726 /* ??? This should perhaps be fixed elsewhere, for instance, in
5727 fixup_stack_1, by checking for other kinds of invalid addresses,
5728 e.g. a bare reference to a virtual register. This may confuse the
5729 alpha though, which must handle this case differently. */
5730 if (MEM_P (operands[0])
5731 && !memory_address_p (GET_MODE (operands[0]),
5732 XEXP (operands[0], 0)))
5734 = replace_equiv_address (operands[0],
5735 copy_to_reg (XEXP (operands[0], 0)));
5737 if (MEM_P (operands[1])
5738 && !memory_address_p (GET_MODE (operands[1]),
5739 XEXP (operands[1], 0)))
5741 = replace_equiv_address (operands[1],
5742 copy_to_reg (XEXP (operands[1], 0)));
5744 if (MEM_P (operands[1]) && optimize > 0)
5746 rtx reg = gen_reg_rtx (SImode);
5748 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5749 operands[1] = gen_lowpart (HImode, reg);
5752 if (MEM_P (operands[0]))
5753 operands[1] = force_reg (HImode, operands[1]);
5755 else if (CONST_INT_P (operands[1])
5756 && !satisfies_constraint_I (operands[1]))
5758 /* Handle loading a large integer during reload. */
5760 /* Writing a constant to memory needs a scratch, which should
5761 be handled with SECONDARY_RELOADs. */
5762 gcc_assert (REG_P (operands[0]));
5764 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5765 emit_insn (gen_movsi (operands[0], operands[1]));
5772 (define_expand "movhi_bytes"
5773 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5775 (zero_extend:SI (match_dup 6)))
5776 (set (match_operand:SI 0 "" "")
5777 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5782 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5784 mem1 = change_address (operands[1], QImode, addr);
5785 mem2 = change_address (operands[1], QImode,
5786 plus_constant (Pmode, addr, 1));
5787 operands[0] = gen_lowpart (SImode, operands[0]);
5789 operands[2] = gen_reg_rtx (SImode);
5790 operands[3] = gen_reg_rtx (SImode);
5793 if (BYTES_BIG_ENDIAN)
5795 operands[4] = operands[2];
5796 operands[5] = operands[3];
5800 operands[4] = operands[3];
5801 operands[5] = operands[2];
5806 (define_expand "movhi_bigend"
5808 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5811 (ashiftrt:SI (match_dup 2) (const_int 16)))
5812 (set (match_operand:HI 0 "s_register_operand")
5816 operands[2] = gen_reg_rtx (SImode);
5817 operands[3] = gen_reg_rtx (SImode);
5818 operands[4] = gen_lowpart (HImode, operands[3]);
5822 ;; Pattern to recognize insn generated default case above
5823 (define_insn "*movhi_insn_arch4"
5824 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5825 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5827 && arm_arch4 && !TARGET_HARD_FLOAT
5828 && (register_operand (operands[0], HImode)
5829 || register_operand (operands[1], HImode))"
5831 mov%?\\t%0, %1\\t%@ movhi
5832 mvn%?\\t%0, #%B1\\t%@ movhi
5833 movw%?\\t%0, %L1\\t%@ movhi
5834 strh%?\\t%1, %0\\t%@ movhi
5835 ldrh%?\\t%0, %1\\t%@ movhi"
5836 [(set_attr "predicable" "yes")
5837 (set_attr "pool_range" "*,*,*,*,256")
5838 (set_attr "neg_pool_range" "*,*,*,*,244")
5839 (set_attr "arch" "*,*,v6t2,*,*")
5840 (set_attr_alternative "type"
5841 [(if_then_else (match_operand 1 "const_int_operand" "")
5842 (const_string "mov_imm" )
5843 (const_string "mov_reg"))
5844 (const_string "mvn_imm")
5845 (const_string "mov_imm")
5846 (const_string "store_4")
5847 (const_string "load_4")])]
5850 (define_insn "*movhi_bytes"
5851 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5852 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5853 "TARGET_ARM && !TARGET_HARD_FLOAT"
5855 mov%?\\t%0, %1\\t%@ movhi
5856 mov%?\\t%0, %1\\t%@ movhi
5857 mvn%?\\t%0, #%B1\\t%@ movhi"
5858 [(set_attr "predicable" "yes")
5859 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5862 ;; We use a DImode scratch because we may occasionally need an additional
5863 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5864 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5865 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5866 ;; to be correctly handled in default_secondary_reload function.
5867 (define_expand "reload_outhi"
5868 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5869 (match_operand:HI 1 "s_register_operand" "r")
5870 (match_operand:DI 2 "s_register_operand" "=&l")])]
5873 arm_reload_out_hi (operands);
5875 thumb_reload_out_hi (operands);
5880 (define_expand "reload_inhi"
5881 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5882 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5883 (match_operand:DI 2 "s_register_operand" "=&r")])]
5887 arm_reload_in_hi (operands);
5889 thumb_reload_out_hi (operands);
5893 (define_expand "movqi"
5894 [(set (match_operand:QI 0 "general_operand")
5895 (match_operand:QI 1 "general_operand"))]
5898 /* Everything except mem = const or mem = mem can be done easily */
5900 if (can_create_pseudo_p ())
5902 if (CONST_INT_P (operands[1]))
5904 rtx reg = gen_reg_rtx (SImode);
5906 /* For thumb we want an unsigned immediate, then we are more likely
5907 to be able to use a movs insn. */
5909 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5911 emit_insn (gen_movsi (reg, operands[1]));
5912 operands[1] = gen_lowpart (QImode, reg);
5917 /* ??? We shouldn't really get invalid addresses here, but this can
5918 happen if we are passed a SP (never OK for HImode/QImode) or
5919 virtual register (also rejected as illegitimate for HImode/QImode)
5920 relative address. */
5921 /* ??? This should perhaps be fixed elsewhere, for instance, in
5922 fixup_stack_1, by checking for other kinds of invalid addresses,
5923 e.g. a bare reference to a virtual register. This may confuse the
5924 alpha though, which must handle this case differently. */
5925 if (MEM_P (operands[0])
5926 && !memory_address_p (GET_MODE (operands[0]),
5927 XEXP (operands[0], 0)))
5929 = replace_equiv_address (operands[0],
5930 copy_to_reg (XEXP (operands[0], 0)));
5931 if (MEM_P (operands[1])
5932 && !memory_address_p (GET_MODE (operands[1]),
5933 XEXP (operands[1], 0)))
5935 = replace_equiv_address (operands[1],
5936 copy_to_reg (XEXP (operands[1], 0)));
5939 if (MEM_P (operands[1]) && optimize > 0)
5941 rtx reg = gen_reg_rtx (SImode);
5943 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5944 operands[1] = gen_lowpart (QImode, reg);
5947 if (MEM_P (operands[0]))
5948 operands[1] = force_reg (QImode, operands[1]);
5950 else if (TARGET_THUMB
5951 && CONST_INT_P (operands[1])
5952 && !satisfies_constraint_I (operands[1]))
5954 /* Handle loading a large integer during reload. */
5956 /* Writing a constant to memory needs a scratch, which should
5957 be handled with SECONDARY_RELOADs. */
5958 gcc_assert (REG_P (operands[0]));
5960 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5961 emit_insn (gen_movsi (operands[0], operands[1]));
5967 (define_insn "*arm_movqi_insn"
5968 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5969 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5971 && ( register_operand (operands[0], QImode)
5972 || register_operand (operands[1], QImode))"
5983 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5984 (set_attr "predicable" "yes")
5985 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5986 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5987 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5991 (define_expand "movhf"
5992 [(set (match_operand:HF 0 "general_operand")
5993 (match_operand:HF 1 "general_operand"))]
5996 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5997 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6000 if (MEM_P (operands[0]))
6001 operands[1] = force_reg (HFmode, operands[1]);
6003 else /* TARGET_THUMB1 */
6005 if (can_create_pseudo_p ())
6007 if (!REG_P (operands[0]))
6008 operands[1] = force_reg (HFmode, operands[1]);
6014 (define_insn "*arm32_movhf"
6015 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6016 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6017 "TARGET_32BIT && !TARGET_HARD_FLOAT
6018 && ( s_register_operand (operands[0], HFmode)
6019 || s_register_operand (operands[1], HFmode))"
6021 switch (which_alternative)
6023 case 0: /* ARM register from memory */
6024 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6025 case 1: /* memory from ARM register */
6026 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6027 case 2: /* ARM register from ARM register */
6028 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6029 case 3: /* ARM register from constant */
6034 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6036 ops[0] = operands[0];
6037 ops[1] = GEN_INT (bits);
6038 ops[2] = GEN_INT (bits & 0xff00);
6039 ops[3] = GEN_INT (bits & 0x00ff);
6041 if (arm_arch_thumb2)
6042 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6044 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6051 [(set_attr "conds" "unconditional")
6052 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6053 (set_attr "length" "4,4,4,8")
6054 (set_attr "predicable" "yes")]
6057 (define_expand "movsf"
6058 [(set (match_operand:SF 0 "general_operand")
6059 (match_operand:SF 1 "general_operand"))]
6062 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6063 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6066 if (MEM_P (operands[0]))
6067 operands[1] = force_reg (SFmode, operands[1]);
6069 else /* TARGET_THUMB1 */
6071 if (can_create_pseudo_p ())
6073 if (!REG_P (operands[0]))
6074 operands[1] = force_reg (SFmode, operands[1]);
6078 /* Cannot load it directly, generate a load with clobber so that it can be
6079 loaded via GPR with MOV / MOVT. */
6080 if (arm_disable_literal_pool
6081 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6082 && CONST_DOUBLE_P (operands[1])
6083 && TARGET_HARD_FLOAT
6084 && !vfp3_const_double_rtx (operands[1]))
6086 rtx clobreg = gen_reg_rtx (SFmode);
6087 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6094 ;; Transform a floating-point move of a constant into a core register into
6095 ;; an SImode operation.
6097 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6098 (match_operand:SF 1 "immediate_operand" ""))]
6101 && CONST_DOUBLE_P (operands[1])"
6102 [(set (match_dup 2) (match_dup 3))]
6104 operands[2] = gen_lowpart (SImode, operands[0]);
6105 operands[3] = gen_lowpart (SImode, operands[1]);
6106 if (operands[2] == 0 || operands[3] == 0)
6111 (define_insn "*arm_movsf_soft_insn"
6112 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6113 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6115 && TARGET_SOFT_FLOAT
6116 && (!MEM_P (operands[0])
6117 || register_operand (operands[1], SFmode))"
6119 switch (which_alternative)
6121 case 0: return \"mov%?\\t%0, %1\";
6123 /* Cannot load it directly, split to load it via MOV / MOVT. */
6124 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6126 return \"ldr%?\\t%0, %1\\t%@ float\";
6127 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6128 default: gcc_unreachable ();
6131 [(set_attr "predicable" "yes")
6132 (set_attr "type" "mov_reg,load_4,store_4")
6133 (set_attr "arm_pool_range" "*,4096,*")
6134 (set_attr "thumb2_pool_range" "*,4094,*")
6135 (set_attr "arm_neg_pool_range" "*,4084,*")
6136 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6139 ;; Splitter for the above.
6141 [(set (match_operand:SF 0 "s_register_operand")
6142 (match_operand:SF 1 "const_double_operand"))]
6143 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6147 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6148 rtx cst = gen_int_mode (buf, SImode);
6149 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6154 (define_expand "movdf"
6155 [(set (match_operand:DF 0 "general_operand")
6156 (match_operand:DF 1 "general_operand"))]
6159 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6160 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6163 if (MEM_P (operands[0]))
6164 operands[1] = force_reg (DFmode, operands[1]);
6166 else /* TARGET_THUMB */
6168 if (can_create_pseudo_p ())
6170 if (!REG_P (operands[0]))
6171 operands[1] = force_reg (DFmode, operands[1]);
6175 /* Cannot load it directly, generate a load with clobber so that it can be
6176 loaded via GPR with MOV / MOVT. */
6177 if (arm_disable_literal_pool
6178 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6179 && CONSTANT_P (operands[1])
6180 && TARGET_HARD_FLOAT
6181 && !arm_const_double_rtx (operands[1])
6182 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6184 rtx clobreg = gen_reg_rtx (DFmode);
6185 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6192 ;; Reloading a df mode value stored in integer regs to memory can require a
6194 ;; Another reload_out<m> pattern that requires special constraints.
6195 (define_expand "reload_outdf"
6196 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6197 (match_operand:DF 1 "s_register_operand" "r")
6198 (match_operand:SI 2 "s_register_operand" "=&r")]
6202 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6205 operands[2] = XEXP (operands[0], 0);
6206 else if (code == POST_INC || code == PRE_DEC)
6208 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6209 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6210 emit_insn (gen_movdi (operands[0], operands[1]));
6213 else if (code == PRE_INC)
6215 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6217 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6220 else if (code == POST_DEC)
6221 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6223 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6224 XEXP (XEXP (operands[0], 0), 1)));
6226 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6229 if (code == POST_DEC)
6230 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6236 (define_insn "*movdf_soft_insn"
6237 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6238 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6239 "TARGET_32BIT && TARGET_SOFT_FLOAT
6240 && ( register_operand (operands[0], DFmode)
6241 || register_operand (operands[1], DFmode))"
6243 switch (which_alternative)
6250 /* Cannot load it directly, split to load it via MOV / MOVT. */
6251 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6255 return output_move_double (operands, true, NULL);
6258 [(set_attr "length" "8,12,16,8,8")
6259 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6260 (set_attr "arm_pool_range" "*,*,*,1020,*")
6261 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6262 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6263 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6266 ;; Splitter for the above.
6268 [(set (match_operand:DF 0 "s_register_operand")
6269 (match_operand:DF 1 "const_double_operand"))]
6270 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6274 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6275 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6276 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6277 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6278 rtx cst = gen_int_mode (ival, DImode);
6279 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6285 ;; load- and store-multiple insns
6286 ;; The arm can load/store any set of registers, provided that they are in
6287 ;; ascending order, but these expanders assume a contiguous set.
6289 (define_expand "load_multiple"
6290 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6291 (match_operand:SI 1 "" ""))
6292 (use (match_operand:SI 2 "" ""))])]
6295 HOST_WIDE_INT offset = 0;
6297 /* Support only fixed point registers. */
6298 if (!CONST_INT_P (operands[2])
6299 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6300 || INTVAL (operands[2]) < 2
6301 || !MEM_P (operands[1])
6302 || !REG_P (operands[0])
6303 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6304 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6308 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6309 INTVAL (operands[2]),
6310 force_reg (SImode, XEXP (operands[1], 0)),
6311 FALSE, operands[1], &offset);
6314 (define_expand "store_multiple"
6315 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6316 (match_operand:SI 1 "" ""))
6317 (use (match_operand:SI 2 "" ""))])]
6320 HOST_WIDE_INT offset = 0;
6322 /* Support only fixed point registers. */
6323 if (!CONST_INT_P (operands[2])
6324 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6325 || INTVAL (operands[2]) < 2
6326 || !REG_P (operands[1])
6327 || !MEM_P (operands[0])
6328 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6329 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6333 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6334 INTVAL (operands[2]),
6335 force_reg (SImode, XEXP (operands[0], 0)),
6336 FALSE, operands[0], &offset);
6340 (define_expand "setmemsi"
6341 [(match_operand:BLK 0 "general_operand")
6342 (match_operand:SI 1 "const_int_operand")
6343 (match_operand:SI 2 "const_int_operand")
6344 (match_operand:SI 3 "const_int_operand")]
6347 if (arm_gen_setmem (operands))
6354 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6355 ;; We could let this apply for blocks of less than this, but it clobbers so
6356 ;; many registers that there is then probably a better way.
6358 (define_expand "cpymemqi"
6359 [(match_operand:BLK 0 "general_operand")
6360 (match_operand:BLK 1 "general_operand")
6361 (match_operand:SI 2 "const_int_operand")
6362 (match_operand:SI 3 "const_int_operand")]
6367 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6368 && !optimize_function_for_size_p (cfun))
6370 if (gen_cpymem_ldrd_strd (operands))
6375 if (arm_gen_cpymemqi (operands))
6379 else /* TARGET_THUMB1 */
6381 if ( INTVAL (operands[3]) != 4
6382 || INTVAL (operands[2]) > 48)
6385 thumb_expand_cpymemqi (operands);
6392 ;; Compare & branch insns
6393 ;; The range calculations are based as follows:
6394 ;; For forward branches, the address calculation returns the address of
6395 ;; the next instruction. This is 2 beyond the branch instruction.
6396 ;; For backward branches, the address calculation returns the address of
6397 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6398 ;; instruction for the shortest sequence, and 4 before the branch instruction
6399 ;; if we have to jump around an unconditional branch.
6400 ;; To the basic branch range the PC offset must be added (this is +4).
6401 ;; So for forward branches we have
6402 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6403 ;; And for backward branches we have
6404 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6406 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6407 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6409 (define_expand "cbranchsi4"
6410 [(set (pc) (if_then_else
6411 (match_operator 0 "expandable_comparison_operator"
6412 [(match_operand:SI 1 "s_register_operand")
6413 (match_operand:SI 2 "nonmemory_operand")])
6414 (label_ref (match_operand 3 "" ""))
6420 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6422 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6426 if (thumb1_cmpneg_operand (operands[2], SImode))
6428 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6429 operands[3], operands[0]));
6432 if (!thumb1_cmp_operand (operands[2], SImode))
6433 operands[2] = force_reg (SImode, operands[2]);
6436 (define_expand "cbranchsf4"
6437 [(set (pc) (if_then_else
6438 (match_operator 0 "expandable_comparison_operator"
6439 [(match_operand:SF 1 "s_register_operand")
6440 (match_operand:SF 2 "vfp_compare_operand")])
6441 (label_ref (match_operand 3 "" ""))
6443 "TARGET_32BIT && TARGET_HARD_FLOAT"
6444 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6445 operands[3])); DONE;"
6448 (define_expand "cbranchdf4"
6449 [(set (pc) (if_then_else
6450 (match_operator 0 "expandable_comparison_operator"
6451 [(match_operand:DF 1 "s_register_operand")
6452 (match_operand:DF 2 "vfp_compare_operand")])
6453 (label_ref (match_operand 3 "" ""))
6455 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6456 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6457 operands[3])); DONE;"
6460 (define_expand "cbranchdi4"
6461 [(set (pc) (if_then_else
6462 (match_operator 0 "expandable_comparison_operator"
6463 [(match_operand:DI 1 "s_register_operand")
6464 (match_operand:DI 2 "cmpdi_operand")])
6465 (label_ref (match_operand 3 "" ""))
6469 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6471 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6477 ;; Comparison and test insns
6479 (define_insn "*arm_cmpsi_insn"
6480 [(set (reg:CC CC_REGNUM)
6481 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6482 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6490 [(set_attr "conds" "set")
6491 (set_attr "arch" "t2,t2,any,any,any")
6492 (set_attr "length" "2,2,4,4,4")
6493 (set_attr "predicable" "yes")
6494 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6495 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6498 (define_insn "*cmpsi_shiftsi"
6499 [(set (reg:CC CC_REGNUM)
6500 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6501 (match_operator:SI 3 "shift_operator"
6502 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6503 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6506 [(set_attr "conds" "set")
6507 (set_attr "shift" "1")
6508 (set_attr "arch" "32,a,a")
6509 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6511 (define_insn "*cmpsi_shiftsi_swp"
6512 [(set (reg:CC_SWP CC_REGNUM)
6513 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6514 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6515 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6516 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6519 [(set_attr "conds" "set")
6520 (set_attr "shift" "1")
6521 (set_attr "arch" "32,a,a")
6522 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6524 (define_insn "*arm_cmpsi_negshiftsi_si"
6525 [(set (reg:CC_Z CC_REGNUM)
6527 (neg:SI (match_operator:SI 1 "shift_operator"
6528 [(match_operand:SI 2 "s_register_operand" "r")
6529 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6530 (match_operand:SI 0 "s_register_operand" "r")))]
6533 [(set_attr "conds" "set")
6534 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6535 (const_string "alus_shift_imm")
6536 (const_string "alus_shift_reg")))
6537 (set_attr "predicable" "yes")]
6540 ;; DImode comparisons. The generic code generates branches that
6541 ;; if-conversion cannot reduce to a conditional compare, so we do
6544 (define_insn "*arm_cmpdi_insn"
6545 [(set (reg:CC_NCV CC_REGNUM)
6546 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6547 (match_operand:DI 1 "arm_di_operand" "rDi")))
6548 (clobber (match_scratch:SI 2 "=r"))]
6550 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6551 [(set_attr "conds" "set")
6552 (set_attr "length" "8")
6553 (set_attr "type" "multiple")]
6556 (define_insn_and_split "*arm_cmpdi_unsigned"
6557 [(set (reg:CC_CZ CC_REGNUM)
6558 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6559 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6562 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6563 "&& reload_completed"
6564 [(set (reg:CC CC_REGNUM)
6565 (compare:CC (match_dup 2) (match_dup 3)))
6566 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6567 (set (reg:CC CC_REGNUM)
6568 (compare:CC (match_dup 0) (match_dup 1))))]
6570 operands[2] = gen_highpart (SImode, operands[0]);
6571 operands[0] = gen_lowpart (SImode, operands[0]);
6572 if (CONST_INT_P (operands[1]))
6573 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6575 operands[3] = gen_highpart (SImode, operands[1]);
6576 operands[1] = gen_lowpart (SImode, operands[1]);
6578 [(set_attr "conds" "set")
6579 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6580 (set_attr "arch" "t2,t2,t2,a")
6581 (set_attr "length" "6,6,10,8")
6582 (set_attr "type" "multiple")]
6585 (define_insn "*arm_cmpdi_zero"
6586 [(set (reg:CC_Z CC_REGNUM)
6587 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
6589 (clobber (match_scratch:SI 1 "=r"))]
6591 "orrs%?\\t%1, %Q0, %R0"
6592 [(set_attr "conds" "set")
6593 (set_attr "type" "logics_reg")]
6596 ; This insn allows redundant compares to be removed by cse, nothing should
6597 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6598 ; is deleted later on. The match_dup will match the mode here, so that
6599 ; mode changes of the condition codes aren't lost by this even though we don't
6600 ; specify what they are.
6602 (define_insn "*deleted_compare"
6603 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6605 "\\t%@ deleted compare"
6606 [(set_attr "conds" "set")
6607 (set_attr "length" "0")
6608 (set_attr "type" "no_insn")]
6612 ;; Conditional branch insns
6614 (define_expand "cbranch_cc"
6616 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6617 (match_operand 2 "" "")])
6618 (label_ref (match_operand 3 "" ""))
6621 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6622 operands[1], operands[2], NULL_RTX);
6623 operands[2] = const0_rtx;"
6627 ;; Patterns to match conditional branch insns.
6630 (define_insn "arm_cond_branch"
6632 (if_then_else (match_operator 1 "arm_comparison_operator"
6633 [(match_operand 2 "cc_register" "") (const_int 0)])
6634 (label_ref (match_operand 0 "" ""))
6638 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6640 arm_ccfsm_state += 2;
6643 return \"b%d1\\t%l0\";
6645 [(set_attr "conds" "use")
6646 (set_attr "type" "branch")
6647 (set (attr "length")
6649 (and (match_test "TARGET_THUMB2")
6650 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6651 (le (minus (match_dup 0) (pc)) (const_int 256))))
6656 (define_insn "*arm_cond_branch_reversed"
6658 (if_then_else (match_operator 1 "arm_comparison_operator"
6659 [(match_operand 2 "cc_register" "") (const_int 0)])
6661 (label_ref (match_operand 0 "" ""))))]
6664 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6666 arm_ccfsm_state += 2;
6669 return \"b%D1\\t%l0\";
6671 [(set_attr "conds" "use")
6672 (set_attr "type" "branch")
6673 (set (attr "length")
6675 (and (match_test "TARGET_THUMB2")
6676 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6677 (le (minus (match_dup 0) (pc)) (const_int 256))))
6686 (define_expand "cstore_cc"
6687 [(set (match_operand:SI 0 "s_register_operand")
6688 (match_operator:SI 1 "" [(match_operand 2 "" "")
6689 (match_operand 3 "" "")]))]
6691 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6692 operands[2], operands[3], NULL_RTX);
6693 operands[3] = const0_rtx;"
6696 (define_insn_and_split "*mov_scc"
6697 [(set (match_operand:SI 0 "s_register_operand" "=r")
6698 (match_operator:SI 1 "arm_comparison_operator_mode"
6699 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6701 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6704 (if_then_else:SI (match_dup 1)
6708 [(set_attr "conds" "use")
6709 (set_attr "length" "8")
6710 (set_attr "type" "multiple")]
6713 (define_insn_and_split "*mov_negscc"
6714 [(set (match_operand:SI 0 "s_register_operand" "=r")
6715 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6716 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6718 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6721 (if_then_else:SI (match_dup 1)
6725 operands[3] = GEN_INT (~0);
6727 [(set_attr "conds" "use")
6728 (set_attr "length" "8")
6729 (set_attr "type" "multiple")]
6732 (define_insn_and_split "*mov_notscc"
6733 [(set (match_operand:SI 0 "s_register_operand" "=r")
6734 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6735 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6737 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6740 (if_then_else:SI (match_dup 1)
6744 operands[3] = GEN_INT (~1);
6745 operands[4] = GEN_INT (~0);
6747 [(set_attr "conds" "use")
6748 (set_attr "length" "8")
6749 (set_attr "type" "multiple")]
6752 (define_expand "cstoresi4"
6753 [(set (match_operand:SI 0 "s_register_operand")
6754 (match_operator:SI 1 "expandable_comparison_operator"
6755 [(match_operand:SI 2 "s_register_operand")
6756 (match_operand:SI 3 "reg_or_int_operand")]))]
6757 "TARGET_32BIT || TARGET_THUMB1"
6759 rtx op3, scratch, scratch2;
6763 if (!arm_add_operand (operands[3], SImode))
6764 operands[3] = force_reg (SImode, operands[3]);
6765 emit_insn (gen_cstore_cc (operands[0], operands[1],
6766 operands[2], operands[3]));
6770 if (operands[3] == const0_rtx)
6772 switch (GET_CODE (operands[1]))
6775 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6779 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6783 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6784 NULL_RTX, 0, OPTAB_WIDEN);
6785 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6786 NULL_RTX, 0, OPTAB_WIDEN);
6787 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6788 operands[0], 1, OPTAB_WIDEN);
6792 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6794 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6795 NULL_RTX, 1, OPTAB_WIDEN);
6799 scratch = expand_binop (SImode, ashr_optab, operands[2],
6800 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6801 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6802 NULL_RTX, 0, OPTAB_WIDEN);
6803 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6807 /* LT is handled by generic code. No need for unsigned with 0. */
6814 switch (GET_CODE (operands[1]))
6817 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6818 NULL_RTX, 0, OPTAB_WIDEN);
6819 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6823 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6824 NULL_RTX, 0, OPTAB_WIDEN);
6825 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6829 op3 = force_reg (SImode, operands[3]);
6831 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6832 NULL_RTX, 1, OPTAB_WIDEN);
6833 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6834 NULL_RTX, 0, OPTAB_WIDEN);
6835 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6841 if (!thumb1_cmp_operand (op3, SImode))
6842 op3 = force_reg (SImode, op3);
6843 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6844 NULL_RTX, 0, OPTAB_WIDEN);
6845 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6846 NULL_RTX, 1, OPTAB_WIDEN);
6847 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6852 op3 = force_reg (SImode, operands[3]);
6853 scratch = force_reg (SImode, const0_rtx);
6854 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6860 if (!thumb1_cmp_operand (op3, SImode))
6861 op3 = force_reg (SImode, op3);
6862 scratch = force_reg (SImode, const0_rtx);
6863 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6869 if (!thumb1_cmp_operand (op3, SImode))
6870 op3 = force_reg (SImode, op3);
6871 scratch = gen_reg_rtx (SImode);
6872 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6876 op3 = force_reg (SImode, operands[3]);
6877 scratch = gen_reg_rtx (SImode);
6878 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6881 /* No good sequences for GT, LT. */
6888 (define_expand "cstorehf4"
6889 [(set (match_operand:SI 0 "s_register_operand")
6890 (match_operator:SI 1 "expandable_comparison_operator"
6891 [(match_operand:HF 2 "s_register_operand")
6892 (match_operand:HF 3 "vfp_compare_operand")]))]
6893 "TARGET_VFP_FP16INST"
6895 if (!arm_validize_comparison (&operands[1],
6900 emit_insn (gen_cstore_cc (operands[0], operands[1],
6901 operands[2], operands[3]));
6906 (define_expand "cstoresf4"
6907 [(set (match_operand:SI 0 "s_register_operand")
6908 (match_operator:SI 1 "expandable_comparison_operator"
6909 [(match_operand:SF 2 "s_register_operand")
6910 (match_operand:SF 3 "vfp_compare_operand")]))]
6911 "TARGET_32BIT && TARGET_HARD_FLOAT"
6912 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6913 operands[2], operands[3])); DONE;"
6916 (define_expand "cstoredf4"
6917 [(set (match_operand:SI 0 "s_register_operand")
6918 (match_operator:SI 1 "expandable_comparison_operator"
6919 [(match_operand:DF 2 "s_register_operand")
6920 (match_operand:DF 3 "vfp_compare_operand")]))]
6921 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6922 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6923 operands[2], operands[3])); DONE;"
6926 (define_expand "cstoredi4"
6927 [(set (match_operand:SI 0 "s_register_operand")
6928 (match_operator:SI 1 "expandable_comparison_operator"
6929 [(match_operand:DI 2 "s_register_operand")
6930 (match_operand:DI 3 "cmpdi_operand")]))]
6933 if (!arm_validize_comparison (&operands[1],
6937 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6944 ;; Conditional move insns
6946 (define_expand "movsicc"
6947 [(set (match_operand:SI 0 "s_register_operand")
6948 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6949 (match_operand:SI 2 "arm_not_operand")
6950 (match_operand:SI 3 "arm_not_operand")))]
6957 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6958 &XEXP (operands[1], 1)))
6961 code = GET_CODE (operands[1]);
6962 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6963 XEXP (operands[1], 1), NULL_RTX);
6964 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6968 (define_expand "movhfcc"
6969 [(set (match_operand:HF 0 "s_register_operand")
6970 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6971 (match_operand:HF 2 "s_register_operand")
6972 (match_operand:HF 3 "s_register_operand")))]
6973 "TARGET_VFP_FP16INST"
6976 enum rtx_code code = GET_CODE (operands[1]);
6979 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6980 &XEXP (operands[1], 1)))
6983 code = GET_CODE (operands[1]);
6984 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6985 XEXP (operands[1], 1), NULL_RTX);
6986 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6990 (define_expand "movsfcc"
6991 [(set (match_operand:SF 0 "s_register_operand")
6992 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6993 (match_operand:SF 2 "s_register_operand")
6994 (match_operand:SF 3 "s_register_operand")))]
6995 "TARGET_32BIT && TARGET_HARD_FLOAT"
6998 enum rtx_code code = GET_CODE (operands[1]);
7001 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7002 &XEXP (operands[1], 1)))
7005 code = GET_CODE (operands[1]);
7006 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7007 XEXP (operands[1], 1), NULL_RTX);
7008 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7012 (define_expand "movdfcc"
7013 [(set (match_operand:DF 0 "s_register_operand")
7014 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7015 (match_operand:DF 2 "s_register_operand")
7016 (match_operand:DF 3 "s_register_operand")))]
7017 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7020 enum rtx_code code = GET_CODE (operands[1]);
7023 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7024 &XEXP (operands[1], 1)))
7026 code = GET_CODE (operands[1]);
7027 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7028 XEXP (operands[1], 1), NULL_RTX);
7029 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7033 (define_insn "*cmov<mode>"
7034 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7035 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7036 [(match_operand 2 "cc_register" "") (const_int 0)])
7037 (match_operand:SDF 3 "s_register_operand"
7039 (match_operand:SDF 4 "s_register_operand"
7040 "<F_constraint>")))]
7041 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7044 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7051 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7056 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7062 [(set_attr "conds" "use")
7063 (set_attr "type" "fcsel")]
7066 (define_insn "*cmovhf"
7067 [(set (match_operand:HF 0 "s_register_operand" "=t")
7068 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7069 [(match_operand 2 "cc_register" "") (const_int 0)])
7070 (match_operand:HF 3 "s_register_operand" "t")
7071 (match_operand:HF 4 "s_register_operand" "t")))]
7072 "TARGET_VFP_FP16INST"
7075 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7082 return \"vsel%d1.f16\\t%0, %3, %4\";
7087 return \"vsel%D1.f16\\t%0, %4, %3\";
7093 [(set_attr "conds" "use")
7094 (set_attr "type" "fcsel")]
7097 (define_insn_and_split "*movsicc_insn"
7098 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7100 (match_operator 3 "arm_comparison_operator"
7101 [(match_operand 4 "cc_register" "") (const_int 0)])
7102 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7103 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7114 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7115 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7116 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7117 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7118 "&& reload_completed"
7121 enum rtx_code rev_code;
7125 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7127 gen_rtx_SET (operands[0], operands[1])));
7129 rev_code = GET_CODE (operands[3]);
7130 mode = GET_MODE (operands[4]);
7131 if (mode == CCFPmode || mode == CCFPEmode)
7132 rev_code = reverse_condition_maybe_unordered (rev_code);
7134 rev_code = reverse_condition (rev_code);
7136 rev_cond = gen_rtx_fmt_ee (rev_code,
7140 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7142 gen_rtx_SET (operands[0], operands[2])));
7145 [(set_attr "length" "4,4,4,4,8,8,8,8")
7146 (set_attr "conds" "use")
7147 (set_attr_alternative "type"
7148 [(if_then_else (match_operand 2 "const_int_operand" "")
7149 (const_string "mov_imm")
7150 (const_string "mov_reg"))
7151 (const_string "mvn_imm")
7152 (if_then_else (match_operand 1 "const_int_operand" "")
7153 (const_string "mov_imm")
7154 (const_string "mov_reg"))
7155 (const_string "mvn_imm")
7156 (const_string "multiple")
7157 (const_string "multiple")
7158 (const_string "multiple")
7159 (const_string "multiple")])]
7162 (define_insn "*movsfcc_soft_insn"
7163 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7164 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7165 [(match_operand 4 "cc_register" "") (const_int 0)])
7166 (match_operand:SF 1 "s_register_operand" "0,r")
7167 (match_operand:SF 2 "s_register_operand" "r,0")))]
7168 "TARGET_ARM && TARGET_SOFT_FLOAT"
7172 [(set_attr "conds" "use")
7173 (set_attr "type" "mov_reg")]
7177 ;; Jump and linkage insns
7179 (define_expand "jump"
7181 (label_ref (match_operand 0 "" "")))]
7186 (define_insn "*arm_jump"
7188 (label_ref (match_operand 0 "" "")))]
7192 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7194 arm_ccfsm_state += 2;
7197 return \"b%?\\t%l0\";
7200 [(set_attr "predicable" "yes")
7201 (set (attr "length")
7203 (and (match_test "TARGET_THUMB2")
7204 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7205 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7208 (set_attr "type" "branch")]
7211 (define_expand "call"
7212 [(parallel [(call (match_operand 0 "memory_operand")
7213 (match_operand 1 "general_operand"))
7214 (use (match_operand 2 "" ""))
7215 (clobber (reg:SI LR_REGNUM))])]
7220 tree addr = MEM_EXPR (operands[0]);
7222 /* In an untyped call, we can get NULL for operand 2. */
7223 if (operands[2] == NULL_RTX)
7224 operands[2] = const0_rtx;
7226 /* Decide if we should generate indirect calls by loading the
7227 32-bit address of the callee into a register before performing the
7229 callee = XEXP (operands[0], 0);
7230 if (GET_CODE (callee) == SYMBOL_REF
7231 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7233 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7235 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7236 /* Indirect call: set r9 with FDPIC value of callee. */
7237 XEXP (operands[0], 0)
7238 = arm_load_function_descriptor (XEXP (operands[0], 0));
7240 if (detect_cmse_nonsecure_call (addr))
7242 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7244 emit_call_insn (pat);
7248 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7249 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7252 /* Restore FDPIC register (r9) after call. */
7255 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7256 rtx initial_fdpic_reg
7257 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7259 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7260 initial_fdpic_reg));
7267 (define_insn "restore_pic_register_after_call"
7268 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7269 (unspec:SI [(match_dup 0)
7270 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7271 UNSPEC_PIC_RESTORE))]
7278 (define_expand "call_internal"
7279 [(parallel [(call (match_operand 0 "memory_operand")
7280 (match_operand 1 "general_operand"))
7281 (use (match_operand 2 "" ""))
7282 (clobber (reg:SI LR_REGNUM))])])
7284 (define_expand "nonsecure_call_internal"
7285 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7286 UNSPEC_NONSECURE_MEM)
7287 (match_operand 1 "general_operand"))
7288 (use (match_operand 2 "" ""))
7289 (clobber (reg:SI LR_REGNUM))])]
7294 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7295 gen_rtx_REG (SImode, R4_REGNUM),
7298 operands[0] = replace_equiv_address (operands[0], tmp);
7301 (define_insn "*call_reg_armv5"
7302 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7303 (match_operand 1 "" ""))
7304 (use (match_operand 2 "" ""))
7305 (clobber (reg:SI LR_REGNUM))]
7306 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7308 [(set_attr "type" "call")]
7311 (define_insn "*call_reg_arm"
7312 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7313 (match_operand 1 "" ""))
7314 (use (match_operand 2 "" ""))
7315 (clobber (reg:SI LR_REGNUM))]
7316 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7318 return output_call (operands);
7320 ;; length is worst case, normally it is only two
7321 [(set_attr "length" "12")
7322 (set_attr "type" "call")]
7326 (define_expand "call_value"
7327 [(parallel [(set (match_operand 0 "" "")
7328 (call (match_operand 1 "memory_operand")
7329 (match_operand 2 "general_operand")))
7330 (use (match_operand 3 "" ""))
7331 (clobber (reg:SI LR_REGNUM))])]
7336 tree addr = MEM_EXPR (operands[1]);
7338 /* In an untyped call, we can get NULL for operand 2. */
7339 if (operands[3] == 0)
7340 operands[3] = const0_rtx;
7342 /* Decide if we should generate indirect calls by loading the
7343 32-bit address of the callee into a register before performing the
7345 callee = XEXP (operands[1], 0);
7346 if (GET_CODE (callee) == SYMBOL_REF
7347 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7349 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7351 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7352 /* Indirect call: set r9 with FDPIC value of callee. */
7353 XEXP (operands[1], 0)
7354 = arm_load_function_descriptor (XEXP (operands[1], 0));
7356 if (detect_cmse_nonsecure_call (addr))
7358 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7359 operands[2], operands[3]);
7360 emit_call_insn (pat);
7364 pat = gen_call_value_internal (operands[0], operands[1],
7365 operands[2], operands[3]);
7366 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7369 /* Restore FDPIC register (r9) after call. */
7372 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7373 rtx initial_fdpic_reg
7374 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7376 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7377 initial_fdpic_reg));
7384 (define_expand "call_value_internal"
7385 [(parallel [(set (match_operand 0 "" "")
7386 (call (match_operand 1 "memory_operand")
7387 (match_operand 2 "general_operand")))
7388 (use (match_operand 3 "" ""))
7389 (clobber (reg:SI LR_REGNUM))])])
7391 (define_expand "nonsecure_call_value_internal"
7392 [(parallel [(set (match_operand 0 "" "")
7393 (call (unspec:SI [(match_operand 1 "memory_operand")]
7394 UNSPEC_NONSECURE_MEM)
7395 (match_operand 2 "general_operand")))
7396 (use (match_operand 3 "" ""))
7397 (clobber (reg:SI LR_REGNUM))])]
7402 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7403 gen_rtx_REG (SImode, R4_REGNUM),
7406 operands[1] = replace_equiv_address (operands[1], tmp);
7409 (define_insn "*call_value_reg_armv5"
7410 [(set (match_operand 0 "" "")
7411 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7412 (match_operand 2 "" "")))
7413 (use (match_operand 3 "" ""))
7414 (clobber (reg:SI LR_REGNUM))]
7415 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7417 [(set_attr "type" "call")]
7420 (define_insn "*call_value_reg_arm"
7421 [(set (match_operand 0 "" "")
7422 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7423 (match_operand 2 "" "")))
7424 (use (match_operand 3 "" ""))
7425 (clobber (reg:SI LR_REGNUM))]
7426 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7428 return output_call (&operands[1]);
7430 [(set_attr "length" "12")
7431 (set_attr "type" "call")]
7434 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7435 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7437 (define_insn "*call_symbol"
7438 [(call (mem:SI (match_operand:SI 0 "" ""))
7439 (match_operand 1 "" ""))
7440 (use (match_operand 2 "" ""))
7441 (clobber (reg:SI LR_REGNUM))]
7443 && !SIBLING_CALL_P (insn)
7444 && (GET_CODE (operands[0]) == SYMBOL_REF)
7445 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7448 rtx op = operands[0];
7450 /* Switch mode now when possible. */
7451 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7452 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7453 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7455 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7457 [(set_attr "type" "call")]
7460 (define_insn "*call_value_symbol"
7461 [(set (match_operand 0 "" "")
7462 (call (mem:SI (match_operand:SI 1 "" ""))
7463 (match_operand:SI 2 "" "")))
7464 (use (match_operand 3 "" ""))
7465 (clobber (reg:SI LR_REGNUM))]
7467 && !SIBLING_CALL_P (insn)
7468 && (GET_CODE (operands[1]) == SYMBOL_REF)
7469 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7472 rtx op = operands[1];
7474 /* Switch mode now when possible. */
7475 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7476 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7477 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7479 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7481 [(set_attr "type" "call")]
7484 (define_expand "sibcall_internal"
7485 [(parallel [(call (match_operand 0 "memory_operand")
7486 (match_operand 1 "general_operand"))
7488 (use (match_operand 2 "" ""))])])
7490 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7491 (define_expand "sibcall"
7492 [(parallel [(call (match_operand 0 "memory_operand")
7493 (match_operand 1 "general_operand"))
7495 (use (match_operand 2 "" ""))])]
7501 if ((!REG_P (XEXP (operands[0], 0))
7502 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7503 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7504 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7505 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7507 if (operands[2] == NULL_RTX)
7508 operands[2] = const0_rtx;
7510 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7511 arm_emit_call_insn (pat, operands[0], true);
7516 (define_expand "sibcall_value_internal"
7517 [(parallel [(set (match_operand 0 "" "")
7518 (call (match_operand 1 "memory_operand")
7519 (match_operand 2 "general_operand")))
7521 (use (match_operand 3 "" ""))])])
7523 (define_expand "sibcall_value"
7524 [(parallel [(set (match_operand 0 "" "")
7525 (call (match_operand 1 "memory_operand")
7526 (match_operand 2 "general_operand")))
7528 (use (match_operand 3 "" ""))])]
7534 if ((!REG_P (XEXP (operands[1], 0))
7535 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7536 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7537 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7538 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7540 if (operands[3] == NULL_RTX)
7541 operands[3] = const0_rtx;
7543 pat = gen_sibcall_value_internal (operands[0], operands[1],
7544 operands[2], operands[3]);
7545 arm_emit_call_insn (pat, operands[1], true);
7550 (define_insn "*sibcall_insn"
7551 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7552 (match_operand 1 "" ""))
7554 (use (match_operand 2 "" ""))]
7555 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7557 if (which_alternative == 1)
7558 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7561 if (arm_arch5t || arm_arch4t)
7562 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7564 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7567 [(set_attr "type" "call")]
7570 (define_insn "*sibcall_value_insn"
7571 [(set (match_operand 0 "" "")
7572 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7573 (match_operand 2 "" "")))
7575 (use (match_operand 3 "" ""))]
7576 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7578 if (which_alternative == 1)
7579 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7582 if (arm_arch5t || arm_arch4t)
7583 return \"bx%?\\t%1\";
7585 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7588 [(set_attr "type" "call")]
7591 (define_expand "<return_str>return"
7593 "(TARGET_ARM || (TARGET_THUMB2
7594 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7595 && !IS_STACKALIGN (arm_current_func_type ())))
7596 <return_cond_false>"
7601 thumb2_expand_return (<return_simple_p>);
7608 ;; Often the return insn will be the same as loading from memory, so set attr
7609 (define_insn "*arm_return"
7611 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7614 if (arm_ccfsm_state == 2)
7616 arm_ccfsm_state += 2;
7619 return output_return_instruction (const_true_rtx, true, false, false);
7621 [(set_attr "type" "load_4")
7622 (set_attr "length" "12")
7623 (set_attr "predicable" "yes")]
7626 (define_insn "*cond_<return_str>return"
7628 (if_then_else (match_operator 0 "arm_comparison_operator"
7629 [(match_operand 1 "cc_register" "") (const_int 0)])
7632 "TARGET_ARM <return_cond_true>"
7635 if (arm_ccfsm_state == 2)
7637 arm_ccfsm_state += 2;
7640 return output_return_instruction (operands[0], true, false,
7643 [(set_attr "conds" "use")
7644 (set_attr "length" "12")
7645 (set_attr "type" "load_4")]
7648 (define_insn "*cond_<return_str>return_inverted"
7650 (if_then_else (match_operator 0 "arm_comparison_operator"
7651 [(match_operand 1 "cc_register" "") (const_int 0)])
7654 "TARGET_ARM <return_cond_true>"
7657 if (arm_ccfsm_state == 2)
7659 arm_ccfsm_state += 2;
7662 return output_return_instruction (operands[0], true, true,
7665 [(set_attr "conds" "use")
7666 (set_attr "length" "12")
7667 (set_attr "type" "load_4")]
7670 (define_insn "*arm_simple_return"
7675 if (arm_ccfsm_state == 2)
7677 arm_ccfsm_state += 2;
7680 return output_return_instruction (const_true_rtx, true, false, true);
7682 [(set_attr "type" "branch")
7683 (set_attr "length" "4")
7684 (set_attr "predicable" "yes")]
7687 ;; Generate a sequence of instructions to determine if the processor is
7688 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7691 (define_expand "return_addr_mask"
7693 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7695 (set (match_operand:SI 0 "s_register_operand")
7696 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7698 (const_int 67108860)))] ; 0x03fffffc
7701 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7704 (define_insn "*check_arch2"
7705 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7706 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7709 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7710 [(set_attr "length" "8")
7711 (set_attr "conds" "set")
7712 (set_attr "type" "multiple")]
7715 ;; Call subroutine returning any type.
7717 (define_expand "untyped_call"
7718 [(parallel [(call (match_operand 0 "" "")
7720 (match_operand 1 "" "")
7721 (match_operand 2 "" "")])]
7722 "TARGET_EITHER && !TARGET_FDPIC"
7726 rtx par = gen_rtx_PARALLEL (VOIDmode,
7727 rtvec_alloc (XVECLEN (operands[2], 0)));
7728 rtx addr = gen_reg_rtx (Pmode);
7732 emit_move_insn (addr, XEXP (operands[1], 0));
7733 mem = change_address (operands[1], BLKmode, addr);
7735 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7737 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7739 /* Default code only uses r0 as a return value, but we could
7740 be using anything up to 4 registers. */
7741 if (REGNO (src) == R0_REGNUM)
7742 src = gen_rtx_REG (TImode, R0_REGNUM);
7744 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7746 size += GET_MODE_SIZE (GET_MODE (src));
7749 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7753 for (i = 0; i < XVECLEN (par, 0); i++)
7755 HOST_WIDE_INT offset = 0;
7756 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7759 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7761 mem = change_address (mem, GET_MODE (reg), NULL);
7762 if (REGNO (reg) == R0_REGNUM)
7764 /* On thumb we have to use a write-back instruction. */
7765 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7766 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7767 size = TARGET_ARM ? 16 : 0;
7771 emit_move_insn (mem, reg);
7772 size = GET_MODE_SIZE (GET_MODE (reg));
7776 /* The optimizer does not know that the call sets the function value
7777 registers we stored in the result block. We avoid problems by
7778 claiming that all hard registers are used and clobbered at this
7780 emit_insn (gen_blockage ());
7786 (define_expand "untyped_return"
7787 [(match_operand:BLK 0 "memory_operand")
7788 (match_operand 1 "" "")]
7789 "TARGET_EITHER && !TARGET_FDPIC"
7793 rtx addr = gen_reg_rtx (Pmode);
7797 emit_move_insn (addr, XEXP (operands[0], 0));
7798 mem = change_address (operands[0], BLKmode, addr);
7800 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7802 HOST_WIDE_INT offset = 0;
7803 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7806 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7808 mem = change_address (mem, GET_MODE (reg), NULL);
7809 if (REGNO (reg) == R0_REGNUM)
7811 /* On thumb we have to use a write-back instruction. */
7812 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7813 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7814 size = TARGET_ARM ? 16 : 0;
7818 emit_move_insn (reg, mem);
7819 size = GET_MODE_SIZE (GET_MODE (reg));
7823 /* Emit USE insns before the return. */
7824 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7825 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7827 /* Construct the return. */
7828 expand_naked_return ();
7834 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7835 ;; all of memory. This blocks insns from being moved across this point.
7837 (define_insn "blockage"
7838 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7841 [(set_attr "length" "0")
7842 (set_attr "type" "block")]
7845 ;; Since we hard code r0 here use the 'o' constraint to prevent
7846 ;; provoking undefined behaviour in the hardware with putting out
7847 ;; auto-increment operations with potentially r0 as the base register.
7848 (define_insn "probe_stack"
7849 [(set (match_operand:SI 0 "memory_operand" "=o")
7850 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7853 [(set_attr "type" "store_4")
7854 (set_attr "predicable" "yes")]
7857 (define_insn "probe_stack_range"
7858 [(set (match_operand:SI 0 "register_operand" "=r")
7859 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7860 (match_operand:SI 2 "register_operand" "r")]
7861 VUNSPEC_PROBE_STACK_RANGE))]
7864 return output_probe_stack_range (operands[0], operands[2]);
7866 [(set_attr "type" "multiple")
7867 (set_attr "conds" "clob")]
7870 ;; Named patterns for stack smashing protection.
7871 (define_expand "stack_protect_combined_set"
7873 [(set (match_operand:SI 0 "memory_operand")
7874 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7876 (clobber (match_scratch:SI 2 ""))
7877 (clobber (match_scratch:SI 3 ""))])]
7882 ;; Use a separate insn from the above expand to be able to have the mem outside
7883 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7884 ;; try to reload the guard since we need to control how PIC access is done in
7885 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7886 ;; legitimize_pic_address ()).
7887 (define_insn_and_split "*stack_protect_combined_set_insn"
7888 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7889 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7891 (clobber (match_scratch:SI 2 "=&l,&r"))
7892 (clobber (match_scratch:SI 3 "=&l,&r"))]
7896 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7898 (clobber (match_dup 2))])]
7906 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7908 pic_reg = operands[3];
7910 /* Forces recomputing of GOT base now. */
7911 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7912 true /*compute_now*/);
7916 if (address_operand (operands[1], SImode))
7917 operands[2] = operands[1];
7920 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7921 emit_move_insn (operands[2], mem);
7925 [(set_attr "arch" "t1,32")]
7928 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7929 ;; canary value does not live beyond the life of this sequence.
7930 (define_insn "*stack_protect_set_insn"
7931 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7932 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7934 (clobber (match_dup 1))]
7937 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7938 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7939 [(set_attr "length" "8,12")
7940 (set_attr "conds" "clob,nocond")
7941 (set_attr "type" "multiple")
7942 (set_attr "arch" "t1,32")]
7945 (define_expand "stack_protect_combined_test"
7949 (eq (match_operand:SI 0 "memory_operand")
7950 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7952 (label_ref (match_operand 2))
7954 (clobber (match_scratch:SI 3 ""))
7955 (clobber (match_scratch:SI 4 ""))
7956 (clobber (reg:CC CC_REGNUM))])]
7961 ;; Use a separate insn from the above expand to be able to have the mem outside
7962 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7963 ;; try to reload the guard since we need to control how PIC access is done in
7964 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7965 ;; legitimize_pic_address ()).
7966 (define_insn_and_split "*stack_protect_combined_test_insn"
7969 (eq (match_operand:SI 0 "memory_operand" "m,m")
7970 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7972 (label_ref (match_operand 2))
7974 (clobber (match_scratch:SI 3 "=&l,&r"))
7975 (clobber (match_scratch:SI 4 "=&l,&r"))
7976 (clobber (reg:CC CC_REGNUM))]
7989 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7991 pic_reg = operands[4];
7993 /* Forces recomputing of GOT base now. */
7994 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7995 true /*compute_now*/);
7999 if (address_operand (operands[1], SImode))
8000 operands[3] = operands[1];
8003 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8004 emit_move_insn (operands[3], mem);
8009 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8011 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8012 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8013 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8017 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8019 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8020 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8025 [(set_attr "arch" "t1,32")]
8028 (define_insn "arm_stack_protect_test_insn"
8029 [(set (reg:CC_Z CC_REGNUM)
8030 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8031 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8034 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8035 (clobber (match_dup 2))]
8037 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8038 [(set_attr "length" "8,12")
8039 (set_attr "conds" "set")
8040 (set_attr "type" "multiple")
8041 (set_attr "arch" "t,32")]
8044 (define_expand "casesi"
8045 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8046 (match_operand:SI 1 "const_int_operand") ; lower bound
8047 (match_operand:SI 2 "const_int_operand") ; total range
8048 (match_operand:SI 3 "" "") ; table label
8049 (match_operand:SI 4 "" "")] ; Out of range label
8050 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8053 enum insn_code code;
8054 if (operands[1] != const0_rtx)
8056 rtx reg = gen_reg_rtx (SImode);
8058 emit_insn (gen_addsi3 (reg, operands[0],
8059 gen_int_mode (-INTVAL (operands[1]),
8065 code = CODE_FOR_arm_casesi_internal;
8066 else if (TARGET_THUMB1)
8067 code = CODE_FOR_thumb1_casesi_internal_pic;
8069 code = CODE_FOR_thumb2_casesi_internal_pic;
8071 code = CODE_FOR_thumb2_casesi_internal;
8073 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8074 operands[2] = force_reg (SImode, operands[2]);
8076 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8077 operands[3], operands[4]));
8082 ;; The USE in this pattern is needed to tell flow analysis that this is
8083 ;; a CASESI insn. It has no other purpose.
8084 (define_expand "arm_casesi_internal"
8085 [(parallel [(set (pc)
8087 (leu (match_operand:SI 0 "s_register_operand")
8088 (match_operand:SI 1 "arm_rhs_operand"))
8090 (label_ref:SI (match_operand 3 ""))))
8091 (clobber (reg:CC CC_REGNUM))
8092 (use (label_ref:SI (match_operand 2 "")))])]
8095 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8096 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8097 gen_rtx_LABEL_REF (SImode, operands[2]));
8098 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8099 MEM_READONLY_P (operands[4]) = 1;
8100 MEM_NOTRAP_P (operands[4]) = 1;
8103 (define_insn "*arm_casesi_internal"
8104 [(parallel [(set (pc)
8106 (leu (match_operand:SI 0 "s_register_operand" "r")
8107 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8108 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8109 (label_ref:SI (match_operand 2 "" ""))))
8110 (label_ref:SI (match_operand 3 "" ""))))
8111 (clobber (reg:CC CC_REGNUM))
8112 (use (label_ref:SI (match_dup 2)))])]
8116 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8117 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8119 [(set_attr "conds" "clob")
8120 (set_attr "length" "12")
8121 (set_attr "type" "multiple")]
8124 (define_expand "indirect_jump"
8126 (match_operand:SI 0 "s_register_operand"))]
8129 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8130 address and use bx. */
8134 tmp = gen_reg_rtx (SImode);
8135 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8141 ;; NB Never uses BX.
8142 (define_insn "*arm_indirect_jump"
8144 (match_operand:SI 0 "s_register_operand" "r"))]
8146 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8147 [(set_attr "predicable" "yes")
8148 (set_attr "type" "branch")]
8151 (define_insn "*load_indirect_jump"
8153 (match_operand:SI 0 "memory_operand" "m"))]
8155 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8156 [(set_attr "type" "load_4")
8157 (set_attr "pool_range" "4096")
8158 (set_attr "neg_pool_range" "4084")
8159 (set_attr "predicable" "yes")]
8169 [(set (attr "length")
8170 (if_then_else (eq_attr "is_thumb" "yes")
8173 (set_attr "type" "mov_reg")]
8177 [(trap_if (const_int 1) (const_int 0))]
8181 return \".inst\\t0xe7f000f0\";
8183 return \".inst\\t0xdeff\";
8185 [(set (attr "length")
8186 (if_then_else (eq_attr "is_thumb" "yes")
8189 (set_attr "type" "trap")
8190 (set_attr "conds" "unconditional")]
8194 ;; Patterns to allow combination of arithmetic, cond code and shifts
8196 (define_insn "*<arith_shift_insn>_multsi"
8197 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8199 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8200 (match_operand:SI 3 "power_of_two_operand" ""))
8201 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8203 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8204 [(set_attr "predicable" "yes")
8205 (set_attr "shift" "2")
8206 (set_attr "arch" "a,t2")
8207 (set_attr "type" "alu_shift_imm")])
8209 (define_insn "*<arith_shift_insn>_shiftsi"
8210 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8212 (match_operator:SI 2 "shift_nomul_operator"
8213 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8214 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8215 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8216 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8217 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8218 [(set_attr "predicable" "yes")
8219 (set_attr "shift" "3")
8220 (set_attr "arch" "a,t2,a")
8221 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8224 [(set (match_operand:SI 0 "s_register_operand" "")
8225 (match_operator:SI 1 "shiftable_operator"
8226 [(match_operator:SI 2 "shiftable_operator"
8227 [(match_operator:SI 3 "shift_operator"
8228 [(match_operand:SI 4 "s_register_operand" "")
8229 (match_operand:SI 5 "reg_or_int_operand" "")])
8230 (match_operand:SI 6 "s_register_operand" "")])
8231 (match_operand:SI 7 "arm_rhs_operand" "")]))
8232 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8235 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8238 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8241 (define_insn "*arith_shiftsi_compare0"
8242 [(set (reg:CC_NOOV CC_REGNUM)
8244 (match_operator:SI 1 "shiftable_operator"
8245 [(match_operator:SI 3 "shift_operator"
8246 [(match_operand:SI 4 "s_register_operand" "r,r")
8247 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8248 (match_operand:SI 2 "s_register_operand" "r,r")])
8250 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8251 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8254 "%i1s%?\\t%0, %2, %4%S3"
8255 [(set_attr "conds" "set")
8256 (set_attr "shift" "4")
8257 (set_attr "arch" "32,a")
8258 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8260 (define_insn "*arith_shiftsi_compare0_scratch"
8261 [(set (reg:CC_NOOV CC_REGNUM)
8263 (match_operator:SI 1 "shiftable_operator"
8264 [(match_operator:SI 3 "shift_operator"
8265 [(match_operand:SI 4 "s_register_operand" "r,r")
8266 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8267 (match_operand:SI 2 "s_register_operand" "r,r")])
8269 (clobber (match_scratch:SI 0 "=r,r"))]
8271 "%i1s%?\\t%0, %2, %4%S3"
8272 [(set_attr "conds" "set")
8273 (set_attr "shift" "4")
8274 (set_attr "arch" "32,a")
8275 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8277 (define_insn "*sub_shiftsi"
8278 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8279 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8280 (match_operator:SI 2 "shift_operator"
8281 [(match_operand:SI 3 "s_register_operand" "r,r")
8282 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8284 "sub%?\\t%0, %1, %3%S2"
8285 [(set_attr "predicable" "yes")
8286 (set_attr "predicable_short_it" "no")
8287 (set_attr "shift" "3")
8288 (set_attr "arch" "32,a")
8289 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8291 (define_insn "*sub_shiftsi_compare0"
8292 [(set (reg:CC_NOOV CC_REGNUM)
8294 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8295 (match_operator:SI 2 "shift_operator"
8296 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8297 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8299 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8300 (minus:SI (match_dup 1)
8301 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8303 "subs%?\\t%0, %1, %3%S2"
8304 [(set_attr "conds" "set")
8305 (set_attr "shift" "3")
8306 (set_attr "arch" "32,a,a")
8307 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8309 (define_insn "*sub_shiftsi_compare0_scratch"
8310 [(set (reg:CC_NOOV CC_REGNUM)
8312 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8313 (match_operator:SI 2 "shift_operator"
8314 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8315 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8317 (clobber (match_scratch:SI 0 "=r,r,r"))]
8319 "subs%?\\t%0, %1, %3%S2"
8320 [(set_attr "conds" "set")
8321 (set_attr "shift" "3")
8322 (set_attr "arch" "32,a,a")
8323 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8326 (define_insn_and_split "*and_scc"
8327 [(set (match_operand:SI 0 "s_register_operand" "=r")
8328 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8329 [(match_operand 2 "cc_register" "") (const_int 0)])
8330 (match_operand:SI 3 "s_register_operand" "r")))]
8332 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8333 "&& reload_completed"
8334 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8335 (cond_exec (match_dup 4) (set (match_dup 0)
8336 (and:SI (match_dup 3) (const_int 1))))]
8338 machine_mode mode = GET_MODE (operands[2]);
8339 enum rtx_code rc = GET_CODE (operands[1]);
8341 /* Note that operands[4] is the same as operands[1],
8342 but with VOIDmode as the result. */
8343 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8344 if (mode == CCFPmode || mode == CCFPEmode)
8345 rc = reverse_condition_maybe_unordered (rc);
8347 rc = reverse_condition (rc);
8348 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8350 [(set_attr "conds" "use")
8351 (set_attr "type" "multiple")
8352 (set_attr "length" "8")]
8355 (define_insn_and_split "*ior_scc"
8356 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8357 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8358 [(match_operand 2 "cc_register" "") (const_int 0)])
8359 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8364 "&& reload_completed
8365 && REGNO (operands [0]) != REGNO (operands[3])"
8366 ;; && which_alternative == 1
8367 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8368 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8369 (cond_exec (match_dup 4) (set (match_dup 0)
8370 (ior:SI (match_dup 3) (const_int 1))))]
8372 machine_mode mode = GET_MODE (operands[2]);
8373 enum rtx_code rc = GET_CODE (operands[1]);
8375 /* Note that operands[4] is the same as operands[1],
8376 but with VOIDmode as the result. */
8377 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8378 if (mode == CCFPmode || mode == CCFPEmode)
8379 rc = reverse_condition_maybe_unordered (rc);
8381 rc = reverse_condition (rc);
8382 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8384 [(set_attr "conds" "use")
8385 (set_attr "length" "4,8")
8386 (set_attr "type" "logic_imm,multiple")]
8389 ; A series of splitters for the compare_scc pattern below. Note that
8390 ; order is important.
8392 [(set (match_operand:SI 0 "s_register_operand" "")
8393 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8395 (clobber (reg:CC CC_REGNUM))]
8396 "TARGET_32BIT && reload_completed"
8397 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8400 [(set (match_operand:SI 0 "s_register_operand" "")
8401 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8403 (clobber (reg:CC CC_REGNUM))]
8404 "TARGET_32BIT && reload_completed"
8405 [(set (match_dup 0) (not:SI (match_dup 1)))
8406 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8409 [(set (match_operand:SI 0 "s_register_operand" "")
8410 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8412 (clobber (reg:CC CC_REGNUM))]
8413 "arm_arch5t && TARGET_32BIT"
8414 [(set (match_dup 0) (clz:SI (match_dup 1)))
8415 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8419 [(set (match_operand:SI 0 "s_register_operand" "")
8420 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8422 (clobber (reg:CC CC_REGNUM))]
8423 "TARGET_32BIT && reload_completed"
8425 [(set (reg:CC CC_REGNUM)
8426 (compare:CC (const_int 1) (match_dup 1)))
8428 (minus:SI (const_int 1) (match_dup 1)))])
8429 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8430 (set (match_dup 0) (const_int 0)))])
8433 [(set (match_operand:SI 0 "s_register_operand" "")
8434 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8435 (match_operand:SI 2 "const_int_operand" "")))
8436 (clobber (reg:CC CC_REGNUM))]
8437 "TARGET_32BIT && reload_completed"
8439 [(set (reg:CC CC_REGNUM)
8440 (compare:CC (match_dup 1) (match_dup 2)))
8441 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8442 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8443 (set (match_dup 0) (const_int 1)))]
8445 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8449 [(set (match_operand:SI 0 "s_register_operand" "")
8450 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8451 (match_operand:SI 2 "arm_add_operand" "")))
8452 (clobber (reg:CC CC_REGNUM))]
8453 "TARGET_32BIT && reload_completed"
8455 [(set (reg:CC_NOOV CC_REGNUM)
8456 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8458 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8459 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8460 (set (match_dup 0) (const_int 1)))])
8462 (define_insn_and_split "*compare_scc"
8463 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8464 (match_operator:SI 1 "arm_comparison_operator"
8465 [(match_operand:SI 2 "s_register_operand" "r,r")
8466 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8467 (clobber (reg:CC CC_REGNUM))]
8470 "&& reload_completed"
8471 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8472 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8473 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8476 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8477 operands[2], operands[3]);
8478 enum rtx_code rc = GET_CODE (operands[1]);
8480 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8482 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8483 if (mode == CCFPmode || mode == CCFPEmode)
8484 rc = reverse_condition_maybe_unordered (rc);
8486 rc = reverse_condition (rc);
8487 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8489 [(set_attr "type" "multiple")]
8492 ;; Attempt to improve the sequence generated by the compare_scc splitters
8493 ;; not to use conditional execution.
8495 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8499 [(set (reg:CC CC_REGNUM)
8500 (compare:CC (match_operand:SI 1 "register_operand" "")
8502 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8503 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8504 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8505 (set (match_dup 0) (const_int 1)))]
8506 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8507 [(set (match_dup 0) (clz:SI (match_dup 1)))
8508 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8511 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8515 [(set (reg:CC CC_REGNUM)
8516 (compare:CC (match_operand:SI 1 "register_operand" "")
8518 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8519 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8520 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8521 (set (match_dup 0) (const_int 1)))
8522 (match_scratch:SI 2 "r")]
8523 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8525 [(set (reg:CC CC_REGNUM)
8526 (compare:CC (const_int 0) (match_dup 1)))
8527 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8529 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8530 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8533 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8534 ;; sub Rd, Reg1, reg2
8538 [(set (reg:CC CC_REGNUM)
8539 (compare:CC (match_operand:SI 1 "register_operand" "")
8540 (match_operand:SI 2 "arm_rhs_operand" "")))
8541 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8542 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8543 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8544 (set (match_dup 0) (const_int 1)))]
8545 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8546 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8547 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8548 (set (match_dup 0) (clz:SI (match_dup 0)))
8549 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8553 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8554 ;; sub T1, Reg1, reg2
8558 [(set (reg:CC CC_REGNUM)
8559 (compare:CC (match_operand:SI 1 "register_operand" "")
8560 (match_operand:SI 2 "arm_rhs_operand" "")))
8561 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8562 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8563 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8564 (set (match_dup 0) (const_int 1)))
8565 (match_scratch:SI 3 "r")]
8566 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8567 [(set (match_dup 3) (match_dup 4))
8569 [(set (reg:CC CC_REGNUM)
8570 (compare:CC (const_int 0) (match_dup 3)))
8571 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8573 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8574 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8576 if (CONST_INT_P (operands[2]))
8577 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8579 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8582 (define_insn "*cond_move"
8583 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8584 (if_then_else:SI (match_operator 3 "equality_operator"
8585 [(match_operator 4 "arm_comparison_operator"
8586 [(match_operand 5 "cc_register" "") (const_int 0)])
8588 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8589 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8592 if (GET_CODE (operands[3]) == NE)
8594 if (which_alternative != 1)
8595 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8596 if (which_alternative != 0)
8597 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8600 if (which_alternative != 0)
8601 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8602 if (which_alternative != 1)
8603 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8606 [(set_attr "conds" "use")
8607 (set_attr_alternative "type"
8608 [(if_then_else (match_operand 2 "const_int_operand" "")
8609 (const_string "mov_imm")
8610 (const_string "mov_reg"))
8611 (if_then_else (match_operand 1 "const_int_operand" "")
8612 (const_string "mov_imm")
8613 (const_string "mov_reg"))
8614 (const_string "multiple")])
8615 (set_attr "length" "4,4,8")]
8618 (define_insn "*cond_arith"
8619 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8620 (match_operator:SI 5 "shiftable_operator"
8621 [(match_operator:SI 4 "arm_comparison_operator"
8622 [(match_operand:SI 2 "s_register_operand" "r,r")
8623 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8624 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8625 (clobber (reg:CC CC_REGNUM))]
8628 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8629 return \"%i5\\t%0, %1, %2, lsr #31\";
8631 output_asm_insn (\"cmp\\t%2, %3\", operands);
8632 if (GET_CODE (operands[5]) == AND)
8633 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8634 else if (GET_CODE (operands[5]) == MINUS)
8635 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8636 else if (which_alternative != 0)
8637 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8638 return \"%i5%d4\\t%0, %1, #1\";
8640 [(set_attr "conds" "clob")
8641 (set_attr "length" "12")
8642 (set_attr "type" "multiple")]
8645 (define_insn "*cond_sub"
8646 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8647 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8648 (match_operator:SI 4 "arm_comparison_operator"
8649 [(match_operand:SI 2 "s_register_operand" "r,r")
8650 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8651 (clobber (reg:CC CC_REGNUM))]
8654 output_asm_insn (\"cmp\\t%2, %3\", operands);
8655 if (which_alternative != 0)
8656 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8657 return \"sub%d4\\t%0, %1, #1\";
8659 [(set_attr "conds" "clob")
8660 (set_attr "length" "8,12")
8661 (set_attr "type" "multiple")]
8664 (define_insn "*cmp_ite0"
8665 [(set (match_operand 6 "dominant_cc_register" "")
8668 (match_operator 4 "arm_comparison_operator"
8669 [(match_operand:SI 0 "s_register_operand"
8670 "l,l,l,r,r,r,r,r,r")
8671 (match_operand:SI 1 "arm_add_operand"
8672 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8673 (match_operator:SI 5 "arm_comparison_operator"
8674 [(match_operand:SI 2 "s_register_operand"
8675 "l,r,r,l,l,r,r,r,r")
8676 (match_operand:SI 3 "arm_add_operand"
8677 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8683 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8685 {\"cmp%d5\\t%0, %1\",
8686 \"cmp%d4\\t%2, %3\"},
8687 {\"cmn%d5\\t%0, #%n1\",
8688 \"cmp%d4\\t%2, %3\"},
8689 {\"cmp%d5\\t%0, %1\",
8690 \"cmn%d4\\t%2, #%n3\"},
8691 {\"cmn%d5\\t%0, #%n1\",
8692 \"cmn%d4\\t%2, #%n3\"}
8694 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8699 \"cmn\\t%0, #%n1\"},
8700 {\"cmn\\t%2, #%n3\",
8702 {\"cmn\\t%2, #%n3\",
8705 static const char * const ite[2] =
8710 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8711 CMP_CMP, CMN_CMP, CMP_CMP,
8712 CMN_CMP, CMP_CMN, CMN_CMN};
8714 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8716 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8717 if (TARGET_THUMB2) {
8718 output_asm_insn (ite[swap], operands);
8720 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8723 [(set_attr "conds" "set")
8724 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8725 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8726 (set_attr "type" "multiple")
8727 (set_attr_alternative "length"
8733 (if_then_else (eq_attr "is_thumb" "no")
8736 (if_then_else (eq_attr "is_thumb" "no")
8739 (if_then_else (eq_attr "is_thumb" "no")
8742 (if_then_else (eq_attr "is_thumb" "no")
8747 (define_insn "*cmp_ite1"
8748 [(set (match_operand 6 "dominant_cc_register" "")
8751 (match_operator 4 "arm_comparison_operator"
8752 [(match_operand:SI 0 "s_register_operand"
8753 "l,l,l,r,r,r,r,r,r")
8754 (match_operand:SI 1 "arm_add_operand"
8755 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8756 (match_operator:SI 5 "arm_comparison_operator"
8757 [(match_operand:SI 2 "s_register_operand"
8758 "l,r,r,l,l,r,r,r,r")
8759 (match_operand:SI 3 "arm_add_operand"
8760 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8766 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8770 {\"cmn\\t%0, #%n1\",
8773 \"cmn\\t%2, #%n3\"},
8774 {\"cmn\\t%0, #%n1\",
8777 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8779 {\"cmp%d4\\t%2, %3\",
8780 \"cmp%D5\\t%0, %1\"},
8781 {\"cmp%d4\\t%2, %3\",
8782 \"cmn%D5\\t%0, #%n1\"},
8783 {\"cmn%d4\\t%2, #%n3\",
8784 \"cmp%D5\\t%0, %1\"},
8785 {\"cmn%d4\\t%2, #%n3\",
8786 \"cmn%D5\\t%0, #%n1\"}
8788 static const char * const ite[2] =
8793 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8794 CMP_CMP, CMN_CMP, CMP_CMP,
8795 CMN_CMP, CMP_CMN, CMN_CMN};
8797 comparison_dominates_p (GET_CODE (operands[5]),
8798 reverse_condition (GET_CODE (operands[4])));
8800 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8801 if (TARGET_THUMB2) {
8802 output_asm_insn (ite[swap], operands);
8804 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8807 [(set_attr "conds" "set")
8808 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8809 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8810 (set_attr_alternative "length"
8816 (if_then_else (eq_attr "is_thumb" "no")
8819 (if_then_else (eq_attr "is_thumb" "no")
8822 (if_then_else (eq_attr "is_thumb" "no")
8825 (if_then_else (eq_attr "is_thumb" "no")
8828 (set_attr "type" "multiple")]
8831 (define_insn "*cmp_and"
8832 [(set (match_operand 6 "dominant_cc_register" "")
8835 (match_operator 4 "arm_comparison_operator"
8836 [(match_operand:SI 0 "s_register_operand"
8837 "l,l,l,r,r,r,r,r,r,r")
8838 (match_operand:SI 1 "arm_add_operand"
8839 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8840 (match_operator:SI 5 "arm_comparison_operator"
8841 [(match_operand:SI 2 "s_register_operand"
8842 "l,r,r,l,l,r,r,r,r,r")
8843 (match_operand:SI 3 "arm_add_operand"
8844 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8849 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8851 {\"cmp%d5\\t%0, %1\",
8852 \"cmp%d4\\t%2, %3\"},
8853 {\"cmn%d5\\t%0, #%n1\",
8854 \"cmp%d4\\t%2, %3\"},
8855 {\"cmp%d5\\t%0, %1\",
8856 \"cmn%d4\\t%2, #%n3\"},
8857 {\"cmn%d5\\t%0, #%n1\",
8858 \"cmn%d4\\t%2, #%n3\"}
8860 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8865 \"cmn\\t%0, #%n1\"},
8866 {\"cmn\\t%2, #%n3\",
8868 {\"cmn\\t%2, #%n3\",
8871 static const char *const ite[2] =
8876 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8877 CMP_CMP, CMN_CMP, CMP_CMP,
8878 CMP_CMP, CMN_CMP, CMP_CMN,
8881 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8883 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8884 if (TARGET_THUMB2) {
8885 output_asm_insn (ite[swap], operands);
8887 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8890 [(set_attr "conds" "set")
8891 (set_attr "predicable" "no")
8892 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8893 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8894 (set_attr_alternative "length"
8901 (if_then_else (eq_attr "is_thumb" "no")
8904 (if_then_else (eq_attr "is_thumb" "no")
8907 (if_then_else (eq_attr "is_thumb" "no")
8910 (if_then_else (eq_attr "is_thumb" "no")
8913 (set_attr "type" "multiple")]
8916 (define_insn "*cmp_ior"
8917 [(set (match_operand 6 "dominant_cc_register" "")
8920 (match_operator 4 "arm_comparison_operator"
8921 [(match_operand:SI 0 "s_register_operand"
8922 "l,l,l,r,r,r,r,r,r,r")
8923 (match_operand:SI 1 "arm_add_operand"
8924 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8925 (match_operator:SI 5 "arm_comparison_operator"
8926 [(match_operand:SI 2 "s_register_operand"
8927 "l,r,r,l,l,r,r,r,r,r")
8928 (match_operand:SI 3 "arm_add_operand"
8929 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8934 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8938 {\"cmn\\t%0, #%n1\",
8941 \"cmn\\t%2, #%n3\"},
8942 {\"cmn\\t%0, #%n1\",
8945 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8947 {\"cmp%D4\\t%2, %3\",
8948 \"cmp%D5\\t%0, %1\"},
8949 {\"cmp%D4\\t%2, %3\",
8950 \"cmn%D5\\t%0, #%n1\"},
8951 {\"cmn%D4\\t%2, #%n3\",
8952 \"cmp%D5\\t%0, %1\"},
8953 {\"cmn%D4\\t%2, #%n3\",
8954 \"cmn%D5\\t%0, #%n1\"}
8956 static const char *const ite[2] =
8961 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8962 CMP_CMP, CMN_CMP, CMP_CMP,
8963 CMP_CMP, CMN_CMP, CMP_CMN,
8966 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8968 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8969 if (TARGET_THUMB2) {
8970 output_asm_insn (ite[swap], operands);
8972 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8976 [(set_attr "conds" "set")
8977 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8978 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8979 (set_attr_alternative "length"
8986 (if_then_else (eq_attr "is_thumb" "no")
8989 (if_then_else (eq_attr "is_thumb" "no")
8992 (if_then_else (eq_attr "is_thumb" "no")
8995 (if_then_else (eq_attr "is_thumb" "no")
8998 (set_attr "type" "multiple")]
9001 (define_insn_and_split "*ior_scc_scc"
9002 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9003 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9004 [(match_operand:SI 1 "s_register_operand" "l,r")
9005 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9006 (match_operator:SI 6 "arm_comparison_operator"
9007 [(match_operand:SI 4 "s_register_operand" "l,r")
9008 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9009 (clobber (reg:CC CC_REGNUM))]
9011 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9014 "TARGET_32BIT && reload_completed"
9018 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9019 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9021 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9023 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9026 [(set_attr "conds" "clob")
9027 (set_attr "enabled_for_short_it" "yes,no")
9028 (set_attr "length" "16")
9029 (set_attr "type" "multiple")]
9032 ; If the above pattern is followed by a CMP insn, then the compare is
9033 ; redundant, since we can rework the conditional instruction that follows.
9034 (define_insn_and_split "*ior_scc_scc_cmp"
9035 [(set (match_operand 0 "dominant_cc_register" "")
9036 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9037 [(match_operand:SI 1 "s_register_operand" "l,r")
9038 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9039 (match_operator:SI 6 "arm_comparison_operator"
9040 [(match_operand:SI 4 "s_register_operand" "l,r")
9041 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9043 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9044 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9045 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9048 "TARGET_32BIT && reload_completed"
9052 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9053 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9055 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9057 [(set_attr "conds" "set")
9058 (set_attr "enabled_for_short_it" "yes,no")
9059 (set_attr "length" "16")
9060 (set_attr "type" "multiple")]
9063 (define_insn_and_split "*and_scc_scc"
9064 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9065 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9066 [(match_operand:SI 1 "s_register_operand" "l,r")
9067 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9068 (match_operator:SI 6 "arm_comparison_operator"
9069 [(match_operand:SI 4 "s_register_operand" "l,r")
9070 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9071 (clobber (reg:CC CC_REGNUM))]
9073 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9076 "TARGET_32BIT && reload_completed
9077 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9082 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9083 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9085 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9087 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9090 [(set_attr "conds" "clob")
9091 (set_attr "enabled_for_short_it" "yes,no")
9092 (set_attr "length" "16")
9093 (set_attr "type" "multiple")]
9096 ; If the above pattern is followed by a CMP insn, then the compare is
9097 ; redundant, since we can rework the conditional instruction that follows.
9098 (define_insn_and_split "*and_scc_scc_cmp"
9099 [(set (match_operand 0 "dominant_cc_register" "")
9100 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9101 [(match_operand:SI 1 "s_register_operand" "l,r")
9102 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9103 (match_operator:SI 6 "arm_comparison_operator"
9104 [(match_operand:SI 4 "s_register_operand" "l,r")
9105 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9107 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9108 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9109 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9112 "TARGET_32BIT && reload_completed"
9116 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9117 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9119 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9121 [(set_attr "conds" "set")
9122 (set_attr "enabled_for_short_it" "yes,no")
9123 (set_attr "length" "16")
9124 (set_attr "type" "multiple")]
9127 ;; If there is no dominance in the comparison, then we can still save an
9128 ;; instruction in the AND case, since we can know that the second compare
9129 ;; need only zero the value if false (if true, then the value is already
9131 (define_insn_and_split "*and_scc_scc_nodom"
9132 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9133 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9134 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9135 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9136 (match_operator:SI 6 "arm_comparison_operator"
9137 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9138 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9139 (clobber (reg:CC CC_REGNUM))]
9141 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9144 "TARGET_32BIT && reload_completed"
9145 [(parallel [(set (match_dup 0)
9146 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9147 (clobber (reg:CC CC_REGNUM))])
9148 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9150 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9153 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9154 operands[4], operands[5]),
9156 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9158 [(set_attr "conds" "clob")
9159 (set_attr "length" "20")
9160 (set_attr "type" "multiple")]
9164 [(set (reg:CC_NOOV CC_REGNUM)
9165 (compare:CC_NOOV (ior:SI
9166 (and:SI (match_operand:SI 0 "s_register_operand" "")
9168 (match_operator:SI 1 "arm_comparison_operator"
9169 [(match_operand:SI 2 "s_register_operand" "")
9170 (match_operand:SI 3 "arm_add_operand" "")]))
9172 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9175 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9177 (set (reg:CC_NOOV CC_REGNUM)
9178 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9183 [(set (reg:CC_NOOV CC_REGNUM)
9184 (compare:CC_NOOV (ior:SI
9185 (match_operator:SI 1 "arm_comparison_operator"
9186 [(match_operand:SI 2 "s_register_operand" "")
9187 (match_operand:SI 3 "arm_add_operand" "")])
9188 (and:SI (match_operand:SI 0 "s_register_operand" "")
9191 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9194 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9196 (set (reg:CC_NOOV CC_REGNUM)
9197 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9200 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9202 (define_insn_and_split "*negscc"
9203 [(set (match_operand:SI 0 "s_register_operand" "=r")
9204 (neg:SI (match_operator 3 "arm_comparison_operator"
9205 [(match_operand:SI 1 "s_register_operand" "r")
9206 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9207 (clobber (reg:CC CC_REGNUM))]
9210 "&& reload_completed"
9213 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9215 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9217 /* Emit mov\\t%0, %1, asr #31 */
9218 emit_insn (gen_rtx_SET (operands[0],
9219 gen_rtx_ASHIFTRT (SImode,
9224 else if (GET_CODE (operands[3]) == NE)
9226 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9227 if (CONST_INT_P (operands[2]))
9228 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9229 gen_int_mode (-INTVAL (operands[2]),
9232 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9234 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9238 gen_rtx_SET (operands[0],
9244 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9245 emit_insn (gen_rtx_SET (cc_reg,
9246 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9247 enum rtx_code rc = GET_CODE (operands[3]);
9249 rc = reverse_condition (rc);
9250 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9255 gen_rtx_SET (operands[0], const0_rtx)));
9256 rc = GET_CODE (operands[3]);
9257 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9262 gen_rtx_SET (operands[0],
9268 [(set_attr "conds" "clob")
9269 (set_attr "length" "12")
9270 (set_attr "type" "multiple")]
9273 (define_insn_and_split "movcond_addsi"
9274 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9276 (match_operator 5 "comparison_operator"
9277 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9278 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9280 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9281 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9282 (clobber (reg:CC CC_REGNUM))]
9285 "&& reload_completed"
9286 [(set (reg:CC_NOOV CC_REGNUM)
9288 (plus:SI (match_dup 3)
9291 (set (match_dup 0) (match_dup 1))
9292 (cond_exec (match_dup 6)
9293 (set (match_dup 0) (match_dup 2)))]
9296 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9297 operands[3], operands[4]);
9298 enum rtx_code rc = GET_CODE (operands[5]);
9299 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9300 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9301 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9302 rc = reverse_condition (rc);
9304 std::swap (operands[1], operands[2]);
9306 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9309 [(set_attr "conds" "clob")
9310 (set_attr "enabled_for_short_it" "no,yes,yes")
9311 (set_attr "type" "multiple")]
9314 (define_insn "movcond"
9315 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9317 (match_operator 5 "arm_comparison_operator"
9318 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9319 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9320 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9321 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9322 (clobber (reg:CC CC_REGNUM))]
9325 if (GET_CODE (operands[5]) == LT
9326 && (operands[4] == const0_rtx))
9328 if (which_alternative != 1 && REG_P (operands[1]))
9330 if (operands[2] == const0_rtx)
9331 return \"and\\t%0, %1, %3, asr #31\";
9332 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9334 else if (which_alternative != 0 && REG_P (operands[2]))
9336 if (operands[1] == const0_rtx)
9337 return \"bic\\t%0, %2, %3, asr #31\";
9338 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9340 /* The only case that falls through to here is when both ops 1 & 2
9344 if (GET_CODE (operands[5]) == GE
9345 && (operands[4] == const0_rtx))
9347 if (which_alternative != 1 && REG_P (operands[1]))
9349 if (operands[2] == const0_rtx)
9350 return \"bic\\t%0, %1, %3, asr #31\";
9351 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9353 else if (which_alternative != 0 && REG_P (operands[2]))
9355 if (operands[1] == const0_rtx)
9356 return \"and\\t%0, %2, %3, asr #31\";
9357 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9359 /* The only case that falls through to here is when both ops 1 & 2
9362 if (CONST_INT_P (operands[4])
9363 && !const_ok_for_arm (INTVAL (operands[4])))
9364 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9366 output_asm_insn (\"cmp\\t%3, %4\", operands);
9367 if (which_alternative != 0)
9368 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9369 if (which_alternative != 1)
9370 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9373 [(set_attr "conds" "clob")
9374 (set_attr "length" "8,8,12")
9375 (set_attr "type" "multiple")]
9378 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9380 (define_insn "*ifcompare_plus_move"
9381 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9382 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9383 [(match_operand:SI 4 "s_register_operand" "r,r")
9384 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9386 (match_operand:SI 2 "s_register_operand" "r,r")
9387 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9388 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9389 (clobber (reg:CC CC_REGNUM))]
9392 [(set_attr "conds" "clob")
9393 (set_attr "length" "8,12")
9394 (set_attr "type" "multiple")]
9397 (define_insn "*if_plus_move"
9398 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9400 (match_operator 4 "arm_comparison_operator"
9401 [(match_operand 5 "cc_register" "") (const_int 0)])
9403 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9404 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9405 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9409 sub%d4\\t%0, %2, #%n3
9410 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9411 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9412 [(set_attr "conds" "use")
9413 (set_attr "length" "4,4,8,8")
9414 (set_attr_alternative "type"
9415 [(if_then_else (match_operand 3 "const_int_operand" "")
9416 (const_string "alu_imm" )
9417 (const_string "alu_sreg"))
9418 (const_string "alu_imm")
9419 (const_string "multiple")
9420 (const_string "multiple")])]
9423 (define_insn "*ifcompare_move_plus"
9424 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9425 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9426 [(match_operand:SI 4 "s_register_operand" "r,r")
9427 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9428 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9430 (match_operand:SI 2 "s_register_operand" "r,r")
9431 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9432 (clobber (reg:CC CC_REGNUM))]
9435 [(set_attr "conds" "clob")
9436 (set_attr "length" "8,12")
9437 (set_attr "type" "multiple")]
9440 (define_insn "*if_move_plus"
9441 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9443 (match_operator 4 "arm_comparison_operator"
9444 [(match_operand 5 "cc_register" "") (const_int 0)])
9445 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9447 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9448 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9452 sub%D4\\t%0, %2, #%n3
9453 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9454 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9455 [(set_attr "conds" "use")
9456 (set_attr "length" "4,4,8,8")
9457 (set_attr_alternative "type"
9458 [(if_then_else (match_operand 3 "const_int_operand" "")
9459 (const_string "alu_imm" )
9460 (const_string "alu_sreg"))
9461 (const_string "alu_imm")
9462 (const_string "multiple")
9463 (const_string "multiple")])]
9466 (define_insn "*ifcompare_arith_arith"
9467 [(set (match_operand:SI 0 "s_register_operand" "=r")
9468 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9469 [(match_operand:SI 5 "s_register_operand" "r")
9470 (match_operand:SI 6 "arm_add_operand" "rIL")])
9471 (match_operator:SI 8 "shiftable_operator"
9472 [(match_operand:SI 1 "s_register_operand" "r")
9473 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9474 (match_operator:SI 7 "shiftable_operator"
9475 [(match_operand:SI 3 "s_register_operand" "r")
9476 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9477 (clobber (reg:CC CC_REGNUM))]
9480 [(set_attr "conds" "clob")
9481 (set_attr "length" "12")
9482 (set_attr "type" "multiple")]
9485 (define_insn "*if_arith_arith"
9486 [(set (match_operand:SI 0 "s_register_operand" "=r")
9487 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9488 [(match_operand 8 "cc_register" "") (const_int 0)])
9489 (match_operator:SI 6 "shiftable_operator"
9490 [(match_operand:SI 1 "s_register_operand" "r")
9491 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9492 (match_operator:SI 7 "shiftable_operator"
9493 [(match_operand:SI 3 "s_register_operand" "r")
9494 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9496 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9497 [(set_attr "conds" "use")
9498 (set_attr "length" "8")
9499 (set_attr "type" "multiple")]
9502 (define_insn "*ifcompare_arith_move"
9503 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9504 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9505 [(match_operand:SI 2 "s_register_operand" "r,r")
9506 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9507 (match_operator:SI 7 "shiftable_operator"
9508 [(match_operand:SI 4 "s_register_operand" "r,r")
9509 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9510 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9511 (clobber (reg:CC CC_REGNUM))]
9514 /* If we have an operation where (op x 0) is the identity operation and
9515 the conditional operator is LT or GE and we are comparing against zero and
9516 everything is in registers then we can do this in two instructions. */
9517 if (operands[3] == const0_rtx
9518 && GET_CODE (operands[7]) != AND
9519 && REG_P (operands[5])
9520 && REG_P (operands[1])
9521 && REGNO (operands[1]) == REGNO (operands[4])
9522 && REGNO (operands[4]) != REGNO (operands[0]))
9524 if (GET_CODE (operands[6]) == LT)
9525 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9526 else if (GET_CODE (operands[6]) == GE)
9527 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9529 if (CONST_INT_P (operands[3])
9530 && !const_ok_for_arm (INTVAL (operands[3])))
9531 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9533 output_asm_insn (\"cmp\\t%2, %3\", operands);
9534 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9535 if (which_alternative != 0)
9536 return \"mov%D6\\t%0, %1\";
9539 [(set_attr "conds" "clob")
9540 (set_attr "length" "8,12")
9541 (set_attr "type" "multiple")]
9544 (define_insn "*if_arith_move"
9545 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9546 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9547 [(match_operand 6 "cc_register" "") (const_int 0)])
9548 (match_operator:SI 5 "shiftable_operator"
9549 [(match_operand:SI 2 "s_register_operand" "r,r")
9550 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9551 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9555 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9556 [(set_attr "conds" "use")
9557 (set_attr "length" "4,8")
9558 (set_attr_alternative "type"
9559 [(if_then_else (match_operand 3 "const_int_operand" "")
9560 (const_string "alu_shift_imm" )
9561 (const_string "alu_shift_reg"))
9562 (const_string "multiple")])]
9565 (define_insn "*ifcompare_move_arith"
9566 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9567 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9568 [(match_operand:SI 4 "s_register_operand" "r,r")
9569 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9570 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9571 (match_operator:SI 7 "shiftable_operator"
9572 [(match_operand:SI 2 "s_register_operand" "r,r")
9573 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9574 (clobber (reg:CC CC_REGNUM))]
9577 /* If we have an operation where (op x 0) is the identity operation and
9578 the conditional operator is LT or GE and we are comparing against zero and
9579 everything is in registers then we can do this in two instructions */
9580 if (operands[5] == const0_rtx
9581 && GET_CODE (operands[7]) != AND
9582 && REG_P (operands[3])
9583 && REG_P (operands[1])
9584 && REGNO (operands[1]) == REGNO (operands[2])
9585 && REGNO (operands[2]) != REGNO (operands[0]))
9587 if (GET_CODE (operands[6]) == GE)
9588 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9589 else if (GET_CODE (operands[6]) == LT)
9590 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9593 if (CONST_INT_P (operands[5])
9594 && !const_ok_for_arm (INTVAL (operands[5])))
9595 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9597 output_asm_insn (\"cmp\\t%4, %5\", operands);
9599 if (which_alternative != 0)
9600 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9601 return \"%I7%D6\\t%0, %2, %3\";
9603 [(set_attr "conds" "clob")
9604 (set_attr "length" "8,12")
9605 (set_attr "type" "multiple")]
9608 (define_insn "*if_move_arith"
9609 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9611 (match_operator 4 "arm_comparison_operator"
9612 [(match_operand 6 "cc_register" "") (const_int 0)])
9613 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9614 (match_operator:SI 5 "shiftable_operator"
9615 [(match_operand:SI 2 "s_register_operand" "r,r")
9616 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9620 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9621 [(set_attr "conds" "use")
9622 (set_attr "length" "4,8")
9623 (set_attr_alternative "type"
9624 [(if_then_else (match_operand 3 "const_int_operand" "")
9625 (const_string "alu_shift_imm" )
9626 (const_string "alu_shift_reg"))
9627 (const_string "multiple")])]
9630 (define_insn "*ifcompare_move_not"
9631 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9633 (match_operator 5 "arm_comparison_operator"
9634 [(match_operand:SI 3 "s_register_operand" "r,r")
9635 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9636 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9638 (match_operand:SI 2 "s_register_operand" "r,r"))))
9639 (clobber (reg:CC CC_REGNUM))]
9642 [(set_attr "conds" "clob")
9643 (set_attr "length" "8,12")
9644 (set_attr "type" "multiple")]
9647 (define_insn "*if_move_not"
9648 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9650 (match_operator 4 "arm_comparison_operator"
9651 [(match_operand 3 "cc_register" "") (const_int 0)])
9652 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9653 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9657 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9658 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9659 [(set_attr "conds" "use")
9660 (set_attr "type" "mvn_reg")
9661 (set_attr "length" "4,8,8")
9662 (set_attr "type" "mvn_reg,multiple,multiple")]
9665 (define_insn "*ifcompare_not_move"
9666 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9668 (match_operator 5 "arm_comparison_operator"
9669 [(match_operand:SI 3 "s_register_operand" "r,r")
9670 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9672 (match_operand:SI 2 "s_register_operand" "r,r"))
9673 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9674 (clobber (reg:CC CC_REGNUM))]
9677 [(set_attr "conds" "clob")
9678 (set_attr "length" "8,12")
9679 (set_attr "type" "multiple")]
9682 (define_insn "*if_not_move"
9683 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9685 (match_operator 4 "arm_comparison_operator"
9686 [(match_operand 3 "cc_register" "") (const_int 0)])
9687 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9688 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9692 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9693 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9694 [(set_attr "conds" "use")
9695 (set_attr "type" "mvn_reg,multiple,multiple")
9696 (set_attr "length" "4,8,8")]
9699 (define_insn "*ifcompare_shift_move"
9700 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9702 (match_operator 6 "arm_comparison_operator"
9703 [(match_operand:SI 4 "s_register_operand" "r,r")
9704 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9705 (match_operator:SI 7 "shift_operator"
9706 [(match_operand:SI 2 "s_register_operand" "r,r")
9707 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9708 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9709 (clobber (reg:CC CC_REGNUM))]
9712 [(set_attr "conds" "clob")
9713 (set_attr "length" "8,12")
9714 (set_attr "type" "multiple")]
9717 (define_insn "*if_shift_move"
9718 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9720 (match_operator 5 "arm_comparison_operator"
9721 [(match_operand 6 "cc_register" "") (const_int 0)])
9722 (match_operator:SI 4 "shift_operator"
9723 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9724 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9725 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9729 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9730 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9731 [(set_attr "conds" "use")
9732 (set_attr "shift" "2")
9733 (set_attr "length" "4,8,8")
9734 (set_attr_alternative "type"
9735 [(if_then_else (match_operand 3 "const_int_operand" "")
9736 (const_string "mov_shift" )
9737 (const_string "mov_shift_reg"))
9738 (const_string "multiple")
9739 (const_string "multiple")])]
9742 (define_insn "*ifcompare_move_shift"
9743 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9745 (match_operator 6 "arm_comparison_operator"
9746 [(match_operand:SI 4 "s_register_operand" "r,r")
9747 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9748 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9749 (match_operator:SI 7 "shift_operator"
9750 [(match_operand:SI 2 "s_register_operand" "r,r")
9751 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9752 (clobber (reg:CC CC_REGNUM))]
9755 [(set_attr "conds" "clob")
9756 (set_attr "length" "8,12")
9757 (set_attr "type" "multiple")]
9760 (define_insn "*if_move_shift"
9761 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9763 (match_operator 5 "arm_comparison_operator"
9764 [(match_operand 6 "cc_register" "") (const_int 0)])
9765 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9766 (match_operator:SI 4 "shift_operator"
9767 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9768 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9772 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9773 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9774 [(set_attr "conds" "use")
9775 (set_attr "shift" "2")
9776 (set_attr "length" "4,8,8")
9777 (set_attr_alternative "type"
9778 [(if_then_else (match_operand 3 "const_int_operand" "")
9779 (const_string "mov_shift" )
9780 (const_string "mov_shift_reg"))
9781 (const_string "multiple")
9782 (const_string "multiple")])]
9785 (define_insn "*ifcompare_shift_shift"
9786 [(set (match_operand:SI 0 "s_register_operand" "=r")
9788 (match_operator 7 "arm_comparison_operator"
9789 [(match_operand:SI 5 "s_register_operand" "r")
9790 (match_operand:SI 6 "arm_add_operand" "rIL")])
9791 (match_operator:SI 8 "shift_operator"
9792 [(match_operand:SI 1 "s_register_operand" "r")
9793 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9794 (match_operator:SI 9 "shift_operator"
9795 [(match_operand:SI 3 "s_register_operand" "r")
9796 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9797 (clobber (reg:CC CC_REGNUM))]
9800 [(set_attr "conds" "clob")
9801 (set_attr "length" "12")
9802 (set_attr "type" "multiple")]
9805 (define_insn "*if_shift_shift"
9806 [(set (match_operand:SI 0 "s_register_operand" "=r")
9808 (match_operator 5 "arm_comparison_operator"
9809 [(match_operand 8 "cc_register" "") (const_int 0)])
9810 (match_operator:SI 6 "shift_operator"
9811 [(match_operand:SI 1 "s_register_operand" "r")
9812 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9813 (match_operator:SI 7 "shift_operator"
9814 [(match_operand:SI 3 "s_register_operand" "r")
9815 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9817 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9818 [(set_attr "conds" "use")
9819 (set_attr "shift" "1")
9820 (set_attr "length" "8")
9821 (set (attr "type") (if_then_else
9822 (and (match_operand 2 "const_int_operand" "")
9823 (match_operand 4 "const_int_operand" ""))
9824 (const_string "mov_shift")
9825 (const_string "mov_shift_reg")))]
9828 (define_insn "*ifcompare_not_arith"
9829 [(set (match_operand:SI 0 "s_register_operand" "=r")
9831 (match_operator 6 "arm_comparison_operator"
9832 [(match_operand:SI 4 "s_register_operand" "r")
9833 (match_operand:SI 5 "arm_add_operand" "rIL")])
9834 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9835 (match_operator:SI 7 "shiftable_operator"
9836 [(match_operand:SI 2 "s_register_operand" "r")
9837 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9838 (clobber (reg:CC CC_REGNUM))]
9841 [(set_attr "conds" "clob")
9842 (set_attr "length" "12")
9843 (set_attr "type" "multiple")]
9846 (define_insn "*if_not_arith"
9847 [(set (match_operand:SI 0 "s_register_operand" "=r")
9849 (match_operator 5 "arm_comparison_operator"
9850 [(match_operand 4 "cc_register" "") (const_int 0)])
9851 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9852 (match_operator:SI 6 "shiftable_operator"
9853 [(match_operand:SI 2 "s_register_operand" "r")
9854 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9856 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9857 [(set_attr "conds" "use")
9858 (set_attr "type" "mvn_reg")
9859 (set_attr "length" "8")]
9862 (define_insn "*ifcompare_arith_not"
9863 [(set (match_operand:SI 0 "s_register_operand" "=r")
9865 (match_operator 6 "arm_comparison_operator"
9866 [(match_operand:SI 4 "s_register_operand" "r")
9867 (match_operand:SI 5 "arm_add_operand" "rIL")])
9868 (match_operator:SI 7 "shiftable_operator"
9869 [(match_operand:SI 2 "s_register_operand" "r")
9870 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9871 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9872 (clobber (reg:CC CC_REGNUM))]
9875 [(set_attr "conds" "clob")
9876 (set_attr "length" "12")
9877 (set_attr "type" "multiple")]
9880 (define_insn "*if_arith_not"
9881 [(set (match_operand:SI 0 "s_register_operand" "=r")
9883 (match_operator 5 "arm_comparison_operator"
9884 [(match_operand 4 "cc_register" "") (const_int 0)])
9885 (match_operator:SI 6 "shiftable_operator"
9886 [(match_operand:SI 2 "s_register_operand" "r")
9887 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9888 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9890 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9891 [(set_attr "conds" "use")
9892 (set_attr "type" "multiple")
9893 (set_attr "length" "8")]
9896 (define_insn "*ifcompare_neg_move"
9897 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9899 (match_operator 5 "arm_comparison_operator"
9900 [(match_operand:SI 3 "s_register_operand" "r,r")
9901 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9902 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9903 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9904 (clobber (reg:CC CC_REGNUM))]
9907 [(set_attr "conds" "clob")
9908 (set_attr "length" "8,12")
9909 (set_attr "type" "multiple")]
9912 (define_insn_and_split "*if_neg_move"
9913 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9915 (match_operator 4 "arm_comparison_operator"
9916 [(match_operand 3 "cc_register" "") (const_int 0)])
9917 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9918 (match_operand:SI 1 "s_register_operand" "0,0")))]
9921 "&& reload_completed"
9922 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9923 (set (match_dup 0) (neg:SI (match_dup 2))))]
9925 [(set_attr "conds" "use")
9926 (set_attr "length" "4")
9927 (set_attr "arch" "t2,32")
9928 (set_attr "enabled_for_short_it" "yes,no")
9929 (set_attr "type" "logic_shift_imm")]
9932 (define_insn "*ifcompare_move_neg"
9933 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9935 (match_operator 5 "arm_comparison_operator"
9936 [(match_operand:SI 3 "s_register_operand" "r,r")
9937 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9938 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9939 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9940 (clobber (reg:CC CC_REGNUM))]
9943 [(set_attr "conds" "clob")
9944 (set_attr "length" "8,12")
9945 (set_attr "type" "multiple")]
9948 (define_insn_and_split "*if_move_neg"
9949 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9951 (match_operator 4 "arm_comparison_operator"
9952 [(match_operand 3 "cc_register" "") (const_int 0)])
9953 (match_operand:SI 1 "s_register_operand" "0,0")
9954 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9957 "&& reload_completed"
9958 [(cond_exec (match_dup 5)
9959 (set (match_dup 0) (neg:SI (match_dup 2))))]
9961 machine_mode mode = GET_MODE (operands[3]);
9962 rtx_code rc = GET_CODE (operands[4]);
9964 if (mode == CCFPmode || mode == CCFPEmode)
9965 rc = reverse_condition_maybe_unordered (rc);
9967 rc = reverse_condition (rc);
9969 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9971 [(set_attr "conds" "use")
9972 (set_attr "length" "4")
9973 (set_attr "arch" "t2,32")
9974 (set_attr "enabled_for_short_it" "yes,no")
9975 (set_attr "type" "logic_shift_imm")]
9978 (define_insn "*arith_adjacentmem"
9979 [(set (match_operand:SI 0 "s_register_operand" "=r")
9980 (match_operator:SI 1 "shiftable_operator"
9981 [(match_operand:SI 2 "memory_operand" "m")
9982 (match_operand:SI 3 "memory_operand" "m")]))
9983 (clobber (match_scratch:SI 4 "=r"))]
9984 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9990 HOST_WIDE_INT val1 = 0, val2 = 0;
9992 if (REGNO (operands[0]) > REGNO (operands[4]))
9994 ldm[1] = operands[4];
9995 ldm[2] = operands[0];
9999 ldm[1] = operands[0];
10000 ldm[2] = operands[4];
10003 base_reg = XEXP (operands[2], 0);
10005 if (!REG_P (base_reg))
10007 val1 = INTVAL (XEXP (base_reg, 1));
10008 base_reg = XEXP (base_reg, 0);
10011 if (!REG_P (XEXP (operands[3], 0)))
10012 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10014 arith[0] = operands[0];
10015 arith[3] = operands[1];
10029 if (val1 !=0 && val2 != 0)
10033 if (val1 == 4 || val2 == 4)
10034 /* Other val must be 8, since we know they are adjacent and neither
10036 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10037 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10039 ldm[0] = ops[0] = operands[4];
10041 ops[2] = GEN_INT (val1);
10042 output_add_immediate (ops);
10044 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10046 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10050 /* Offset is out of range for a single add, so use two ldr. */
10053 ops[2] = GEN_INT (val1);
10054 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10056 ops[2] = GEN_INT (val2);
10057 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10060 else if (val1 != 0)
10063 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10065 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10070 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10072 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10074 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10077 [(set_attr "length" "12")
10078 (set_attr "predicable" "yes")
10079 (set_attr "type" "load_4")]
10082 ; This pattern is never tried by combine, so do it as a peephole
10085 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10086 (match_operand:SI 1 "arm_general_register_operand" ""))
10087 (set (reg:CC CC_REGNUM)
10088 (compare:CC (match_dup 1) (const_int 0)))]
10090 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10091 (set (match_dup 0) (match_dup 1))])]
10096 [(set (match_operand:SI 0 "s_register_operand" "")
10097 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10099 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10100 [(match_operand:SI 3 "s_register_operand" "")
10101 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10102 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10104 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10105 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10110 ;; This split can be used because CC_Z mode implies that the following
10111 ;; branch will be an equality, or an unsigned inequality, so the sign
10112 ;; extension is not needed.
10115 [(set (reg:CC_Z CC_REGNUM)
10117 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10119 (match_operand 1 "const_int_operand" "")))
10120 (clobber (match_scratch:SI 2 ""))]
10122 && ((UINTVAL (operands[1]))
10123 == ((UINTVAL (operands[1])) >> 24) << 24)"
10124 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10125 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10127 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10130 ;; ??? Check the patterns above for Thumb-2 usefulness
10132 (define_expand "prologue"
10133 [(clobber (const_int 0))]
10136 arm_expand_prologue ();
10138 thumb1_expand_prologue ();
10143 (define_expand "epilogue"
10144 [(clobber (const_int 0))]
10147 if (crtl->calls_eh_return)
10148 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10151 thumb1_expand_epilogue ();
10152 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10153 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10155 else if (HAVE_return)
10157 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10158 no need for explicit testing again. */
10159 emit_jump_insn (gen_return ());
10161 else if (TARGET_32BIT)
10163 arm_expand_epilogue (true);
10169 ;; Note - although unspec_volatile's USE all hard registers,
10170 ;; USEs are ignored after relaod has completed. Thus we need
10171 ;; to add an unspec of the link register to ensure that flow
10172 ;; does not think that it is unused by the sibcall branch that
10173 ;; will replace the standard function epilogue.
10174 (define_expand "sibcall_epilogue"
10175 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10176 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10179 arm_expand_epilogue (false);
10184 (define_expand "eh_epilogue"
10185 [(use (match_operand:SI 0 "register_operand"))
10186 (use (match_operand:SI 1 "register_operand"))
10187 (use (match_operand:SI 2 "register_operand"))]
10191 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10192 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10194 rtx ra = gen_rtx_REG (Pmode, 2);
10196 emit_move_insn (ra, operands[2]);
10199 /* This is a hack -- we may have crystalized the function type too
10201 cfun->machine->func_type = 0;
10205 ;; This split is only used during output to reduce the number of patterns
10206 ;; that need assembler instructions adding to them. We allowed the setting
10207 ;; of the conditions to be implicit during rtl generation so that
10208 ;; the conditional compare patterns would work. However this conflicts to
10209 ;; some extent with the conditional data operations, so we have to split them
10212 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10213 ;; conditional execution sufficient?
10216 [(set (match_operand:SI 0 "s_register_operand" "")
10217 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10218 [(match_operand 2 "" "") (match_operand 3 "" "")])
10220 (match_operand 4 "" "")))
10221 (clobber (reg:CC CC_REGNUM))]
10222 "TARGET_ARM && reload_completed"
10223 [(set (match_dup 5) (match_dup 6))
10224 (cond_exec (match_dup 7)
10225 (set (match_dup 0) (match_dup 4)))]
10228 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10229 operands[2], operands[3]);
10230 enum rtx_code rc = GET_CODE (operands[1]);
10232 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10233 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10234 if (mode == CCFPmode || mode == CCFPEmode)
10235 rc = reverse_condition_maybe_unordered (rc);
10237 rc = reverse_condition (rc);
10239 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10244 [(set (match_operand:SI 0 "s_register_operand" "")
10245 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10246 [(match_operand 2 "" "") (match_operand 3 "" "")])
10247 (match_operand 4 "" "")
10249 (clobber (reg:CC CC_REGNUM))]
10250 "TARGET_ARM && reload_completed"
10251 [(set (match_dup 5) (match_dup 6))
10252 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10253 (set (match_dup 0) (match_dup 4)))]
10256 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10257 operands[2], operands[3]);
10259 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10260 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10265 [(set (match_operand:SI 0 "s_register_operand" "")
10266 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10267 [(match_operand 2 "" "") (match_operand 3 "" "")])
10268 (match_operand 4 "" "")
10269 (match_operand 5 "" "")))
10270 (clobber (reg:CC CC_REGNUM))]
10271 "TARGET_ARM && reload_completed"
10272 [(set (match_dup 6) (match_dup 7))
10273 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10274 (set (match_dup 0) (match_dup 4)))
10275 (cond_exec (match_dup 8)
10276 (set (match_dup 0) (match_dup 5)))]
10279 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10280 operands[2], operands[3]);
10281 enum rtx_code rc = GET_CODE (operands[1]);
10283 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10284 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10285 if (mode == CCFPmode || mode == CCFPEmode)
10286 rc = reverse_condition_maybe_unordered (rc);
10288 rc = reverse_condition (rc);
10290 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10295 [(set (match_operand:SI 0 "s_register_operand" "")
10296 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10297 [(match_operand:SI 2 "s_register_operand" "")
10298 (match_operand:SI 3 "arm_add_operand" "")])
10299 (match_operand:SI 4 "arm_rhs_operand" "")
10301 (match_operand:SI 5 "s_register_operand" ""))))
10302 (clobber (reg:CC CC_REGNUM))]
10303 "TARGET_ARM && reload_completed"
10304 [(set (match_dup 6) (match_dup 7))
10305 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10306 (set (match_dup 0) (match_dup 4)))
10307 (cond_exec (match_dup 8)
10308 (set (match_dup 0) (not:SI (match_dup 5))))]
10311 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10312 operands[2], operands[3]);
10313 enum rtx_code rc = GET_CODE (operands[1]);
10315 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10316 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10317 if (mode == CCFPmode || mode == CCFPEmode)
10318 rc = reverse_condition_maybe_unordered (rc);
10320 rc = reverse_condition (rc);
10322 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10326 (define_insn "*cond_move_not"
10327 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10328 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10329 [(match_operand 3 "cc_register" "") (const_int 0)])
10330 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10332 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10336 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10337 [(set_attr "conds" "use")
10338 (set_attr "type" "mvn_reg,multiple")
10339 (set_attr "length" "4,8")]
10342 ;; The next two patterns occur when an AND operation is followed by a
10343 ;; scc insn sequence
10345 (define_insn "*sign_extract_onebit"
10346 [(set (match_operand:SI 0 "s_register_operand" "=r")
10347 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10349 (match_operand:SI 2 "const_int_operand" "n")))
10350 (clobber (reg:CC CC_REGNUM))]
10353 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10354 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10355 return \"mvnne\\t%0, #0\";
10357 [(set_attr "conds" "clob")
10358 (set_attr "length" "8")
10359 (set_attr "type" "multiple")]
10362 (define_insn "*not_signextract_onebit"
10363 [(set (match_operand:SI 0 "s_register_operand" "=r")
10365 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10367 (match_operand:SI 2 "const_int_operand" "n"))))
10368 (clobber (reg:CC CC_REGNUM))]
10371 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10372 output_asm_insn (\"tst\\t%1, %2\", operands);
10373 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10374 return \"movne\\t%0, #0\";
10376 [(set_attr "conds" "clob")
10377 (set_attr "length" "12")
10378 (set_attr "type" "multiple")]
10380 ;; ??? The above patterns need auditing for Thumb-2
10382 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10383 ;; expressions. For simplicity, the first register is also in the unspec
10385 ;; To avoid the usage of GNU extension, the length attribute is computed
10386 ;; in a C function arm_attr_length_push_multi.
10387 (define_insn "*push_multi"
10388 [(match_parallel 2 "multi_register_push"
10389 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10390 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10391 UNSPEC_PUSH_MULT))])]
10395 int num_saves = XVECLEN (operands[2], 0);
10397 /* For the StrongARM at least it is faster to
10398 use STR to store only a single register.
10399 In Thumb mode always use push, and the assembler will pick
10400 something appropriate. */
10401 if (num_saves == 1 && TARGET_ARM)
10402 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10409 strcpy (pattern, \"push%?\\t{%1\");
10411 strcpy (pattern, \"push\\t{%1\");
10413 for (i = 1; i < num_saves; i++)
10415 strcat (pattern, \", %|\");
10417 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10420 strcat (pattern, \"}\");
10421 output_asm_insn (pattern, operands);
10426 [(set_attr "type" "store_16")
10427 (set (attr "length")
10428 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10431 (define_insn "stack_tie"
10432 [(set (mem:BLK (scratch))
10433 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10434 (match_operand:SI 1 "s_register_operand" "rk")]
10438 [(set_attr "length" "0")
10439 (set_attr "type" "block")]
10442 ;; Pop (as used in epilogue RTL)
10444 (define_insn "*load_multiple_with_writeback"
10445 [(match_parallel 0 "load_multiple_operation"
10446 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10447 (plus:SI (match_dup 1)
10448 (match_operand:SI 2 "const_int_I_operand" "I")))
10449 (set (match_operand:SI 3 "s_register_operand" "=rk")
10450 (mem:SI (match_dup 1)))
10452 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10455 arm_output_multireg_pop (operands, /*return_pc=*/false,
10456 /*cond=*/const_true_rtx,
10462 [(set_attr "type" "load_16")
10463 (set_attr "predicable" "yes")
10464 (set (attr "length")
10465 (symbol_ref "arm_attr_length_pop_multi (operands,
10466 /*return_pc=*/false,
10467 /*write_back_p=*/true)"))]
10470 ;; Pop with return (as used in epilogue RTL)
10472 ;; This instruction is generated when the registers are popped at the end of
10473 ;; epilogue. Here, instead of popping the value into LR and then generating
10474 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10476 (define_insn "*pop_multiple_with_writeback_and_return"
10477 [(match_parallel 0 "pop_multiple_return"
10479 (set (match_operand:SI 1 "s_register_operand" "+rk")
10480 (plus:SI (match_dup 1)
10481 (match_operand:SI 2 "const_int_I_operand" "I")))
10482 (set (match_operand:SI 3 "s_register_operand" "=rk")
10483 (mem:SI (match_dup 1)))
10485 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10488 arm_output_multireg_pop (operands, /*return_pc=*/true,
10489 /*cond=*/const_true_rtx,
10495 [(set_attr "type" "load_16")
10496 (set_attr "predicable" "yes")
10497 (set (attr "length")
10498 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10499 /*write_back_p=*/true)"))]
10502 (define_insn "*pop_multiple_with_return"
10503 [(match_parallel 0 "pop_multiple_return"
10505 (set (match_operand:SI 2 "s_register_operand" "=rk")
10506 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10508 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10511 arm_output_multireg_pop (operands, /*return_pc=*/true,
10512 /*cond=*/const_true_rtx,
10518 [(set_attr "type" "load_16")
10519 (set_attr "predicable" "yes")
10520 (set (attr "length")
10521 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10522 /*write_back_p=*/false)"))]
10525 ;; Load into PC and return
10526 (define_insn "*ldr_with_return"
10528 (set (reg:SI PC_REGNUM)
10529 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10530 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10531 "ldr%?\t%|pc, [%0], #4"
10532 [(set_attr "type" "load_4")
10533 (set_attr "predicable" "yes")]
10535 ;; Pop for floating point registers (as used in epilogue RTL)
10536 (define_insn "*vfp_pop_multiple_with_writeback"
10537 [(match_parallel 0 "pop_multiple_fp"
10538 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10539 (plus:SI (match_dup 1)
10540 (match_operand:SI 2 "const_int_I_operand" "I")))
10541 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10542 (mem:DF (match_dup 1)))])]
10543 "TARGET_32BIT && TARGET_HARD_FLOAT"
10546 int num_regs = XVECLEN (operands[0], 0);
10549 strcpy (pattern, \"vldm\\t\");
10550 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10551 strcat (pattern, \"!, {\");
10552 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10553 strcat (pattern, \"%P0\");
10554 if ((num_regs - 1) > 1)
10556 strcat (pattern, \"-%P1\");
10557 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10560 strcat (pattern, \"}\");
10561 output_asm_insn (pattern, op_list);
10565 [(set_attr "type" "load_16")
10566 (set_attr "conds" "unconditional")
10567 (set_attr "predicable" "no")]
10570 ;; Special patterns for dealing with the constant pool
10572 (define_insn "align_4"
10573 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10576 assemble_align (32);
10579 [(set_attr "type" "no_insn")]
10582 (define_insn "align_8"
10583 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10586 assemble_align (64);
10589 [(set_attr "type" "no_insn")]
10592 (define_insn "consttable_end"
10593 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10596 making_const_table = FALSE;
10599 [(set_attr "type" "no_insn")]
10602 (define_insn "consttable_1"
10603 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10606 making_const_table = TRUE;
10607 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10608 assemble_zeros (3);
10611 [(set_attr "length" "4")
10612 (set_attr "type" "no_insn")]
10615 (define_insn "consttable_2"
10616 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10620 rtx x = operands[0];
10621 making_const_table = TRUE;
10622 switch (GET_MODE_CLASS (GET_MODE (x)))
10625 arm_emit_fp16_const (x);
10628 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10629 assemble_zeros (2);
10634 [(set_attr "length" "4")
10635 (set_attr "type" "no_insn")]
10638 (define_insn "consttable_4"
10639 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10643 rtx x = operands[0];
10644 making_const_table = TRUE;
10645 scalar_float_mode float_mode;
10646 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10647 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10650 /* XXX: Sometimes gcc does something really dumb and ends up with
10651 a HIGH in a constant pool entry, usually because it's trying to
10652 load into a VFP register. We know this will always be used in
10653 combination with a LO_SUM which ignores the high bits, so just
10654 strip off the HIGH. */
10655 if (GET_CODE (x) == HIGH)
10657 assemble_integer (x, 4, BITS_PER_WORD, 1);
10658 mark_symbol_refs_as_used (x);
10662 [(set_attr "length" "4")
10663 (set_attr "type" "no_insn")]
10666 (define_insn "consttable_8"
10667 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10671 making_const_table = TRUE;
10672 scalar_float_mode float_mode;
10673 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10674 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10675 float_mode, BITS_PER_WORD);
10677 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10680 [(set_attr "length" "8")
10681 (set_attr "type" "no_insn")]
10684 (define_insn "consttable_16"
10685 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10689 making_const_table = TRUE;
10690 scalar_float_mode float_mode;
10691 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10692 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10693 float_mode, BITS_PER_WORD);
10695 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10698 [(set_attr "length" "16")
10699 (set_attr "type" "no_insn")]
10702 ;; V5 Instructions,
10704 (define_insn "clzsi2"
10705 [(set (match_operand:SI 0 "s_register_operand" "=r")
10706 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10707 "TARGET_32BIT && arm_arch5t"
10709 [(set_attr "predicable" "yes")
10710 (set_attr "type" "clz")])
10712 (define_insn "rbitsi2"
10713 [(set (match_operand:SI 0 "s_register_operand" "=r")
10714 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10715 "TARGET_32BIT && arm_arch_thumb2"
10717 [(set_attr "predicable" "yes")
10718 (set_attr "type" "clz")])
10720 ;; Keep this as a CTZ expression until after reload and then split
10721 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10722 ;; to fold with any other expression.
10724 (define_insn_and_split "ctzsi2"
10725 [(set (match_operand:SI 0 "s_register_operand" "=r")
10726 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10727 "TARGET_32BIT && arm_arch_thumb2"
10729 "&& reload_completed"
10732 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10733 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10737 ;; V5E instructions.
10739 (define_insn "prefetch"
10740 [(prefetch (match_operand:SI 0 "address_operand" "p")
10741 (match_operand:SI 1 "" "")
10742 (match_operand:SI 2 "" ""))]
10743 "TARGET_32BIT && arm_arch5te"
10745 [(set_attr "type" "load_4")]
10748 ;; General predication pattern
10751 [(match_operator 0 "arm_comparison_operator"
10752 [(match_operand 1 "cc_register" "")
10755 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10757 [(set_attr "predicated" "yes")]
10760 (define_insn "force_register_use"
10761 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10764 [(set_attr "length" "0")
10765 (set_attr "type" "no_insn")]
10769 ;; Patterns for exception handling
10771 (define_expand "eh_return"
10772 [(use (match_operand 0 "general_operand"))]
10777 emit_insn (gen_arm_eh_return (operands[0]));
10779 emit_insn (gen_thumb_eh_return (operands[0]));
10784 ;; We can't expand this before we know where the link register is stored.
10785 (define_insn_and_split "arm_eh_return"
10786 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10788 (clobber (match_scratch:SI 1 "=&r"))]
10791 "&& reload_completed"
10795 arm_set_return_address (operands[0], operands[1]);
10803 (define_insn "load_tp_hard"
10804 [(set (match_operand:SI 0 "register_operand" "=r")
10805 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10807 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10808 [(set_attr "predicable" "yes")
10809 (set_attr "type" "mrs")]
10812 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10813 (define_insn "load_tp_soft_fdpic"
10814 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10815 (clobber (reg:SI FDPIC_REGNUM))
10816 (clobber (reg:SI LR_REGNUM))
10817 (clobber (reg:SI IP_REGNUM))
10818 (clobber (reg:CC CC_REGNUM))]
10819 "TARGET_SOFT_TP && TARGET_FDPIC"
10820 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10821 [(set_attr "conds" "clob")
10822 (set_attr "type" "branch")]
10825 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10826 (define_insn "load_tp_soft"
10827 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10828 (clobber (reg:SI LR_REGNUM))
10829 (clobber (reg:SI IP_REGNUM))
10830 (clobber (reg:CC CC_REGNUM))]
10831 "TARGET_SOFT_TP && !TARGET_FDPIC"
10832 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10833 [(set_attr "conds" "clob")
10834 (set_attr "type" "branch")]
10837 ;; tls descriptor call
10838 (define_insn "tlscall"
10839 [(set (reg:SI R0_REGNUM)
10840 (unspec:SI [(reg:SI R0_REGNUM)
10841 (match_operand:SI 0 "" "X")
10842 (match_operand 1 "" "")] UNSPEC_TLS))
10843 (clobber (reg:SI R1_REGNUM))
10844 (clobber (reg:SI LR_REGNUM))
10845 (clobber (reg:SI CC_REGNUM))]
10848 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10849 INTVAL (operands[1]));
10850 return "bl\\t%c0(tlscall)";
10852 [(set_attr "conds" "clob")
10853 (set_attr "length" "4")
10854 (set_attr "type" "branch")]
10857 ;; For thread pointer builtin
10858 (define_expand "get_thread_pointersi"
10859 [(match_operand:SI 0 "s_register_operand")]
10863 arm_load_tp (operands[0]);
10869 ;; We only care about the lower 16 bits of the constant
10870 ;; being inserted into the upper 16 bits of the register.
10871 (define_insn "*arm_movtas_ze"
10872 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10875 (match_operand:SI 1 "const_int_operand" ""))]
10880 [(set_attr "arch" "32,v8mb")
10881 (set_attr "predicable" "yes")
10882 (set_attr "length" "4")
10883 (set_attr "type" "alu_sreg")]
10886 (define_insn "*arm_rev"
10887 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10888 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10894 [(set_attr "arch" "t1,t2,32")
10895 (set_attr "length" "2,2,4")
10896 (set_attr "predicable" "no,yes,yes")
10897 (set_attr "type" "rev")]
10900 (define_expand "arm_legacy_rev"
10901 [(set (match_operand:SI 2 "s_register_operand")
10902 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10906 (lshiftrt:SI (match_dup 2)
10908 (set (match_operand:SI 3 "s_register_operand")
10909 (rotatert:SI (match_dup 1)
10912 (and:SI (match_dup 2)
10913 (const_int -65281)))
10914 (set (match_operand:SI 0 "s_register_operand")
10915 (xor:SI (match_dup 3)
10921 ;; Reuse temporaries to keep register pressure down.
10922 (define_expand "thumb_legacy_rev"
10923 [(set (match_operand:SI 2 "s_register_operand")
10924 (ashift:SI (match_operand:SI 1 "s_register_operand")
10926 (set (match_operand:SI 3 "s_register_operand")
10927 (lshiftrt:SI (match_dup 1)
10930 (ior:SI (match_dup 3)
10932 (set (match_operand:SI 4 "s_register_operand")
10934 (set (match_operand:SI 5 "s_register_operand")
10935 (rotatert:SI (match_dup 1)
10938 (ashift:SI (match_dup 5)
10941 (lshiftrt:SI (match_dup 5)
10944 (ior:SI (match_dup 5)
10947 (rotatert:SI (match_dup 5)
10949 (set (match_operand:SI 0 "s_register_operand")
10950 (ior:SI (match_dup 5)
10956 ;; ARM-specific expansion of signed mod by power of 2
10957 ;; using conditional negate.
10958 ;; For r0 % n where n is a power of 2 produce:
10960 ;; and r0, r0, #(n - 1)
10961 ;; and r1, r1, #(n - 1)
10962 ;; rsbpl r0, r1, #0
10964 (define_expand "modsi3"
10965 [(match_operand:SI 0 "register_operand")
10966 (match_operand:SI 1 "register_operand")
10967 (match_operand:SI 2 "const_int_operand")]
10970 HOST_WIDE_INT val = INTVAL (operands[2]);
10973 || exact_log2 (val) <= 0)
10976 rtx mask = GEN_INT (val - 1);
10978 /* In the special case of x0 % 2 we can do the even shorter:
10981 rsblt r0, r0, #0. */
10985 rtx cc_reg = arm_gen_compare_reg (LT,
10986 operands[1], const0_rtx, NULL_RTX);
10987 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10988 rtx masked = gen_reg_rtx (SImode);
10990 emit_insn (gen_andsi3 (masked, operands[1], mask));
10991 emit_move_insn (operands[0],
10992 gen_rtx_IF_THEN_ELSE (SImode, cond,
10993 gen_rtx_NEG (SImode,
10999 rtx neg_op = gen_reg_rtx (SImode);
11000 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11003 /* Extract the condition register and mode. */
11004 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11005 rtx cc_reg = SET_DEST (cmp);
11006 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11008 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11010 rtx masked_neg = gen_reg_rtx (SImode);
11011 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11013 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11014 during expand does not always work. Do an IF_THEN_ELSE instead. */
11015 emit_move_insn (operands[0],
11016 gen_rtx_IF_THEN_ELSE (SImode, cond,
11017 gen_rtx_NEG (SImode, masked_neg),
11025 (define_expand "bswapsi2"
11026 [(set (match_operand:SI 0 "s_register_operand")
11027 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11028 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11032 rtx op2 = gen_reg_rtx (SImode);
11033 rtx op3 = gen_reg_rtx (SImode);
11037 rtx op4 = gen_reg_rtx (SImode);
11038 rtx op5 = gen_reg_rtx (SImode);
11040 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11041 op2, op3, op4, op5));
11045 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11054 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11055 ;; and unsigned variants, respectively. For rev16, expose
11056 ;; byte-swapping in the lower 16 bits only.
11057 (define_insn "*arm_revsh"
11058 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11059 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11065 [(set_attr "arch" "t1,t2,32")
11066 (set_attr "length" "2,2,4")
11067 (set_attr "type" "rev")]
11070 (define_insn "*arm_rev16"
11071 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11072 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11078 [(set_attr "arch" "t1,t2,32")
11079 (set_attr "length" "2,2,4")
11080 (set_attr "type" "rev")]
11083 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11084 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11085 ;; each valid permutation.
11087 (define_insn "arm_rev16si2"
11088 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11089 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11091 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11092 (and:SI (lshiftrt:SI (match_dup 1)
11094 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11096 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11097 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11099 [(set_attr "arch" "t1,t2,32")
11100 (set_attr "length" "2,2,4")
11101 (set_attr "type" "rev")]
11104 (define_insn "arm_rev16si2_alt"
11105 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11106 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11108 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11109 (and:SI (ashift:SI (match_dup 1)
11111 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11113 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11114 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11116 [(set_attr "arch" "t1,t2,32")
11117 (set_attr "length" "2,2,4")
11118 (set_attr "type" "rev")]
11121 (define_expand "bswaphi2"
11122 [(set (match_operand:HI 0 "s_register_operand")
11123 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11128 ;; Patterns for LDRD/STRD in Thumb2 mode
11130 (define_insn "*thumb2_ldrd"
11131 [(set (match_operand:SI 0 "s_register_operand" "=r")
11132 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11133 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11134 (set (match_operand:SI 3 "s_register_operand" "=r")
11135 (mem:SI (plus:SI (match_dup 1)
11136 (match_operand:SI 4 "const_int_operand" ""))))]
11137 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11138 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11139 && (operands_ok_ldrd_strd (operands[0], operands[3],
11140 operands[1], INTVAL (operands[2]),
11142 "ldrd%?\t%0, %3, [%1, %2]"
11143 [(set_attr "type" "load_8")
11144 (set_attr "predicable" "yes")])
11146 (define_insn "*thumb2_ldrd_base"
11147 [(set (match_operand:SI 0 "s_register_operand" "=r")
11148 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11149 (set (match_operand:SI 2 "s_register_operand" "=r")
11150 (mem:SI (plus:SI (match_dup 1)
11152 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11153 && (operands_ok_ldrd_strd (operands[0], operands[2],
11154 operands[1], 0, false, true))"
11155 "ldrd%?\t%0, %2, [%1]"
11156 [(set_attr "type" "load_8")
11157 (set_attr "predicable" "yes")])
11159 (define_insn "*thumb2_ldrd_base_neg"
11160 [(set (match_operand:SI 0 "s_register_operand" "=r")
11161 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11163 (set (match_operand:SI 2 "s_register_operand" "=r")
11164 (mem:SI (match_dup 1)))]
11165 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11166 && (operands_ok_ldrd_strd (operands[0], operands[2],
11167 operands[1], -4, false, true))"
11168 "ldrd%?\t%0, %2, [%1, #-4]"
11169 [(set_attr "type" "load_8")
11170 (set_attr "predicable" "yes")])
11172 (define_insn "*thumb2_strd"
11173 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11174 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11175 (match_operand:SI 2 "s_register_operand" "r"))
11176 (set (mem:SI (plus:SI (match_dup 0)
11177 (match_operand:SI 3 "const_int_operand" "")))
11178 (match_operand:SI 4 "s_register_operand" "r"))]
11179 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11180 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11181 && (operands_ok_ldrd_strd (operands[2], operands[4],
11182 operands[0], INTVAL (operands[1]),
11184 "strd%?\t%2, %4, [%0, %1]"
11185 [(set_attr "type" "store_8")
11186 (set_attr "predicable" "yes")])
11188 (define_insn "*thumb2_strd_base"
11189 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11190 (match_operand:SI 1 "s_register_operand" "r"))
11191 (set (mem:SI (plus:SI (match_dup 0)
11193 (match_operand:SI 2 "s_register_operand" "r"))]
11194 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11195 && (operands_ok_ldrd_strd (operands[1], operands[2],
11196 operands[0], 0, false, false))"
11197 "strd%?\t%1, %2, [%0]"
11198 [(set_attr "type" "store_8")
11199 (set_attr "predicable" "yes")])
11201 (define_insn "*thumb2_strd_base_neg"
11202 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11204 (match_operand:SI 1 "s_register_operand" "r"))
11205 (set (mem:SI (match_dup 0))
11206 (match_operand:SI 2 "s_register_operand" "r"))]
11207 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11208 && (operands_ok_ldrd_strd (operands[1], operands[2],
11209 operands[0], -4, false, false))"
11210 "strd%?\t%1, %2, [%0, #-4]"
11211 [(set_attr "type" "store_8")
11212 (set_attr "predicable" "yes")])
11214 ;; ARMv8 CRC32 instructions.
11215 (define_insn "arm_<crc_variant>"
11216 [(set (match_operand:SI 0 "s_register_operand" "=r")
11217 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11218 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11221 "<crc_variant>\\t%0, %1, %2"
11222 [(set_attr "type" "crc")
11223 (set_attr "conds" "unconditional")]
11226 ;; Load the load/store double peephole optimizations.
11227 (include "ldrdstrd.md")
11229 ;; Load the load/store multiple patterns
11230 (include "ldmstm.md")
11232 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11233 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11234 ;; The operands are validated through the load_multiple_operation
11235 ;; match_parallel predicate rather than through constraints so enable it only
11237 (define_insn "*load_multiple"
11238 [(match_parallel 0 "load_multiple_operation"
11239 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11240 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11242 "TARGET_32BIT && reload_completed"
11245 arm_output_multireg_pop (operands, /*return_pc=*/false,
11246 /*cond=*/const_true_rtx,
11252 [(set_attr "predicable" "yes")]
11255 (define_expand "copysignsf3"
11256 [(match_operand:SF 0 "register_operand")
11257 (match_operand:SF 1 "register_operand")
11258 (match_operand:SF 2 "register_operand")]
11259 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11261 emit_move_insn (operands[0], operands[2]);
11262 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11263 GEN_INT (31), GEN_INT (0),
11264 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11269 (define_expand "copysigndf3"
11270 [(match_operand:DF 0 "register_operand")
11271 (match_operand:DF 1 "register_operand")
11272 (match_operand:DF 2 "register_operand")]
11273 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11275 rtx op0_low = gen_lowpart (SImode, operands[0]);
11276 rtx op0_high = gen_highpart (SImode, operands[0]);
11277 rtx op1_low = gen_lowpart (SImode, operands[1]);
11278 rtx op1_high = gen_highpart (SImode, operands[1]);
11279 rtx op2_high = gen_highpart (SImode, operands[2]);
11281 rtx scratch1 = gen_reg_rtx (SImode);
11282 rtx scratch2 = gen_reg_rtx (SImode);
11283 emit_move_insn (scratch1, op2_high);
11284 emit_move_insn (scratch2, op1_high);
11286 emit_insn(gen_rtx_SET(scratch1,
11287 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11288 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11289 emit_move_insn (op0_low, op1_low);
11290 emit_move_insn (op0_high, scratch2);
11296 ;; movmisalign patterns for HImode and SImode.
11297 (define_expand "movmisalign<mode>"
11298 [(match_operand:HSI 0 "general_operand")
11299 (match_operand:HSI 1 "general_operand")]
11302 /* This pattern is not permitted to fail during expansion: if both arguments
11303 are non-registers (e.g. memory := constant), force operand 1 into a
11305 rtx (* gen_unaligned_load)(rtx, rtx);
11306 rtx tmp_dest = operands[0];
11307 if (!s_register_operand (operands[0], <MODE>mode)
11308 && !s_register_operand (operands[1], <MODE>mode))
11309 operands[1] = force_reg (<MODE>mode, operands[1]);
11311 if (<MODE>mode == HImode)
11313 gen_unaligned_load = gen_unaligned_loadhiu;
11314 tmp_dest = gen_reg_rtx (SImode);
11317 gen_unaligned_load = gen_unaligned_loadsi;
11319 if (MEM_P (operands[1]))
11321 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11322 if (<MODE>mode == HImode)
11323 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11326 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11331 (define_insn "arm_<cdp>"
11332 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11333 (match_operand:SI 1 "immediate_operand" "n")
11334 (match_operand:SI 2 "immediate_operand" "n")
11335 (match_operand:SI 3 "immediate_operand" "n")
11336 (match_operand:SI 4 "immediate_operand" "n")
11337 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11338 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11340 arm_const_bounds (operands[0], 0, 16);
11341 arm_const_bounds (operands[1], 0, 16);
11342 arm_const_bounds (operands[2], 0, (1 << 5));
11343 arm_const_bounds (operands[3], 0, (1 << 5));
11344 arm_const_bounds (operands[4], 0, (1 << 5));
11345 arm_const_bounds (operands[5], 0, 8);
11346 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11348 [(set_attr "length" "4")
11349 (set_attr "type" "coproc")])
11351 (define_insn "*ldc"
11352 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11353 (match_operand:SI 1 "immediate_operand" "n")
11354 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11355 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11357 arm_const_bounds (operands[0], 0, 16);
11358 arm_const_bounds (operands[1], 0, (1 << 5));
11359 return "<ldc>\\tp%c0, CR%c1, %2";
11361 [(set_attr "length" "4")
11362 (set_attr "type" "coproc")])
11364 (define_insn "*stc"
11365 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11366 (match_operand:SI 1 "immediate_operand" "n")
11367 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11368 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11370 arm_const_bounds (operands[0], 0, 16);
11371 arm_const_bounds (operands[1], 0, (1 << 5));
11372 return "<stc>\\tp%c0, CR%c1, %2";
11374 [(set_attr "length" "4")
11375 (set_attr "type" "coproc")])
11377 (define_expand "arm_<ldc>"
11378 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11379 (match_operand:SI 1 "immediate_operand")
11380 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11381 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11383 (define_expand "arm_<stc>"
11384 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11385 (match_operand:SI 1 "immediate_operand")
11386 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11387 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11389 (define_insn "arm_<mcr>"
11390 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11391 (match_operand:SI 1 "immediate_operand" "n")
11392 (match_operand:SI 2 "s_register_operand" "r")
11393 (match_operand:SI 3 "immediate_operand" "n")
11394 (match_operand:SI 4 "immediate_operand" "n")
11395 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11396 (use (match_dup 2))]
11397 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11399 arm_const_bounds (operands[0], 0, 16);
11400 arm_const_bounds (operands[1], 0, 8);
11401 arm_const_bounds (operands[3], 0, (1 << 5));
11402 arm_const_bounds (operands[4], 0, (1 << 5));
11403 arm_const_bounds (operands[5], 0, 8);
11404 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11406 [(set_attr "length" "4")
11407 (set_attr "type" "coproc")])
11409 (define_insn "arm_<mrc>"
11410 [(set (match_operand:SI 0 "s_register_operand" "=r")
11411 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11412 (match_operand:SI 2 "immediate_operand" "n")
11413 (match_operand:SI 3 "immediate_operand" "n")
11414 (match_operand:SI 4 "immediate_operand" "n")
11415 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11416 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11418 arm_const_bounds (operands[1], 0, 16);
11419 arm_const_bounds (operands[2], 0, 8);
11420 arm_const_bounds (operands[3], 0, (1 << 5));
11421 arm_const_bounds (operands[4], 0, (1 << 5));
11422 arm_const_bounds (operands[5], 0, 8);
11423 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11425 [(set_attr "length" "4")
11426 (set_attr "type" "coproc")])
11428 (define_insn "arm_<mcrr>"
11429 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11430 (match_operand:SI 1 "immediate_operand" "n")
11431 (match_operand:DI 2 "s_register_operand" "r")
11432 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11433 (use (match_dup 2))]
11434 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11436 arm_const_bounds (operands[0], 0, 16);
11437 arm_const_bounds (operands[1], 0, 8);
11438 arm_const_bounds (operands[3], 0, (1 << 5));
11439 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11441 [(set_attr "length" "4")
11442 (set_attr "type" "coproc")])
11444 (define_insn "arm_<mrrc>"
11445 [(set (match_operand:DI 0 "s_register_operand" "=r")
11446 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11447 (match_operand:SI 2 "immediate_operand" "n")
11448 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11449 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11451 arm_const_bounds (operands[1], 0, 16);
11452 arm_const_bounds (operands[2], 0, 8);
11453 arm_const_bounds (operands[3], 0, (1 << 5));
11454 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11456 [(set_attr "length" "4")
11457 (set_attr "type" "coproc")])
11459 (define_expand "speculation_barrier"
11460 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11463 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11464 have a usable barrier (and probably don't need one in practice).
11465 But to be safe if such code is run on later architectures, call a
11466 helper function in libgcc that will do the thing for the active
11468 if (!(arm_arch7 || arm_arch8))
11470 arm_emit_speculation_barrier_function ();
11476 ;; Generate a hard speculation barrier when we have not enabled speculation
11478 (define_insn "*speculation_barrier_insn"
11479 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11480 "arm_arch7 || arm_arch8"
11482 [(set_attr "type" "block")
11483 (set_attr "length" "8")]
11486 ;; Vector bits common to IWMMXT and Neon
11487 (include "vec-common.md")
11488 ;; Load the Intel Wireless Multimedia Extension patterns
11489 (include "iwmmxt.md")
11490 ;; Load the VFP co-processor patterns
11492 ;; Thumb-1 patterns
11493 (include "thumb1.md")
11494 ;; Thumb-2 patterns
11495 (include "thumb2.md")
11497 (include "neon.md")
11499 (include "crypto.md")
11500 ;; Synchronization Primitives
11501 (include "sync.md")
11502 ;; Fixed-point patterns
11503 (include "arm-fixed.md")