1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 if (hi_op2 == const0_rtx)
475 emit_insn (gen_add0si3_carryin_ltu (hi_dest, hi_op1));
477 emit_insn (gen_addsi3_carryin_ltu (hi_dest, hi_op1, hi_op2));
480 if (lo_result != lo_dest)
481 emit_move_insn (lo_result, lo_dest);
482 if (hi_result != hi_dest)
483 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
489 (define_expand "addv<mode>4"
490 [(match_operand:SIDI 0 "register_operand")
491 (match_operand:SIDI 1 "register_operand")
492 (match_operand:SIDI 2 "register_operand")
493 (match_operand 3 "")]
496 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
497 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
502 (define_expand "uaddv<mode>4"
503 [(match_operand:SIDI 0 "register_operand")
504 (match_operand:SIDI 1 "register_operand")
505 (match_operand:SIDI 2 "register_operand")
506 (match_operand 3 "")]
509 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
510 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
515 (define_expand "addsi3"
516 [(set (match_operand:SI 0 "s_register_operand")
517 (plus:SI (match_operand:SI 1 "s_register_operand")
518 (match_operand:SI 2 "reg_or_int_operand")))]
521 if (TARGET_32BIT && CONST_INT_P (operands[2]))
523 arm_split_constant (PLUS, SImode, NULL_RTX,
524 INTVAL (operands[2]), operands[0], operands[1],
525 optimize && can_create_pseudo_p ());
531 ; If there is a scratch available, this will be faster than synthesizing the
534 [(match_scratch:SI 3 "r")
535 (set (match_operand:SI 0 "arm_general_register_operand" "")
536 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
537 (match_operand:SI 2 "const_int_operand" "")))]
539 !(const_ok_for_arm (INTVAL (operands[2]))
540 || const_ok_for_arm (-INTVAL (operands[2])))
541 && const_ok_for_arm (~INTVAL (operands[2]))"
542 [(set (match_dup 3) (match_dup 2))
543 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
547 ;; The r/r/k alternative is required when reloading the address
548 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
549 ;; put the duplicated register first, and not try the commutative version.
550 (define_insn_and_split "*arm_addsi3"
551 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
552 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
553 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
569 subw%?\\t%0, %1, #%n2
570 subw%?\\t%0, %1, #%n2
573 && CONST_INT_P (operands[2])
574 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
575 && (reload_completed || !arm_eliminable_register (operands[1]))"
576 [(clobber (const_int 0))]
578 arm_split_constant (PLUS, SImode, curr_insn,
579 INTVAL (operands[2]), operands[0],
583 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
584 (set_attr "predicable" "yes")
585 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
586 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
587 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
588 (const_string "alu_imm")
589 (const_string "alu_sreg")))
593 (define_insn "adddi3_compareV"
594 [(set (reg:CC_V CC_REGNUM)
597 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
598 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
599 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
600 (set (match_operand:DI 0 "s_register_operand" "=&r")
601 (plus:DI (match_dup 1) (match_dup 2)))]
603 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
604 [(set_attr "conds" "set")
605 (set_attr "length" "8")
606 (set_attr "type" "multiple")]
609 (define_insn "addsi3_compareV"
610 [(set (reg:CC_V CC_REGNUM)
613 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
614 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
615 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
616 (set (match_operand:SI 0 "register_operand" "=r")
617 (plus:SI (match_dup 1) (match_dup 2)))]
619 "adds%?\\t%0, %1, %2"
620 [(set_attr "conds" "set")
621 (set_attr "type" "alus_sreg")]
624 (define_insn "adddi3_compareC"
625 [(set (reg:CC_C CC_REGNUM)
628 (match_operand:DI 1 "register_operand" "r")
629 (match_operand:DI 2 "register_operand" "r"))
631 (set (match_operand:DI 0 "register_operand" "=&r")
632 (plus:DI (match_dup 1) (match_dup 2)))]
634 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
635 [(set_attr "conds" "set")
636 (set_attr "length" "8")
637 (set_attr "type" "multiple")]
640 (define_insn "addsi3_compareC"
641 [(set (reg:CC_C CC_REGNUM)
642 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
643 (match_operand:SI 2 "register_operand" "r"))
645 (set (match_operand:SI 0 "register_operand" "=r")
646 (plus:SI (match_dup 1) (match_dup 2)))]
648 "adds%?\\t%0, %1, %2"
649 [(set_attr "conds" "set")
650 (set_attr "type" "alus_sreg")]
653 (define_insn "addsi3_compare0"
654 [(set (reg:CC_NOOV CC_REGNUM)
656 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
657 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
659 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
660 (plus:SI (match_dup 1) (match_dup 2)))]
664 subs%?\\t%0, %1, #%n2
666 [(set_attr "conds" "set")
667 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
670 (define_insn "*addsi3_compare0_scratch"
671 [(set (reg:CC_NOOV CC_REGNUM)
673 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
674 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
681 [(set_attr "conds" "set")
682 (set_attr "predicable" "yes")
683 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
686 (define_insn "*compare_negsi_si"
687 [(set (reg:CC_Z CC_REGNUM)
689 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
690 (match_operand:SI 1 "s_register_operand" "l,r")))]
693 [(set_attr "conds" "set")
694 (set_attr "predicable" "yes")
695 (set_attr "arch" "t2,*")
696 (set_attr "length" "2,4")
697 (set_attr "predicable_short_it" "yes,no")
698 (set_attr "type" "alus_sreg")]
701 ;; This is the canonicalization of subsi3_compare when the
702 ;; addend is a constant.
703 (define_insn "cmpsi2_addneg"
704 [(set (reg:CC CC_REGNUM)
706 (match_operand:SI 1 "s_register_operand" "r,r")
707 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
709 (plus:SI (match_dup 1)
710 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
712 && (INTVAL (operands[2])
713 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
715 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
716 in different condition codes (like cmn rather than like cmp), so that
717 alternative comes first. Both alternatives can match for any 0x??000000
718 where except for 0 and INT_MIN it doesn't matter what we choose, and also
719 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
721 if (which_alternative == 0 && operands[3] != const1_rtx)
722 return "subs%?\\t%0, %1, #%n3";
724 return "adds%?\\t%0, %1, %3";
726 [(set_attr "conds" "set")
727 (set_attr "type" "alus_sreg")]
730 ;; Convert the sequence
732 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
736 ;; bcs dest ((unsigned)rn >= 1)
737 ;; similarly for the beq variant using bcc.
738 ;; This is a common looping idiom (while (n--))
740 [(set (match_operand:SI 0 "arm_general_register_operand" "")
741 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
743 (set (match_operand 2 "cc_register" "")
744 (compare (match_dup 0) (const_int -1)))
746 (if_then_else (match_operator 3 "equality_operator"
747 [(match_dup 2) (const_int 0)])
748 (match_operand 4 "" "")
749 (match_operand 5 "" "")))]
750 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
754 (match_dup 1) (const_int 1)))
755 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
757 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
760 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
761 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
764 operands[2], const0_rtx);"
767 ;; The next four insns work because they compare the result with one of
768 ;; the operands, and we know that the use of the condition code is
769 ;; either GEU or LTU, so we can use the carry flag from the addition
770 ;; instead of doing the compare a second time.
771 (define_insn "*addsi3_compare_op1"
772 [(set (reg:CC_C CC_REGNUM)
774 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
775 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
777 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
778 (plus:SI (match_dup 1) (match_dup 2)))]
783 subs%?\\t%0, %1, #%n2
784 subs%?\\t%0, %0, #%n2
786 subs%?\\t%0, %1, #%n2
788 [(set_attr "conds" "set")
789 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
790 (set_attr "length" "2,2,2,2,4,4,4")
792 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
795 (define_insn "*addsi3_compare_op2"
796 [(set (reg:CC_C CC_REGNUM)
798 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
799 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
801 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
802 (plus:SI (match_dup 1) (match_dup 2)))]
807 subs%?\\t%0, %1, #%n2
808 subs%?\\t%0, %0, #%n2
810 subs%?\\t%0, %1, #%n2
812 [(set_attr "conds" "set")
813 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
814 (set_attr "length" "2,2,2,2,4,4,4")
816 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
819 (define_insn "*compare_addsi2_op0"
820 [(set (reg:CC_C CC_REGNUM)
822 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
823 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
832 [(set_attr "conds" "set")
833 (set_attr "predicable" "yes")
834 (set_attr "arch" "t2,t2,*,*,*")
835 (set_attr "predicable_short_it" "yes,yes,no,no,no")
836 (set_attr "length" "2,2,4,4,4")
837 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
840 (define_insn "*compare_addsi2_op1"
841 [(set (reg:CC_C CC_REGNUM)
843 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
844 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
853 [(set_attr "conds" "set")
854 (set_attr "predicable" "yes")
855 (set_attr "arch" "t2,t2,*,*,*")
856 (set_attr "predicable_short_it" "yes,yes,no,no,no")
857 (set_attr "length" "2,2,4,4,4")
858 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
861 (define_insn "addsi3_carryin_<optab>"
862 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
863 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
864 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
865 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
870 sbc%?\\t%0, %1, #%B2"
871 [(set_attr "conds" "use")
872 (set_attr "predicable" "yes")
873 (set_attr "arch" "t2,*,*")
874 (set_attr "length" "4")
875 (set_attr "predicable_short_it" "yes,no,no")
876 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
879 ;; Canonicalization of the above when the immediate is zero.
880 (define_insn "add0si3_carryin_<optab>"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
883 (match_operand:SI 1 "arm_not_operand" "r")))]
886 [(set_attr "conds" "use")
887 (set_attr "predicable" "yes")
888 (set_attr "length" "4")
889 (set_attr "type" "adc_imm")]
892 (define_insn "*addsi3_carryin_alt2_<optab>"
893 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
894 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
895 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
896 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
901 sbc%?\\t%0, %1, #%B2"
902 [(set_attr "conds" "use")
903 (set_attr "predicable" "yes")
904 (set_attr "arch" "t2,*,*")
905 (set_attr "length" "4")
906 (set_attr "predicable_short_it" "yes,no,no")
907 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
910 (define_insn "*addsi3_carryin_shift_<optab>"
911 [(set (match_operand:SI 0 "s_register_operand" "=r")
913 (match_operator:SI 2 "shift_operator"
914 [(match_operand:SI 3 "s_register_operand" "r")
915 (match_operand:SI 4 "reg_or_int_operand" "rM")])
916 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0)))
917 (match_operand:SI 1 "s_register_operand" "r")))]
919 "adc%?\\t%0, %1, %3%S2"
920 [(set_attr "conds" "use")
921 (set_attr "predicable" "yes")
922 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
923 (const_string "alu_shift_imm")
924 (const_string "alu_shift_reg")))]
927 (define_insn "*addsi3_carryin_clobercc_<optab>"
928 [(set (match_operand:SI 0 "s_register_operand" "=r")
929 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
930 (match_operand:SI 2 "arm_rhs_operand" "rI"))
931 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
932 (clobber (reg:CC CC_REGNUM))]
934 "adcs%?\\t%0, %1, %2"
935 [(set_attr "conds" "set")
936 (set_attr "type" "adcs_reg")]
939 (define_expand "subv<mode>4"
940 [(match_operand:SIDI 0 "register_operand")
941 (match_operand:SIDI 1 "register_operand")
942 (match_operand:SIDI 2 "register_operand")
943 (match_operand 3 "")]
946 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
947 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
952 (define_expand "usubv<mode>4"
953 [(match_operand:SIDI 0 "register_operand")
954 (match_operand:SIDI 1 "register_operand")
955 (match_operand:SIDI 2 "register_operand")
956 (match_operand 3 "")]
959 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
960 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
965 (define_insn "subdi3_compare1"
966 [(set (reg:CC CC_REGNUM)
968 (match_operand:DI 1 "s_register_operand" "r")
969 (match_operand:DI 2 "s_register_operand" "r")))
970 (set (match_operand:DI 0 "s_register_operand" "=&r")
971 (minus:DI (match_dup 1) (match_dup 2)))]
973 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
974 [(set_attr "conds" "set")
975 (set_attr "length" "8")
976 (set_attr "type" "multiple")]
979 (define_insn "subsi3_compare1"
980 [(set (reg:CC CC_REGNUM)
982 (match_operand:SI 1 "register_operand" "r")
983 (match_operand:SI 2 "register_operand" "r")))
984 (set (match_operand:SI 0 "register_operand" "=r")
985 (minus:SI (match_dup 1) (match_dup 2)))]
987 "subs%?\\t%0, %1, %2"
988 [(set_attr "conds" "set")
989 (set_attr "type" "alus_sreg")]
992 (define_insn "subsi3_carryin"
993 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
994 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
995 (match_operand:SI 2 "s_register_operand" "r,r,r"))
996 (match_operand:SI 3 "arm_borrow_operation" "")))]
1001 sbc%?\\t%0, %2, %2, lsl #1"
1002 [(set_attr "conds" "use")
1003 (set_attr "arch" "*,a,t2")
1004 (set_attr "predicable" "yes")
1005 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1008 (define_insn "*subsi3_carryin_const"
1009 [(set (match_operand:SI 0 "s_register_operand" "=r")
1011 (match_operand:SI 1 "s_register_operand" "r")
1012 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1013 (match_operand:SI 3 "arm_borrow_operation" "")))]
1015 "sbc\\t%0, %1, #%n2"
1016 [(set_attr "conds" "use")
1017 (set_attr "type" "adc_imm")]
1020 (define_insn "*subsi3_carryin_const0"
1021 [(set (match_operand:SI 0 "s_register_operand" "=r")
1022 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1023 (match_operand:SI 2 "arm_borrow_operation" "")))]
1026 [(set_attr "conds" "use")
1027 (set_attr "type" "adc_imm")]
1030 (define_insn "*subsi3_carryin_shift"
1031 [(set (match_operand:SI 0 "s_register_operand" "=r")
1033 (match_operand:SI 1 "s_register_operand" "r")
1034 (match_operator:SI 2 "shift_operator"
1035 [(match_operand:SI 3 "s_register_operand" "r")
1036 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1037 (match_operand:SI 5 "arm_borrow_operation" "")))]
1039 "sbc%?\\t%0, %1, %3%S2"
1040 [(set_attr "conds" "use")
1041 (set_attr "predicable" "yes")
1042 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1043 (const_string "alu_shift_imm")
1044 (const_string "alu_shift_reg")))]
1047 (define_insn "*rsbsi3_carryin_shift"
1048 [(set (match_operand:SI 0 "s_register_operand" "=r")
1050 (match_operator:SI 2 "shift_operator"
1051 [(match_operand:SI 3 "s_register_operand" "r")
1052 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1053 (match_operand:SI 1 "s_register_operand" "r"))
1054 (match_operand:SI 5 "arm_borrow_operation" "")))]
1056 "rsc%?\\t%0, %1, %3%S2"
1057 [(set_attr "conds" "use")
1058 (set_attr "predicable" "yes")
1059 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1060 (const_string "alu_shift_imm")
1061 (const_string "alu_shift_reg")))]
1064 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1066 [(set (match_operand:SI 0 "s_register_operand" "")
1067 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1068 (match_operand:SI 2 "s_register_operand" ""))
1070 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1072 [(set (match_dup 3) (match_dup 1))
1073 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1075 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1078 (define_expand "addsf3"
1079 [(set (match_operand:SF 0 "s_register_operand")
1080 (plus:SF (match_operand:SF 1 "s_register_operand")
1081 (match_operand:SF 2 "s_register_operand")))]
1082 "TARGET_32BIT && TARGET_HARD_FLOAT"
1086 (define_expand "adddf3"
1087 [(set (match_operand:DF 0 "s_register_operand")
1088 (plus:DF (match_operand:DF 1 "s_register_operand")
1089 (match_operand:DF 2 "s_register_operand")))]
1090 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1094 (define_expand "subdi3"
1096 [(set (match_operand:DI 0 "s_register_operand")
1097 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1098 (match_operand:DI 2 "s_register_operand")))
1099 (clobber (reg:CC CC_REGNUM))])]
1104 if (!REG_P (operands[1]))
1105 operands[1] = force_reg (DImode, operands[1]);
1109 rtx lo_result, hi_result, lo_dest, hi_dest;
1110 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1113 /* Since operands[1] may be an integer, pass it second, so that
1114 any necessary simplifications will be done on the decomposed
1116 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1118 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1119 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1121 if (!arm_rhs_operand (lo_op1, SImode))
1122 lo_op1 = force_reg (SImode, lo_op1);
1124 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1125 || !arm_rhs_operand (hi_op1, SImode))
1126 hi_op1 = force_reg (SImode, hi_op1);
1129 if (lo_op1 == const0_rtx)
1131 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1132 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1134 else if (CONST_INT_P (lo_op1))
1136 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1137 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1138 GEN_INT (~UINTVAL (lo_op1))));
1142 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1143 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1146 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1148 if (hi_op1 == const0_rtx)
1149 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1151 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1153 if (lo_result != lo_dest)
1154 emit_move_insn (lo_result, lo_dest);
1156 if (hi_result != hi_dest)
1157 emit_move_insn (hi_result, hi_dest);
1164 (define_expand "subsi3"
1165 [(set (match_operand:SI 0 "s_register_operand")
1166 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1167 (match_operand:SI 2 "s_register_operand")))]
1170 if (CONST_INT_P (operands[1]))
1174 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1175 operands[1] = force_reg (SImode, operands[1]);
1178 arm_split_constant (MINUS, SImode, NULL_RTX,
1179 INTVAL (operands[1]), operands[0],
1181 optimize && can_create_pseudo_p ());
1185 else /* TARGET_THUMB1 */
1186 operands[1] = force_reg (SImode, operands[1]);
1191 ; ??? Check Thumb-2 split length
1192 (define_insn_and_split "*arm_subsi3_insn"
1193 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1194 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1195 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1207 "&& (CONST_INT_P (operands[1])
1208 && !const_ok_for_arm (INTVAL (operands[1])))"
1209 [(clobber (const_int 0))]
1211 arm_split_constant (MINUS, SImode, curr_insn,
1212 INTVAL (operands[1]), operands[0], operands[2], 0);
1215 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1216 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1217 (set_attr "predicable" "yes")
1218 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1219 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1223 [(match_scratch:SI 3 "r")
1224 (set (match_operand:SI 0 "arm_general_register_operand" "")
1225 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1226 (match_operand:SI 2 "arm_general_register_operand" "")))]
1228 && !const_ok_for_arm (INTVAL (operands[1]))
1229 && const_ok_for_arm (~INTVAL (operands[1]))"
1230 [(set (match_dup 3) (match_dup 1))
1231 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1235 (define_insn "subsi3_compare0"
1236 [(set (reg:CC_NOOV CC_REGNUM)
1238 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1239 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1241 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1242 (minus:SI (match_dup 1) (match_dup 2)))]
1247 rsbs%?\\t%0, %2, %1"
1248 [(set_attr "conds" "set")
1249 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1252 (define_insn "subsi3_compare"
1253 [(set (reg:CC CC_REGNUM)
1254 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1255 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1256 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1257 (minus:SI (match_dup 1) (match_dup 2)))]
1262 rsbs%?\\t%0, %2, %1"
1263 [(set_attr "conds" "set")
1264 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1267 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1268 ;; rather than (0 cmp reg). This gives the same results for unsigned
1269 ;; and equality compares which is what we mostly need here.
1270 (define_insn "rsb_imm_compare"
1271 [(set (reg:CC_RSB CC_REGNUM)
1272 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1273 (match_operand 3 "const_int_operand" "")))
1274 (set (match_operand:SI 0 "s_register_operand" "=r")
1275 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1277 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1279 [(set_attr "conds" "set")
1280 (set_attr "type" "alus_imm")]
1283 (define_expand "subsf3"
1284 [(set (match_operand:SF 0 "s_register_operand")
1285 (minus:SF (match_operand:SF 1 "s_register_operand")
1286 (match_operand:SF 2 "s_register_operand")))]
1287 "TARGET_32BIT && TARGET_HARD_FLOAT"
1291 (define_expand "subdf3"
1292 [(set (match_operand:DF 0 "s_register_operand")
1293 (minus:DF (match_operand:DF 1 "s_register_operand")
1294 (match_operand:DF 2 "s_register_operand")))]
1295 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1300 ;; Multiplication insns
1302 (define_expand "mulhi3"
1303 [(set (match_operand:HI 0 "s_register_operand")
1304 (mult:HI (match_operand:HI 1 "s_register_operand")
1305 (match_operand:HI 2 "s_register_operand")))]
1306 "TARGET_DSP_MULTIPLY"
1309 rtx result = gen_reg_rtx (SImode);
1310 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1311 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1316 (define_expand "mulsi3"
1317 [(set (match_operand:SI 0 "s_register_operand")
1318 (mult:SI (match_operand:SI 2 "s_register_operand")
1319 (match_operand:SI 1 "s_register_operand")))]
1324 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1326 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1327 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1328 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1330 "mul%?\\t%0, %2, %1"
1331 [(set_attr "type" "mul")
1332 (set_attr "predicable" "yes")
1333 (set_attr "arch" "t2,v6,nov6,nov6")
1334 (set_attr "length" "4")
1335 (set_attr "predicable_short_it" "yes,no,*,*")]
1338 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1339 ;; reusing the same register.
1342 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1344 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1345 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1346 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1348 "mla%?\\t%0, %3, %2, %1"
1349 [(set_attr "type" "mla")
1350 (set_attr "predicable" "yes")
1351 (set_attr "arch" "v6,nov6,nov6,nov6")]
1355 [(set (match_operand:SI 0 "s_register_operand" "=r")
1357 (match_operand:SI 1 "s_register_operand" "r")
1358 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1359 (match_operand:SI 2 "s_register_operand" "r"))))]
1360 "TARGET_32BIT && arm_arch_thumb2"
1361 "mls%?\\t%0, %3, %2, %1"
1362 [(set_attr "type" "mla")
1363 (set_attr "predicable" "yes")]
1366 (define_insn "*mulsi3_compare0"
1367 [(set (reg:CC_NOOV CC_REGNUM)
1368 (compare:CC_NOOV (mult:SI
1369 (match_operand:SI 2 "s_register_operand" "r,r")
1370 (match_operand:SI 1 "s_register_operand" "%0,r"))
1372 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1373 (mult:SI (match_dup 2) (match_dup 1)))]
1374 "TARGET_ARM && !arm_arch6"
1375 "muls%?\\t%0, %2, %1"
1376 [(set_attr "conds" "set")
1377 (set_attr "type" "muls")]
1380 (define_insn "*mulsi3_compare0_v6"
1381 [(set (reg:CC_NOOV CC_REGNUM)
1382 (compare:CC_NOOV (mult:SI
1383 (match_operand:SI 2 "s_register_operand" "r")
1384 (match_operand:SI 1 "s_register_operand" "r"))
1386 (set (match_operand:SI 0 "s_register_operand" "=r")
1387 (mult:SI (match_dup 2) (match_dup 1)))]
1388 "TARGET_ARM && arm_arch6 && optimize_size"
1389 "muls%?\\t%0, %2, %1"
1390 [(set_attr "conds" "set")
1391 (set_attr "type" "muls")]
1394 (define_insn "*mulsi_compare0_scratch"
1395 [(set (reg:CC_NOOV CC_REGNUM)
1396 (compare:CC_NOOV (mult:SI
1397 (match_operand:SI 2 "s_register_operand" "r,r")
1398 (match_operand:SI 1 "s_register_operand" "%0,r"))
1400 (clobber (match_scratch:SI 0 "=&r,&r"))]
1401 "TARGET_ARM && !arm_arch6"
1402 "muls%?\\t%0, %2, %1"
1403 [(set_attr "conds" "set")
1404 (set_attr "type" "muls")]
1407 (define_insn "*mulsi_compare0_scratch_v6"
1408 [(set (reg:CC_NOOV CC_REGNUM)
1409 (compare:CC_NOOV (mult:SI
1410 (match_operand:SI 2 "s_register_operand" "r")
1411 (match_operand:SI 1 "s_register_operand" "r"))
1413 (clobber (match_scratch:SI 0 "=r"))]
1414 "TARGET_ARM && arm_arch6 && optimize_size"
1415 "muls%?\\t%0, %2, %1"
1416 [(set_attr "conds" "set")
1417 (set_attr "type" "muls")]
1420 (define_insn "*mulsi3addsi_compare0"
1421 [(set (reg:CC_NOOV CC_REGNUM)
1424 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1425 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1426 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1428 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1429 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1431 "TARGET_ARM && arm_arch6"
1432 "mlas%?\\t%0, %2, %1, %3"
1433 [(set_attr "conds" "set")
1434 (set_attr "type" "mlas")]
1437 (define_insn "*mulsi3addsi_compare0_v6"
1438 [(set (reg:CC_NOOV CC_REGNUM)
1441 (match_operand:SI 2 "s_register_operand" "r")
1442 (match_operand:SI 1 "s_register_operand" "r"))
1443 (match_operand:SI 3 "s_register_operand" "r"))
1445 (set (match_operand:SI 0 "s_register_operand" "=r")
1446 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1448 "TARGET_ARM && arm_arch6 && optimize_size"
1449 "mlas%?\\t%0, %2, %1, %3"
1450 [(set_attr "conds" "set")
1451 (set_attr "type" "mlas")]
1454 (define_insn "*mulsi3addsi_compare0_scratch"
1455 [(set (reg:CC_NOOV CC_REGNUM)
1458 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1459 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1460 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1462 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1463 "TARGET_ARM && !arm_arch6"
1464 "mlas%?\\t%0, %2, %1, %3"
1465 [(set_attr "conds" "set")
1466 (set_attr "type" "mlas")]
1469 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1470 [(set (reg:CC_NOOV CC_REGNUM)
1473 (match_operand:SI 2 "s_register_operand" "r")
1474 (match_operand:SI 1 "s_register_operand" "r"))
1475 (match_operand:SI 3 "s_register_operand" "r"))
1477 (clobber (match_scratch:SI 0 "=r"))]
1478 "TARGET_ARM && arm_arch6 && optimize_size"
1479 "mlas%?\\t%0, %2, %1, %3"
1480 [(set_attr "conds" "set")
1481 (set_attr "type" "mlas")]
1484 ;; 32x32->64 widening multiply.
1485 ;; The only difference between the v3-5 and v6+ versions is the requirement
1486 ;; that the output does not overlap with either input.
1488 (define_expand "<Us>mulsidi3"
1489 [(set (match_operand:DI 0 "s_register_operand")
1491 (SE:DI (match_operand:SI 1 "s_register_operand"))
1492 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1495 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1496 gen_highpart (SImode, operands[0]),
1497 operands[1], operands[2]));
1502 (define_insn "<US>mull"
1503 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1505 (match_operand:SI 2 "s_register_operand" "%r,r")
1506 (match_operand:SI 3 "s_register_operand" "r,r")))
1507 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1510 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1513 "<US>mull%?\\t%0, %1, %2, %3"
1514 [(set_attr "type" "umull")
1515 (set_attr "predicable" "yes")
1516 (set_attr "arch" "v6,nov6")]
1519 (define_expand "<Us>maddsidi4"
1520 [(set (match_operand:DI 0 "s_register_operand")
1523 (SE:DI (match_operand:SI 1 "s_register_operand"))
1524 (SE:DI (match_operand:SI 2 "s_register_operand")))
1525 (match_operand:DI 3 "s_register_operand")))]
1528 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1529 gen_lowpart (SImode, operands[3]),
1530 gen_highpart (SImode, operands[0]),
1531 gen_highpart (SImode, operands[3]),
1532 operands[1], operands[2]));
1537 (define_insn "<US>mlal"
1538 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1541 (match_operand:SI 4 "s_register_operand" "%r,r")
1542 (match_operand:SI 5 "s_register_operand" "r,r"))
1543 (match_operand:SI 1 "s_register_operand" "0,0")))
1544 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1549 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1550 (zero_extend:DI (match_dup 1)))
1552 (match_operand:SI 3 "s_register_operand" "2,2")))]
1554 "<US>mlal%?\\t%0, %2, %4, %5"
1555 [(set_attr "type" "umlal")
1556 (set_attr "predicable" "yes")
1557 (set_attr "arch" "v6,nov6")]
1560 (define_expand "<US>mulsi3_highpart"
1562 [(set (match_operand:SI 0 "s_register_operand")
1566 (SE:DI (match_operand:SI 1 "s_register_operand"))
1567 (SE:DI (match_operand:SI 2 "s_register_operand")))
1569 (clobber (match_scratch:SI 3 ""))])]
1574 (define_insn "*<US>mull_high"
1575 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1579 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1580 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1582 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1584 "<US>mull%?\\t%3, %0, %2, %1"
1585 [(set_attr "type" "umull")
1586 (set_attr "predicable" "yes")
1587 (set_attr "arch" "v6,nov6,nov6")]
1590 (define_insn "mulhisi3"
1591 [(set (match_operand:SI 0 "s_register_operand" "=r")
1592 (mult:SI (sign_extend:SI
1593 (match_operand:HI 1 "s_register_operand" "%r"))
1595 (match_operand:HI 2 "s_register_operand" "r"))))]
1596 "TARGET_DSP_MULTIPLY"
1597 "smulbb%?\\t%0, %1, %2"
1598 [(set_attr "type" "smulxy")
1599 (set_attr "predicable" "yes")]
1602 (define_insn "*mulhisi3tb"
1603 [(set (match_operand:SI 0 "s_register_operand" "=r")
1604 (mult:SI (ashiftrt:SI
1605 (match_operand:SI 1 "s_register_operand" "r")
1608 (match_operand:HI 2 "s_register_operand" "r"))))]
1609 "TARGET_DSP_MULTIPLY"
1610 "smultb%?\\t%0, %1, %2"
1611 [(set_attr "type" "smulxy")
1612 (set_attr "predicable" "yes")]
1615 (define_insn "*mulhisi3bt"
1616 [(set (match_operand:SI 0 "s_register_operand" "=r")
1617 (mult:SI (sign_extend:SI
1618 (match_operand:HI 1 "s_register_operand" "r"))
1620 (match_operand:SI 2 "s_register_operand" "r")
1622 "TARGET_DSP_MULTIPLY"
1623 "smulbt%?\\t%0, %1, %2"
1624 [(set_attr "type" "smulxy")
1625 (set_attr "predicable" "yes")]
1628 (define_insn "*mulhisi3tt"
1629 [(set (match_operand:SI 0 "s_register_operand" "=r")
1630 (mult:SI (ashiftrt:SI
1631 (match_operand:SI 1 "s_register_operand" "r")
1634 (match_operand:SI 2 "s_register_operand" "r")
1636 "TARGET_DSP_MULTIPLY"
1637 "smultt%?\\t%0, %1, %2"
1638 [(set_attr "type" "smulxy")
1639 (set_attr "predicable" "yes")]
1642 (define_insn "maddhisi4"
1643 [(set (match_operand:SI 0 "s_register_operand" "=r")
1644 (plus:SI (mult:SI (sign_extend:SI
1645 (match_operand:HI 1 "s_register_operand" "r"))
1647 (match_operand:HI 2 "s_register_operand" "r")))
1648 (match_operand:SI 3 "s_register_operand" "r")))]
1649 "TARGET_DSP_MULTIPLY"
1650 "smlabb%?\\t%0, %1, %2, %3"
1651 [(set_attr "type" "smlaxy")
1652 (set_attr "predicable" "yes")]
1655 ;; Note: there is no maddhisi4ibt because this one is canonical form
1656 (define_insn "*maddhisi4tb"
1657 [(set (match_operand:SI 0 "s_register_operand" "=r")
1658 (plus:SI (mult:SI (ashiftrt:SI
1659 (match_operand:SI 1 "s_register_operand" "r")
1662 (match_operand:HI 2 "s_register_operand" "r")))
1663 (match_operand:SI 3 "s_register_operand" "r")))]
1664 "TARGET_DSP_MULTIPLY"
1665 "smlatb%?\\t%0, %1, %2, %3"
1666 [(set_attr "type" "smlaxy")
1667 (set_attr "predicable" "yes")]
1670 (define_insn "*maddhisi4tt"
1671 [(set (match_operand:SI 0 "s_register_operand" "=r")
1672 (plus:SI (mult:SI (ashiftrt:SI
1673 (match_operand:SI 1 "s_register_operand" "r")
1676 (match_operand:SI 2 "s_register_operand" "r")
1678 (match_operand:SI 3 "s_register_operand" "r")))]
1679 "TARGET_DSP_MULTIPLY"
1680 "smlatt%?\\t%0, %1, %2, %3"
1681 [(set_attr "type" "smlaxy")
1682 (set_attr "predicable" "yes")]
1685 (define_insn "maddhidi4"
1686 [(set (match_operand:DI 0 "s_register_operand" "=r")
1688 (mult:DI (sign_extend:DI
1689 (match_operand:HI 1 "s_register_operand" "r"))
1691 (match_operand:HI 2 "s_register_operand" "r")))
1692 (match_operand:DI 3 "s_register_operand" "0")))]
1693 "TARGET_DSP_MULTIPLY"
1694 "smlalbb%?\\t%Q0, %R0, %1, %2"
1695 [(set_attr "type" "smlalxy")
1696 (set_attr "predicable" "yes")])
1698 ;; Note: there is no maddhidi4ibt because this one is canonical form
1699 (define_insn "*maddhidi4tb"
1700 [(set (match_operand:DI 0 "s_register_operand" "=r")
1702 (mult:DI (sign_extend:DI
1704 (match_operand:SI 1 "s_register_operand" "r")
1707 (match_operand:HI 2 "s_register_operand" "r")))
1708 (match_operand:DI 3 "s_register_operand" "0")))]
1709 "TARGET_DSP_MULTIPLY"
1710 "smlaltb%?\\t%Q0, %R0, %1, %2"
1711 [(set_attr "type" "smlalxy")
1712 (set_attr "predicable" "yes")])
1714 (define_insn "*maddhidi4tt"
1715 [(set (match_operand:DI 0 "s_register_operand" "=r")
1717 (mult:DI (sign_extend:DI
1719 (match_operand:SI 1 "s_register_operand" "r")
1723 (match_operand:SI 2 "s_register_operand" "r")
1725 (match_operand:DI 3 "s_register_operand" "0")))]
1726 "TARGET_DSP_MULTIPLY"
1727 "smlaltt%?\\t%Q0, %R0, %1, %2"
1728 [(set_attr "type" "smlalxy")
1729 (set_attr "predicable" "yes")])
1731 (define_expand "mulsf3"
1732 [(set (match_operand:SF 0 "s_register_operand")
1733 (mult:SF (match_operand:SF 1 "s_register_operand")
1734 (match_operand:SF 2 "s_register_operand")))]
1735 "TARGET_32BIT && TARGET_HARD_FLOAT"
1739 (define_expand "muldf3"
1740 [(set (match_operand:DF 0 "s_register_operand")
1741 (mult:DF (match_operand:DF 1 "s_register_operand")
1742 (match_operand:DF 2 "s_register_operand")))]
1743 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1749 (define_expand "divsf3"
1750 [(set (match_operand:SF 0 "s_register_operand")
1751 (div:SF (match_operand:SF 1 "s_register_operand")
1752 (match_operand:SF 2 "s_register_operand")))]
1753 "TARGET_32BIT && TARGET_HARD_FLOAT"
1756 (define_expand "divdf3"
1757 [(set (match_operand:DF 0 "s_register_operand")
1758 (div:DF (match_operand:DF 1 "s_register_operand")
1759 (match_operand:DF 2 "s_register_operand")))]
1760 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1764 ; Expand logical operations. The mid-end expander does not split off memory
1765 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1766 ; So an explicit expander is needed to generate better code.
1768 (define_expand "<LOGICAL:optab>di3"
1769 [(set (match_operand:DI 0 "s_register_operand")
1770 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1771 (match_operand:DI 2 "arm_<optab>di_operand")))]
1774 rtx low = simplify_gen_binary (<CODE>, SImode,
1775 gen_lowpart (SImode, operands[1]),
1776 gen_lowpart (SImode, operands[2]));
1777 rtx high = simplify_gen_binary (<CODE>, SImode,
1778 gen_highpart (SImode, operands[1]),
1779 gen_highpart_mode (SImode, DImode,
1782 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1783 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1788 (define_expand "one_cmpldi2"
1789 [(set (match_operand:DI 0 "s_register_operand")
1790 (not:DI (match_operand:DI 1 "s_register_operand")))]
1793 rtx low = simplify_gen_unary (NOT, SImode,
1794 gen_lowpart (SImode, operands[1]),
1796 rtx high = simplify_gen_unary (NOT, SImode,
1797 gen_highpart_mode (SImode, DImode,
1801 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1802 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1807 ;; Split DImode and, ior, xor operations. Simply perform the logical
1808 ;; operation on the upper and lower halves of the registers.
1809 ;; This is needed for atomic operations in arm_split_atomic_op.
1810 ;; Avoid splitting IWMMXT instructions.
1812 [(set (match_operand:DI 0 "s_register_operand" "")
1813 (match_operator:DI 6 "logical_binary_operator"
1814 [(match_operand:DI 1 "s_register_operand" "")
1815 (match_operand:DI 2 "s_register_operand" "")]))]
1816 "TARGET_32BIT && reload_completed
1817 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1818 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1819 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1822 operands[3] = gen_highpart (SImode, operands[0]);
1823 operands[0] = gen_lowpart (SImode, operands[0]);
1824 operands[4] = gen_highpart (SImode, operands[1]);
1825 operands[1] = gen_lowpart (SImode, operands[1]);
1826 operands[5] = gen_highpart (SImode, operands[2]);
1827 operands[2] = gen_lowpart (SImode, operands[2]);
1831 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1832 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1834 [(set (match_operand:DI 0 "s_register_operand")
1835 (not:DI (match_operand:DI 1 "s_register_operand")))]
1837 [(set (match_dup 0) (not:SI (match_dup 1)))
1838 (set (match_dup 2) (not:SI (match_dup 3)))]
1841 operands[2] = gen_highpart (SImode, operands[0]);
1842 operands[0] = gen_lowpart (SImode, operands[0]);
1843 operands[3] = gen_highpart (SImode, operands[1]);
1844 operands[1] = gen_lowpart (SImode, operands[1]);
1848 (define_expand "andsi3"
1849 [(set (match_operand:SI 0 "s_register_operand")
1850 (and:SI (match_operand:SI 1 "s_register_operand")
1851 (match_operand:SI 2 "reg_or_int_operand")))]
1856 if (CONST_INT_P (operands[2]))
1858 if (INTVAL (operands[2]) == 255 && arm_arch6)
1860 operands[1] = convert_to_mode (QImode, operands[1], 1);
1861 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1865 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1866 operands[2] = force_reg (SImode, operands[2]);
1869 arm_split_constant (AND, SImode, NULL_RTX,
1870 INTVAL (operands[2]), operands[0],
1872 optimize && can_create_pseudo_p ());
1878 else /* TARGET_THUMB1 */
1880 if (!CONST_INT_P (operands[2]))
1882 rtx tmp = force_reg (SImode, operands[2]);
1883 if (rtx_equal_p (operands[0], operands[1]))
1887 operands[2] = operands[1];
1895 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1897 operands[2] = force_reg (SImode,
1898 GEN_INT (~INTVAL (operands[2])));
1900 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1905 for (i = 9; i <= 31; i++)
1907 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1909 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1913 else if ((HOST_WIDE_INT_1 << i) - 1
1914 == ~INTVAL (operands[2]))
1916 rtx shift = GEN_INT (i);
1917 rtx reg = gen_reg_rtx (SImode);
1919 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1920 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1926 operands[2] = force_reg (SImode, operands[2]);
1932 ; ??? Check split length for Thumb-2
1933 (define_insn_and_split "*arm_andsi3_insn"
1934 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1935 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1936 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1941 bic%?\\t%0, %1, #%B2
1945 && CONST_INT_P (operands[2])
1946 && !(const_ok_for_arm (INTVAL (operands[2]))
1947 || const_ok_for_arm (~INTVAL (operands[2])))"
1948 [(clobber (const_int 0))]
1950 arm_split_constant (AND, SImode, curr_insn,
1951 INTVAL (operands[2]), operands[0], operands[1], 0);
1954 [(set_attr "length" "4,4,4,4,16")
1955 (set_attr "predicable" "yes")
1956 (set_attr "predicable_short_it" "no,yes,no,no,no")
1957 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1960 (define_insn "*andsi3_compare0"
1961 [(set (reg:CC_NOOV CC_REGNUM)
1963 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1964 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
1966 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1967 (and:SI (match_dup 1) (match_dup 2)))]
1971 bics%?\\t%0, %1, #%B2
1972 ands%?\\t%0, %1, %2"
1973 [(set_attr "conds" "set")
1974 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1977 (define_insn "*andsi3_compare0_scratch"
1978 [(set (reg:CC_NOOV CC_REGNUM)
1980 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1981 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
1983 (clobber (match_scratch:SI 2 "=X,r,X"))]
1987 bics%?\\t%2, %0, #%B1
1989 [(set_attr "conds" "set")
1990 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1993 (define_insn "*zeroextractsi_compare0_scratch"
1994 [(set (reg:CC_NOOV CC_REGNUM)
1995 (compare:CC_NOOV (zero_extract:SI
1996 (match_operand:SI 0 "s_register_operand" "r")
1997 (match_operand 1 "const_int_operand" "n")
1998 (match_operand 2 "const_int_operand" "n"))
2001 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2002 && INTVAL (operands[1]) > 0
2003 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2004 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2006 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2007 << INTVAL (operands[2]));
2008 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2011 [(set_attr "conds" "set")
2012 (set_attr "predicable" "yes")
2013 (set_attr "type" "logics_imm")]
2016 (define_insn_and_split "*ne_zeroextractsi"
2017 [(set (match_operand:SI 0 "s_register_operand" "=r")
2018 (ne:SI (zero_extract:SI
2019 (match_operand:SI 1 "s_register_operand" "r")
2020 (match_operand:SI 2 "const_int_operand" "n")
2021 (match_operand:SI 3 "const_int_operand" "n"))
2023 (clobber (reg:CC CC_REGNUM))]
2025 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2026 && INTVAL (operands[2]) > 0
2027 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2028 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2031 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2032 && INTVAL (operands[2]) > 0
2033 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2034 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2035 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2036 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2038 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2040 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2041 (match_dup 0) (const_int 1)))]
2043 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2044 << INTVAL (operands[3]));
2046 [(set_attr "conds" "clob")
2047 (set (attr "length")
2048 (if_then_else (eq_attr "is_thumb" "yes")
2051 (set_attr "type" "multiple")]
2054 (define_insn_and_split "*ne_zeroextractsi_shifted"
2055 [(set (match_operand:SI 0 "s_register_operand" "=r")
2056 (ne:SI (zero_extract:SI
2057 (match_operand:SI 1 "s_register_operand" "r")
2058 (match_operand:SI 2 "const_int_operand" "n")
2061 (clobber (reg:CC CC_REGNUM))]
2065 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2066 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2068 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2070 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2071 (match_dup 0) (const_int 1)))]
2073 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2075 [(set_attr "conds" "clob")
2076 (set_attr "length" "8")
2077 (set_attr "type" "multiple")]
2080 (define_insn_and_split "*ite_ne_zeroextractsi"
2081 [(set (match_operand:SI 0 "s_register_operand" "=r")
2082 (if_then_else:SI (ne (zero_extract:SI
2083 (match_operand:SI 1 "s_register_operand" "r")
2084 (match_operand:SI 2 "const_int_operand" "n")
2085 (match_operand:SI 3 "const_int_operand" "n"))
2087 (match_operand:SI 4 "arm_not_operand" "rIK")
2089 (clobber (reg:CC CC_REGNUM))]
2091 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2092 && INTVAL (operands[2]) > 0
2093 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2094 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2095 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2098 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2099 && INTVAL (operands[2]) > 0
2100 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2101 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2102 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2103 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2104 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2106 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2108 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2109 (match_dup 0) (match_dup 4)))]
2111 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2112 << INTVAL (operands[3]));
2114 [(set_attr "conds" "clob")
2115 (set_attr "length" "8")
2116 (set_attr "type" "multiple")]
2119 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2120 [(set (match_operand:SI 0 "s_register_operand" "=r")
2121 (if_then_else:SI (ne (zero_extract:SI
2122 (match_operand:SI 1 "s_register_operand" "r")
2123 (match_operand:SI 2 "const_int_operand" "n")
2126 (match_operand:SI 3 "arm_not_operand" "rIK")
2128 (clobber (reg:CC CC_REGNUM))]
2129 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2131 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2132 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2133 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2135 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2137 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2138 (match_dup 0) (match_dup 3)))]
2140 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2142 [(set_attr "conds" "clob")
2143 (set_attr "length" "8")
2144 (set_attr "type" "multiple")]
2147 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2149 [(set (match_operand:SI 0 "s_register_operand" "")
2150 (match_operator:SI 1 "shiftable_operator"
2151 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2152 (match_operand:SI 3 "const_int_operand" "")
2153 (match_operand:SI 4 "const_int_operand" ""))
2154 (match_operand:SI 5 "s_register_operand" "")]))
2155 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2157 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2160 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2163 HOST_WIDE_INT temp = INTVAL (operands[3]);
2165 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2166 operands[4] = GEN_INT (32 - temp);
2171 [(set (match_operand:SI 0 "s_register_operand" "")
2172 (match_operator:SI 1 "shiftable_operator"
2173 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2174 (match_operand:SI 3 "const_int_operand" "")
2175 (match_operand:SI 4 "const_int_operand" ""))
2176 (match_operand:SI 5 "s_register_operand" "")]))
2177 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2179 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2182 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2185 HOST_WIDE_INT temp = INTVAL (operands[3]);
2187 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2188 operands[4] = GEN_INT (32 - temp);
2192 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2193 ;;; represented by the bitfield, then this will produce incorrect results.
2194 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2195 ;;; which have a real bit-field insert instruction, the truncation happens
2196 ;;; in the bit-field insert instruction itself. Since arm does not have a
2197 ;;; bit-field insert instruction, we would have to emit code here to truncate
2198 ;;; the value before we insert. This loses some of the advantage of having
2199 ;;; this insv pattern, so this pattern needs to be reevalutated.
2201 (define_expand "insv"
2202 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2203 (match_operand 1 "general_operand")
2204 (match_operand 2 "general_operand"))
2205 (match_operand 3 "reg_or_int_operand"))]
2206 "TARGET_ARM || arm_arch_thumb2"
2209 int start_bit = INTVAL (operands[2]);
2210 int width = INTVAL (operands[1]);
2211 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2212 rtx target, subtarget;
2214 if (arm_arch_thumb2)
2216 if (unaligned_access && MEM_P (operands[0])
2217 && s_register_operand (operands[3], GET_MODE (operands[3]))
2218 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2222 if (BYTES_BIG_ENDIAN)
2223 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2228 base_addr = adjust_address (operands[0], SImode,
2229 start_bit / BITS_PER_UNIT);
2230 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2234 rtx tmp = gen_reg_rtx (HImode);
2236 base_addr = adjust_address (operands[0], HImode,
2237 start_bit / BITS_PER_UNIT);
2238 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2239 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2243 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2245 bool use_bfi = TRUE;
2247 if (CONST_INT_P (operands[3]))
2249 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2253 emit_insn (gen_insv_zero (operands[0], operands[1],
2258 /* See if the set can be done with a single orr instruction. */
2259 if (val == mask && const_ok_for_arm (val << start_bit))
2265 if (!REG_P (operands[3]))
2266 operands[3] = force_reg (SImode, operands[3]);
2268 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2277 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2280 target = copy_rtx (operands[0]);
2281 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2282 subreg as the final target. */
2283 if (GET_CODE (target) == SUBREG)
2285 subtarget = gen_reg_rtx (SImode);
2286 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2287 < GET_MODE_SIZE (SImode))
2288 target = SUBREG_REG (target);
2293 if (CONST_INT_P (operands[3]))
2295 /* Since we are inserting a known constant, we may be able to
2296 reduce the number of bits that we have to clear so that
2297 the mask becomes simple. */
2298 /* ??? This code does not check to see if the new mask is actually
2299 simpler. It may not be. */
2300 rtx op1 = gen_reg_rtx (SImode);
2301 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2302 start of this pattern. */
2303 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2304 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2306 emit_insn (gen_andsi3 (op1, operands[0],
2307 gen_int_mode (~mask2, SImode)));
2308 emit_insn (gen_iorsi3 (subtarget, op1,
2309 gen_int_mode (op3_value << start_bit, SImode)));
2311 else if (start_bit == 0
2312 && !(const_ok_for_arm (mask)
2313 || const_ok_for_arm (~mask)))
2315 /* A Trick, since we are setting the bottom bits in the word,
2316 we can shift operand[3] up, operand[0] down, OR them together
2317 and rotate the result back again. This takes 3 insns, and
2318 the third might be mergeable into another op. */
2319 /* The shift up copes with the possibility that operand[3] is
2320 wider than the bitfield. */
2321 rtx op0 = gen_reg_rtx (SImode);
2322 rtx op1 = gen_reg_rtx (SImode);
2324 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2325 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2326 emit_insn (gen_iorsi3 (op1, op1, op0));
2327 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2329 else if ((width + start_bit == 32)
2330 && !(const_ok_for_arm (mask)
2331 || const_ok_for_arm (~mask)))
2333 /* Similar trick, but slightly less efficient. */
2335 rtx op0 = gen_reg_rtx (SImode);
2336 rtx op1 = gen_reg_rtx (SImode);
2338 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2339 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2340 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2341 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2345 rtx op0 = gen_int_mode (mask, SImode);
2346 rtx op1 = gen_reg_rtx (SImode);
2347 rtx op2 = gen_reg_rtx (SImode);
2349 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2351 rtx tmp = gen_reg_rtx (SImode);
2353 emit_insn (gen_movsi (tmp, op0));
2357 /* Mask out any bits in operand[3] that are not needed. */
2358 emit_insn (gen_andsi3 (op1, operands[3], op0));
2360 if (CONST_INT_P (op0)
2361 && (const_ok_for_arm (mask << start_bit)
2362 || const_ok_for_arm (~(mask << start_bit))))
2364 op0 = gen_int_mode (~(mask << start_bit), SImode);
2365 emit_insn (gen_andsi3 (op2, operands[0], op0));
2369 if (CONST_INT_P (op0))
2371 rtx tmp = gen_reg_rtx (SImode);
2373 emit_insn (gen_movsi (tmp, op0));
2378 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2380 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2384 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2386 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2389 if (subtarget != target)
2391 /* If TARGET is still a SUBREG, then it must be wider than a word,
2392 so we must be careful only to set the subword we were asked to. */
2393 if (GET_CODE (target) == SUBREG)
2394 emit_move_insn (target, subtarget);
2396 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2403 (define_insn "insv_zero"
2404 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2405 (match_operand:SI 1 "const_int_M_operand" "M")
2406 (match_operand:SI 2 "const_int_M_operand" "M"))
2410 [(set_attr "length" "4")
2411 (set_attr "predicable" "yes")
2412 (set_attr "type" "bfm")]
2415 (define_insn "insv_t2"
2416 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2417 (match_operand:SI 1 "const_int_M_operand" "M")
2418 (match_operand:SI 2 "const_int_M_operand" "M"))
2419 (match_operand:SI 3 "s_register_operand" "r"))]
2421 "bfi%?\t%0, %3, %2, %1"
2422 [(set_attr "length" "4")
2423 (set_attr "predicable" "yes")
2424 (set_attr "type" "bfm")]
2427 (define_insn "andsi_notsi_si"
2428 [(set (match_operand:SI 0 "s_register_operand" "=r")
2429 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2430 (match_operand:SI 1 "s_register_operand" "r")))]
2432 "bic%?\\t%0, %1, %2"
2433 [(set_attr "predicable" "yes")
2434 (set_attr "type" "logic_reg")]
2437 (define_insn "andsi_not_shiftsi_si"
2438 [(set (match_operand:SI 0 "s_register_operand" "=r")
2439 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2440 [(match_operand:SI 2 "s_register_operand" "r")
2441 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2442 (match_operand:SI 1 "s_register_operand" "r")))]
2444 "bic%?\\t%0, %1, %2%S4"
2445 [(set_attr "predicable" "yes")
2446 (set_attr "shift" "2")
2447 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2448 (const_string "logic_shift_imm")
2449 (const_string "logic_shift_reg")))]
2452 ;; Shifted bics pattern used to set up CC status register and not reusing
2453 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2454 ;; does not support shift by register.
2455 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2456 [(set (reg:CC_NOOV CC_REGNUM)
2458 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2459 [(match_operand:SI 1 "s_register_operand" "r")
2460 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2461 (match_operand:SI 3 "s_register_operand" "r"))
2463 (clobber (match_scratch:SI 4 "=r"))]
2464 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2465 "bics%?\\t%4, %3, %1%S0"
2466 [(set_attr "predicable" "yes")
2467 (set_attr "conds" "set")
2468 (set_attr "shift" "1")
2469 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2470 (const_string "logic_shift_imm")
2471 (const_string "logic_shift_reg")))]
2474 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2475 ;; getting reused later.
2476 (define_insn "andsi_not_shiftsi_si_scc"
2477 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2479 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2480 [(match_operand:SI 1 "s_register_operand" "r")
2481 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2482 (match_operand:SI 3 "s_register_operand" "r"))
2484 (set (match_operand:SI 4 "s_register_operand" "=r")
2485 (and:SI (not:SI (match_op_dup 0
2489 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2490 "bics%?\\t%4, %3, %1%S0"
2491 [(set_attr "predicable" "yes")
2492 (set_attr "conds" "set")
2493 (set_attr "shift" "1")
2494 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2495 (const_string "logic_shift_imm")
2496 (const_string "logic_shift_reg")))]
2499 (define_insn "*andsi_notsi_si_compare0"
2500 [(set (reg:CC_NOOV CC_REGNUM)
2502 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2503 (match_operand:SI 1 "s_register_operand" "r"))
2505 (set (match_operand:SI 0 "s_register_operand" "=r")
2506 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2509 [(set_attr "conds" "set")
2510 (set_attr "type" "logics_shift_reg")]
2513 (define_insn "*andsi_notsi_si_compare0_scratch"
2514 [(set (reg:CC_NOOV CC_REGNUM)
2516 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2517 (match_operand:SI 1 "s_register_operand" "r"))
2519 (clobber (match_scratch:SI 0 "=r"))]
2522 [(set_attr "conds" "set")
2523 (set_attr "type" "logics_shift_reg")]
2526 (define_expand "iorsi3"
2527 [(set (match_operand:SI 0 "s_register_operand")
2528 (ior:SI (match_operand:SI 1 "s_register_operand")
2529 (match_operand:SI 2 "reg_or_int_operand")))]
2532 if (CONST_INT_P (operands[2]))
2536 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2537 operands[2] = force_reg (SImode, operands[2]);
2540 arm_split_constant (IOR, SImode, NULL_RTX,
2541 INTVAL (operands[2]), operands[0],
2543 optimize && can_create_pseudo_p ());
2547 else /* TARGET_THUMB1 */
2549 rtx tmp = force_reg (SImode, operands[2]);
2550 if (rtx_equal_p (operands[0], operands[1]))
2554 operands[2] = operands[1];
2562 (define_insn_and_split "*iorsi3_insn"
2563 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2564 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2565 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2570 orn%?\\t%0, %1, #%B2
2574 && CONST_INT_P (operands[2])
2575 && !(const_ok_for_arm (INTVAL (operands[2]))
2576 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2577 [(clobber (const_int 0))]
2579 arm_split_constant (IOR, SImode, curr_insn,
2580 INTVAL (operands[2]), operands[0], operands[1], 0);
2583 [(set_attr "length" "4,4,4,4,16")
2584 (set_attr "arch" "32,t2,t2,32,32")
2585 (set_attr "predicable" "yes")
2586 (set_attr "predicable_short_it" "no,yes,no,no,no")
2587 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2591 [(match_scratch:SI 3 "r")
2592 (set (match_operand:SI 0 "arm_general_register_operand" "")
2593 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2594 (match_operand:SI 2 "const_int_operand" "")))]
2596 && !const_ok_for_arm (INTVAL (operands[2]))
2597 && const_ok_for_arm (~INTVAL (operands[2]))"
2598 [(set (match_dup 3) (match_dup 2))
2599 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2603 (define_insn "*iorsi3_compare0"
2604 [(set (reg:CC_NOOV CC_REGNUM)
2606 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2607 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2609 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2610 (ior:SI (match_dup 1) (match_dup 2)))]
2612 "orrs%?\\t%0, %1, %2"
2613 [(set_attr "conds" "set")
2614 (set_attr "arch" "*,t2,*")
2615 (set_attr "length" "4,2,4")
2616 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2619 (define_insn "*iorsi3_compare0_scratch"
2620 [(set (reg:CC_NOOV CC_REGNUM)
2622 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2623 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2625 (clobber (match_scratch:SI 0 "=r,l,r"))]
2627 "orrs%?\\t%0, %1, %2"
2628 [(set_attr "conds" "set")
2629 (set_attr "arch" "*,t2,*")
2630 (set_attr "length" "4,2,4")
2631 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2634 (define_expand "xorsi3"
2635 [(set (match_operand:SI 0 "s_register_operand")
2636 (xor:SI (match_operand:SI 1 "s_register_operand")
2637 (match_operand:SI 2 "reg_or_int_operand")))]
2639 "if (CONST_INT_P (operands[2]))
2643 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2644 operands[2] = force_reg (SImode, operands[2]);
2647 arm_split_constant (XOR, SImode, NULL_RTX,
2648 INTVAL (operands[2]), operands[0],
2650 optimize && can_create_pseudo_p ());
2654 else /* TARGET_THUMB1 */
2656 rtx tmp = force_reg (SImode, operands[2]);
2657 if (rtx_equal_p (operands[0], operands[1]))
2661 operands[2] = operands[1];
2668 (define_insn_and_split "*arm_xorsi3"
2669 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2670 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2671 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2679 && CONST_INT_P (operands[2])
2680 && !const_ok_for_arm (INTVAL (operands[2]))"
2681 [(clobber (const_int 0))]
2683 arm_split_constant (XOR, SImode, curr_insn,
2684 INTVAL (operands[2]), operands[0], operands[1], 0);
2687 [(set_attr "length" "4,4,4,16")
2688 (set_attr "predicable" "yes")
2689 (set_attr "predicable_short_it" "no,yes,no,no")
2690 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2693 (define_insn "*xorsi3_compare0"
2694 [(set (reg:CC_NOOV CC_REGNUM)
2695 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2696 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2698 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2699 (xor:SI (match_dup 1) (match_dup 2)))]
2701 "eors%?\\t%0, %1, %2"
2702 [(set_attr "conds" "set")
2703 (set_attr "type" "logics_imm,logics_reg")]
2706 (define_insn "*xorsi3_compare0_scratch"
2707 [(set (reg:CC_NOOV CC_REGNUM)
2708 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2709 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2713 [(set_attr "conds" "set")
2714 (set_attr "type" "logics_imm,logics_reg")]
2717 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2718 ; (NOT D) we can sometimes merge the final NOT into one of the following
2722 [(set (match_operand:SI 0 "s_register_operand" "")
2723 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2724 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2725 (match_operand:SI 3 "arm_rhs_operand" "")))
2726 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2728 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2729 (not:SI (match_dup 3))))
2730 (set (match_dup 0) (not:SI (match_dup 4)))]
2734 (define_insn_and_split "*andsi_iorsi3_notsi"
2735 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2736 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2737 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2738 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2740 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2741 "&& reload_completed"
2742 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2743 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2745 /* If operands[3] is a constant make sure to fold the NOT into it
2746 to avoid creating a NOT of a CONST_INT. */
2747 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2748 if (CONST_INT_P (not_rtx))
2750 operands[4] = operands[0];
2751 operands[5] = not_rtx;
2755 operands[5] = operands[0];
2756 operands[4] = not_rtx;
2759 [(set_attr "length" "8")
2760 (set_attr "ce_count" "2")
2761 (set_attr "predicable" "yes")
2762 (set_attr "type" "multiple")]
2765 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2766 ; insns are available?
2768 [(set (match_operand:SI 0 "s_register_operand" "")
2769 (match_operator:SI 1 "logical_binary_operator"
2770 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2771 (match_operand:SI 3 "const_int_operand" "")
2772 (match_operand:SI 4 "const_int_operand" ""))
2773 (match_operator:SI 9 "logical_binary_operator"
2774 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2775 (match_operand:SI 6 "const_int_operand" ""))
2776 (match_operand:SI 7 "s_register_operand" "")])]))
2777 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2779 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2780 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2783 [(ashift:SI (match_dup 2) (match_dup 4))
2787 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2790 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2794 [(set (match_operand:SI 0 "s_register_operand" "")
2795 (match_operator:SI 1 "logical_binary_operator"
2796 [(match_operator:SI 9 "logical_binary_operator"
2797 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2798 (match_operand:SI 6 "const_int_operand" ""))
2799 (match_operand:SI 7 "s_register_operand" "")])
2800 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2801 (match_operand:SI 3 "const_int_operand" "")
2802 (match_operand:SI 4 "const_int_operand" ""))]))
2803 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2805 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2806 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2809 [(ashift:SI (match_dup 2) (match_dup 4))
2813 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2816 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2820 [(set (match_operand:SI 0 "s_register_operand" "")
2821 (match_operator:SI 1 "logical_binary_operator"
2822 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2823 (match_operand:SI 3 "const_int_operand" "")
2824 (match_operand:SI 4 "const_int_operand" ""))
2825 (match_operator:SI 9 "logical_binary_operator"
2826 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2827 (match_operand:SI 6 "const_int_operand" ""))
2828 (match_operand:SI 7 "s_register_operand" "")])]))
2829 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2831 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2832 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2835 [(ashift:SI (match_dup 2) (match_dup 4))
2839 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2842 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2846 [(set (match_operand:SI 0 "s_register_operand" "")
2847 (match_operator:SI 1 "logical_binary_operator"
2848 [(match_operator:SI 9 "logical_binary_operator"
2849 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2850 (match_operand:SI 6 "const_int_operand" ""))
2851 (match_operand:SI 7 "s_register_operand" "")])
2852 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2853 (match_operand:SI 3 "const_int_operand" "")
2854 (match_operand:SI 4 "const_int_operand" ""))]))
2855 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2857 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2858 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2861 [(ashift:SI (match_dup 2) (match_dup 4))
2865 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2868 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2872 ;; Minimum and maximum insns
2874 (define_expand "smaxsi3"
2876 (set (match_operand:SI 0 "s_register_operand")
2877 (smax:SI (match_operand:SI 1 "s_register_operand")
2878 (match_operand:SI 2 "arm_rhs_operand")))
2879 (clobber (reg:CC CC_REGNUM))])]
2882 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2884 /* No need for a clobber of the condition code register here. */
2885 emit_insn (gen_rtx_SET (operands[0],
2886 gen_rtx_SMAX (SImode, operands[1],
2892 (define_insn "*smax_0"
2893 [(set (match_operand:SI 0 "s_register_operand" "=r")
2894 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2897 "bic%?\\t%0, %1, %1, asr #31"
2898 [(set_attr "predicable" "yes")
2899 (set_attr "type" "logic_shift_reg")]
2902 (define_insn "*smax_m1"
2903 [(set (match_operand:SI 0 "s_register_operand" "=r")
2904 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2907 "orr%?\\t%0, %1, %1, asr #31"
2908 [(set_attr "predicable" "yes")
2909 (set_attr "type" "logic_shift_reg")]
2912 (define_insn_and_split "*arm_smax_insn"
2913 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2914 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2915 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2916 (clobber (reg:CC CC_REGNUM))]
2919 ; cmp\\t%1, %2\;movlt\\t%0, %2
2920 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2922 [(set (reg:CC CC_REGNUM)
2923 (compare:CC (match_dup 1) (match_dup 2)))
2925 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2929 [(set_attr "conds" "clob")
2930 (set_attr "length" "8,12")
2931 (set_attr "type" "multiple")]
2934 (define_expand "sminsi3"
2936 (set (match_operand:SI 0 "s_register_operand")
2937 (smin:SI (match_operand:SI 1 "s_register_operand")
2938 (match_operand:SI 2 "arm_rhs_operand")))
2939 (clobber (reg:CC CC_REGNUM))])]
2942 if (operands[2] == const0_rtx)
2944 /* No need for a clobber of the condition code register here. */
2945 emit_insn (gen_rtx_SET (operands[0],
2946 gen_rtx_SMIN (SImode, operands[1],
2952 (define_insn "*smin_0"
2953 [(set (match_operand:SI 0 "s_register_operand" "=r")
2954 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2957 "and%?\\t%0, %1, %1, asr #31"
2958 [(set_attr "predicable" "yes")
2959 (set_attr "type" "logic_shift_reg")]
2962 (define_insn_and_split "*arm_smin_insn"
2963 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2964 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2965 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2966 (clobber (reg:CC CC_REGNUM))]
2969 ; cmp\\t%1, %2\;movge\\t%0, %2
2970 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2972 [(set (reg:CC CC_REGNUM)
2973 (compare:CC (match_dup 1) (match_dup 2)))
2975 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
2979 [(set_attr "conds" "clob")
2980 (set_attr "length" "8,12")
2981 (set_attr "type" "multiple,multiple")]
2984 (define_expand "umaxsi3"
2986 (set (match_operand:SI 0 "s_register_operand")
2987 (umax:SI (match_operand:SI 1 "s_register_operand")
2988 (match_operand:SI 2 "arm_rhs_operand")))
2989 (clobber (reg:CC CC_REGNUM))])]
2994 (define_insn_and_split "*arm_umaxsi3"
2995 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2996 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2997 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2998 (clobber (reg:CC CC_REGNUM))]
3001 ; cmp\\t%1, %2\;movcc\\t%0, %2
3002 ; cmp\\t%1, %2\;movcs\\t%0, %1
3003 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3005 [(set (reg:CC CC_REGNUM)
3006 (compare:CC (match_dup 1) (match_dup 2)))
3008 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3012 [(set_attr "conds" "clob")
3013 (set_attr "length" "8,8,12")
3014 (set_attr "type" "store_4")]
3017 (define_expand "uminsi3"
3019 (set (match_operand:SI 0 "s_register_operand")
3020 (umin:SI (match_operand:SI 1 "s_register_operand")
3021 (match_operand:SI 2 "arm_rhs_operand")))
3022 (clobber (reg:CC CC_REGNUM))])]
3027 (define_insn_and_split "*arm_uminsi3"
3028 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3029 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3030 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3031 (clobber (reg:CC CC_REGNUM))]
3034 ; cmp\\t%1, %2\;movcs\\t%0, %2
3035 ; cmp\\t%1, %2\;movcc\\t%0, %1
3036 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3038 [(set (reg:CC CC_REGNUM)
3039 (compare:CC (match_dup 1) (match_dup 2)))
3041 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3045 [(set_attr "conds" "clob")
3046 (set_attr "length" "8,8,12")
3047 (set_attr "type" "store_4")]
3050 (define_insn "*store_minmaxsi"
3051 [(set (match_operand:SI 0 "memory_operand" "=m")
3052 (match_operator:SI 3 "minmax_operator"
3053 [(match_operand:SI 1 "s_register_operand" "r")
3054 (match_operand:SI 2 "s_register_operand" "r")]))
3055 (clobber (reg:CC CC_REGNUM))]
3056 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3058 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3059 operands[1], operands[2]);
3060 output_asm_insn (\"cmp\\t%1, %2\", operands);
3062 output_asm_insn (\"ite\t%d3\", operands);
3063 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3064 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3067 [(set_attr "conds" "clob")
3068 (set (attr "length")
3069 (if_then_else (eq_attr "is_thumb" "yes")
3072 (set_attr "type" "store_4")]
3075 ; Reject the frame pointer in operand[1], since reloading this after
3076 ; it has been eliminated can cause carnage.
3077 (define_insn "*minmax_arithsi"
3078 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3079 (match_operator:SI 4 "shiftable_operator"
3080 [(match_operator:SI 5 "minmax_operator"
3081 [(match_operand:SI 2 "s_register_operand" "r,r")
3082 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3083 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3084 (clobber (reg:CC CC_REGNUM))]
3085 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3088 enum rtx_code code = GET_CODE (operands[4]);
3091 if (which_alternative != 0 || operands[3] != const0_rtx
3092 || (code != PLUS && code != IOR && code != XOR))
3097 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3098 operands[2], operands[3]);
3099 output_asm_insn (\"cmp\\t%2, %3\", operands);
3103 output_asm_insn (\"ite\\t%d5\", operands);
3105 output_asm_insn (\"it\\t%d5\", operands);
3107 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3109 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3112 [(set_attr "conds" "clob")
3113 (set (attr "length")
3114 (if_then_else (eq_attr "is_thumb" "yes")
3117 (set_attr "type" "multiple")]
3120 ; Reject the frame pointer in operand[1], since reloading this after
3121 ; it has been eliminated can cause carnage.
3122 (define_insn_and_split "*minmax_arithsi_non_canon"
3123 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3125 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3126 (match_operator:SI 4 "minmax_operator"
3127 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3128 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3129 (clobber (reg:CC CC_REGNUM))]
3130 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3131 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3133 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3134 [(set (reg:CC CC_REGNUM)
3135 (compare:CC (match_dup 2) (match_dup 3)))
3137 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3139 (minus:SI (match_dup 1)
3141 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3145 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3146 operands[2], operands[3]);
3147 enum rtx_code rc = minmax_code (operands[4]);
3148 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3149 operands[2], operands[3]);
3151 if (mode == CCFPmode || mode == CCFPEmode)
3152 rc = reverse_condition_maybe_unordered (rc);
3154 rc = reverse_condition (rc);
3155 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3156 if (CONST_INT_P (operands[3]))
3157 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3159 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3161 [(set_attr "conds" "clob")
3162 (set (attr "length")
3163 (if_then_else (eq_attr "is_thumb" "yes")
3166 (set_attr "type" "multiple")]
3169 (define_code_iterator SAT [smin smax])
3170 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3171 (define_code_attr SATlo [(smin "1") (smax "2")])
3172 (define_code_attr SAThi [(smin "2") (smax "1")])
3174 (define_insn "*satsi_<SAT:code>"
3175 [(set (match_operand:SI 0 "s_register_operand" "=r")
3176 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3177 (match_operand:SI 1 "const_int_operand" "i"))
3178 (match_operand:SI 2 "const_int_operand" "i")))]
3179 "TARGET_32BIT && arm_arch6
3180 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3184 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3185 &mask, &signed_sat))
3188 operands[1] = GEN_INT (mask);
3190 return "ssat%?\t%0, %1, %3";
3192 return "usat%?\t%0, %1, %3";
3194 [(set_attr "predicable" "yes")
3195 (set_attr "type" "alus_imm")]
3198 (define_insn "*satsi_<SAT:code>_shift"
3199 [(set (match_operand:SI 0 "s_register_operand" "=r")
3200 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3201 [(match_operand:SI 4 "s_register_operand" "r")
3202 (match_operand:SI 5 "const_int_operand" "i")])
3203 (match_operand:SI 1 "const_int_operand" "i"))
3204 (match_operand:SI 2 "const_int_operand" "i")))]
3205 "TARGET_32BIT && arm_arch6
3206 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3210 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3211 &mask, &signed_sat))
3214 operands[1] = GEN_INT (mask);
3216 return "ssat%?\t%0, %1, %4%S3";
3218 return "usat%?\t%0, %1, %4%S3";
3220 [(set_attr "predicable" "yes")
3221 (set_attr "shift" "3")
3222 (set_attr "type" "logic_shift_reg")])
3224 ;; Shift and rotation insns
3226 (define_expand "ashldi3"
3227 [(set (match_operand:DI 0 "s_register_operand")
3228 (ashift:DI (match_operand:DI 1 "s_register_operand")
3229 (match_operand:SI 2 "reg_or_int_operand")))]
3232 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3233 operands[2], gen_reg_rtx (SImode),
3234 gen_reg_rtx (SImode));
3238 (define_expand "ashlsi3"
3239 [(set (match_operand:SI 0 "s_register_operand")
3240 (ashift:SI (match_operand:SI 1 "s_register_operand")
3241 (match_operand:SI 2 "arm_rhs_operand")))]
3244 if (CONST_INT_P (operands[2])
3245 && (UINTVAL (operands[2])) > 31)
3247 emit_insn (gen_movsi (operands[0], const0_rtx));
3253 (define_expand "ashrdi3"
3254 [(set (match_operand:DI 0 "s_register_operand")
3255 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3256 (match_operand:SI 2 "reg_or_int_operand")))]
3259 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3260 operands[2], gen_reg_rtx (SImode),
3261 gen_reg_rtx (SImode));
3265 (define_expand "ashrsi3"
3266 [(set (match_operand:SI 0 "s_register_operand")
3267 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3268 (match_operand:SI 2 "arm_rhs_operand")))]
3271 if (CONST_INT_P (operands[2])
3272 && UINTVAL (operands[2]) > 31)
3273 operands[2] = GEN_INT (31);
3277 (define_expand "lshrdi3"
3278 [(set (match_operand:DI 0 "s_register_operand")
3279 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3280 (match_operand:SI 2 "reg_or_int_operand")))]
3283 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3284 operands[2], gen_reg_rtx (SImode),
3285 gen_reg_rtx (SImode));
3289 (define_expand "lshrsi3"
3290 [(set (match_operand:SI 0 "s_register_operand")
3291 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3292 (match_operand:SI 2 "arm_rhs_operand")))]
3295 if (CONST_INT_P (operands[2])
3296 && (UINTVAL (operands[2])) > 31)
3298 emit_insn (gen_movsi (operands[0], const0_rtx));
3304 (define_expand "rotlsi3"
3305 [(set (match_operand:SI 0 "s_register_operand")
3306 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3307 (match_operand:SI 2 "reg_or_int_operand")))]
3310 if (CONST_INT_P (operands[2]))
3311 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3314 rtx reg = gen_reg_rtx (SImode);
3315 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3321 (define_expand "rotrsi3"
3322 [(set (match_operand:SI 0 "s_register_operand")
3323 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3324 (match_operand:SI 2 "arm_rhs_operand")))]
3329 if (CONST_INT_P (operands[2])
3330 && UINTVAL (operands[2]) > 31)
3331 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3333 else /* TARGET_THUMB1 */
3335 if (CONST_INT_P (operands [2]))
3336 operands [2] = force_reg (SImode, operands[2]);
3341 (define_insn "*arm_shiftsi3"
3342 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3343 (match_operator:SI 3 "shift_operator"
3344 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3345 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3347 "* return arm_output_shift(operands, 0);"
3348 [(set_attr "predicable" "yes")
3349 (set_attr "arch" "t2,t2,*,*")
3350 (set_attr "predicable_short_it" "yes,yes,no,no")
3351 (set_attr "length" "4")
3352 (set_attr "shift" "1")
3353 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3356 (define_insn "*shiftsi3_compare0"
3357 [(set (reg:CC_NOOV CC_REGNUM)
3358 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3359 [(match_operand:SI 1 "s_register_operand" "r,r")
3360 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3362 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3363 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3365 "* return arm_output_shift(operands, 1);"
3366 [(set_attr "conds" "set")
3367 (set_attr "shift" "1")
3368 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3371 (define_insn "*shiftsi3_compare0_scratch"
3372 [(set (reg:CC_NOOV CC_REGNUM)
3373 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3374 [(match_operand:SI 1 "s_register_operand" "r,r")
3375 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3377 (clobber (match_scratch:SI 0 "=r,r"))]
3379 "* return arm_output_shift(operands, 1);"
3380 [(set_attr "conds" "set")
3381 (set_attr "shift" "1")
3382 (set_attr "type" "shift_imm,shift_reg")]
3385 (define_insn "*not_shiftsi"
3386 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3387 (not:SI (match_operator:SI 3 "shift_operator"
3388 [(match_operand:SI 1 "s_register_operand" "r,r")
3389 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3392 [(set_attr "predicable" "yes")
3393 (set_attr "shift" "1")
3394 (set_attr "arch" "32,a")
3395 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3397 (define_insn "*not_shiftsi_compare0"
3398 [(set (reg:CC_NOOV CC_REGNUM)
3400 (not:SI (match_operator:SI 3 "shift_operator"
3401 [(match_operand:SI 1 "s_register_operand" "r,r")
3402 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3404 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3405 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3407 "mvns%?\\t%0, %1%S3"
3408 [(set_attr "conds" "set")
3409 (set_attr "shift" "1")
3410 (set_attr "arch" "32,a")
3411 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3413 (define_insn "*not_shiftsi_compare0_scratch"
3414 [(set (reg:CC_NOOV CC_REGNUM)
3416 (not:SI (match_operator:SI 3 "shift_operator"
3417 [(match_operand:SI 1 "s_register_operand" "r,r")
3418 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3420 (clobber (match_scratch:SI 0 "=r,r"))]
3422 "mvns%?\\t%0, %1%S3"
3423 [(set_attr "conds" "set")
3424 (set_attr "shift" "1")
3425 (set_attr "arch" "32,a")
3426 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3428 ;; We don't really have extzv, but defining this using shifts helps
3429 ;; to reduce register pressure later on.
3431 (define_expand "extzv"
3432 [(set (match_operand 0 "s_register_operand")
3433 (zero_extract (match_operand 1 "nonimmediate_operand")
3434 (match_operand 2 "const_int_operand")
3435 (match_operand 3 "const_int_operand")))]
3436 "TARGET_THUMB1 || arm_arch_thumb2"
3439 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3440 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3442 if (arm_arch_thumb2)
3444 HOST_WIDE_INT width = INTVAL (operands[2]);
3445 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3447 if (unaligned_access && MEM_P (operands[1])
3448 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3452 if (BYTES_BIG_ENDIAN)
3453 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3458 base_addr = adjust_address (operands[1], SImode,
3459 bitpos / BITS_PER_UNIT);
3460 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3464 rtx dest = operands[0];
3465 rtx tmp = gen_reg_rtx (SImode);
3467 /* We may get a paradoxical subreg here. Strip it off. */
3468 if (GET_CODE (dest) == SUBREG
3469 && GET_MODE (dest) == SImode
3470 && GET_MODE (SUBREG_REG (dest)) == HImode)
3471 dest = SUBREG_REG (dest);
3473 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3476 base_addr = adjust_address (operands[1], HImode,
3477 bitpos / BITS_PER_UNIT);
3478 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3479 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3483 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3485 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3493 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3496 operands[3] = GEN_INT (rshift);
3500 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3504 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3505 operands[3], gen_reg_rtx (SImode)));
3510 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3512 (define_expand "extzv_t1"
3513 [(set (match_operand:SI 4 "s_register_operand")
3514 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3515 (match_operand:SI 2 "const_int_operand")))
3516 (set (match_operand:SI 0 "s_register_operand")
3517 (lshiftrt:SI (match_dup 4)
3518 (match_operand:SI 3 "const_int_operand")))]
3522 (define_expand "extv"
3523 [(set (match_operand 0 "s_register_operand")
3524 (sign_extract (match_operand 1 "nonimmediate_operand")
3525 (match_operand 2 "const_int_operand")
3526 (match_operand 3 "const_int_operand")))]
3529 HOST_WIDE_INT width = INTVAL (operands[2]);
3530 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3532 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3533 && (bitpos % BITS_PER_UNIT) == 0)
3537 if (BYTES_BIG_ENDIAN)
3538 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3542 base_addr = adjust_address (operands[1], SImode,
3543 bitpos / BITS_PER_UNIT);
3544 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3548 rtx dest = operands[0];
3549 rtx tmp = gen_reg_rtx (SImode);
3551 /* We may get a paradoxical subreg here. Strip it off. */
3552 if (GET_CODE (dest) == SUBREG
3553 && GET_MODE (dest) == SImode
3554 && GET_MODE (SUBREG_REG (dest)) == HImode)
3555 dest = SUBREG_REG (dest);
3557 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3560 base_addr = adjust_address (operands[1], HImode,
3561 bitpos / BITS_PER_UNIT);
3562 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3563 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3568 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3570 else if (GET_MODE (operands[0]) == SImode
3571 && GET_MODE (operands[1]) == SImode)
3573 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3581 ; Helper to expand register forms of extv with the proper modes.
3583 (define_expand "extv_regsi"
3584 [(set (match_operand:SI 0 "s_register_operand")
3585 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3586 (match_operand 2 "const_int_operand")
3587 (match_operand 3 "const_int_operand")))]
3592 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3594 (define_insn "unaligned_loaddi"
3595 [(set (match_operand:DI 0 "s_register_operand" "=r")
3596 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3597 UNSPEC_UNALIGNED_LOAD))]
3598 "TARGET_32BIT && TARGET_LDRD"
3600 return output_move_double (operands, true, NULL);
3602 [(set_attr "length" "8")
3603 (set_attr "type" "load_8")])
3605 (define_insn "unaligned_loadsi"
3606 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3607 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3608 UNSPEC_UNALIGNED_LOAD))]
3611 ldr\t%0, %1\t@ unaligned
3612 ldr%?\t%0, %1\t@ unaligned
3613 ldr%?\t%0, %1\t@ unaligned"
3614 [(set_attr "arch" "t1,t2,32")
3615 (set_attr "length" "2,2,4")
3616 (set_attr "predicable" "no,yes,yes")
3617 (set_attr "predicable_short_it" "no,yes,no")
3618 (set_attr "type" "load_4")])
3620 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3621 ;; address (there's no immediate format). That's tricky to support
3622 ;; here and we don't really need this pattern for that case, so only
3623 ;; enable for 32-bit ISAs.
3624 (define_insn "unaligned_loadhis"
3625 [(set (match_operand:SI 0 "s_register_operand" "=r")
3627 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3628 UNSPEC_UNALIGNED_LOAD)))]
3629 "unaligned_access && TARGET_32BIT"
3630 "ldrsh%?\t%0, %1\t@ unaligned"
3631 [(set_attr "predicable" "yes")
3632 (set_attr "type" "load_byte")])
3634 (define_insn "unaligned_loadhiu"
3635 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3637 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3638 UNSPEC_UNALIGNED_LOAD)))]
3641 ldrh\t%0, %1\t@ unaligned
3642 ldrh%?\t%0, %1\t@ unaligned
3643 ldrh%?\t%0, %1\t@ unaligned"
3644 [(set_attr "arch" "t1,t2,32")
3645 (set_attr "length" "2,2,4")
3646 (set_attr "predicable" "no,yes,yes")
3647 (set_attr "predicable_short_it" "no,yes,no")
3648 (set_attr "type" "load_byte")])
3650 (define_insn "unaligned_storedi"
3651 [(set (match_operand:DI 0 "memory_operand" "=m")
3652 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3653 UNSPEC_UNALIGNED_STORE))]
3654 "TARGET_32BIT && TARGET_LDRD"
3656 return output_move_double (operands, true, NULL);
3658 [(set_attr "length" "8")
3659 (set_attr "type" "store_8")])
3661 (define_insn "unaligned_storesi"
3662 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3663 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3664 UNSPEC_UNALIGNED_STORE))]
3667 str\t%1, %0\t@ unaligned
3668 str%?\t%1, %0\t@ unaligned
3669 str%?\t%1, %0\t@ unaligned"
3670 [(set_attr "arch" "t1,t2,32")
3671 (set_attr "length" "2,2,4")
3672 (set_attr "predicable" "no,yes,yes")
3673 (set_attr "predicable_short_it" "no,yes,no")
3674 (set_attr "type" "store_4")])
3676 (define_insn "unaligned_storehi"
3677 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3678 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3679 UNSPEC_UNALIGNED_STORE))]
3682 strh\t%1, %0\t@ unaligned
3683 strh%?\t%1, %0\t@ unaligned
3684 strh%?\t%1, %0\t@ unaligned"
3685 [(set_attr "arch" "t1,t2,32")
3686 (set_attr "length" "2,2,4")
3687 (set_attr "predicable" "no,yes,yes")
3688 (set_attr "predicable_short_it" "no,yes,no")
3689 (set_attr "type" "store_4")])
3692 (define_insn "*extv_reg"
3693 [(set (match_operand:SI 0 "s_register_operand" "=r")
3694 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3695 (match_operand:SI 2 "const_int_operand" "n")
3696 (match_operand:SI 3 "const_int_operand" "n")))]
3698 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3699 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3700 "sbfx%?\t%0, %1, %3, %2"
3701 [(set_attr "length" "4")
3702 (set_attr "predicable" "yes")
3703 (set_attr "type" "bfm")]
3706 (define_insn "extzv_t2"
3707 [(set (match_operand:SI 0 "s_register_operand" "=r")
3708 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3709 (match_operand:SI 2 "const_int_operand" "n")
3710 (match_operand:SI 3 "const_int_operand" "n")))]
3712 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3713 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3714 "ubfx%?\t%0, %1, %3, %2"
3715 [(set_attr "length" "4")
3716 (set_attr "predicable" "yes")
3717 (set_attr "type" "bfm")]
3721 ;; Division instructions
3722 (define_insn "divsi3"
3723 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3724 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3725 (match_operand:SI 2 "s_register_operand" "r,r")))]
3730 [(set_attr "arch" "32,v8mb")
3731 (set_attr "predicable" "yes")
3732 (set_attr "type" "sdiv")]
3735 (define_insn "udivsi3"
3736 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3737 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3738 (match_operand:SI 2 "s_register_operand" "r,r")))]
3743 [(set_attr "arch" "32,v8mb")
3744 (set_attr "predicable" "yes")
3745 (set_attr "type" "udiv")]
3749 ;; Unary arithmetic insns
3751 (define_expand "negvsi3"
3752 [(match_operand:SI 0 "register_operand")
3753 (match_operand:SI 1 "register_operand")
3754 (match_operand 2 "")]
3757 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3758 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3763 (define_expand "negvdi3"
3764 [(match_operand:DI 0 "s_register_operand")
3765 (match_operand:DI 1 "s_register_operand")
3766 (match_operand 2 "")]
3769 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3770 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3776 (define_insn "negdi2_compare"
3777 [(set (reg:CC CC_REGNUM)
3780 (match_operand:DI 1 "register_operand" "r,r")))
3781 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3782 (minus:DI (const_int 0) (match_dup 1)))]
3785 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3786 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3787 [(set_attr "conds" "set")
3788 (set_attr "arch" "a,t2")
3789 (set_attr "length" "8")
3790 (set_attr "type" "multiple")]
3793 (define_expand "negsi2"
3794 [(set (match_operand:SI 0 "s_register_operand")
3795 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3800 (define_insn "*arm_negsi2"
3801 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3802 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3804 "rsb%?\\t%0, %1, #0"
3805 [(set_attr "predicable" "yes")
3806 (set_attr "predicable_short_it" "yes,no")
3807 (set_attr "arch" "t2,*")
3808 (set_attr "length" "4")
3809 (set_attr "type" "alu_imm")]
3812 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
3813 ;; rather than (0 cmp reg). This gives the same results for unsigned
3814 ;; and equality compares which is what we mostly need here.
3815 (define_insn "negsi2_0compare"
3816 [(set (reg:CC_RSB CC_REGNUM)
3817 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
3819 (set (match_operand:SI 0 "s_register_operand" "=l,r")
3820 (neg:SI (match_dup 1)))]
3825 [(set_attr "conds" "set")
3826 (set_attr "arch" "t2,*")
3827 (set_attr "length" "2,*")
3828 (set_attr "type" "alus_imm")]
3831 (define_insn "negsi2_carryin"
3832 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3833 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
3834 (match_operand:SI 2 "arm_borrow_operation" "")))]
3838 sbc\\t%0, %1, %1, lsl #1"
3839 [(set_attr "conds" "use")
3840 (set_attr "arch" "a,t2")
3841 (set_attr "type" "adc_imm,adc_reg")]
3844 (define_expand "negsf2"
3845 [(set (match_operand:SF 0 "s_register_operand")
3846 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3847 "TARGET_32BIT && TARGET_HARD_FLOAT"
3851 (define_expand "negdf2"
3852 [(set (match_operand:DF 0 "s_register_operand")
3853 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3854 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3857 ;; abssi2 doesn't really clobber the condition codes if a different register
3858 ;; is being set. To keep things simple, assume during rtl manipulations that
3859 ;; it does, but tell the final scan operator the truth. Similarly for
3862 (define_expand "abssi2"
3864 [(set (match_operand:SI 0 "s_register_operand")
3865 (abs:SI (match_operand:SI 1 "s_register_operand")))
3866 (clobber (match_dup 2))])]
3870 operands[2] = gen_rtx_SCRATCH (SImode);
3872 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3875 (define_insn_and_split "*arm_abssi2"
3876 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3877 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3878 (clobber (reg:CC CC_REGNUM))]
3881 "&& reload_completed"
3884 /* if (which_alternative == 0) */
3885 if (REGNO(operands[0]) == REGNO(operands[1]))
3887 /* Emit the pattern:
3888 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3889 [(set (reg:CC CC_REGNUM)
3890 (compare:CC (match_dup 0) (const_int 0)))
3891 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3892 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3894 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3895 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3896 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3897 (gen_rtx_LT (SImode,
3898 gen_rtx_REG (CCmode, CC_REGNUM),
3900 (gen_rtx_SET (operands[0],
3901 (gen_rtx_MINUS (SImode,
3908 /* Emit the pattern:
3909 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
3911 (xor:SI (match_dup 1)
3912 (ashiftrt:SI (match_dup 1) (const_int 31))))
3914 (minus:SI (match_dup 0)
3915 (ashiftrt:SI (match_dup 1) (const_int 31))))]
3917 emit_insn (gen_rtx_SET (operands[0],
3918 gen_rtx_XOR (SImode,
3919 gen_rtx_ASHIFTRT (SImode,
3923 emit_insn (gen_rtx_SET (operands[0],
3924 gen_rtx_MINUS (SImode,
3926 gen_rtx_ASHIFTRT (SImode,
3932 [(set_attr "conds" "clob,*")
3933 (set_attr "shift" "1")
3934 (set_attr "predicable" "no, yes")
3935 (set_attr "length" "8")
3936 (set_attr "type" "multiple")]
3939 (define_insn_and_split "*arm_neg_abssi2"
3940 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3941 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3942 (clobber (reg:CC CC_REGNUM))]
3945 "&& reload_completed"
3948 /* if (which_alternative == 0) */
3949 if (REGNO (operands[0]) == REGNO (operands[1]))
3951 /* Emit the pattern:
3952 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3954 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3955 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3956 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3958 gen_rtx_REG (CCmode, CC_REGNUM),
3960 gen_rtx_SET (operands[0],
3961 (gen_rtx_MINUS (SImode,
3967 /* Emit the pattern:
3968 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
3970 emit_insn (gen_rtx_SET (operands[0],
3971 gen_rtx_XOR (SImode,
3972 gen_rtx_ASHIFTRT (SImode,
3976 emit_insn (gen_rtx_SET (operands[0],
3977 gen_rtx_MINUS (SImode,
3978 gen_rtx_ASHIFTRT (SImode,
3985 [(set_attr "conds" "clob,*")
3986 (set_attr "shift" "1")
3987 (set_attr "predicable" "no, yes")
3988 (set_attr "length" "8")
3989 (set_attr "type" "multiple")]
3992 (define_expand "abssf2"
3993 [(set (match_operand:SF 0 "s_register_operand")
3994 (abs:SF (match_operand:SF 1 "s_register_operand")))]
3995 "TARGET_32BIT && TARGET_HARD_FLOAT"
3998 (define_expand "absdf2"
3999 [(set (match_operand:DF 0 "s_register_operand")
4000 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4001 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4004 (define_expand "sqrtsf2"
4005 [(set (match_operand:SF 0 "s_register_operand")
4006 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4007 "TARGET_32BIT && TARGET_HARD_FLOAT"
4010 (define_expand "sqrtdf2"
4011 [(set (match_operand:DF 0 "s_register_operand")
4012 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4013 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4016 (define_expand "one_cmplsi2"
4017 [(set (match_operand:SI 0 "s_register_operand")
4018 (not:SI (match_operand:SI 1 "s_register_operand")))]
4023 (define_insn "*arm_one_cmplsi2"
4024 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4025 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4028 [(set_attr "predicable" "yes")
4029 (set_attr "predicable_short_it" "yes,no")
4030 (set_attr "arch" "t2,*")
4031 (set_attr "length" "4")
4032 (set_attr "type" "mvn_reg")]
4035 (define_insn "*notsi_compare0"
4036 [(set (reg:CC_NOOV CC_REGNUM)
4037 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4039 (set (match_operand:SI 0 "s_register_operand" "=r")
4040 (not:SI (match_dup 1)))]
4043 [(set_attr "conds" "set")
4044 (set_attr "type" "mvn_reg")]
4047 (define_insn "*notsi_compare0_scratch"
4048 [(set (reg:CC_NOOV CC_REGNUM)
4049 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4051 (clobber (match_scratch:SI 0 "=r"))]
4054 [(set_attr "conds" "set")
4055 (set_attr "type" "mvn_reg")]
4058 ;; Fixed <--> Floating conversion insns
4060 (define_expand "floatsihf2"
4061 [(set (match_operand:HF 0 "general_operand")
4062 (float:HF (match_operand:SI 1 "general_operand")))]
4066 rtx op1 = gen_reg_rtx (SFmode);
4067 expand_float (op1, operands[1], 0);
4068 op1 = convert_to_mode (HFmode, op1, 0);
4069 emit_move_insn (operands[0], op1);
4074 (define_expand "floatdihf2"
4075 [(set (match_operand:HF 0 "general_operand")
4076 (float:HF (match_operand:DI 1 "general_operand")))]
4080 rtx op1 = gen_reg_rtx (SFmode);
4081 expand_float (op1, operands[1], 0);
4082 op1 = convert_to_mode (HFmode, op1, 0);
4083 emit_move_insn (operands[0], op1);
4088 (define_expand "floatsisf2"
4089 [(set (match_operand:SF 0 "s_register_operand")
4090 (float:SF (match_operand:SI 1 "s_register_operand")))]
4091 "TARGET_32BIT && TARGET_HARD_FLOAT"
4095 (define_expand "floatsidf2"
4096 [(set (match_operand:DF 0 "s_register_operand")
4097 (float:DF (match_operand:SI 1 "s_register_operand")))]
4098 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4102 (define_expand "fix_trunchfsi2"
4103 [(set (match_operand:SI 0 "general_operand")
4104 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4108 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4109 expand_fix (operands[0], op1, 0);
4114 (define_expand "fix_trunchfdi2"
4115 [(set (match_operand:DI 0 "general_operand")
4116 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4120 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4121 expand_fix (operands[0], op1, 0);
4126 (define_expand "fix_truncsfsi2"
4127 [(set (match_operand:SI 0 "s_register_operand")
4128 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4129 "TARGET_32BIT && TARGET_HARD_FLOAT"
4133 (define_expand "fix_truncdfsi2"
4134 [(set (match_operand:SI 0 "s_register_operand")
4135 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4136 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4142 (define_expand "truncdfsf2"
4143 [(set (match_operand:SF 0 "s_register_operand")
4145 (match_operand:DF 1 "s_register_operand")))]
4146 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4150 ;; DFmode to HFmode conversions on targets without a single-step hardware
4151 ;; instruction for it would have to go through SFmode. This is dangerous
4152 ;; as it introduces double rounding.
4154 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4155 ;; a single-step instruction.
4157 (define_expand "truncdfhf2"
4158 [(set (match_operand:HF 0 "s_register_operand")
4160 (match_operand:DF 1 "s_register_operand")))]
4161 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4162 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4164 /* We don't have a direct instruction for this, so we must be in
4165 an unsafe math mode, and going via SFmode. */
4167 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4170 op1 = convert_to_mode (SFmode, operands[1], 0);
4171 op1 = convert_to_mode (HFmode, op1, 0);
4172 emit_move_insn (operands[0], op1);
4175 /* Otherwise, we will pick this up as a single instruction with
4176 no intermediary rounding. */
4180 ;; Zero and sign extension instructions.
4182 (define_expand "zero_extend<mode>di2"
4183 [(set (match_operand:DI 0 "s_register_operand" "")
4184 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4185 "TARGET_32BIT <qhs_zextenddi_cond>"
4187 rtx res_lo, res_hi, op0_lo, op0_hi;
4188 res_lo = gen_lowpart (SImode, operands[0]);
4189 res_hi = gen_highpart (SImode, operands[0]);
4190 if (can_create_pseudo_p ())
4192 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4193 op0_hi = gen_reg_rtx (SImode);
4197 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4200 if (<MODE>mode != SImode)
4201 emit_insn (gen_rtx_SET (op0_lo,
4202 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4203 emit_insn (gen_movsi (op0_hi, const0_rtx));
4204 if (res_lo != op0_lo)
4205 emit_move_insn (res_lo, op0_lo);
4206 if (res_hi != op0_hi)
4207 emit_move_insn (res_hi, op0_hi);
4212 (define_expand "extend<mode>di2"
4213 [(set (match_operand:DI 0 "s_register_operand" "")
4214 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4215 "TARGET_32BIT <qhs_sextenddi_cond>"
4217 rtx res_lo, res_hi, op0_lo, op0_hi;
4218 res_lo = gen_lowpart (SImode, operands[0]);
4219 res_hi = gen_highpart (SImode, operands[0]);
4220 if (can_create_pseudo_p ())
4222 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4223 op0_hi = gen_reg_rtx (SImode);
4227 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4230 if (<MODE>mode != SImode)
4231 emit_insn (gen_rtx_SET (op0_lo,
4232 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4233 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4234 if (res_lo != op0_lo)
4235 emit_move_insn (res_lo, op0_lo);
4236 if (res_hi != op0_hi)
4237 emit_move_insn (res_hi, op0_hi);
4242 ;; Splits for all extensions to DImode
4244 [(set (match_operand:DI 0 "s_register_operand" "")
4245 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4247 [(set (match_dup 0) (match_dup 1))]
4249 rtx lo_part = gen_lowpart (SImode, operands[0]);
4250 machine_mode src_mode = GET_MODE (operands[1]);
4252 if (src_mode == SImode)
4253 emit_move_insn (lo_part, operands[1]);
4255 emit_insn (gen_rtx_SET (lo_part,
4256 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4257 operands[0] = gen_highpart (SImode, operands[0]);
4258 operands[1] = const0_rtx;
4262 [(set (match_operand:DI 0 "s_register_operand" "")
4263 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4265 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4267 rtx lo_part = gen_lowpart (SImode, operands[0]);
4268 machine_mode src_mode = GET_MODE (operands[1]);
4270 if (src_mode == SImode)
4271 emit_move_insn (lo_part, operands[1]);
4273 emit_insn (gen_rtx_SET (lo_part,
4274 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4275 operands[1] = lo_part;
4276 operands[0] = gen_highpart (SImode, operands[0]);
4279 (define_expand "zero_extendhisi2"
4280 [(set (match_operand:SI 0 "s_register_operand")
4281 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4284 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4286 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4289 if (!arm_arch6 && !MEM_P (operands[1]))
4291 rtx t = gen_lowpart (SImode, operands[1]);
4292 rtx tmp = gen_reg_rtx (SImode);
4293 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4294 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4300 [(set (match_operand:SI 0 "s_register_operand" "")
4301 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4302 "!TARGET_THUMB2 && !arm_arch6"
4303 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4304 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4306 operands[2] = gen_lowpart (SImode, operands[1]);
4309 (define_insn "*arm_zero_extendhisi2"
4310 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4311 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4312 "TARGET_ARM && arm_arch4 && !arm_arch6"
4316 [(set_attr "type" "alu_shift_reg,load_byte")
4317 (set_attr "predicable" "yes")]
4320 (define_insn "*arm_zero_extendhisi2_v6"
4321 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4322 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4323 "TARGET_ARM && arm_arch6"
4327 [(set_attr "predicable" "yes")
4328 (set_attr "type" "extend,load_byte")]
4331 (define_insn "*arm_zero_extendhisi2addsi"
4332 [(set (match_operand:SI 0 "s_register_operand" "=r")
4333 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4334 (match_operand:SI 2 "s_register_operand" "r")))]
4336 "uxtah%?\\t%0, %2, %1"
4337 [(set_attr "type" "alu_shift_reg")
4338 (set_attr "predicable" "yes")]
4341 (define_expand "zero_extendqisi2"
4342 [(set (match_operand:SI 0 "s_register_operand")
4343 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4346 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4348 emit_insn (gen_andsi3 (operands[0],
4349 gen_lowpart (SImode, operands[1]),
4353 if (!arm_arch6 && !MEM_P (operands[1]))
4355 rtx t = gen_lowpart (SImode, operands[1]);
4356 rtx tmp = gen_reg_rtx (SImode);
4357 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4358 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4364 [(set (match_operand:SI 0 "s_register_operand" "")
4365 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4367 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4368 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4370 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4373 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4378 (define_insn "*arm_zero_extendqisi2"
4379 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4380 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4381 "TARGET_ARM && !arm_arch6"
4384 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4385 [(set_attr "length" "8,4")
4386 (set_attr "type" "alu_shift_reg,load_byte")
4387 (set_attr "predicable" "yes")]
4390 (define_insn "*arm_zero_extendqisi2_v6"
4391 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4392 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4393 "TARGET_ARM && arm_arch6"
4396 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4397 [(set_attr "type" "extend,load_byte")
4398 (set_attr "predicable" "yes")]
4401 (define_insn "*arm_zero_extendqisi2addsi"
4402 [(set (match_operand:SI 0 "s_register_operand" "=r")
4403 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4404 (match_operand:SI 2 "s_register_operand" "r")))]
4406 "uxtab%?\\t%0, %2, %1"
4407 [(set_attr "predicable" "yes")
4408 (set_attr "type" "alu_shift_reg")]
4412 [(set (match_operand:SI 0 "s_register_operand" "")
4413 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4414 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4415 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4416 [(set (match_dup 2) (match_dup 1))
4417 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4422 [(set (match_operand:SI 0 "s_register_operand" "")
4423 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4424 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4425 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4426 [(set (match_dup 2) (match_dup 1))
4427 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4433 [(set (match_operand:SI 0 "s_register_operand" "")
4434 (IOR_XOR:SI (and:SI (ashift:SI
4435 (match_operand:SI 1 "s_register_operand" "")
4436 (match_operand:SI 2 "const_int_operand" ""))
4437 (match_operand:SI 3 "const_int_operand" ""))
4439 (match_operator 5 "subreg_lowpart_operator"
4440 [(match_operand:SI 4 "s_register_operand" "")]))))]
4442 && (UINTVAL (operands[3])
4443 == (GET_MODE_MASK (GET_MODE (operands[5]))
4444 & (GET_MODE_MASK (GET_MODE (operands[5]))
4445 << (INTVAL (operands[2])))))"
4446 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4448 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4449 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4452 (define_insn "*compareqi_eq0"
4453 [(set (reg:CC_Z CC_REGNUM)
4454 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4458 [(set_attr "conds" "set")
4459 (set_attr "predicable" "yes")
4460 (set_attr "type" "logic_imm")]
4463 (define_expand "extendhisi2"
4464 [(set (match_operand:SI 0 "s_register_operand")
4465 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4470 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4473 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4475 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4479 if (!arm_arch6 && !MEM_P (operands[1]))
4481 rtx t = gen_lowpart (SImode, operands[1]);
4482 rtx tmp = gen_reg_rtx (SImode);
4483 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4484 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4491 [(set (match_operand:SI 0 "register_operand" "")
4492 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4493 (clobber (match_scratch:SI 2 ""))])]
4495 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4496 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4498 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4501 ;; This pattern will only be used when ldsh is not available
4502 (define_expand "extendhisi2_mem"
4503 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4505 (zero_extend:SI (match_dup 7)))
4506 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4507 (set (match_operand:SI 0 "" "")
4508 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4513 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4515 mem1 = change_address (operands[1], QImode, addr);
4516 mem2 = change_address (operands[1], QImode,
4517 plus_constant (Pmode, addr, 1));
4518 operands[0] = gen_lowpart (SImode, operands[0]);
4520 operands[2] = gen_reg_rtx (SImode);
4521 operands[3] = gen_reg_rtx (SImode);
4522 operands[6] = gen_reg_rtx (SImode);
4525 if (BYTES_BIG_ENDIAN)
4527 operands[4] = operands[2];
4528 operands[5] = operands[3];
4532 operands[4] = operands[3];
4533 operands[5] = operands[2];
4539 [(set (match_operand:SI 0 "register_operand" "")
4540 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4542 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4543 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4545 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4548 (define_insn "*arm_extendhisi2"
4549 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4550 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4551 "TARGET_ARM && arm_arch4 && !arm_arch6"
4555 [(set_attr "length" "8,4")
4556 (set_attr "type" "alu_shift_reg,load_byte")
4557 (set_attr "predicable" "yes")]
4560 ;; ??? Check Thumb-2 pool range
4561 (define_insn "*arm_extendhisi2_v6"
4562 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4563 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4564 "TARGET_32BIT && arm_arch6"
4568 [(set_attr "type" "extend,load_byte")
4569 (set_attr "predicable" "yes")]
4572 (define_insn "*arm_extendhisi2addsi"
4573 [(set (match_operand:SI 0 "s_register_operand" "=r")
4574 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4575 (match_operand:SI 2 "s_register_operand" "r")))]
4577 "sxtah%?\\t%0, %2, %1"
4578 [(set_attr "type" "alu_shift_reg")]
4581 (define_expand "extendqihi2"
4583 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4585 (set (match_operand:HI 0 "s_register_operand")
4586 (ashiftrt:SI (match_dup 2)
4591 if (arm_arch4 && MEM_P (operands[1]))
4593 emit_insn (gen_rtx_SET (operands[0],
4594 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4597 if (!s_register_operand (operands[1], QImode))
4598 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4599 operands[0] = gen_lowpart (SImode, operands[0]);
4600 operands[1] = gen_lowpart (SImode, operands[1]);
4601 operands[2] = gen_reg_rtx (SImode);
4605 (define_insn "*arm_extendqihi_insn"
4606 [(set (match_operand:HI 0 "s_register_operand" "=r")
4607 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4608 "TARGET_ARM && arm_arch4"
4610 [(set_attr "type" "load_byte")
4611 (set_attr "predicable" "yes")]
4614 (define_expand "extendqisi2"
4615 [(set (match_operand:SI 0 "s_register_operand")
4616 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4619 if (!arm_arch4 && MEM_P (operands[1]))
4620 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4622 if (!arm_arch6 && !MEM_P (operands[1]))
4624 rtx t = gen_lowpart (SImode, operands[1]);
4625 rtx tmp = gen_reg_rtx (SImode);
4626 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4627 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4633 [(set (match_operand:SI 0 "register_operand" "")
4634 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4636 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4637 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4639 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4642 (define_insn "*arm_extendqisi"
4643 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4644 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4645 "TARGET_ARM && arm_arch4 && !arm_arch6"
4649 [(set_attr "length" "8,4")
4650 (set_attr "type" "alu_shift_reg,load_byte")
4651 (set_attr "predicable" "yes")]
4654 (define_insn "*arm_extendqisi_v6"
4655 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4657 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4658 "TARGET_ARM && arm_arch6"
4662 [(set_attr "type" "extend,load_byte")
4663 (set_attr "predicable" "yes")]
4666 (define_insn "*arm_extendqisi2addsi"
4667 [(set (match_operand:SI 0 "s_register_operand" "=r")
4668 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4669 (match_operand:SI 2 "s_register_operand" "r")))]
4671 "sxtab%?\\t%0, %2, %1"
4672 [(set_attr "type" "alu_shift_reg")
4673 (set_attr "predicable" "yes")]
4676 (define_insn "arm_<sup>xtb16"
4677 [(set (match_operand:SI 0 "s_register_operand" "=r")
4679 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4681 "<sup>xtb16%?\\t%0, %1"
4682 [(set_attr "predicable" "yes")
4683 (set_attr "type" "alu_dsp_reg")])
4685 (define_insn "arm_<simd32_op>"
4686 [(set (match_operand:SI 0 "s_register_operand" "=r")
4688 [(match_operand:SI 1 "s_register_operand" "r")
4689 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4691 "<simd32_op>%?\\t%0, %1, %2"
4692 [(set_attr "predicable" "yes")
4693 (set_attr "type" "alu_dsp_reg")])
4695 (define_insn "arm_usada8"
4696 [(set (match_operand:SI 0 "s_register_operand" "=r")
4698 [(match_operand:SI 1 "s_register_operand" "r")
4699 (match_operand:SI 2 "s_register_operand" "r")
4700 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4702 "usada8%?\\t%0, %1, %2, %3"
4703 [(set_attr "predicable" "yes")
4704 (set_attr "type" "alu_dsp_reg")])
4706 (define_insn "arm_<simd32_op>"
4707 [(set (match_operand:DI 0 "s_register_operand" "=r")
4709 [(match_operand:SI 1 "s_register_operand" "r")
4710 (match_operand:SI 2 "s_register_operand" "r")
4711 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4713 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4714 [(set_attr "predicable" "yes")
4715 (set_attr "type" "smlald")])
4717 (define_expand "extendsfdf2"
4718 [(set (match_operand:DF 0 "s_register_operand")
4719 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4720 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4724 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4725 ;; must go through SFmode.
4727 ;; This is always safe for an extend.
4729 (define_expand "extendhfdf2"
4730 [(set (match_operand:DF 0 "s_register_operand")
4731 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4734 /* We don't have a direct instruction for this, so go via SFmode. */
4735 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4738 op1 = convert_to_mode (SFmode, operands[1], 0);
4739 op1 = convert_to_mode (DFmode, op1, 0);
4740 emit_insn (gen_movdf (operands[0], op1));
4743 /* Otherwise, we're done producing RTL and will pick up the correct
4744 pattern to do this with one rounding-step in a single instruction. */
4748 ;; Move insns (including loads and stores)
4750 ;; XXX Just some ideas about movti.
4751 ;; I don't think these are a good idea on the arm, there just aren't enough
4753 ;;(define_expand "loadti"
4754 ;; [(set (match_operand:TI 0 "s_register_operand")
4755 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4758 ;;(define_expand "storeti"
4759 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4760 ;; (match_operand:TI 1 "s_register_operand"))]
4763 ;;(define_expand "movti"
4764 ;; [(set (match_operand:TI 0 "general_operand")
4765 ;; (match_operand:TI 1 "general_operand"))]
4771 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4772 ;; operands[1] = copy_to_reg (operands[1]);
4773 ;; if (MEM_P (operands[0]))
4774 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4775 ;; else if (MEM_P (operands[1]))
4776 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4780 ;; emit_insn (insn);
4784 ;; Recognize garbage generated above.
4787 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4788 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4792 ;; register mem = (which_alternative < 3);
4793 ;; register const char *template;
4795 ;; operands[mem] = XEXP (operands[mem], 0);
4796 ;; switch (which_alternative)
4798 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4799 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4800 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4801 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4802 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4803 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4805 ;; output_asm_insn (template, operands);
4809 (define_expand "movdi"
4810 [(set (match_operand:DI 0 "general_operand")
4811 (match_operand:DI 1 "general_operand"))]
4814 gcc_checking_assert (aligned_operand (operands[0], DImode));
4815 gcc_checking_assert (aligned_operand (operands[1], DImode));
4816 if (can_create_pseudo_p ())
4818 if (!REG_P (operands[0]))
4819 operands[1] = force_reg (DImode, operands[1]);
4821 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4822 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4824 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4825 when expanding function calls. */
4826 gcc_assert (can_create_pseudo_p ());
4827 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4829 /* Perform load into legal reg pair first, then move. */
4830 rtx reg = gen_reg_rtx (DImode);
4831 emit_insn (gen_movdi (reg, operands[1]));
4834 emit_move_insn (gen_lowpart (SImode, operands[0]),
4835 gen_lowpart (SImode, operands[1]));
4836 emit_move_insn (gen_highpart (SImode, operands[0]),
4837 gen_highpart (SImode, operands[1]));
4840 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4841 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4843 /* Avoid STRD's from an odd-numbered register pair in ARM state
4844 when expanding function prologue. */
4845 gcc_assert (can_create_pseudo_p ());
4846 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4847 ? gen_reg_rtx (DImode)
4849 emit_move_insn (gen_lowpart (SImode, split_dest),
4850 gen_lowpart (SImode, operands[1]));
4851 emit_move_insn (gen_highpart (SImode, split_dest),
4852 gen_highpart (SImode, operands[1]));
4853 if (split_dest != operands[0])
4854 emit_insn (gen_movdi (operands[0], split_dest));
4860 (define_insn "*arm_movdi"
4861 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4862 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4864 && !(TARGET_HARD_FLOAT)
4866 && ( register_operand (operands[0], DImode)
4867 || register_operand (operands[1], DImode))"
4869 switch (which_alternative)
4876 /* Cannot load it directly, split to load it via MOV / MOVT. */
4877 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4881 return output_move_double (operands, true, NULL);
4884 [(set_attr "length" "8,12,16,8,8")
4885 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4886 (set_attr "arm_pool_range" "*,*,*,1020,*")
4887 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4888 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4889 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4893 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4894 (match_operand:ANY64 1 "immediate_operand" ""))]
4897 && (arm_disable_literal_pool
4898 || (arm_const_double_inline_cost (operands[1])
4899 <= arm_max_const_double_inline_cost ()))"
4902 arm_split_constant (SET, SImode, curr_insn,
4903 INTVAL (gen_lowpart (SImode, operands[1])),
4904 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4905 arm_split_constant (SET, SImode, curr_insn,
4906 INTVAL (gen_highpart_mode (SImode,
4907 GET_MODE (operands[0]),
4909 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4914 ; If optimizing for size, or if we have load delay slots, then
4915 ; we want to split the constant into two separate operations.
4916 ; In both cases this may split a trivial part into a single data op
4917 ; leaving a single complex constant to load. We can also get longer
4918 ; offsets in a LDR which means we get better chances of sharing the pool
4919 ; entries. Finally, we can normally do a better job of scheduling
4920 ; LDR instructions than we can with LDM.
4921 ; This pattern will only match if the one above did not.
4923 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4924 (match_operand:ANY64 1 "const_double_operand" ""))]
4925 "TARGET_ARM && reload_completed
4926 && arm_const_double_by_parts (operands[1])"
4927 [(set (match_dup 0) (match_dup 1))
4928 (set (match_dup 2) (match_dup 3))]
4930 operands[2] = gen_highpart (SImode, operands[0]);
4931 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4933 operands[0] = gen_lowpart (SImode, operands[0]);
4934 operands[1] = gen_lowpart (SImode, operands[1]);
4939 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4940 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4941 "TARGET_EITHER && reload_completed"
4942 [(set (match_dup 0) (match_dup 1))
4943 (set (match_dup 2) (match_dup 3))]
4945 operands[2] = gen_highpart (SImode, operands[0]);
4946 operands[3] = gen_highpart (SImode, operands[1]);
4947 operands[0] = gen_lowpart (SImode, operands[0]);
4948 operands[1] = gen_lowpart (SImode, operands[1]);
4950 /* Handle a partial overlap. */
4951 if (rtx_equal_p (operands[0], operands[3]))
4953 rtx tmp0 = operands[0];
4954 rtx tmp1 = operands[1];
4956 operands[0] = operands[2];
4957 operands[1] = operands[3];
4964 ;; We can't actually do base+index doubleword loads if the index and
4965 ;; destination overlap. Split here so that we at least have chance to
4968 [(set (match_operand:DI 0 "s_register_operand" "")
4969 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4970 (match_operand:SI 2 "s_register_operand" ""))))]
4972 && reg_overlap_mentioned_p (operands[0], operands[1])
4973 && reg_overlap_mentioned_p (operands[0], operands[2])"
4975 (plus:SI (match_dup 1)
4978 (mem:DI (match_dup 4)))]
4980 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4984 (define_expand "movsi"
4985 [(set (match_operand:SI 0 "general_operand")
4986 (match_operand:SI 1 "general_operand"))]
4990 rtx base, offset, tmp;
4992 gcc_checking_assert (aligned_operand (operands[0], SImode));
4993 gcc_checking_assert (aligned_operand (operands[1], SImode));
4994 if (TARGET_32BIT || TARGET_HAVE_MOVT)
4996 /* Everything except mem = const or mem = mem can be done easily. */
4997 if (MEM_P (operands[0]))
4998 operands[1] = force_reg (SImode, operands[1]);
4999 if (arm_general_register_operand (operands[0], SImode)
5000 && CONST_INT_P (operands[1])
5001 && !(const_ok_for_arm (INTVAL (operands[1]))
5002 || const_ok_for_arm (~INTVAL (operands[1]))))
5004 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5006 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5011 arm_split_constant (SET, SImode, NULL_RTX,
5012 INTVAL (operands[1]), operands[0], NULL_RTX,
5013 optimize && can_create_pseudo_p ());
5018 else /* Target doesn't have MOVT... */
5020 if (can_create_pseudo_p ())
5022 if (!REG_P (operands[0]))
5023 operands[1] = force_reg (SImode, operands[1]);
5027 split_const (operands[1], &base, &offset);
5028 if (INTVAL (offset) != 0
5029 && targetm.cannot_force_const_mem (SImode, operands[1]))
5031 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5032 emit_move_insn (tmp, base);
5033 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5037 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5039 /* Recognize the case where operand[1] is a reference to thread-local
5040 data and load its address to a register. Offsets have been split off
5042 if (arm_tls_referenced_p (operands[1]))
5043 operands[1] = legitimize_tls_address (operands[1], tmp);
5045 && (CONSTANT_P (operands[1])
5046 || symbol_mentioned_p (operands[1])
5047 || label_mentioned_p (operands[1])))
5049 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5054 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5055 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5056 ;; so this does not matter.
5057 (define_insn "*arm_movt"
5058 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5059 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5060 (match_operand:SI 2 "general_operand" "i,i")))]
5061 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5063 movt%?\t%0, #:upper16:%c2
5064 movt\t%0, #:upper16:%c2"
5065 [(set_attr "arch" "32,v8mb")
5066 (set_attr "predicable" "yes")
5067 (set_attr "length" "4")
5068 (set_attr "type" "alu_sreg")]
5071 (define_insn "*arm_movsi_insn"
5072 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5073 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5074 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5075 && ( register_operand (operands[0], SImode)
5076 || register_operand (operands[1], SImode))"
5084 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5085 (set_attr "predicable" "yes")
5086 (set_attr "arch" "*,*,*,v6t2,*,*")
5087 (set_attr "pool_range" "*,*,*,*,4096,*")
5088 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5092 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5093 (match_operand:SI 1 "const_int_operand" ""))]
5094 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5095 && (!(const_ok_for_arm (INTVAL (operands[1]))
5096 || const_ok_for_arm (~INTVAL (operands[1]))))"
5097 [(clobber (const_int 0))]
5099 arm_split_constant (SET, SImode, NULL_RTX,
5100 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5105 ;; A normal way to do (symbol + offset) requires three instructions at least
5106 ;; (depends on how big the offset is) as below:
5107 ;; movw r0, #:lower16:g
5108 ;; movw r0, #:upper16:g
5111 ;; A better way would be:
5112 ;; movw r0, #:lower16:g+4
5113 ;; movw r0, #:upper16:g+4
5115 ;; The limitation of this way is that the length of offset should be a 16-bit
5116 ;; signed value, because current assembler only supports REL type relocation for
5117 ;; such case. If the more powerful RELA type is supported in future, we should
5118 ;; update this pattern to go with better way.
5120 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5121 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5122 (match_operand:SI 2 "const_int_operand" ""))))]
5125 && arm_disable_literal_pool
5127 && GET_CODE (operands[1]) == SYMBOL_REF"
5128 [(clobber (const_int 0))]
5130 int offset = INTVAL (operands[2]);
5132 if (offset < -0x8000 || offset > 0x7fff)
5134 arm_emit_movpair (operands[0], operands[1]);
5135 emit_insn (gen_rtx_SET (operands[0],
5136 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5140 rtx op = gen_rtx_CONST (SImode,
5141 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5142 arm_emit_movpair (operands[0], op);
5147 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5148 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5149 ;; and lo_sum would be merged back into memory load at cprop. However,
5150 ;; if the default is to prefer movt/movw rather than a load from the constant
5151 ;; pool, the performance is better.
5153 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5154 (match_operand:SI 1 "general_operand" ""))]
5155 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5156 && !target_word_relocations
5157 && !arm_tls_referenced_p (operands[1])"
5158 [(clobber (const_int 0))]
5160 arm_emit_movpair (operands[0], operands[1]);
5164 ;; When generating pic, we need to load the symbol offset into a register.
5165 ;; So that the optimizer does not confuse this with a normal symbol load
5166 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5167 ;; since that is the only type of relocation we can use.
5169 ;; Wrap calculation of the whole PIC address in a single pattern for the
5170 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5171 ;; a PIC address involves two loads from memory, so we want to CSE it
5172 ;; as often as possible.
5173 ;; This pattern will be split into one of the pic_load_addr_* patterns
5174 ;; and a move after GCSE optimizations.
5176 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5177 (define_expand "calculate_pic_address"
5178 [(set (match_operand:SI 0 "register_operand")
5179 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5180 (unspec:SI [(match_operand:SI 2 "" "")]
5185 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5187 [(set (match_operand:SI 0 "register_operand" "")
5188 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5189 (unspec:SI [(match_operand:SI 2 "" "")]
5192 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5193 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5194 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5197 ;; operand1 is the memory address to go into
5198 ;; pic_load_addr_32bit.
5199 ;; operand2 is the PIC label to be emitted
5200 ;; from pic_add_dot_plus_eight.
5201 ;; We do this to allow hoisting of the entire insn.
5202 (define_insn_and_split "pic_load_addr_unified"
5203 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5204 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5205 (match_operand:SI 2 "" "")]
5206 UNSPEC_PIC_UNIFIED))]
5209 "&& reload_completed"
5210 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5211 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5212 (match_dup 2)] UNSPEC_PIC_BASE))]
5213 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5214 [(set_attr "type" "load_4,load_4,load_4")
5215 (set_attr "pool_range" "4096,4094,1022")
5216 (set_attr "neg_pool_range" "4084,0,0")
5217 (set_attr "arch" "a,t2,t1")
5218 (set_attr "length" "8,6,4")]
5221 ;; The rather odd constraints on the following are to force reload to leave
5222 ;; the insn alone, and to force the minipool generation pass to then move
5223 ;; the GOT symbol to memory.
5225 (define_insn "pic_load_addr_32bit"
5226 [(set (match_operand:SI 0 "s_register_operand" "=r")
5227 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5228 "TARGET_32BIT && flag_pic"
5230 [(set_attr "type" "load_4")
5231 (set (attr "pool_range")
5232 (if_then_else (eq_attr "is_thumb" "no")
5235 (set (attr "neg_pool_range")
5236 (if_then_else (eq_attr "is_thumb" "no")
5241 (define_insn "pic_load_addr_thumb1"
5242 [(set (match_operand:SI 0 "s_register_operand" "=l")
5243 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5244 "TARGET_THUMB1 && flag_pic"
5246 [(set_attr "type" "load_4")
5247 (set (attr "pool_range") (const_int 1018))]
5250 (define_insn "pic_add_dot_plus_four"
5251 [(set (match_operand:SI 0 "register_operand" "=r")
5252 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5254 (match_operand 2 "" "")]
5258 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5259 INTVAL (operands[2]));
5260 return \"add\\t%0, %|pc\";
5262 [(set_attr "length" "2")
5263 (set_attr "type" "alu_sreg")]
5266 (define_insn "pic_add_dot_plus_eight"
5267 [(set (match_operand:SI 0 "register_operand" "=r")
5268 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5270 (match_operand 2 "" "")]
5274 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5275 INTVAL (operands[2]));
5276 return \"add%?\\t%0, %|pc, %1\";
5278 [(set_attr "predicable" "yes")
5279 (set_attr "type" "alu_sreg")]
5282 (define_insn "tls_load_dot_plus_eight"
5283 [(set (match_operand:SI 0 "register_operand" "=r")
5284 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5286 (match_operand 2 "" "")]
5290 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5291 INTVAL (operands[2]));
5292 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5294 [(set_attr "predicable" "yes")
5295 (set_attr "type" "load_4")]
5298 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5299 ;; followed by a load. These sequences can be crunched down to
5300 ;; tls_load_dot_plus_eight by a peephole.
5303 [(set (match_operand:SI 0 "register_operand" "")
5304 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5306 (match_operand 1 "" "")]
5308 (set (match_operand:SI 2 "arm_general_register_operand" "")
5309 (mem:SI (match_dup 0)))]
5310 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5312 (mem:SI (unspec:SI [(match_dup 3)
5319 (define_insn "pic_offset_arm"
5320 [(set (match_operand:SI 0 "register_operand" "=r")
5321 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5322 (unspec:SI [(match_operand:SI 2 "" "X")]
5323 UNSPEC_PIC_OFFSET))))]
5324 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5325 "ldr%?\\t%0, [%1,%2]"
5326 [(set_attr "type" "load_4")]
5329 (define_expand "builtin_setjmp_receiver"
5330 [(label_ref (match_operand 0 "" ""))]
5334 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5336 if (arm_pic_register != INVALID_REGNUM)
5337 arm_load_pic_register (1UL << 3, NULL_RTX);
5341 ;; If copying one reg to another we can set the condition codes according to
5342 ;; its value. Such a move is common after a return from subroutine and the
5343 ;; result is being tested against zero.
5345 (define_insn "*movsi_compare0"
5346 [(set (reg:CC CC_REGNUM)
5347 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5349 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5354 subs%?\\t%0, %1, #0"
5355 [(set_attr "conds" "set")
5356 (set_attr "type" "alus_imm,alus_imm")]
5359 ;; Subroutine to store a half word from a register into memory.
5360 ;; Operand 0 is the source register (HImode)
5361 ;; Operand 1 is the destination address in a register (SImode)
5363 ;; In both this routine and the next, we must be careful not to spill
5364 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5365 ;; can generate unrecognizable rtl.
5367 (define_expand "storehi"
5368 [;; store the low byte
5369 (set (match_operand 1 "" "") (match_dup 3))
5370 ;; extract the high byte
5372 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5373 ;; store the high byte
5374 (set (match_dup 4) (match_dup 5))]
5378 rtx op1 = operands[1];
5379 rtx addr = XEXP (op1, 0);
5380 enum rtx_code code = GET_CODE (addr);
5382 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5384 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5386 operands[4] = adjust_address (op1, QImode, 1);
5387 operands[1] = adjust_address (operands[1], QImode, 0);
5388 operands[3] = gen_lowpart (QImode, operands[0]);
5389 operands[0] = gen_lowpart (SImode, operands[0]);
5390 operands[2] = gen_reg_rtx (SImode);
5391 operands[5] = gen_lowpart (QImode, operands[2]);
5395 (define_expand "storehi_bigend"
5396 [(set (match_dup 4) (match_dup 3))
5398 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5399 (set (match_operand 1 "" "") (match_dup 5))]
5403 rtx op1 = operands[1];
5404 rtx addr = XEXP (op1, 0);
5405 enum rtx_code code = GET_CODE (addr);
5407 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5409 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5411 operands[4] = adjust_address (op1, QImode, 1);
5412 operands[1] = adjust_address (operands[1], QImode, 0);
5413 operands[3] = gen_lowpart (QImode, operands[0]);
5414 operands[0] = gen_lowpart (SImode, operands[0]);
5415 operands[2] = gen_reg_rtx (SImode);
5416 operands[5] = gen_lowpart (QImode, operands[2]);
5420 ;; Subroutine to store a half word integer constant into memory.
5421 (define_expand "storeinthi"
5422 [(set (match_operand 0 "" "")
5423 (match_operand 1 "" ""))
5424 (set (match_dup 3) (match_dup 2))]
5428 HOST_WIDE_INT value = INTVAL (operands[1]);
5429 rtx addr = XEXP (operands[0], 0);
5430 rtx op0 = operands[0];
5431 enum rtx_code code = GET_CODE (addr);
5433 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5435 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5437 operands[1] = gen_reg_rtx (SImode);
5438 if (BYTES_BIG_ENDIAN)
5440 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5441 if ((value & 255) == ((value >> 8) & 255))
5442 operands[2] = operands[1];
5445 operands[2] = gen_reg_rtx (SImode);
5446 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5451 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5452 if ((value & 255) == ((value >> 8) & 255))
5453 operands[2] = operands[1];
5456 operands[2] = gen_reg_rtx (SImode);
5457 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5461 operands[3] = adjust_address (op0, QImode, 1);
5462 operands[0] = adjust_address (operands[0], QImode, 0);
5463 operands[2] = gen_lowpart (QImode, operands[2]);
5464 operands[1] = gen_lowpart (QImode, operands[1]);
5468 (define_expand "storehi_single_op"
5469 [(set (match_operand:HI 0 "memory_operand")
5470 (match_operand:HI 1 "general_operand"))]
5471 "TARGET_32BIT && arm_arch4"
5473 if (!s_register_operand (operands[1], HImode))
5474 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5478 (define_expand "movhi"
5479 [(set (match_operand:HI 0 "general_operand")
5480 (match_operand:HI 1 "general_operand"))]
5483 gcc_checking_assert (aligned_operand (operands[0], HImode));
5484 gcc_checking_assert (aligned_operand (operands[1], HImode));
5487 if (can_create_pseudo_p ())
5489 if (MEM_P (operands[0]))
5493 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5496 if (CONST_INT_P (operands[1]))
5497 emit_insn (gen_storeinthi (operands[0], operands[1]));
5500 if (MEM_P (operands[1]))
5501 operands[1] = force_reg (HImode, operands[1]);
5502 if (BYTES_BIG_ENDIAN)
5503 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5505 emit_insn (gen_storehi (operands[1], operands[0]));
5509 /* Sign extend a constant, and keep it in an SImode reg. */
5510 else if (CONST_INT_P (operands[1]))
5512 rtx reg = gen_reg_rtx (SImode);
5513 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5515 /* If the constant is already valid, leave it alone. */
5516 if (!const_ok_for_arm (val))
5518 /* If setting all the top bits will make the constant
5519 loadable in a single instruction, then set them.
5520 Otherwise, sign extend the number. */
5522 if (const_ok_for_arm (~(val | ~0xffff)))
5524 else if (val & 0x8000)
5528 emit_insn (gen_movsi (reg, GEN_INT (val)));
5529 operands[1] = gen_lowpart (HImode, reg);
5531 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5532 && MEM_P (operands[1]))
5534 rtx reg = gen_reg_rtx (SImode);
5536 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5537 operands[1] = gen_lowpart (HImode, reg);
5539 else if (!arm_arch4)
5541 if (MEM_P (operands[1]))
5544 rtx offset = const0_rtx;
5545 rtx reg = gen_reg_rtx (SImode);
5547 if ((REG_P (base = XEXP (operands[1], 0))
5548 || (GET_CODE (base) == PLUS
5549 && (CONST_INT_P (offset = XEXP (base, 1)))
5550 && ((INTVAL(offset) & 1) != 1)
5551 && REG_P (base = XEXP (base, 0))))
5552 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5556 new_rtx = widen_memory_access (operands[1], SImode,
5557 ((INTVAL (offset) & ~3)
5558 - INTVAL (offset)));
5559 emit_insn (gen_movsi (reg, new_rtx));
5560 if (((INTVAL (offset) & 2) != 0)
5561 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5563 rtx reg2 = gen_reg_rtx (SImode);
5565 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5570 emit_insn (gen_movhi_bytes (reg, operands[1]));
5572 operands[1] = gen_lowpart (HImode, reg);
5576 /* Handle loading a large integer during reload. */
5577 else if (CONST_INT_P (operands[1])
5578 && !const_ok_for_arm (INTVAL (operands[1]))
5579 && !const_ok_for_arm (~INTVAL (operands[1])))
5581 /* Writing a constant to memory needs a scratch, which should
5582 be handled with SECONDARY_RELOADs. */
5583 gcc_assert (REG_P (operands[0]));
5585 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5586 emit_insn (gen_movsi (operands[0], operands[1]));
5590 else if (TARGET_THUMB2)
5592 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5593 if (can_create_pseudo_p ())
5595 if (!REG_P (operands[0]))
5596 operands[1] = force_reg (HImode, operands[1]);
5597 /* Zero extend a constant, and keep it in an SImode reg. */
5598 else if (CONST_INT_P (operands[1]))
5600 rtx reg = gen_reg_rtx (SImode);
5601 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5603 emit_insn (gen_movsi (reg, GEN_INT (val)));
5604 operands[1] = gen_lowpart (HImode, reg);
5608 else /* TARGET_THUMB1 */
5610 if (can_create_pseudo_p ())
5612 if (CONST_INT_P (operands[1]))
5614 rtx reg = gen_reg_rtx (SImode);
5616 emit_insn (gen_movsi (reg, operands[1]));
5617 operands[1] = gen_lowpart (HImode, reg);
5620 /* ??? We shouldn't really get invalid addresses here, but this can
5621 happen if we are passed a SP (never OK for HImode/QImode) or
5622 virtual register (also rejected as illegitimate for HImode/QImode)
5623 relative address. */
5624 /* ??? This should perhaps be fixed elsewhere, for instance, in
5625 fixup_stack_1, by checking for other kinds of invalid addresses,
5626 e.g. a bare reference to a virtual register. This may confuse the
5627 alpha though, which must handle this case differently. */
5628 if (MEM_P (operands[0])
5629 && !memory_address_p (GET_MODE (operands[0]),
5630 XEXP (operands[0], 0)))
5632 = replace_equiv_address (operands[0],
5633 copy_to_reg (XEXP (operands[0], 0)));
5635 if (MEM_P (operands[1])
5636 && !memory_address_p (GET_MODE (operands[1]),
5637 XEXP (operands[1], 0)))
5639 = replace_equiv_address (operands[1],
5640 copy_to_reg (XEXP (operands[1], 0)));
5642 if (MEM_P (operands[1]) && optimize > 0)
5644 rtx reg = gen_reg_rtx (SImode);
5646 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5647 operands[1] = gen_lowpart (HImode, reg);
5650 if (MEM_P (operands[0]))
5651 operands[1] = force_reg (HImode, operands[1]);
5653 else if (CONST_INT_P (operands[1])
5654 && !satisfies_constraint_I (operands[1]))
5656 /* Handle loading a large integer during reload. */
5658 /* Writing a constant to memory needs a scratch, which should
5659 be handled with SECONDARY_RELOADs. */
5660 gcc_assert (REG_P (operands[0]));
5662 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5663 emit_insn (gen_movsi (operands[0], operands[1]));
5670 (define_expand "movhi_bytes"
5671 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5673 (zero_extend:SI (match_dup 6)))
5674 (set (match_operand:SI 0 "" "")
5675 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5680 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5682 mem1 = change_address (operands[1], QImode, addr);
5683 mem2 = change_address (operands[1], QImode,
5684 plus_constant (Pmode, addr, 1));
5685 operands[0] = gen_lowpart (SImode, operands[0]);
5687 operands[2] = gen_reg_rtx (SImode);
5688 operands[3] = gen_reg_rtx (SImode);
5691 if (BYTES_BIG_ENDIAN)
5693 operands[4] = operands[2];
5694 operands[5] = operands[3];
5698 operands[4] = operands[3];
5699 operands[5] = operands[2];
5704 (define_expand "movhi_bigend"
5706 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5709 (ashiftrt:SI (match_dup 2) (const_int 16)))
5710 (set (match_operand:HI 0 "s_register_operand")
5714 operands[2] = gen_reg_rtx (SImode);
5715 operands[3] = gen_reg_rtx (SImode);
5716 operands[4] = gen_lowpart (HImode, operands[3]);
5720 ;; Pattern to recognize insn generated default case above
5721 (define_insn "*movhi_insn_arch4"
5722 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5723 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5725 && arm_arch4 && !TARGET_HARD_FLOAT
5726 && (register_operand (operands[0], HImode)
5727 || register_operand (operands[1], HImode))"
5729 mov%?\\t%0, %1\\t%@ movhi
5730 mvn%?\\t%0, #%B1\\t%@ movhi
5731 movw%?\\t%0, %L1\\t%@ movhi
5732 strh%?\\t%1, %0\\t%@ movhi
5733 ldrh%?\\t%0, %1\\t%@ movhi"
5734 [(set_attr "predicable" "yes")
5735 (set_attr "pool_range" "*,*,*,*,256")
5736 (set_attr "neg_pool_range" "*,*,*,*,244")
5737 (set_attr "arch" "*,*,v6t2,*,*")
5738 (set_attr_alternative "type"
5739 [(if_then_else (match_operand 1 "const_int_operand" "")
5740 (const_string "mov_imm" )
5741 (const_string "mov_reg"))
5742 (const_string "mvn_imm")
5743 (const_string "mov_imm")
5744 (const_string "store_4")
5745 (const_string "load_4")])]
5748 (define_insn "*movhi_bytes"
5749 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5750 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5751 "TARGET_ARM && !TARGET_HARD_FLOAT"
5753 mov%?\\t%0, %1\\t%@ movhi
5754 mov%?\\t%0, %1\\t%@ movhi
5755 mvn%?\\t%0, #%B1\\t%@ movhi"
5756 [(set_attr "predicable" "yes")
5757 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5760 ;; We use a DImode scratch because we may occasionally need an additional
5761 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5762 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5763 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5764 ;; to be correctly handled in default_secondary_reload function.
5765 (define_expand "reload_outhi"
5766 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5767 (match_operand:HI 1 "s_register_operand" "r")
5768 (match_operand:DI 2 "s_register_operand" "=&l")])]
5771 arm_reload_out_hi (operands);
5773 thumb_reload_out_hi (operands);
5778 (define_expand "reload_inhi"
5779 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5780 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5781 (match_operand:DI 2 "s_register_operand" "=&r")])]
5785 arm_reload_in_hi (operands);
5787 thumb_reload_out_hi (operands);
5791 (define_expand "movqi"
5792 [(set (match_operand:QI 0 "general_operand")
5793 (match_operand:QI 1 "general_operand"))]
5796 /* Everything except mem = const or mem = mem can be done easily */
5798 if (can_create_pseudo_p ())
5800 if (CONST_INT_P (operands[1]))
5802 rtx reg = gen_reg_rtx (SImode);
5804 /* For thumb we want an unsigned immediate, then we are more likely
5805 to be able to use a movs insn. */
5807 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5809 emit_insn (gen_movsi (reg, operands[1]));
5810 operands[1] = gen_lowpart (QImode, reg);
5815 /* ??? We shouldn't really get invalid addresses here, but this can
5816 happen if we are passed a SP (never OK for HImode/QImode) or
5817 virtual register (also rejected as illegitimate for HImode/QImode)
5818 relative address. */
5819 /* ??? This should perhaps be fixed elsewhere, for instance, in
5820 fixup_stack_1, by checking for other kinds of invalid addresses,
5821 e.g. a bare reference to a virtual register. This may confuse the
5822 alpha though, which must handle this case differently. */
5823 if (MEM_P (operands[0])
5824 && !memory_address_p (GET_MODE (operands[0]),
5825 XEXP (operands[0], 0)))
5827 = replace_equiv_address (operands[0],
5828 copy_to_reg (XEXP (operands[0], 0)));
5829 if (MEM_P (operands[1])
5830 && !memory_address_p (GET_MODE (operands[1]),
5831 XEXP (operands[1], 0)))
5833 = replace_equiv_address (operands[1],
5834 copy_to_reg (XEXP (operands[1], 0)));
5837 if (MEM_P (operands[1]) && optimize > 0)
5839 rtx reg = gen_reg_rtx (SImode);
5841 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5842 operands[1] = gen_lowpart (QImode, reg);
5845 if (MEM_P (operands[0]))
5846 operands[1] = force_reg (QImode, operands[1]);
5848 else if (TARGET_THUMB
5849 && CONST_INT_P (operands[1])
5850 && !satisfies_constraint_I (operands[1]))
5852 /* Handle loading a large integer during reload. */
5854 /* Writing a constant to memory needs a scratch, which should
5855 be handled with SECONDARY_RELOADs. */
5856 gcc_assert (REG_P (operands[0]));
5858 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5859 emit_insn (gen_movsi (operands[0], operands[1]));
5865 (define_insn "*arm_movqi_insn"
5866 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5867 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5869 && ( register_operand (operands[0], QImode)
5870 || register_operand (operands[1], QImode))"
5881 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5882 (set_attr "predicable" "yes")
5883 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5884 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5885 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5889 (define_expand "movhf"
5890 [(set (match_operand:HF 0 "general_operand")
5891 (match_operand:HF 1 "general_operand"))]
5894 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5895 gcc_checking_assert (aligned_operand (operands[1], HFmode));
5898 if (MEM_P (operands[0]))
5899 operands[1] = force_reg (HFmode, operands[1]);
5901 else /* TARGET_THUMB1 */
5903 if (can_create_pseudo_p ())
5905 if (!REG_P (operands[0]))
5906 operands[1] = force_reg (HFmode, operands[1]);
5912 (define_insn "*arm32_movhf"
5913 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5914 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5915 "TARGET_32BIT && !TARGET_HARD_FLOAT
5916 && ( s_register_operand (operands[0], HFmode)
5917 || s_register_operand (operands[1], HFmode))"
5919 switch (which_alternative)
5921 case 0: /* ARM register from memory */
5922 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
5923 case 1: /* memory from ARM register */
5924 return \"strh%?\\t%1, %0\\t%@ __fp16\";
5925 case 2: /* ARM register from ARM register */
5926 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5927 case 3: /* ARM register from constant */
5932 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
5934 ops[0] = operands[0];
5935 ops[1] = GEN_INT (bits);
5936 ops[2] = GEN_INT (bits & 0xff00);
5937 ops[3] = GEN_INT (bits & 0x00ff);
5939 if (arm_arch_thumb2)
5940 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5942 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5949 [(set_attr "conds" "unconditional")
5950 (set_attr "type" "load_4,store_4,mov_reg,multiple")
5951 (set_attr "length" "4,4,4,8")
5952 (set_attr "predicable" "yes")]
5955 (define_expand "movsf"
5956 [(set (match_operand:SF 0 "general_operand")
5957 (match_operand:SF 1 "general_operand"))]
5960 gcc_checking_assert (aligned_operand (operands[0], SFmode));
5961 gcc_checking_assert (aligned_operand (operands[1], SFmode));
5964 if (MEM_P (operands[0]))
5965 operands[1] = force_reg (SFmode, operands[1]);
5967 else /* TARGET_THUMB1 */
5969 if (can_create_pseudo_p ())
5971 if (!REG_P (operands[0]))
5972 operands[1] = force_reg (SFmode, operands[1]);
5976 /* Cannot load it directly, generate a load with clobber so that it can be
5977 loaded via GPR with MOV / MOVT. */
5978 if (arm_disable_literal_pool
5979 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
5980 && CONST_DOUBLE_P (operands[1])
5981 && TARGET_HARD_FLOAT
5982 && !vfp3_const_double_rtx (operands[1]))
5984 rtx clobreg = gen_reg_rtx (SFmode);
5985 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
5992 ;; Transform a floating-point move of a constant into a core register into
5993 ;; an SImode operation.
5995 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5996 (match_operand:SF 1 "immediate_operand" ""))]
5999 && CONST_DOUBLE_P (operands[1])"
6000 [(set (match_dup 2) (match_dup 3))]
6002 operands[2] = gen_lowpart (SImode, operands[0]);
6003 operands[3] = gen_lowpart (SImode, operands[1]);
6004 if (operands[2] == 0 || operands[3] == 0)
6009 (define_insn "*arm_movsf_soft_insn"
6010 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6011 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6013 && TARGET_SOFT_FLOAT
6014 && (!MEM_P (operands[0])
6015 || register_operand (operands[1], SFmode))"
6017 switch (which_alternative)
6019 case 0: return \"mov%?\\t%0, %1\";
6021 /* Cannot load it directly, split to load it via MOV / MOVT. */
6022 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6024 return \"ldr%?\\t%0, %1\\t%@ float\";
6025 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6026 default: gcc_unreachable ();
6029 [(set_attr "predicable" "yes")
6030 (set_attr "type" "mov_reg,load_4,store_4")
6031 (set_attr "arm_pool_range" "*,4096,*")
6032 (set_attr "thumb2_pool_range" "*,4094,*")
6033 (set_attr "arm_neg_pool_range" "*,4084,*")
6034 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6037 ;; Splitter for the above.
6039 [(set (match_operand:SF 0 "s_register_operand")
6040 (match_operand:SF 1 "const_double_operand"))]
6041 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6045 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6046 rtx cst = gen_int_mode (buf, SImode);
6047 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6052 (define_expand "movdf"
6053 [(set (match_operand:DF 0 "general_operand")
6054 (match_operand:DF 1 "general_operand"))]
6057 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6058 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6061 if (MEM_P (operands[0]))
6062 operands[1] = force_reg (DFmode, operands[1]);
6064 else /* TARGET_THUMB */
6066 if (can_create_pseudo_p ())
6068 if (!REG_P (operands[0]))
6069 operands[1] = force_reg (DFmode, operands[1]);
6073 /* Cannot load it directly, generate a load with clobber so that it can be
6074 loaded via GPR with MOV / MOVT. */
6075 if (arm_disable_literal_pool
6076 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6077 && CONSTANT_P (operands[1])
6078 && TARGET_HARD_FLOAT
6079 && !arm_const_double_rtx (operands[1])
6080 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6082 rtx clobreg = gen_reg_rtx (DFmode);
6083 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6090 ;; Reloading a df mode value stored in integer regs to memory can require a
6092 ;; Another reload_out<m> pattern that requires special constraints.
6093 (define_expand "reload_outdf"
6094 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6095 (match_operand:DF 1 "s_register_operand" "r")
6096 (match_operand:SI 2 "s_register_operand" "=&r")]
6100 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6103 operands[2] = XEXP (operands[0], 0);
6104 else if (code == POST_INC || code == PRE_DEC)
6106 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6107 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6108 emit_insn (gen_movdi (operands[0], operands[1]));
6111 else if (code == PRE_INC)
6113 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6115 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6118 else if (code == POST_DEC)
6119 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6121 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6122 XEXP (XEXP (operands[0], 0), 1)));
6124 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6127 if (code == POST_DEC)
6128 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6134 (define_insn "*movdf_soft_insn"
6135 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6136 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6137 "TARGET_32BIT && TARGET_SOFT_FLOAT
6138 && ( register_operand (operands[0], DFmode)
6139 || register_operand (operands[1], DFmode))"
6141 switch (which_alternative)
6148 /* Cannot load it directly, split to load it via MOV / MOVT. */
6149 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6153 return output_move_double (operands, true, NULL);
6156 [(set_attr "length" "8,12,16,8,8")
6157 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6158 (set_attr "arm_pool_range" "*,*,*,1020,*")
6159 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6160 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6161 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6164 ;; Splitter for the above.
6166 [(set (match_operand:DF 0 "s_register_operand")
6167 (match_operand:DF 1 "const_double_operand"))]
6168 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6172 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6173 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6174 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6175 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6176 rtx cst = gen_int_mode (ival, DImode);
6177 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6183 ;; load- and store-multiple insns
6184 ;; The arm can load/store any set of registers, provided that they are in
6185 ;; ascending order, but these expanders assume a contiguous set.
6187 (define_expand "load_multiple"
6188 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6189 (match_operand:SI 1 "" ""))
6190 (use (match_operand:SI 2 "" ""))])]
6193 HOST_WIDE_INT offset = 0;
6195 /* Support only fixed point registers. */
6196 if (!CONST_INT_P (operands[2])
6197 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6198 || INTVAL (operands[2]) < 2
6199 || !MEM_P (operands[1])
6200 || !REG_P (operands[0])
6201 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6202 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6206 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6207 INTVAL (operands[2]),
6208 force_reg (SImode, XEXP (operands[1], 0)),
6209 FALSE, operands[1], &offset);
6212 (define_expand "store_multiple"
6213 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6214 (match_operand:SI 1 "" ""))
6215 (use (match_operand:SI 2 "" ""))])]
6218 HOST_WIDE_INT offset = 0;
6220 /* Support only fixed point registers. */
6221 if (!CONST_INT_P (operands[2])
6222 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6223 || INTVAL (operands[2]) < 2
6224 || !REG_P (operands[1])
6225 || !MEM_P (operands[0])
6226 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6227 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6231 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6232 INTVAL (operands[2]),
6233 force_reg (SImode, XEXP (operands[0], 0)),
6234 FALSE, operands[0], &offset);
6238 (define_expand "setmemsi"
6239 [(match_operand:BLK 0 "general_operand")
6240 (match_operand:SI 1 "const_int_operand")
6241 (match_operand:SI 2 "const_int_operand")
6242 (match_operand:SI 3 "const_int_operand")]
6245 if (arm_gen_setmem (operands))
6252 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6253 ;; We could let this apply for blocks of less than this, but it clobbers so
6254 ;; many registers that there is then probably a better way.
6256 (define_expand "cpymemqi"
6257 [(match_operand:BLK 0 "general_operand")
6258 (match_operand:BLK 1 "general_operand")
6259 (match_operand:SI 2 "const_int_operand")
6260 (match_operand:SI 3 "const_int_operand")]
6265 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6266 && !optimize_function_for_size_p (cfun))
6268 if (gen_cpymem_ldrd_strd (operands))
6273 if (arm_gen_cpymemqi (operands))
6277 else /* TARGET_THUMB1 */
6279 if ( INTVAL (operands[3]) != 4
6280 || INTVAL (operands[2]) > 48)
6283 thumb_expand_cpymemqi (operands);
6290 ;; Compare & branch insns
6291 ;; The range calculations are based as follows:
6292 ;; For forward branches, the address calculation returns the address of
6293 ;; the next instruction. This is 2 beyond the branch instruction.
6294 ;; For backward branches, the address calculation returns the address of
6295 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6296 ;; instruction for the shortest sequence, and 4 before the branch instruction
6297 ;; if we have to jump around an unconditional branch.
6298 ;; To the basic branch range the PC offset must be added (this is +4).
6299 ;; So for forward branches we have
6300 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6301 ;; And for backward branches we have
6302 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6304 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6305 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6307 (define_expand "cbranchsi4"
6308 [(set (pc) (if_then_else
6309 (match_operator 0 "expandable_comparison_operator"
6310 [(match_operand:SI 1 "s_register_operand")
6311 (match_operand:SI 2 "nonmemory_operand")])
6312 (label_ref (match_operand 3 "" ""))
6318 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6320 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6324 if (thumb1_cmpneg_operand (operands[2], SImode))
6326 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6327 operands[3], operands[0]));
6330 if (!thumb1_cmp_operand (operands[2], SImode))
6331 operands[2] = force_reg (SImode, operands[2]);
6334 (define_expand "cbranchsf4"
6335 [(set (pc) (if_then_else
6336 (match_operator 0 "expandable_comparison_operator"
6337 [(match_operand:SF 1 "s_register_operand")
6338 (match_operand:SF 2 "vfp_compare_operand")])
6339 (label_ref (match_operand 3 "" ""))
6341 "TARGET_32BIT && TARGET_HARD_FLOAT"
6342 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6343 operands[3])); DONE;"
6346 (define_expand "cbranchdf4"
6347 [(set (pc) (if_then_else
6348 (match_operator 0 "expandable_comparison_operator"
6349 [(match_operand:DF 1 "s_register_operand")
6350 (match_operand:DF 2 "vfp_compare_operand")])
6351 (label_ref (match_operand 3 "" ""))
6353 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6354 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6355 operands[3])); DONE;"
6358 (define_expand "cbranchdi4"
6359 [(set (pc) (if_then_else
6360 (match_operator 0 "expandable_comparison_operator"
6361 [(match_operand:DI 1 "s_register_operand")
6362 (match_operand:DI 2 "cmpdi_operand")])
6363 (label_ref (match_operand 3 "" ""))
6367 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6369 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6375 ;; Comparison and test insns
6377 (define_insn "*arm_cmpsi_insn"
6378 [(set (reg:CC CC_REGNUM)
6379 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6380 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6388 [(set_attr "conds" "set")
6389 (set_attr "arch" "t2,t2,any,any,any")
6390 (set_attr "length" "2,2,4,4,4")
6391 (set_attr "predicable" "yes")
6392 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6393 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6396 (define_insn "*cmpsi_shiftsi"
6397 [(set (reg:CC CC_REGNUM)
6398 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6399 (match_operator:SI 3 "shift_operator"
6400 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6401 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6404 [(set_attr "conds" "set")
6405 (set_attr "shift" "1")
6406 (set_attr "arch" "32,a,a")
6407 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6409 (define_insn "*cmpsi_shiftsi_swp"
6410 [(set (reg:CC_SWP CC_REGNUM)
6411 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6412 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6413 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6414 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6417 [(set_attr "conds" "set")
6418 (set_attr "shift" "1")
6419 (set_attr "arch" "32,a,a")
6420 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6422 (define_insn "*arm_cmpsi_negshiftsi_si"
6423 [(set (reg:CC_Z CC_REGNUM)
6425 (neg:SI (match_operator:SI 1 "shift_operator"
6426 [(match_operand:SI 2 "s_register_operand" "r")
6427 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6428 (match_operand:SI 0 "s_register_operand" "r")))]
6431 [(set_attr "conds" "set")
6432 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6433 (const_string "alus_shift_imm")
6434 (const_string "alus_shift_reg")))
6435 (set_attr "predicable" "yes")]
6438 ;; DImode comparisons. The generic code generates branches that
6439 ;; if-conversion cannot reduce to a conditional compare, so we do
6442 (define_insn "*arm_cmpdi_insn"
6443 [(set (reg:CC_NCV CC_REGNUM)
6444 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6445 (match_operand:DI 1 "arm_di_operand" "rDi")))
6446 (clobber (match_scratch:SI 2 "=r"))]
6448 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6449 [(set_attr "conds" "set")
6450 (set_attr "length" "8")
6451 (set_attr "type" "multiple")]
6454 (define_insn_and_split "*arm_cmpdi_unsigned"
6455 [(set (reg:CC_CZ CC_REGNUM)
6456 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6457 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6460 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6461 "&& reload_completed"
6462 [(set (reg:CC CC_REGNUM)
6463 (compare:CC (match_dup 2) (match_dup 3)))
6464 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6465 (set (reg:CC CC_REGNUM)
6466 (compare:CC (match_dup 0) (match_dup 1))))]
6468 operands[2] = gen_highpart (SImode, operands[0]);
6469 operands[0] = gen_lowpart (SImode, operands[0]);
6470 if (CONST_INT_P (operands[1]))
6471 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6473 operands[3] = gen_highpart (SImode, operands[1]);
6474 operands[1] = gen_lowpart (SImode, operands[1]);
6476 [(set_attr "conds" "set")
6477 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6478 (set_attr "arch" "t2,t2,t2,a")
6479 (set_attr "length" "6,6,10,8")
6480 (set_attr "type" "multiple")]
6483 (define_insn "*arm_cmpdi_zero"
6484 [(set (reg:CC_Z CC_REGNUM)
6485 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
6487 (clobber (match_scratch:SI 1 "=r"))]
6489 "orrs%?\\t%1, %Q0, %R0"
6490 [(set_attr "conds" "set")
6491 (set_attr "type" "logics_reg")]
6494 ; This insn allows redundant compares to be removed by cse, nothing should
6495 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6496 ; is deleted later on. The match_dup will match the mode here, so that
6497 ; mode changes of the condition codes aren't lost by this even though we don't
6498 ; specify what they are.
6500 (define_insn "*deleted_compare"
6501 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6503 "\\t%@ deleted compare"
6504 [(set_attr "conds" "set")
6505 (set_attr "length" "0")
6506 (set_attr "type" "no_insn")]
6510 ;; Conditional branch insns
6512 (define_expand "cbranch_cc"
6514 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6515 (match_operand 2 "" "")])
6516 (label_ref (match_operand 3 "" ""))
6519 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6520 operands[1], operands[2], NULL_RTX);
6521 operands[2] = const0_rtx;"
6525 ;; Patterns to match conditional branch insns.
6528 (define_insn "arm_cond_branch"
6530 (if_then_else (match_operator 1 "arm_comparison_operator"
6531 [(match_operand 2 "cc_register" "") (const_int 0)])
6532 (label_ref (match_operand 0 "" ""))
6536 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6538 arm_ccfsm_state += 2;
6541 return \"b%d1\\t%l0\";
6543 [(set_attr "conds" "use")
6544 (set_attr "type" "branch")
6545 (set (attr "length")
6547 (and (match_test "TARGET_THUMB2")
6548 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6549 (le (minus (match_dup 0) (pc)) (const_int 256))))
6554 (define_insn "*arm_cond_branch_reversed"
6556 (if_then_else (match_operator 1 "arm_comparison_operator"
6557 [(match_operand 2 "cc_register" "") (const_int 0)])
6559 (label_ref (match_operand 0 "" ""))))]
6562 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6564 arm_ccfsm_state += 2;
6567 return \"b%D1\\t%l0\";
6569 [(set_attr "conds" "use")
6570 (set_attr "type" "branch")
6571 (set (attr "length")
6573 (and (match_test "TARGET_THUMB2")
6574 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6575 (le (minus (match_dup 0) (pc)) (const_int 256))))
6584 (define_expand "cstore_cc"
6585 [(set (match_operand:SI 0 "s_register_operand")
6586 (match_operator:SI 1 "" [(match_operand 2 "" "")
6587 (match_operand 3 "" "")]))]
6589 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6590 operands[2], operands[3], NULL_RTX);
6591 operands[3] = const0_rtx;"
6594 (define_insn_and_split "*mov_scc"
6595 [(set (match_operand:SI 0 "s_register_operand" "=r")
6596 (match_operator:SI 1 "arm_comparison_operator_mode"
6597 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6599 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6602 (if_then_else:SI (match_dup 1)
6606 [(set_attr "conds" "use")
6607 (set_attr "length" "8")
6608 (set_attr "type" "multiple")]
6611 (define_insn_and_split "*mov_negscc"
6612 [(set (match_operand:SI 0 "s_register_operand" "=r")
6613 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6614 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6616 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6619 (if_then_else:SI (match_dup 1)
6623 operands[3] = GEN_INT (~0);
6625 [(set_attr "conds" "use")
6626 (set_attr "length" "8")
6627 (set_attr "type" "multiple")]
6630 (define_insn_and_split "*mov_notscc"
6631 [(set (match_operand:SI 0 "s_register_operand" "=r")
6632 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6633 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6635 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6638 (if_then_else:SI (match_dup 1)
6642 operands[3] = GEN_INT (~1);
6643 operands[4] = GEN_INT (~0);
6645 [(set_attr "conds" "use")
6646 (set_attr "length" "8")
6647 (set_attr "type" "multiple")]
6650 (define_expand "cstoresi4"
6651 [(set (match_operand:SI 0 "s_register_operand")
6652 (match_operator:SI 1 "expandable_comparison_operator"
6653 [(match_operand:SI 2 "s_register_operand")
6654 (match_operand:SI 3 "reg_or_int_operand")]))]
6655 "TARGET_32BIT || TARGET_THUMB1"
6657 rtx op3, scratch, scratch2;
6661 if (!arm_add_operand (operands[3], SImode))
6662 operands[3] = force_reg (SImode, operands[3]);
6663 emit_insn (gen_cstore_cc (operands[0], operands[1],
6664 operands[2], operands[3]));
6668 if (operands[3] == const0_rtx)
6670 switch (GET_CODE (operands[1]))
6673 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6677 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6681 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6682 NULL_RTX, 0, OPTAB_WIDEN);
6683 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6684 NULL_RTX, 0, OPTAB_WIDEN);
6685 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6686 operands[0], 1, OPTAB_WIDEN);
6690 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6692 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6693 NULL_RTX, 1, OPTAB_WIDEN);
6697 scratch = expand_binop (SImode, ashr_optab, operands[2],
6698 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6699 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6700 NULL_RTX, 0, OPTAB_WIDEN);
6701 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6705 /* LT is handled by generic code. No need for unsigned with 0. */
6712 switch (GET_CODE (operands[1]))
6715 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6716 NULL_RTX, 0, OPTAB_WIDEN);
6717 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6721 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6722 NULL_RTX, 0, OPTAB_WIDEN);
6723 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6727 op3 = force_reg (SImode, operands[3]);
6729 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6730 NULL_RTX, 1, OPTAB_WIDEN);
6731 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6732 NULL_RTX, 0, OPTAB_WIDEN);
6733 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6739 if (!thumb1_cmp_operand (op3, SImode))
6740 op3 = force_reg (SImode, op3);
6741 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6742 NULL_RTX, 0, OPTAB_WIDEN);
6743 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6744 NULL_RTX, 1, OPTAB_WIDEN);
6745 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6750 op3 = force_reg (SImode, operands[3]);
6751 scratch = force_reg (SImode, const0_rtx);
6752 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6758 if (!thumb1_cmp_operand (op3, SImode))
6759 op3 = force_reg (SImode, op3);
6760 scratch = force_reg (SImode, const0_rtx);
6761 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6767 if (!thumb1_cmp_operand (op3, SImode))
6768 op3 = force_reg (SImode, op3);
6769 scratch = gen_reg_rtx (SImode);
6770 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6774 op3 = force_reg (SImode, operands[3]);
6775 scratch = gen_reg_rtx (SImode);
6776 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6779 /* No good sequences for GT, LT. */
6786 (define_expand "cstorehf4"
6787 [(set (match_operand:SI 0 "s_register_operand")
6788 (match_operator:SI 1 "expandable_comparison_operator"
6789 [(match_operand:HF 2 "s_register_operand")
6790 (match_operand:HF 3 "vfp_compare_operand")]))]
6791 "TARGET_VFP_FP16INST"
6793 if (!arm_validize_comparison (&operands[1],
6798 emit_insn (gen_cstore_cc (operands[0], operands[1],
6799 operands[2], operands[3]));
6804 (define_expand "cstoresf4"
6805 [(set (match_operand:SI 0 "s_register_operand")
6806 (match_operator:SI 1 "expandable_comparison_operator"
6807 [(match_operand:SF 2 "s_register_operand")
6808 (match_operand:SF 3 "vfp_compare_operand")]))]
6809 "TARGET_32BIT && TARGET_HARD_FLOAT"
6810 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6811 operands[2], operands[3])); DONE;"
6814 (define_expand "cstoredf4"
6815 [(set (match_operand:SI 0 "s_register_operand")
6816 (match_operator:SI 1 "expandable_comparison_operator"
6817 [(match_operand:DF 2 "s_register_operand")
6818 (match_operand:DF 3 "vfp_compare_operand")]))]
6819 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6820 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6821 operands[2], operands[3])); DONE;"
6824 (define_expand "cstoredi4"
6825 [(set (match_operand:SI 0 "s_register_operand")
6826 (match_operator:SI 1 "expandable_comparison_operator"
6827 [(match_operand:DI 2 "s_register_operand")
6828 (match_operand:DI 3 "cmpdi_operand")]))]
6831 if (!arm_validize_comparison (&operands[1],
6835 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6842 ;; Conditional move insns
6844 (define_expand "movsicc"
6845 [(set (match_operand:SI 0 "s_register_operand")
6846 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6847 (match_operand:SI 2 "arm_not_operand")
6848 (match_operand:SI 3 "arm_not_operand")))]
6855 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6856 &XEXP (operands[1], 1)))
6859 code = GET_CODE (operands[1]);
6860 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6861 XEXP (operands[1], 1), NULL_RTX);
6862 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6866 (define_expand "movhfcc"
6867 [(set (match_operand:HF 0 "s_register_operand")
6868 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6869 (match_operand:HF 2 "s_register_operand")
6870 (match_operand:HF 3 "s_register_operand")))]
6871 "TARGET_VFP_FP16INST"
6874 enum rtx_code code = GET_CODE (operands[1]);
6877 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6878 &XEXP (operands[1], 1)))
6881 code = GET_CODE (operands[1]);
6882 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6883 XEXP (operands[1], 1), NULL_RTX);
6884 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6888 (define_expand "movsfcc"
6889 [(set (match_operand:SF 0 "s_register_operand")
6890 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6891 (match_operand:SF 2 "s_register_operand")
6892 (match_operand:SF 3 "s_register_operand")))]
6893 "TARGET_32BIT && TARGET_HARD_FLOAT"
6896 enum rtx_code code = GET_CODE (operands[1]);
6899 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6900 &XEXP (operands[1], 1)))
6903 code = GET_CODE (operands[1]);
6904 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6905 XEXP (operands[1], 1), NULL_RTX);
6906 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6910 (define_expand "movdfcc"
6911 [(set (match_operand:DF 0 "s_register_operand")
6912 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6913 (match_operand:DF 2 "s_register_operand")
6914 (match_operand:DF 3 "s_register_operand")))]
6915 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6918 enum rtx_code code = GET_CODE (operands[1]);
6921 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6922 &XEXP (operands[1], 1)))
6924 code = GET_CODE (operands[1]);
6925 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6926 XEXP (operands[1], 1), NULL_RTX);
6927 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6931 (define_insn "*cmov<mode>"
6932 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
6933 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
6934 [(match_operand 2 "cc_register" "") (const_int 0)])
6935 (match_operand:SDF 3 "s_register_operand"
6937 (match_operand:SDF 4 "s_register_operand"
6938 "<F_constraint>")))]
6939 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
6942 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6949 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
6954 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
6960 [(set_attr "conds" "use")
6961 (set_attr "type" "fcsel")]
6964 (define_insn "*cmovhf"
6965 [(set (match_operand:HF 0 "s_register_operand" "=t")
6966 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
6967 [(match_operand 2 "cc_register" "") (const_int 0)])
6968 (match_operand:HF 3 "s_register_operand" "t")
6969 (match_operand:HF 4 "s_register_operand" "t")))]
6970 "TARGET_VFP_FP16INST"
6973 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6980 return \"vsel%d1.f16\\t%0, %3, %4\";
6985 return \"vsel%D1.f16\\t%0, %4, %3\";
6991 [(set_attr "conds" "use")
6992 (set_attr "type" "fcsel")]
6995 (define_insn_and_split "*movsicc_insn"
6996 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
6998 (match_operator 3 "arm_comparison_operator"
6999 [(match_operand 4 "cc_register" "") (const_int 0)])
7000 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7001 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7012 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7013 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7014 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7015 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7016 "&& reload_completed"
7019 enum rtx_code rev_code;
7023 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7025 gen_rtx_SET (operands[0], operands[1])));
7027 rev_code = GET_CODE (operands[3]);
7028 mode = GET_MODE (operands[4]);
7029 if (mode == CCFPmode || mode == CCFPEmode)
7030 rev_code = reverse_condition_maybe_unordered (rev_code);
7032 rev_code = reverse_condition (rev_code);
7034 rev_cond = gen_rtx_fmt_ee (rev_code,
7038 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7040 gen_rtx_SET (operands[0], operands[2])));
7043 [(set_attr "length" "4,4,4,4,8,8,8,8")
7044 (set_attr "conds" "use")
7045 (set_attr_alternative "type"
7046 [(if_then_else (match_operand 2 "const_int_operand" "")
7047 (const_string "mov_imm")
7048 (const_string "mov_reg"))
7049 (const_string "mvn_imm")
7050 (if_then_else (match_operand 1 "const_int_operand" "")
7051 (const_string "mov_imm")
7052 (const_string "mov_reg"))
7053 (const_string "mvn_imm")
7054 (const_string "multiple")
7055 (const_string "multiple")
7056 (const_string "multiple")
7057 (const_string "multiple")])]
7060 (define_insn "*movsfcc_soft_insn"
7061 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7062 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7063 [(match_operand 4 "cc_register" "") (const_int 0)])
7064 (match_operand:SF 1 "s_register_operand" "0,r")
7065 (match_operand:SF 2 "s_register_operand" "r,0")))]
7066 "TARGET_ARM && TARGET_SOFT_FLOAT"
7070 [(set_attr "conds" "use")
7071 (set_attr "type" "mov_reg")]
7075 ;; Jump and linkage insns
7077 (define_expand "jump"
7079 (label_ref (match_operand 0 "" "")))]
7084 (define_insn "*arm_jump"
7086 (label_ref (match_operand 0 "" "")))]
7090 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7092 arm_ccfsm_state += 2;
7095 return \"b%?\\t%l0\";
7098 [(set_attr "predicable" "yes")
7099 (set (attr "length")
7101 (and (match_test "TARGET_THUMB2")
7102 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7103 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7106 (set_attr "type" "branch")]
7109 (define_expand "call"
7110 [(parallel [(call (match_operand 0 "memory_operand")
7111 (match_operand 1 "general_operand"))
7112 (use (match_operand 2 "" ""))
7113 (clobber (reg:SI LR_REGNUM))])]
7118 tree addr = MEM_EXPR (operands[0]);
7120 /* In an untyped call, we can get NULL for operand 2. */
7121 if (operands[2] == NULL_RTX)
7122 operands[2] = const0_rtx;
7124 /* Decide if we should generate indirect calls by loading the
7125 32-bit address of the callee into a register before performing the
7127 callee = XEXP (operands[0], 0);
7128 if (GET_CODE (callee) == SYMBOL_REF
7129 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7131 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7133 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7134 /* Indirect call: set r9 with FDPIC value of callee. */
7135 XEXP (operands[0], 0)
7136 = arm_load_function_descriptor (XEXP (operands[0], 0));
7138 if (detect_cmse_nonsecure_call (addr))
7140 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7142 emit_call_insn (pat);
7146 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7147 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7150 /* Restore FDPIC register (r9) after call. */
7153 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7154 rtx initial_fdpic_reg
7155 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7157 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7158 initial_fdpic_reg));
7165 (define_insn "restore_pic_register_after_call"
7166 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7167 (unspec:SI [(match_dup 0)
7168 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7169 UNSPEC_PIC_RESTORE))]
7176 (define_expand "call_internal"
7177 [(parallel [(call (match_operand 0 "memory_operand")
7178 (match_operand 1 "general_operand"))
7179 (use (match_operand 2 "" ""))
7180 (clobber (reg:SI LR_REGNUM))])])
7182 (define_expand "nonsecure_call_internal"
7183 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7184 UNSPEC_NONSECURE_MEM)
7185 (match_operand 1 "general_operand"))
7186 (use (match_operand 2 "" ""))
7187 (clobber (reg:SI LR_REGNUM))])]
7192 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7193 gen_rtx_REG (SImode, R4_REGNUM),
7196 operands[0] = replace_equiv_address (operands[0], tmp);
7199 (define_insn "*call_reg_armv5"
7200 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7201 (match_operand 1 "" ""))
7202 (use (match_operand 2 "" ""))
7203 (clobber (reg:SI LR_REGNUM))]
7204 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7206 [(set_attr "type" "call")]
7209 (define_insn "*call_reg_arm"
7210 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7211 (match_operand 1 "" ""))
7212 (use (match_operand 2 "" ""))
7213 (clobber (reg:SI LR_REGNUM))]
7214 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7216 return output_call (operands);
7218 ;; length is worst case, normally it is only two
7219 [(set_attr "length" "12")
7220 (set_attr "type" "call")]
7224 (define_expand "call_value"
7225 [(parallel [(set (match_operand 0 "" "")
7226 (call (match_operand 1 "memory_operand")
7227 (match_operand 2 "general_operand")))
7228 (use (match_operand 3 "" ""))
7229 (clobber (reg:SI LR_REGNUM))])]
7234 tree addr = MEM_EXPR (operands[1]);
7236 /* In an untyped call, we can get NULL for operand 2. */
7237 if (operands[3] == 0)
7238 operands[3] = const0_rtx;
7240 /* Decide if we should generate indirect calls by loading the
7241 32-bit address of the callee into a register before performing the
7243 callee = XEXP (operands[1], 0);
7244 if (GET_CODE (callee) == SYMBOL_REF
7245 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7247 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7249 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7250 /* Indirect call: set r9 with FDPIC value of callee. */
7251 XEXP (operands[1], 0)
7252 = arm_load_function_descriptor (XEXP (operands[1], 0));
7254 if (detect_cmse_nonsecure_call (addr))
7256 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7257 operands[2], operands[3]);
7258 emit_call_insn (pat);
7262 pat = gen_call_value_internal (operands[0], operands[1],
7263 operands[2], operands[3]);
7264 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7267 /* Restore FDPIC register (r9) after call. */
7270 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7271 rtx initial_fdpic_reg
7272 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7274 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7275 initial_fdpic_reg));
7282 (define_expand "call_value_internal"
7283 [(parallel [(set (match_operand 0 "" "")
7284 (call (match_operand 1 "memory_operand")
7285 (match_operand 2 "general_operand")))
7286 (use (match_operand 3 "" ""))
7287 (clobber (reg:SI LR_REGNUM))])])
7289 (define_expand "nonsecure_call_value_internal"
7290 [(parallel [(set (match_operand 0 "" "")
7291 (call (unspec:SI [(match_operand 1 "memory_operand")]
7292 UNSPEC_NONSECURE_MEM)
7293 (match_operand 2 "general_operand")))
7294 (use (match_operand 3 "" ""))
7295 (clobber (reg:SI LR_REGNUM))])]
7300 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7301 gen_rtx_REG (SImode, R4_REGNUM),
7304 operands[1] = replace_equiv_address (operands[1], tmp);
7307 (define_insn "*call_value_reg_armv5"
7308 [(set (match_operand 0 "" "")
7309 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7310 (match_operand 2 "" "")))
7311 (use (match_operand 3 "" ""))
7312 (clobber (reg:SI LR_REGNUM))]
7313 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7315 [(set_attr "type" "call")]
7318 (define_insn "*call_value_reg_arm"
7319 [(set (match_operand 0 "" "")
7320 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7321 (match_operand 2 "" "")))
7322 (use (match_operand 3 "" ""))
7323 (clobber (reg:SI LR_REGNUM))]
7324 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7326 return output_call (&operands[1]);
7328 [(set_attr "length" "12")
7329 (set_attr "type" "call")]
7332 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7333 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7335 (define_insn "*call_symbol"
7336 [(call (mem:SI (match_operand:SI 0 "" ""))
7337 (match_operand 1 "" ""))
7338 (use (match_operand 2 "" ""))
7339 (clobber (reg:SI LR_REGNUM))]
7341 && !SIBLING_CALL_P (insn)
7342 && (GET_CODE (operands[0]) == SYMBOL_REF)
7343 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7346 rtx op = operands[0];
7348 /* Switch mode now when possible. */
7349 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7350 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7351 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7353 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7355 [(set_attr "type" "call")]
7358 (define_insn "*call_value_symbol"
7359 [(set (match_operand 0 "" "")
7360 (call (mem:SI (match_operand:SI 1 "" ""))
7361 (match_operand:SI 2 "" "")))
7362 (use (match_operand 3 "" ""))
7363 (clobber (reg:SI LR_REGNUM))]
7365 && !SIBLING_CALL_P (insn)
7366 && (GET_CODE (operands[1]) == SYMBOL_REF)
7367 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7370 rtx op = operands[1];
7372 /* Switch mode now when possible. */
7373 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7374 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7375 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7377 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7379 [(set_attr "type" "call")]
7382 (define_expand "sibcall_internal"
7383 [(parallel [(call (match_operand 0 "memory_operand")
7384 (match_operand 1 "general_operand"))
7386 (use (match_operand 2 "" ""))])])
7388 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7389 (define_expand "sibcall"
7390 [(parallel [(call (match_operand 0 "memory_operand")
7391 (match_operand 1 "general_operand"))
7393 (use (match_operand 2 "" ""))])]
7399 if ((!REG_P (XEXP (operands[0], 0))
7400 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7401 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7402 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7403 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7405 if (operands[2] == NULL_RTX)
7406 operands[2] = const0_rtx;
7408 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7409 arm_emit_call_insn (pat, operands[0], true);
7414 (define_expand "sibcall_value_internal"
7415 [(parallel [(set (match_operand 0 "" "")
7416 (call (match_operand 1 "memory_operand")
7417 (match_operand 2 "general_operand")))
7419 (use (match_operand 3 "" ""))])])
7421 (define_expand "sibcall_value"
7422 [(parallel [(set (match_operand 0 "" "")
7423 (call (match_operand 1 "memory_operand")
7424 (match_operand 2 "general_operand")))
7426 (use (match_operand 3 "" ""))])]
7432 if ((!REG_P (XEXP (operands[1], 0))
7433 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7434 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7435 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7436 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7438 if (operands[3] == NULL_RTX)
7439 operands[3] = const0_rtx;
7441 pat = gen_sibcall_value_internal (operands[0], operands[1],
7442 operands[2], operands[3]);
7443 arm_emit_call_insn (pat, operands[1], true);
7448 (define_insn "*sibcall_insn"
7449 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7450 (match_operand 1 "" ""))
7452 (use (match_operand 2 "" ""))]
7453 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7455 if (which_alternative == 1)
7456 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7459 if (arm_arch5t || arm_arch4t)
7460 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7462 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7465 [(set_attr "type" "call")]
7468 (define_insn "*sibcall_value_insn"
7469 [(set (match_operand 0 "" "")
7470 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7471 (match_operand 2 "" "")))
7473 (use (match_operand 3 "" ""))]
7474 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7476 if (which_alternative == 1)
7477 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7480 if (arm_arch5t || arm_arch4t)
7481 return \"bx%?\\t%1\";
7483 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7486 [(set_attr "type" "call")]
7489 (define_expand "<return_str>return"
7491 "(TARGET_ARM || (TARGET_THUMB2
7492 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7493 && !IS_STACKALIGN (arm_current_func_type ())))
7494 <return_cond_false>"
7499 thumb2_expand_return (<return_simple_p>);
7506 ;; Often the return insn will be the same as loading from memory, so set attr
7507 (define_insn "*arm_return"
7509 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7512 if (arm_ccfsm_state == 2)
7514 arm_ccfsm_state += 2;
7517 return output_return_instruction (const_true_rtx, true, false, false);
7519 [(set_attr "type" "load_4")
7520 (set_attr "length" "12")
7521 (set_attr "predicable" "yes")]
7524 (define_insn "*cond_<return_str>return"
7526 (if_then_else (match_operator 0 "arm_comparison_operator"
7527 [(match_operand 1 "cc_register" "") (const_int 0)])
7530 "TARGET_ARM <return_cond_true>"
7533 if (arm_ccfsm_state == 2)
7535 arm_ccfsm_state += 2;
7538 return output_return_instruction (operands[0], true, false,
7541 [(set_attr "conds" "use")
7542 (set_attr "length" "12")
7543 (set_attr "type" "load_4")]
7546 (define_insn "*cond_<return_str>return_inverted"
7548 (if_then_else (match_operator 0 "arm_comparison_operator"
7549 [(match_operand 1 "cc_register" "") (const_int 0)])
7552 "TARGET_ARM <return_cond_true>"
7555 if (arm_ccfsm_state == 2)
7557 arm_ccfsm_state += 2;
7560 return output_return_instruction (operands[0], true, true,
7563 [(set_attr "conds" "use")
7564 (set_attr "length" "12")
7565 (set_attr "type" "load_4")]
7568 (define_insn "*arm_simple_return"
7573 if (arm_ccfsm_state == 2)
7575 arm_ccfsm_state += 2;
7578 return output_return_instruction (const_true_rtx, true, false, true);
7580 [(set_attr "type" "branch")
7581 (set_attr "length" "4")
7582 (set_attr "predicable" "yes")]
7585 ;; Generate a sequence of instructions to determine if the processor is
7586 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7589 (define_expand "return_addr_mask"
7591 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7593 (set (match_operand:SI 0 "s_register_operand")
7594 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7596 (const_int 67108860)))] ; 0x03fffffc
7599 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7602 (define_insn "*check_arch2"
7603 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7604 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7607 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7608 [(set_attr "length" "8")
7609 (set_attr "conds" "set")
7610 (set_attr "type" "multiple")]
7613 ;; Call subroutine returning any type.
7615 (define_expand "untyped_call"
7616 [(parallel [(call (match_operand 0 "" "")
7618 (match_operand 1 "" "")
7619 (match_operand 2 "" "")])]
7620 "TARGET_EITHER && !TARGET_FDPIC"
7624 rtx par = gen_rtx_PARALLEL (VOIDmode,
7625 rtvec_alloc (XVECLEN (operands[2], 0)));
7626 rtx addr = gen_reg_rtx (Pmode);
7630 emit_move_insn (addr, XEXP (operands[1], 0));
7631 mem = change_address (operands[1], BLKmode, addr);
7633 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7635 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7637 /* Default code only uses r0 as a return value, but we could
7638 be using anything up to 4 registers. */
7639 if (REGNO (src) == R0_REGNUM)
7640 src = gen_rtx_REG (TImode, R0_REGNUM);
7642 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7644 size += GET_MODE_SIZE (GET_MODE (src));
7647 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7651 for (i = 0; i < XVECLEN (par, 0); i++)
7653 HOST_WIDE_INT offset = 0;
7654 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7657 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7659 mem = change_address (mem, GET_MODE (reg), NULL);
7660 if (REGNO (reg) == R0_REGNUM)
7662 /* On thumb we have to use a write-back instruction. */
7663 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7664 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7665 size = TARGET_ARM ? 16 : 0;
7669 emit_move_insn (mem, reg);
7670 size = GET_MODE_SIZE (GET_MODE (reg));
7674 /* The optimizer does not know that the call sets the function value
7675 registers we stored in the result block. We avoid problems by
7676 claiming that all hard registers are used and clobbered at this
7678 emit_insn (gen_blockage ());
7684 (define_expand "untyped_return"
7685 [(match_operand:BLK 0 "memory_operand")
7686 (match_operand 1 "" "")]
7687 "TARGET_EITHER && !TARGET_FDPIC"
7691 rtx addr = gen_reg_rtx (Pmode);
7695 emit_move_insn (addr, XEXP (operands[0], 0));
7696 mem = change_address (operands[0], BLKmode, addr);
7698 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7700 HOST_WIDE_INT offset = 0;
7701 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7704 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7706 mem = change_address (mem, GET_MODE (reg), NULL);
7707 if (REGNO (reg) == R0_REGNUM)
7709 /* On thumb we have to use a write-back instruction. */
7710 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7711 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7712 size = TARGET_ARM ? 16 : 0;
7716 emit_move_insn (reg, mem);
7717 size = GET_MODE_SIZE (GET_MODE (reg));
7721 /* Emit USE insns before the return. */
7722 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7723 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7725 /* Construct the return. */
7726 expand_naked_return ();
7732 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7733 ;; all of memory. This blocks insns from being moved across this point.
7735 (define_insn "blockage"
7736 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7739 [(set_attr "length" "0")
7740 (set_attr "type" "block")]
7743 ;; Since we hard code r0 here use the 'o' constraint to prevent
7744 ;; provoking undefined behaviour in the hardware with putting out
7745 ;; auto-increment operations with potentially r0 as the base register.
7746 (define_insn "probe_stack"
7747 [(set (match_operand:SI 0 "memory_operand" "=o")
7748 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7751 [(set_attr "type" "store_4")
7752 (set_attr "predicable" "yes")]
7755 (define_insn "probe_stack_range"
7756 [(set (match_operand:SI 0 "register_operand" "=r")
7757 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7758 (match_operand:SI 2 "register_operand" "r")]
7759 VUNSPEC_PROBE_STACK_RANGE))]
7762 return output_probe_stack_range (operands[0], operands[2]);
7764 [(set_attr "type" "multiple")
7765 (set_attr "conds" "clob")]
7768 ;; Named patterns for stack smashing protection.
7769 (define_expand "stack_protect_combined_set"
7771 [(set (match_operand:SI 0 "memory_operand")
7772 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7774 (clobber (match_scratch:SI 2 ""))
7775 (clobber (match_scratch:SI 3 ""))])]
7780 ;; Use a separate insn from the above expand to be able to have the mem outside
7781 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7782 ;; try to reload the guard since we need to control how PIC access is done in
7783 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7784 ;; legitimize_pic_address ()).
7785 (define_insn_and_split "*stack_protect_combined_set_insn"
7786 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7787 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7789 (clobber (match_scratch:SI 2 "=&l,&r"))
7790 (clobber (match_scratch:SI 3 "=&l,&r"))]
7794 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7796 (clobber (match_dup 2))])]
7804 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7806 pic_reg = operands[3];
7808 /* Forces recomputing of GOT base now. */
7809 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7810 true /*compute_now*/);
7814 if (address_operand (operands[1], SImode))
7815 operands[2] = operands[1];
7818 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7819 emit_move_insn (operands[2], mem);
7823 [(set_attr "arch" "t1,32")]
7826 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7827 ;; canary value does not live beyond the life of this sequence.
7828 (define_insn "*stack_protect_set_insn"
7829 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7830 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7832 (clobber (match_dup 1))]
7835 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7836 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7837 [(set_attr "length" "8,12")
7838 (set_attr "conds" "clob,nocond")
7839 (set_attr "type" "multiple")
7840 (set_attr "arch" "t1,32")]
7843 (define_expand "stack_protect_combined_test"
7847 (eq (match_operand:SI 0 "memory_operand")
7848 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7850 (label_ref (match_operand 2))
7852 (clobber (match_scratch:SI 3 ""))
7853 (clobber (match_scratch:SI 4 ""))
7854 (clobber (reg:CC CC_REGNUM))])]
7859 ;; Use a separate insn from the above expand to be able to have the mem outside
7860 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7861 ;; try to reload the guard since we need to control how PIC access is done in
7862 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7863 ;; legitimize_pic_address ()).
7864 (define_insn_and_split "*stack_protect_combined_test_insn"
7867 (eq (match_operand:SI 0 "memory_operand" "m,m")
7868 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7870 (label_ref (match_operand 2))
7872 (clobber (match_scratch:SI 3 "=&l,&r"))
7873 (clobber (match_scratch:SI 4 "=&l,&r"))
7874 (clobber (reg:CC CC_REGNUM))]
7887 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7889 pic_reg = operands[4];
7891 /* Forces recomputing of GOT base now. */
7892 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7893 true /*compute_now*/);
7897 if (address_operand (operands[1], SImode))
7898 operands[3] = operands[1];
7901 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7902 emit_move_insn (operands[3], mem);
7907 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7909 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7910 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7911 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7915 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7917 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7918 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7923 [(set_attr "arch" "t1,32")]
7926 (define_insn "arm_stack_protect_test_insn"
7927 [(set (reg:CC_Z CC_REGNUM)
7928 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7929 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
7932 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
7933 (clobber (match_dup 2))]
7935 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
7936 [(set_attr "length" "8,12")
7937 (set_attr "conds" "set")
7938 (set_attr "type" "multiple")
7939 (set_attr "arch" "t,32")]
7942 (define_expand "casesi"
7943 [(match_operand:SI 0 "s_register_operand") ; index to jump on
7944 (match_operand:SI 1 "const_int_operand") ; lower bound
7945 (match_operand:SI 2 "const_int_operand") ; total range
7946 (match_operand:SI 3 "" "") ; table label
7947 (match_operand:SI 4 "" "")] ; Out of range label
7948 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
7951 enum insn_code code;
7952 if (operands[1] != const0_rtx)
7954 rtx reg = gen_reg_rtx (SImode);
7956 emit_insn (gen_addsi3 (reg, operands[0],
7957 gen_int_mode (-INTVAL (operands[1]),
7963 code = CODE_FOR_arm_casesi_internal;
7964 else if (TARGET_THUMB1)
7965 code = CODE_FOR_thumb1_casesi_internal_pic;
7967 code = CODE_FOR_thumb2_casesi_internal_pic;
7969 code = CODE_FOR_thumb2_casesi_internal;
7971 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
7972 operands[2] = force_reg (SImode, operands[2]);
7974 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
7975 operands[3], operands[4]));
7980 ;; The USE in this pattern is needed to tell flow analysis that this is
7981 ;; a CASESI insn. It has no other purpose.
7982 (define_expand "arm_casesi_internal"
7983 [(parallel [(set (pc)
7985 (leu (match_operand:SI 0 "s_register_operand")
7986 (match_operand:SI 1 "arm_rhs_operand"))
7988 (label_ref:SI (match_operand 3 ""))))
7989 (clobber (reg:CC CC_REGNUM))
7990 (use (label_ref:SI (match_operand 2 "")))])]
7993 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
7994 operands[4] = gen_rtx_PLUS (SImode, operands[4],
7995 gen_rtx_LABEL_REF (SImode, operands[2]));
7996 operands[4] = gen_rtx_MEM (SImode, operands[4]);
7997 MEM_READONLY_P (operands[4]) = 1;
7998 MEM_NOTRAP_P (operands[4]) = 1;
8001 (define_insn "*arm_casesi_internal"
8002 [(parallel [(set (pc)
8004 (leu (match_operand:SI 0 "s_register_operand" "r")
8005 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8006 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8007 (label_ref:SI (match_operand 2 "" ""))))
8008 (label_ref:SI (match_operand 3 "" ""))))
8009 (clobber (reg:CC CC_REGNUM))
8010 (use (label_ref:SI (match_dup 2)))])]
8014 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8015 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8017 [(set_attr "conds" "clob")
8018 (set_attr "length" "12")
8019 (set_attr "type" "multiple")]
8022 (define_expand "indirect_jump"
8024 (match_operand:SI 0 "s_register_operand"))]
8027 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8028 address and use bx. */
8032 tmp = gen_reg_rtx (SImode);
8033 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8039 ;; NB Never uses BX.
8040 (define_insn "*arm_indirect_jump"
8042 (match_operand:SI 0 "s_register_operand" "r"))]
8044 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8045 [(set_attr "predicable" "yes")
8046 (set_attr "type" "branch")]
8049 (define_insn "*load_indirect_jump"
8051 (match_operand:SI 0 "memory_operand" "m"))]
8053 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8054 [(set_attr "type" "load_4")
8055 (set_attr "pool_range" "4096")
8056 (set_attr "neg_pool_range" "4084")
8057 (set_attr "predicable" "yes")]
8067 [(set (attr "length")
8068 (if_then_else (eq_attr "is_thumb" "yes")
8071 (set_attr "type" "mov_reg")]
8075 [(trap_if (const_int 1) (const_int 0))]
8079 return \".inst\\t0xe7f000f0\";
8081 return \".inst\\t0xdeff\";
8083 [(set (attr "length")
8084 (if_then_else (eq_attr "is_thumb" "yes")
8087 (set_attr "type" "trap")
8088 (set_attr "conds" "unconditional")]
8092 ;; Patterns to allow combination of arithmetic, cond code and shifts
8094 (define_insn "*<arith_shift_insn>_multsi"
8095 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8097 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8098 (match_operand:SI 3 "power_of_two_operand" ""))
8099 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8101 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8102 [(set_attr "predicable" "yes")
8103 (set_attr "shift" "2")
8104 (set_attr "arch" "a,t2")
8105 (set_attr "type" "alu_shift_imm")])
8107 (define_insn "*<arith_shift_insn>_shiftsi"
8108 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8110 (match_operator:SI 2 "shift_nomul_operator"
8111 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8112 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8113 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8114 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8115 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8116 [(set_attr "predicable" "yes")
8117 (set_attr "shift" "3")
8118 (set_attr "arch" "a,t2,a")
8119 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8122 [(set (match_operand:SI 0 "s_register_operand" "")
8123 (match_operator:SI 1 "shiftable_operator"
8124 [(match_operator:SI 2 "shiftable_operator"
8125 [(match_operator:SI 3 "shift_operator"
8126 [(match_operand:SI 4 "s_register_operand" "")
8127 (match_operand:SI 5 "reg_or_int_operand" "")])
8128 (match_operand:SI 6 "s_register_operand" "")])
8129 (match_operand:SI 7 "arm_rhs_operand" "")]))
8130 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8133 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8136 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8139 (define_insn "*arith_shiftsi_compare0"
8140 [(set (reg:CC_NOOV CC_REGNUM)
8142 (match_operator:SI 1 "shiftable_operator"
8143 [(match_operator:SI 3 "shift_operator"
8144 [(match_operand:SI 4 "s_register_operand" "r,r")
8145 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8146 (match_operand:SI 2 "s_register_operand" "r,r")])
8148 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8149 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8152 "%i1s%?\\t%0, %2, %4%S3"
8153 [(set_attr "conds" "set")
8154 (set_attr "shift" "4")
8155 (set_attr "arch" "32,a")
8156 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8158 (define_insn "*arith_shiftsi_compare0_scratch"
8159 [(set (reg:CC_NOOV CC_REGNUM)
8161 (match_operator:SI 1 "shiftable_operator"
8162 [(match_operator:SI 3 "shift_operator"
8163 [(match_operand:SI 4 "s_register_operand" "r,r")
8164 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8165 (match_operand:SI 2 "s_register_operand" "r,r")])
8167 (clobber (match_scratch:SI 0 "=r,r"))]
8169 "%i1s%?\\t%0, %2, %4%S3"
8170 [(set_attr "conds" "set")
8171 (set_attr "shift" "4")
8172 (set_attr "arch" "32,a")
8173 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8175 (define_insn "*sub_shiftsi"
8176 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8177 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8178 (match_operator:SI 2 "shift_operator"
8179 [(match_operand:SI 3 "s_register_operand" "r,r")
8180 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8182 "sub%?\\t%0, %1, %3%S2"
8183 [(set_attr "predicable" "yes")
8184 (set_attr "predicable_short_it" "no")
8185 (set_attr "shift" "3")
8186 (set_attr "arch" "32,a")
8187 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8189 (define_insn "*sub_shiftsi_compare0"
8190 [(set (reg:CC_NOOV CC_REGNUM)
8192 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8193 (match_operator:SI 2 "shift_operator"
8194 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8195 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8197 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8198 (minus:SI (match_dup 1)
8199 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8201 "subs%?\\t%0, %1, %3%S2"
8202 [(set_attr "conds" "set")
8203 (set_attr "shift" "3")
8204 (set_attr "arch" "32,a,a")
8205 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8207 (define_insn "*sub_shiftsi_compare0_scratch"
8208 [(set (reg:CC_NOOV CC_REGNUM)
8210 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8211 (match_operator:SI 2 "shift_operator"
8212 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8213 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8215 (clobber (match_scratch:SI 0 "=r,r,r"))]
8217 "subs%?\\t%0, %1, %3%S2"
8218 [(set_attr "conds" "set")
8219 (set_attr "shift" "3")
8220 (set_attr "arch" "32,a,a")
8221 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8224 (define_insn_and_split "*and_scc"
8225 [(set (match_operand:SI 0 "s_register_operand" "=r")
8226 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8227 [(match_operand 2 "cc_register" "") (const_int 0)])
8228 (match_operand:SI 3 "s_register_operand" "r")))]
8230 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8231 "&& reload_completed"
8232 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8233 (cond_exec (match_dup 4) (set (match_dup 0)
8234 (and:SI (match_dup 3) (const_int 1))))]
8236 machine_mode mode = GET_MODE (operands[2]);
8237 enum rtx_code rc = GET_CODE (operands[1]);
8239 /* Note that operands[4] is the same as operands[1],
8240 but with VOIDmode as the result. */
8241 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8242 if (mode == CCFPmode || mode == CCFPEmode)
8243 rc = reverse_condition_maybe_unordered (rc);
8245 rc = reverse_condition (rc);
8246 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8248 [(set_attr "conds" "use")
8249 (set_attr "type" "multiple")
8250 (set_attr "length" "8")]
8253 (define_insn_and_split "*ior_scc"
8254 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8255 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8256 [(match_operand 2 "cc_register" "") (const_int 0)])
8257 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8262 "&& reload_completed
8263 && REGNO (operands [0]) != REGNO (operands[3])"
8264 ;; && which_alternative == 1
8265 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8266 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8267 (cond_exec (match_dup 4) (set (match_dup 0)
8268 (ior:SI (match_dup 3) (const_int 1))))]
8270 machine_mode mode = GET_MODE (operands[2]);
8271 enum rtx_code rc = GET_CODE (operands[1]);
8273 /* Note that operands[4] is the same as operands[1],
8274 but with VOIDmode as the result. */
8275 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8276 if (mode == CCFPmode || mode == CCFPEmode)
8277 rc = reverse_condition_maybe_unordered (rc);
8279 rc = reverse_condition (rc);
8280 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8282 [(set_attr "conds" "use")
8283 (set_attr "length" "4,8")
8284 (set_attr "type" "logic_imm,multiple")]
8287 ; A series of splitters for the compare_scc pattern below. Note that
8288 ; order is important.
8290 [(set (match_operand:SI 0 "s_register_operand" "")
8291 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8293 (clobber (reg:CC CC_REGNUM))]
8294 "TARGET_32BIT && reload_completed"
8295 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8298 [(set (match_operand:SI 0 "s_register_operand" "")
8299 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8301 (clobber (reg:CC CC_REGNUM))]
8302 "TARGET_32BIT && reload_completed"
8303 [(set (match_dup 0) (not:SI (match_dup 1)))
8304 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8307 [(set (match_operand:SI 0 "s_register_operand" "")
8308 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8310 (clobber (reg:CC CC_REGNUM))]
8311 "arm_arch5t && TARGET_32BIT"
8312 [(set (match_dup 0) (clz:SI (match_dup 1)))
8313 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8317 [(set (match_operand:SI 0 "s_register_operand" "")
8318 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8320 (clobber (reg:CC CC_REGNUM))]
8321 "TARGET_32BIT && reload_completed"
8323 [(set (reg:CC CC_REGNUM)
8324 (compare:CC (const_int 1) (match_dup 1)))
8326 (minus:SI (const_int 1) (match_dup 1)))])
8327 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8328 (set (match_dup 0) (const_int 0)))])
8331 [(set (match_operand:SI 0 "s_register_operand" "")
8332 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8333 (match_operand:SI 2 "const_int_operand" "")))
8334 (clobber (reg:CC CC_REGNUM))]
8335 "TARGET_32BIT && reload_completed"
8337 [(set (reg:CC CC_REGNUM)
8338 (compare:CC (match_dup 1) (match_dup 2)))
8339 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8340 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8341 (set (match_dup 0) (const_int 1)))]
8343 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8347 [(set (match_operand:SI 0 "s_register_operand" "")
8348 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8349 (match_operand:SI 2 "arm_add_operand" "")))
8350 (clobber (reg:CC CC_REGNUM))]
8351 "TARGET_32BIT && reload_completed"
8353 [(set (reg:CC_NOOV CC_REGNUM)
8354 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8356 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8357 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8358 (set (match_dup 0) (const_int 1)))])
8360 (define_insn_and_split "*compare_scc"
8361 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8362 (match_operator:SI 1 "arm_comparison_operator"
8363 [(match_operand:SI 2 "s_register_operand" "r,r")
8364 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8365 (clobber (reg:CC CC_REGNUM))]
8368 "&& reload_completed"
8369 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8370 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8371 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8374 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8375 operands[2], operands[3]);
8376 enum rtx_code rc = GET_CODE (operands[1]);
8378 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8380 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8381 if (mode == CCFPmode || mode == CCFPEmode)
8382 rc = reverse_condition_maybe_unordered (rc);
8384 rc = reverse_condition (rc);
8385 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8387 [(set_attr "type" "multiple")]
8390 ;; Attempt to improve the sequence generated by the compare_scc splitters
8391 ;; not to use conditional execution.
8393 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8397 [(set (reg:CC CC_REGNUM)
8398 (compare:CC (match_operand:SI 1 "register_operand" "")
8400 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8401 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8402 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8403 (set (match_dup 0) (const_int 1)))]
8404 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8405 [(set (match_dup 0) (clz:SI (match_dup 1)))
8406 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8409 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8413 [(set (reg:CC CC_REGNUM)
8414 (compare:CC (match_operand:SI 1 "register_operand" "")
8416 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8417 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8418 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8419 (set (match_dup 0) (const_int 1)))
8420 (match_scratch:SI 2 "r")]
8421 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8423 [(set (reg:CC CC_REGNUM)
8424 (compare:CC (const_int 0) (match_dup 1)))
8425 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8427 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8428 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8431 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8432 ;; sub Rd, Reg1, reg2
8436 [(set (reg:CC CC_REGNUM)
8437 (compare:CC (match_operand:SI 1 "register_operand" "")
8438 (match_operand:SI 2 "arm_rhs_operand" "")))
8439 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8440 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8441 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8442 (set (match_dup 0) (const_int 1)))]
8443 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8444 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8445 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8446 (set (match_dup 0) (clz:SI (match_dup 0)))
8447 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8451 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8452 ;; sub T1, Reg1, reg2
8456 [(set (reg:CC CC_REGNUM)
8457 (compare:CC (match_operand:SI 1 "register_operand" "")
8458 (match_operand:SI 2 "arm_rhs_operand" "")))
8459 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8460 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8461 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8462 (set (match_dup 0) (const_int 1)))
8463 (match_scratch:SI 3 "r")]
8464 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8465 [(set (match_dup 3) (match_dup 4))
8467 [(set (reg:CC CC_REGNUM)
8468 (compare:CC (const_int 0) (match_dup 3)))
8469 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8471 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8472 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8474 if (CONST_INT_P (operands[2]))
8475 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8477 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8480 (define_insn "*cond_move"
8481 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8482 (if_then_else:SI (match_operator 3 "equality_operator"
8483 [(match_operator 4 "arm_comparison_operator"
8484 [(match_operand 5 "cc_register" "") (const_int 0)])
8486 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8487 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8490 if (GET_CODE (operands[3]) == NE)
8492 if (which_alternative != 1)
8493 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8494 if (which_alternative != 0)
8495 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8498 if (which_alternative != 0)
8499 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8500 if (which_alternative != 1)
8501 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8504 [(set_attr "conds" "use")
8505 (set_attr_alternative "type"
8506 [(if_then_else (match_operand 2 "const_int_operand" "")
8507 (const_string "mov_imm")
8508 (const_string "mov_reg"))
8509 (if_then_else (match_operand 1 "const_int_operand" "")
8510 (const_string "mov_imm")
8511 (const_string "mov_reg"))
8512 (const_string "multiple")])
8513 (set_attr "length" "4,4,8")]
8516 (define_insn "*cond_arith"
8517 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8518 (match_operator:SI 5 "shiftable_operator"
8519 [(match_operator:SI 4 "arm_comparison_operator"
8520 [(match_operand:SI 2 "s_register_operand" "r,r")
8521 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8522 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8523 (clobber (reg:CC CC_REGNUM))]
8526 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8527 return \"%i5\\t%0, %1, %2, lsr #31\";
8529 output_asm_insn (\"cmp\\t%2, %3\", operands);
8530 if (GET_CODE (operands[5]) == AND)
8531 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8532 else if (GET_CODE (operands[5]) == MINUS)
8533 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8534 else if (which_alternative != 0)
8535 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8536 return \"%i5%d4\\t%0, %1, #1\";
8538 [(set_attr "conds" "clob")
8539 (set_attr "length" "12")
8540 (set_attr "type" "multiple")]
8543 (define_insn "*cond_sub"
8544 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8545 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8546 (match_operator:SI 4 "arm_comparison_operator"
8547 [(match_operand:SI 2 "s_register_operand" "r,r")
8548 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8549 (clobber (reg:CC CC_REGNUM))]
8552 output_asm_insn (\"cmp\\t%2, %3\", operands);
8553 if (which_alternative != 0)
8554 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8555 return \"sub%d4\\t%0, %1, #1\";
8557 [(set_attr "conds" "clob")
8558 (set_attr "length" "8,12")
8559 (set_attr "type" "multiple")]
8562 (define_insn "*cmp_ite0"
8563 [(set (match_operand 6 "dominant_cc_register" "")
8566 (match_operator 4 "arm_comparison_operator"
8567 [(match_operand:SI 0 "s_register_operand"
8568 "l,l,l,r,r,r,r,r,r")
8569 (match_operand:SI 1 "arm_add_operand"
8570 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8571 (match_operator:SI 5 "arm_comparison_operator"
8572 [(match_operand:SI 2 "s_register_operand"
8573 "l,r,r,l,l,r,r,r,r")
8574 (match_operand:SI 3 "arm_add_operand"
8575 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8581 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8583 {\"cmp%d5\\t%0, %1\",
8584 \"cmp%d4\\t%2, %3\"},
8585 {\"cmn%d5\\t%0, #%n1\",
8586 \"cmp%d4\\t%2, %3\"},
8587 {\"cmp%d5\\t%0, %1\",
8588 \"cmn%d4\\t%2, #%n3\"},
8589 {\"cmn%d5\\t%0, #%n1\",
8590 \"cmn%d4\\t%2, #%n3\"}
8592 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8597 \"cmn\\t%0, #%n1\"},
8598 {\"cmn\\t%2, #%n3\",
8600 {\"cmn\\t%2, #%n3\",
8603 static const char * const ite[2] =
8608 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8609 CMP_CMP, CMN_CMP, CMP_CMP,
8610 CMN_CMP, CMP_CMN, CMN_CMN};
8612 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8614 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8615 if (TARGET_THUMB2) {
8616 output_asm_insn (ite[swap], operands);
8618 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8621 [(set_attr "conds" "set")
8622 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8623 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8624 (set_attr "type" "multiple")
8625 (set_attr_alternative "length"
8631 (if_then_else (eq_attr "is_thumb" "no")
8634 (if_then_else (eq_attr "is_thumb" "no")
8637 (if_then_else (eq_attr "is_thumb" "no")
8640 (if_then_else (eq_attr "is_thumb" "no")
8645 (define_insn "*cmp_ite1"
8646 [(set (match_operand 6 "dominant_cc_register" "")
8649 (match_operator 4 "arm_comparison_operator"
8650 [(match_operand:SI 0 "s_register_operand"
8651 "l,l,l,r,r,r,r,r,r")
8652 (match_operand:SI 1 "arm_add_operand"
8653 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8654 (match_operator:SI 5 "arm_comparison_operator"
8655 [(match_operand:SI 2 "s_register_operand"
8656 "l,r,r,l,l,r,r,r,r")
8657 (match_operand:SI 3 "arm_add_operand"
8658 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8664 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8668 {\"cmn\\t%0, #%n1\",
8671 \"cmn\\t%2, #%n3\"},
8672 {\"cmn\\t%0, #%n1\",
8675 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8677 {\"cmp%d4\\t%2, %3\",
8678 \"cmp%D5\\t%0, %1\"},
8679 {\"cmp%d4\\t%2, %3\",
8680 \"cmn%D5\\t%0, #%n1\"},
8681 {\"cmn%d4\\t%2, #%n3\",
8682 \"cmp%D5\\t%0, %1\"},
8683 {\"cmn%d4\\t%2, #%n3\",
8684 \"cmn%D5\\t%0, #%n1\"}
8686 static const char * const ite[2] =
8691 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8692 CMP_CMP, CMN_CMP, CMP_CMP,
8693 CMN_CMP, CMP_CMN, CMN_CMN};
8695 comparison_dominates_p (GET_CODE (operands[5]),
8696 reverse_condition (GET_CODE (operands[4])));
8698 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8699 if (TARGET_THUMB2) {
8700 output_asm_insn (ite[swap], operands);
8702 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8705 [(set_attr "conds" "set")
8706 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8707 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8708 (set_attr_alternative "length"
8714 (if_then_else (eq_attr "is_thumb" "no")
8717 (if_then_else (eq_attr "is_thumb" "no")
8720 (if_then_else (eq_attr "is_thumb" "no")
8723 (if_then_else (eq_attr "is_thumb" "no")
8726 (set_attr "type" "multiple")]
8729 (define_insn "*cmp_and"
8730 [(set (match_operand 6 "dominant_cc_register" "")
8733 (match_operator 4 "arm_comparison_operator"
8734 [(match_operand:SI 0 "s_register_operand"
8735 "l,l,l,r,r,r,r,r,r,r")
8736 (match_operand:SI 1 "arm_add_operand"
8737 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8738 (match_operator:SI 5 "arm_comparison_operator"
8739 [(match_operand:SI 2 "s_register_operand"
8740 "l,r,r,l,l,r,r,r,r,r")
8741 (match_operand:SI 3 "arm_add_operand"
8742 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8747 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8749 {\"cmp%d5\\t%0, %1\",
8750 \"cmp%d4\\t%2, %3\"},
8751 {\"cmn%d5\\t%0, #%n1\",
8752 \"cmp%d4\\t%2, %3\"},
8753 {\"cmp%d5\\t%0, %1\",
8754 \"cmn%d4\\t%2, #%n3\"},
8755 {\"cmn%d5\\t%0, #%n1\",
8756 \"cmn%d4\\t%2, #%n3\"}
8758 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8763 \"cmn\\t%0, #%n1\"},
8764 {\"cmn\\t%2, #%n3\",
8766 {\"cmn\\t%2, #%n3\",
8769 static const char *const ite[2] =
8774 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8775 CMP_CMP, CMN_CMP, CMP_CMP,
8776 CMP_CMP, CMN_CMP, CMP_CMN,
8779 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8781 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8782 if (TARGET_THUMB2) {
8783 output_asm_insn (ite[swap], operands);
8785 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8788 [(set_attr "conds" "set")
8789 (set_attr "predicable" "no")
8790 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8791 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8792 (set_attr_alternative "length"
8799 (if_then_else (eq_attr "is_thumb" "no")
8802 (if_then_else (eq_attr "is_thumb" "no")
8805 (if_then_else (eq_attr "is_thumb" "no")
8808 (if_then_else (eq_attr "is_thumb" "no")
8811 (set_attr "type" "multiple")]
8814 (define_insn "*cmp_ior"
8815 [(set (match_operand 6 "dominant_cc_register" "")
8818 (match_operator 4 "arm_comparison_operator"
8819 [(match_operand:SI 0 "s_register_operand"
8820 "l,l,l,r,r,r,r,r,r,r")
8821 (match_operand:SI 1 "arm_add_operand"
8822 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8823 (match_operator:SI 5 "arm_comparison_operator"
8824 [(match_operand:SI 2 "s_register_operand"
8825 "l,r,r,l,l,r,r,r,r,r")
8826 (match_operand:SI 3 "arm_add_operand"
8827 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8832 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8836 {\"cmn\\t%0, #%n1\",
8839 \"cmn\\t%2, #%n3\"},
8840 {\"cmn\\t%0, #%n1\",
8843 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8845 {\"cmp%D4\\t%2, %3\",
8846 \"cmp%D5\\t%0, %1\"},
8847 {\"cmp%D4\\t%2, %3\",
8848 \"cmn%D5\\t%0, #%n1\"},
8849 {\"cmn%D4\\t%2, #%n3\",
8850 \"cmp%D5\\t%0, %1\"},
8851 {\"cmn%D4\\t%2, #%n3\",
8852 \"cmn%D5\\t%0, #%n1\"}
8854 static const char *const ite[2] =
8859 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8860 CMP_CMP, CMN_CMP, CMP_CMP,
8861 CMP_CMP, CMN_CMP, CMP_CMN,
8864 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8866 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8867 if (TARGET_THUMB2) {
8868 output_asm_insn (ite[swap], operands);
8870 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8874 [(set_attr "conds" "set")
8875 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8876 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8877 (set_attr_alternative "length"
8884 (if_then_else (eq_attr "is_thumb" "no")
8887 (if_then_else (eq_attr "is_thumb" "no")
8890 (if_then_else (eq_attr "is_thumb" "no")
8893 (if_then_else (eq_attr "is_thumb" "no")
8896 (set_attr "type" "multiple")]
8899 (define_insn_and_split "*ior_scc_scc"
8900 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8901 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8902 [(match_operand:SI 1 "s_register_operand" "l,r")
8903 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8904 (match_operator:SI 6 "arm_comparison_operator"
8905 [(match_operand:SI 4 "s_register_operand" "l,r")
8906 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8907 (clobber (reg:CC CC_REGNUM))]
8909 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8912 "TARGET_32BIT && reload_completed"
8916 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8917 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8919 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8921 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8924 [(set_attr "conds" "clob")
8925 (set_attr "enabled_for_short_it" "yes,no")
8926 (set_attr "length" "16")
8927 (set_attr "type" "multiple")]
8930 ; If the above pattern is followed by a CMP insn, then the compare is
8931 ; redundant, since we can rework the conditional instruction that follows.
8932 (define_insn_and_split "*ior_scc_scc_cmp"
8933 [(set (match_operand 0 "dominant_cc_register" "")
8934 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8935 [(match_operand:SI 1 "s_register_operand" "l,r")
8936 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8937 (match_operator:SI 6 "arm_comparison_operator"
8938 [(match_operand:SI 4 "s_register_operand" "l,r")
8939 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
8941 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
8942 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8943 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8946 "TARGET_32BIT && reload_completed"
8950 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8951 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8953 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8955 [(set_attr "conds" "set")
8956 (set_attr "enabled_for_short_it" "yes,no")
8957 (set_attr "length" "16")
8958 (set_attr "type" "multiple")]
8961 (define_insn_and_split "*and_scc_scc"
8962 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8963 (and:SI (match_operator:SI 3 "arm_comparison_operator"
8964 [(match_operand:SI 1 "s_register_operand" "l,r")
8965 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8966 (match_operator:SI 6 "arm_comparison_operator"
8967 [(match_operand:SI 4 "s_register_operand" "l,r")
8968 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8969 (clobber (reg:CC CC_REGNUM))]
8971 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8974 "TARGET_32BIT && reload_completed
8975 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8980 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8981 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8983 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8985 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8988 [(set_attr "conds" "clob")
8989 (set_attr "enabled_for_short_it" "yes,no")
8990 (set_attr "length" "16")
8991 (set_attr "type" "multiple")]
8994 ; If the above pattern is followed by a CMP insn, then the compare is
8995 ; redundant, since we can rework the conditional instruction that follows.
8996 (define_insn_and_split "*and_scc_scc_cmp"
8997 [(set (match_operand 0 "dominant_cc_register" "")
8998 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
8999 [(match_operand:SI 1 "s_register_operand" "l,r")
9000 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9001 (match_operator:SI 6 "arm_comparison_operator"
9002 [(match_operand:SI 4 "s_register_operand" "l,r")
9003 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9005 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9006 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9007 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9010 "TARGET_32BIT && reload_completed"
9014 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9015 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9017 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9019 [(set_attr "conds" "set")
9020 (set_attr "enabled_for_short_it" "yes,no")
9021 (set_attr "length" "16")
9022 (set_attr "type" "multiple")]
9025 ;; If there is no dominance in the comparison, then we can still save an
9026 ;; instruction in the AND case, since we can know that the second compare
9027 ;; need only zero the value if false (if true, then the value is already
9029 (define_insn_and_split "*and_scc_scc_nodom"
9030 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9031 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9032 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9033 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9034 (match_operator:SI 6 "arm_comparison_operator"
9035 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9036 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9037 (clobber (reg:CC CC_REGNUM))]
9039 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9042 "TARGET_32BIT && reload_completed"
9043 [(parallel [(set (match_dup 0)
9044 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9045 (clobber (reg:CC CC_REGNUM))])
9046 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9048 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9051 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9052 operands[4], operands[5]),
9054 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9056 [(set_attr "conds" "clob")
9057 (set_attr "length" "20")
9058 (set_attr "type" "multiple")]
9062 [(set (reg:CC_NOOV CC_REGNUM)
9063 (compare:CC_NOOV (ior:SI
9064 (and:SI (match_operand:SI 0 "s_register_operand" "")
9066 (match_operator:SI 1 "arm_comparison_operator"
9067 [(match_operand:SI 2 "s_register_operand" "")
9068 (match_operand:SI 3 "arm_add_operand" "")]))
9070 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9073 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9075 (set (reg:CC_NOOV CC_REGNUM)
9076 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9081 [(set (reg:CC_NOOV CC_REGNUM)
9082 (compare:CC_NOOV (ior:SI
9083 (match_operator:SI 1 "arm_comparison_operator"
9084 [(match_operand:SI 2 "s_register_operand" "")
9085 (match_operand:SI 3 "arm_add_operand" "")])
9086 (and:SI (match_operand:SI 0 "s_register_operand" "")
9089 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9092 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9094 (set (reg:CC_NOOV CC_REGNUM)
9095 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9098 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9100 (define_insn_and_split "*negscc"
9101 [(set (match_operand:SI 0 "s_register_operand" "=r")
9102 (neg:SI (match_operator 3 "arm_comparison_operator"
9103 [(match_operand:SI 1 "s_register_operand" "r")
9104 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9105 (clobber (reg:CC CC_REGNUM))]
9108 "&& reload_completed"
9111 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9113 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9115 /* Emit mov\\t%0, %1, asr #31 */
9116 emit_insn (gen_rtx_SET (operands[0],
9117 gen_rtx_ASHIFTRT (SImode,
9122 else if (GET_CODE (operands[3]) == NE)
9124 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9125 if (CONST_INT_P (operands[2]))
9126 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9127 gen_int_mode (-INTVAL (operands[2]),
9130 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9132 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9136 gen_rtx_SET (operands[0],
9142 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9143 emit_insn (gen_rtx_SET (cc_reg,
9144 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9145 enum rtx_code rc = GET_CODE (operands[3]);
9147 rc = reverse_condition (rc);
9148 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9153 gen_rtx_SET (operands[0], const0_rtx)));
9154 rc = GET_CODE (operands[3]);
9155 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9160 gen_rtx_SET (operands[0],
9166 [(set_attr "conds" "clob")
9167 (set_attr "length" "12")
9168 (set_attr "type" "multiple")]
9171 (define_insn_and_split "movcond_addsi"
9172 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9174 (match_operator 5 "comparison_operator"
9175 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9176 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9178 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9179 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9180 (clobber (reg:CC CC_REGNUM))]
9183 "&& reload_completed"
9184 [(set (reg:CC_NOOV CC_REGNUM)
9186 (plus:SI (match_dup 3)
9189 (set (match_dup 0) (match_dup 1))
9190 (cond_exec (match_dup 6)
9191 (set (match_dup 0) (match_dup 2)))]
9194 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9195 operands[3], operands[4]);
9196 enum rtx_code rc = GET_CODE (operands[5]);
9197 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9198 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9199 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9200 rc = reverse_condition (rc);
9202 std::swap (operands[1], operands[2]);
9204 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9207 [(set_attr "conds" "clob")
9208 (set_attr "enabled_for_short_it" "no,yes,yes")
9209 (set_attr "type" "multiple")]
9212 (define_insn "movcond"
9213 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9215 (match_operator 5 "arm_comparison_operator"
9216 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9217 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9218 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9219 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9220 (clobber (reg:CC CC_REGNUM))]
9223 if (GET_CODE (operands[5]) == LT
9224 && (operands[4] == const0_rtx))
9226 if (which_alternative != 1 && REG_P (operands[1]))
9228 if (operands[2] == const0_rtx)
9229 return \"and\\t%0, %1, %3, asr #31\";
9230 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9232 else if (which_alternative != 0 && REG_P (operands[2]))
9234 if (operands[1] == const0_rtx)
9235 return \"bic\\t%0, %2, %3, asr #31\";
9236 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9238 /* The only case that falls through to here is when both ops 1 & 2
9242 if (GET_CODE (operands[5]) == GE
9243 && (operands[4] == const0_rtx))
9245 if (which_alternative != 1 && REG_P (operands[1]))
9247 if (operands[2] == const0_rtx)
9248 return \"bic\\t%0, %1, %3, asr #31\";
9249 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9251 else if (which_alternative != 0 && REG_P (operands[2]))
9253 if (operands[1] == const0_rtx)
9254 return \"and\\t%0, %2, %3, asr #31\";
9255 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9257 /* The only case that falls through to here is when both ops 1 & 2
9260 if (CONST_INT_P (operands[4])
9261 && !const_ok_for_arm (INTVAL (operands[4])))
9262 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9264 output_asm_insn (\"cmp\\t%3, %4\", operands);
9265 if (which_alternative != 0)
9266 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9267 if (which_alternative != 1)
9268 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9271 [(set_attr "conds" "clob")
9272 (set_attr "length" "8,8,12")
9273 (set_attr "type" "multiple")]
9276 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9278 (define_insn "*ifcompare_plus_move"
9279 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9280 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9281 [(match_operand:SI 4 "s_register_operand" "r,r")
9282 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9284 (match_operand:SI 2 "s_register_operand" "r,r")
9285 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9286 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9287 (clobber (reg:CC CC_REGNUM))]
9290 [(set_attr "conds" "clob")
9291 (set_attr "length" "8,12")
9292 (set_attr "type" "multiple")]
9295 (define_insn "*if_plus_move"
9296 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9298 (match_operator 4 "arm_comparison_operator"
9299 [(match_operand 5 "cc_register" "") (const_int 0)])
9301 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9302 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9303 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9307 sub%d4\\t%0, %2, #%n3
9308 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9309 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9310 [(set_attr "conds" "use")
9311 (set_attr "length" "4,4,8,8")
9312 (set_attr_alternative "type"
9313 [(if_then_else (match_operand 3 "const_int_operand" "")
9314 (const_string "alu_imm" )
9315 (const_string "alu_sreg"))
9316 (const_string "alu_imm")
9317 (const_string "multiple")
9318 (const_string "multiple")])]
9321 (define_insn "*ifcompare_move_plus"
9322 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9323 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9324 [(match_operand:SI 4 "s_register_operand" "r,r")
9325 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9326 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9328 (match_operand:SI 2 "s_register_operand" "r,r")
9329 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9330 (clobber (reg:CC CC_REGNUM))]
9333 [(set_attr "conds" "clob")
9334 (set_attr "length" "8,12")
9335 (set_attr "type" "multiple")]
9338 (define_insn "*if_move_plus"
9339 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9341 (match_operator 4 "arm_comparison_operator"
9342 [(match_operand 5 "cc_register" "") (const_int 0)])
9343 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9345 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9346 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9350 sub%D4\\t%0, %2, #%n3
9351 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9352 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9353 [(set_attr "conds" "use")
9354 (set_attr "length" "4,4,8,8")
9355 (set_attr_alternative "type"
9356 [(if_then_else (match_operand 3 "const_int_operand" "")
9357 (const_string "alu_imm" )
9358 (const_string "alu_sreg"))
9359 (const_string "alu_imm")
9360 (const_string "multiple")
9361 (const_string "multiple")])]
9364 (define_insn "*ifcompare_arith_arith"
9365 [(set (match_operand:SI 0 "s_register_operand" "=r")
9366 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9367 [(match_operand:SI 5 "s_register_operand" "r")
9368 (match_operand:SI 6 "arm_add_operand" "rIL")])
9369 (match_operator:SI 8 "shiftable_operator"
9370 [(match_operand:SI 1 "s_register_operand" "r")
9371 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9372 (match_operator:SI 7 "shiftable_operator"
9373 [(match_operand:SI 3 "s_register_operand" "r")
9374 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9375 (clobber (reg:CC CC_REGNUM))]
9378 [(set_attr "conds" "clob")
9379 (set_attr "length" "12")
9380 (set_attr "type" "multiple")]
9383 (define_insn "*if_arith_arith"
9384 [(set (match_operand:SI 0 "s_register_operand" "=r")
9385 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9386 [(match_operand 8 "cc_register" "") (const_int 0)])
9387 (match_operator:SI 6 "shiftable_operator"
9388 [(match_operand:SI 1 "s_register_operand" "r")
9389 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9390 (match_operator:SI 7 "shiftable_operator"
9391 [(match_operand:SI 3 "s_register_operand" "r")
9392 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9394 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9395 [(set_attr "conds" "use")
9396 (set_attr "length" "8")
9397 (set_attr "type" "multiple")]
9400 (define_insn "*ifcompare_arith_move"
9401 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9402 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9403 [(match_operand:SI 2 "s_register_operand" "r,r")
9404 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9405 (match_operator:SI 7 "shiftable_operator"
9406 [(match_operand:SI 4 "s_register_operand" "r,r")
9407 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9408 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9409 (clobber (reg:CC CC_REGNUM))]
9412 /* If we have an operation where (op x 0) is the identity operation and
9413 the conditional operator is LT or GE and we are comparing against zero and
9414 everything is in registers then we can do this in two instructions. */
9415 if (operands[3] == const0_rtx
9416 && GET_CODE (operands[7]) != AND
9417 && REG_P (operands[5])
9418 && REG_P (operands[1])
9419 && REGNO (operands[1]) == REGNO (operands[4])
9420 && REGNO (operands[4]) != REGNO (operands[0]))
9422 if (GET_CODE (operands[6]) == LT)
9423 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9424 else if (GET_CODE (operands[6]) == GE)
9425 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9427 if (CONST_INT_P (operands[3])
9428 && !const_ok_for_arm (INTVAL (operands[3])))
9429 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9431 output_asm_insn (\"cmp\\t%2, %3\", operands);
9432 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9433 if (which_alternative != 0)
9434 return \"mov%D6\\t%0, %1\";
9437 [(set_attr "conds" "clob")
9438 (set_attr "length" "8,12")
9439 (set_attr "type" "multiple")]
9442 (define_insn "*if_arith_move"
9443 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9444 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9445 [(match_operand 6 "cc_register" "") (const_int 0)])
9446 (match_operator:SI 5 "shiftable_operator"
9447 [(match_operand:SI 2 "s_register_operand" "r,r")
9448 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9449 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9453 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9454 [(set_attr "conds" "use")
9455 (set_attr "length" "4,8")
9456 (set_attr_alternative "type"
9457 [(if_then_else (match_operand 3 "const_int_operand" "")
9458 (const_string "alu_shift_imm" )
9459 (const_string "alu_shift_reg"))
9460 (const_string "multiple")])]
9463 (define_insn "*ifcompare_move_arith"
9464 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9465 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9466 [(match_operand:SI 4 "s_register_operand" "r,r")
9467 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9468 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9469 (match_operator:SI 7 "shiftable_operator"
9470 [(match_operand:SI 2 "s_register_operand" "r,r")
9471 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9472 (clobber (reg:CC CC_REGNUM))]
9475 /* If we have an operation where (op x 0) is the identity operation and
9476 the conditional operator is LT or GE and we are comparing against zero and
9477 everything is in registers then we can do this in two instructions */
9478 if (operands[5] == const0_rtx
9479 && GET_CODE (operands[7]) != AND
9480 && REG_P (operands[3])
9481 && REG_P (operands[1])
9482 && REGNO (operands[1]) == REGNO (operands[2])
9483 && REGNO (operands[2]) != REGNO (operands[0]))
9485 if (GET_CODE (operands[6]) == GE)
9486 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9487 else if (GET_CODE (operands[6]) == LT)
9488 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9491 if (CONST_INT_P (operands[5])
9492 && !const_ok_for_arm (INTVAL (operands[5])))
9493 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9495 output_asm_insn (\"cmp\\t%4, %5\", operands);
9497 if (which_alternative != 0)
9498 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9499 return \"%I7%D6\\t%0, %2, %3\";
9501 [(set_attr "conds" "clob")
9502 (set_attr "length" "8,12")
9503 (set_attr "type" "multiple")]
9506 (define_insn "*if_move_arith"
9507 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9509 (match_operator 4 "arm_comparison_operator"
9510 [(match_operand 6 "cc_register" "") (const_int 0)])
9511 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9512 (match_operator:SI 5 "shiftable_operator"
9513 [(match_operand:SI 2 "s_register_operand" "r,r")
9514 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9518 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9519 [(set_attr "conds" "use")
9520 (set_attr "length" "4,8")
9521 (set_attr_alternative "type"
9522 [(if_then_else (match_operand 3 "const_int_operand" "")
9523 (const_string "alu_shift_imm" )
9524 (const_string "alu_shift_reg"))
9525 (const_string "multiple")])]
9528 (define_insn "*ifcompare_move_not"
9529 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9531 (match_operator 5 "arm_comparison_operator"
9532 [(match_operand:SI 3 "s_register_operand" "r,r")
9533 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9534 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9536 (match_operand:SI 2 "s_register_operand" "r,r"))))
9537 (clobber (reg:CC CC_REGNUM))]
9540 [(set_attr "conds" "clob")
9541 (set_attr "length" "8,12")
9542 (set_attr "type" "multiple")]
9545 (define_insn "*if_move_not"
9546 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9548 (match_operator 4 "arm_comparison_operator"
9549 [(match_operand 3 "cc_register" "") (const_int 0)])
9550 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9551 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9555 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9556 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9557 [(set_attr "conds" "use")
9558 (set_attr "type" "mvn_reg")
9559 (set_attr "length" "4,8,8")
9560 (set_attr "type" "mvn_reg,multiple,multiple")]
9563 (define_insn "*ifcompare_not_move"
9564 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9566 (match_operator 5 "arm_comparison_operator"
9567 [(match_operand:SI 3 "s_register_operand" "r,r")
9568 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9570 (match_operand:SI 2 "s_register_operand" "r,r"))
9571 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9572 (clobber (reg:CC CC_REGNUM))]
9575 [(set_attr "conds" "clob")
9576 (set_attr "length" "8,12")
9577 (set_attr "type" "multiple")]
9580 (define_insn "*if_not_move"
9581 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9583 (match_operator 4 "arm_comparison_operator"
9584 [(match_operand 3 "cc_register" "") (const_int 0)])
9585 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9586 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9590 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9591 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9592 [(set_attr "conds" "use")
9593 (set_attr "type" "mvn_reg,multiple,multiple")
9594 (set_attr "length" "4,8,8")]
9597 (define_insn "*ifcompare_shift_move"
9598 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9600 (match_operator 6 "arm_comparison_operator"
9601 [(match_operand:SI 4 "s_register_operand" "r,r")
9602 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9603 (match_operator:SI 7 "shift_operator"
9604 [(match_operand:SI 2 "s_register_operand" "r,r")
9605 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9606 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9607 (clobber (reg:CC CC_REGNUM))]
9610 [(set_attr "conds" "clob")
9611 (set_attr "length" "8,12")
9612 (set_attr "type" "multiple")]
9615 (define_insn "*if_shift_move"
9616 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9618 (match_operator 5 "arm_comparison_operator"
9619 [(match_operand 6 "cc_register" "") (const_int 0)])
9620 (match_operator:SI 4 "shift_operator"
9621 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9622 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9623 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9627 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9628 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9629 [(set_attr "conds" "use")
9630 (set_attr "shift" "2")
9631 (set_attr "length" "4,8,8")
9632 (set_attr_alternative "type"
9633 [(if_then_else (match_operand 3 "const_int_operand" "")
9634 (const_string "mov_shift" )
9635 (const_string "mov_shift_reg"))
9636 (const_string "multiple")
9637 (const_string "multiple")])]
9640 (define_insn "*ifcompare_move_shift"
9641 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9643 (match_operator 6 "arm_comparison_operator"
9644 [(match_operand:SI 4 "s_register_operand" "r,r")
9645 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9646 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9647 (match_operator:SI 7 "shift_operator"
9648 [(match_operand:SI 2 "s_register_operand" "r,r")
9649 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9650 (clobber (reg:CC CC_REGNUM))]
9653 [(set_attr "conds" "clob")
9654 (set_attr "length" "8,12")
9655 (set_attr "type" "multiple")]
9658 (define_insn "*if_move_shift"
9659 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9661 (match_operator 5 "arm_comparison_operator"
9662 [(match_operand 6 "cc_register" "") (const_int 0)])
9663 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9664 (match_operator:SI 4 "shift_operator"
9665 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9666 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9670 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9671 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9672 [(set_attr "conds" "use")
9673 (set_attr "shift" "2")
9674 (set_attr "length" "4,8,8")
9675 (set_attr_alternative "type"
9676 [(if_then_else (match_operand 3 "const_int_operand" "")
9677 (const_string "mov_shift" )
9678 (const_string "mov_shift_reg"))
9679 (const_string "multiple")
9680 (const_string "multiple")])]
9683 (define_insn "*ifcompare_shift_shift"
9684 [(set (match_operand:SI 0 "s_register_operand" "=r")
9686 (match_operator 7 "arm_comparison_operator"
9687 [(match_operand:SI 5 "s_register_operand" "r")
9688 (match_operand:SI 6 "arm_add_operand" "rIL")])
9689 (match_operator:SI 8 "shift_operator"
9690 [(match_operand:SI 1 "s_register_operand" "r")
9691 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9692 (match_operator:SI 9 "shift_operator"
9693 [(match_operand:SI 3 "s_register_operand" "r")
9694 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9695 (clobber (reg:CC CC_REGNUM))]
9698 [(set_attr "conds" "clob")
9699 (set_attr "length" "12")
9700 (set_attr "type" "multiple")]
9703 (define_insn "*if_shift_shift"
9704 [(set (match_operand:SI 0 "s_register_operand" "=r")
9706 (match_operator 5 "arm_comparison_operator"
9707 [(match_operand 8 "cc_register" "") (const_int 0)])
9708 (match_operator:SI 6 "shift_operator"
9709 [(match_operand:SI 1 "s_register_operand" "r")
9710 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9711 (match_operator:SI 7 "shift_operator"
9712 [(match_operand:SI 3 "s_register_operand" "r")
9713 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9715 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9716 [(set_attr "conds" "use")
9717 (set_attr "shift" "1")
9718 (set_attr "length" "8")
9719 (set (attr "type") (if_then_else
9720 (and (match_operand 2 "const_int_operand" "")
9721 (match_operand 4 "const_int_operand" ""))
9722 (const_string "mov_shift")
9723 (const_string "mov_shift_reg")))]
9726 (define_insn "*ifcompare_not_arith"
9727 [(set (match_operand:SI 0 "s_register_operand" "=r")
9729 (match_operator 6 "arm_comparison_operator"
9730 [(match_operand:SI 4 "s_register_operand" "r")
9731 (match_operand:SI 5 "arm_add_operand" "rIL")])
9732 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9733 (match_operator:SI 7 "shiftable_operator"
9734 [(match_operand:SI 2 "s_register_operand" "r")
9735 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9736 (clobber (reg:CC CC_REGNUM))]
9739 [(set_attr "conds" "clob")
9740 (set_attr "length" "12")
9741 (set_attr "type" "multiple")]
9744 (define_insn "*if_not_arith"
9745 [(set (match_operand:SI 0 "s_register_operand" "=r")
9747 (match_operator 5 "arm_comparison_operator"
9748 [(match_operand 4 "cc_register" "") (const_int 0)])
9749 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9750 (match_operator:SI 6 "shiftable_operator"
9751 [(match_operand:SI 2 "s_register_operand" "r")
9752 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9754 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9755 [(set_attr "conds" "use")
9756 (set_attr "type" "mvn_reg")
9757 (set_attr "length" "8")]
9760 (define_insn "*ifcompare_arith_not"
9761 [(set (match_operand:SI 0 "s_register_operand" "=r")
9763 (match_operator 6 "arm_comparison_operator"
9764 [(match_operand:SI 4 "s_register_operand" "r")
9765 (match_operand:SI 5 "arm_add_operand" "rIL")])
9766 (match_operator:SI 7 "shiftable_operator"
9767 [(match_operand:SI 2 "s_register_operand" "r")
9768 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9769 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9770 (clobber (reg:CC CC_REGNUM))]
9773 [(set_attr "conds" "clob")
9774 (set_attr "length" "12")
9775 (set_attr "type" "multiple")]
9778 (define_insn "*if_arith_not"
9779 [(set (match_operand:SI 0 "s_register_operand" "=r")
9781 (match_operator 5 "arm_comparison_operator"
9782 [(match_operand 4 "cc_register" "") (const_int 0)])
9783 (match_operator:SI 6 "shiftable_operator"
9784 [(match_operand:SI 2 "s_register_operand" "r")
9785 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9786 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9788 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9789 [(set_attr "conds" "use")
9790 (set_attr "type" "multiple")
9791 (set_attr "length" "8")]
9794 (define_insn "*ifcompare_neg_move"
9795 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9797 (match_operator 5 "arm_comparison_operator"
9798 [(match_operand:SI 3 "s_register_operand" "r,r")
9799 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9800 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9801 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9802 (clobber (reg:CC CC_REGNUM))]
9805 [(set_attr "conds" "clob")
9806 (set_attr "length" "8,12")
9807 (set_attr "type" "multiple")]
9810 (define_insn_and_split "*if_neg_move"
9811 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9813 (match_operator 4 "arm_comparison_operator"
9814 [(match_operand 3 "cc_register" "") (const_int 0)])
9815 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9816 (match_operand:SI 1 "s_register_operand" "0,0")))]
9819 "&& reload_completed"
9820 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9821 (set (match_dup 0) (neg:SI (match_dup 2))))]
9823 [(set_attr "conds" "use")
9824 (set_attr "length" "4")
9825 (set_attr "arch" "t2,32")
9826 (set_attr "enabled_for_short_it" "yes,no")
9827 (set_attr "type" "logic_shift_imm")]
9830 (define_insn "*ifcompare_move_neg"
9831 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9833 (match_operator 5 "arm_comparison_operator"
9834 [(match_operand:SI 3 "s_register_operand" "r,r")
9835 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9836 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9837 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9838 (clobber (reg:CC CC_REGNUM))]
9841 [(set_attr "conds" "clob")
9842 (set_attr "length" "8,12")
9843 (set_attr "type" "multiple")]
9846 (define_insn_and_split "*if_move_neg"
9847 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9849 (match_operator 4 "arm_comparison_operator"
9850 [(match_operand 3 "cc_register" "") (const_int 0)])
9851 (match_operand:SI 1 "s_register_operand" "0,0")
9852 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9855 "&& reload_completed"
9856 [(cond_exec (match_dup 5)
9857 (set (match_dup 0) (neg:SI (match_dup 2))))]
9859 machine_mode mode = GET_MODE (operands[3]);
9860 rtx_code rc = GET_CODE (operands[4]);
9862 if (mode == CCFPmode || mode == CCFPEmode)
9863 rc = reverse_condition_maybe_unordered (rc);
9865 rc = reverse_condition (rc);
9867 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9869 [(set_attr "conds" "use")
9870 (set_attr "length" "4")
9871 (set_attr "arch" "t2,32")
9872 (set_attr "enabled_for_short_it" "yes,no")
9873 (set_attr "type" "logic_shift_imm")]
9876 (define_insn "*arith_adjacentmem"
9877 [(set (match_operand:SI 0 "s_register_operand" "=r")
9878 (match_operator:SI 1 "shiftable_operator"
9879 [(match_operand:SI 2 "memory_operand" "m")
9880 (match_operand:SI 3 "memory_operand" "m")]))
9881 (clobber (match_scratch:SI 4 "=r"))]
9882 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9888 HOST_WIDE_INT val1 = 0, val2 = 0;
9890 if (REGNO (operands[0]) > REGNO (operands[4]))
9892 ldm[1] = operands[4];
9893 ldm[2] = operands[0];
9897 ldm[1] = operands[0];
9898 ldm[2] = operands[4];
9901 base_reg = XEXP (operands[2], 0);
9903 if (!REG_P (base_reg))
9905 val1 = INTVAL (XEXP (base_reg, 1));
9906 base_reg = XEXP (base_reg, 0);
9909 if (!REG_P (XEXP (operands[3], 0)))
9910 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9912 arith[0] = operands[0];
9913 arith[3] = operands[1];
9927 if (val1 !=0 && val2 != 0)
9931 if (val1 == 4 || val2 == 4)
9932 /* Other val must be 8, since we know they are adjacent and neither
9934 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
9935 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9937 ldm[0] = ops[0] = operands[4];
9939 ops[2] = GEN_INT (val1);
9940 output_add_immediate (ops);
9942 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9944 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9948 /* Offset is out of range for a single add, so use two ldr. */
9951 ops[2] = GEN_INT (val1);
9952 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9954 ops[2] = GEN_INT (val2);
9955 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9961 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9963 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9968 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9970 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9972 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9975 [(set_attr "length" "12")
9976 (set_attr "predicable" "yes")
9977 (set_attr "type" "load_4")]
9980 ; This pattern is never tried by combine, so do it as a peephole
9983 [(set (match_operand:SI 0 "arm_general_register_operand" "")
9984 (match_operand:SI 1 "arm_general_register_operand" ""))
9985 (set (reg:CC CC_REGNUM)
9986 (compare:CC (match_dup 1) (const_int 0)))]
9988 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
9989 (set (match_dup 0) (match_dup 1))])]
9994 [(set (match_operand:SI 0 "s_register_operand" "")
9995 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
9997 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
9998 [(match_operand:SI 3 "s_register_operand" "")
9999 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10000 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10002 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10003 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10008 ;; This split can be used because CC_Z mode implies that the following
10009 ;; branch will be an equality, or an unsigned inequality, so the sign
10010 ;; extension is not needed.
10013 [(set (reg:CC_Z CC_REGNUM)
10015 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10017 (match_operand 1 "const_int_operand" "")))
10018 (clobber (match_scratch:SI 2 ""))]
10020 && ((UINTVAL (operands[1]))
10021 == ((UINTVAL (operands[1])) >> 24) << 24)"
10022 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10023 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10025 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10028 ;; ??? Check the patterns above for Thumb-2 usefulness
10030 (define_expand "prologue"
10031 [(clobber (const_int 0))]
10034 arm_expand_prologue ();
10036 thumb1_expand_prologue ();
10041 (define_expand "epilogue"
10042 [(clobber (const_int 0))]
10045 if (crtl->calls_eh_return)
10046 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10049 thumb1_expand_epilogue ();
10050 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10051 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10053 else if (HAVE_return)
10055 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10056 no need for explicit testing again. */
10057 emit_jump_insn (gen_return ());
10059 else if (TARGET_32BIT)
10061 arm_expand_epilogue (true);
10067 ;; Note - although unspec_volatile's USE all hard registers,
10068 ;; USEs are ignored after relaod has completed. Thus we need
10069 ;; to add an unspec of the link register to ensure that flow
10070 ;; does not think that it is unused by the sibcall branch that
10071 ;; will replace the standard function epilogue.
10072 (define_expand "sibcall_epilogue"
10073 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10074 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10077 arm_expand_epilogue (false);
10082 (define_expand "eh_epilogue"
10083 [(use (match_operand:SI 0 "register_operand"))
10084 (use (match_operand:SI 1 "register_operand"))
10085 (use (match_operand:SI 2 "register_operand"))]
10089 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10090 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10092 rtx ra = gen_rtx_REG (Pmode, 2);
10094 emit_move_insn (ra, operands[2]);
10097 /* This is a hack -- we may have crystalized the function type too
10099 cfun->machine->func_type = 0;
10103 ;; This split is only used during output to reduce the number of patterns
10104 ;; that need assembler instructions adding to them. We allowed the setting
10105 ;; of the conditions to be implicit during rtl generation so that
10106 ;; the conditional compare patterns would work. However this conflicts to
10107 ;; some extent with the conditional data operations, so we have to split them
10110 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10111 ;; conditional execution sufficient?
10114 [(set (match_operand:SI 0 "s_register_operand" "")
10115 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10116 [(match_operand 2 "" "") (match_operand 3 "" "")])
10118 (match_operand 4 "" "")))
10119 (clobber (reg:CC CC_REGNUM))]
10120 "TARGET_ARM && reload_completed"
10121 [(set (match_dup 5) (match_dup 6))
10122 (cond_exec (match_dup 7)
10123 (set (match_dup 0) (match_dup 4)))]
10126 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10127 operands[2], operands[3]);
10128 enum rtx_code rc = GET_CODE (operands[1]);
10130 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10131 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10132 if (mode == CCFPmode || mode == CCFPEmode)
10133 rc = reverse_condition_maybe_unordered (rc);
10135 rc = reverse_condition (rc);
10137 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10142 [(set (match_operand:SI 0 "s_register_operand" "")
10143 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10144 [(match_operand 2 "" "") (match_operand 3 "" "")])
10145 (match_operand 4 "" "")
10147 (clobber (reg:CC CC_REGNUM))]
10148 "TARGET_ARM && reload_completed"
10149 [(set (match_dup 5) (match_dup 6))
10150 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10151 (set (match_dup 0) (match_dup 4)))]
10154 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10155 operands[2], operands[3]);
10157 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10158 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10163 [(set (match_operand:SI 0 "s_register_operand" "")
10164 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10165 [(match_operand 2 "" "") (match_operand 3 "" "")])
10166 (match_operand 4 "" "")
10167 (match_operand 5 "" "")))
10168 (clobber (reg:CC CC_REGNUM))]
10169 "TARGET_ARM && reload_completed"
10170 [(set (match_dup 6) (match_dup 7))
10171 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10172 (set (match_dup 0) (match_dup 4)))
10173 (cond_exec (match_dup 8)
10174 (set (match_dup 0) (match_dup 5)))]
10177 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10178 operands[2], operands[3]);
10179 enum rtx_code rc = GET_CODE (operands[1]);
10181 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10182 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10183 if (mode == CCFPmode || mode == CCFPEmode)
10184 rc = reverse_condition_maybe_unordered (rc);
10186 rc = reverse_condition (rc);
10188 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10193 [(set (match_operand:SI 0 "s_register_operand" "")
10194 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10195 [(match_operand:SI 2 "s_register_operand" "")
10196 (match_operand:SI 3 "arm_add_operand" "")])
10197 (match_operand:SI 4 "arm_rhs_operand" "")
10199 (match_operand:SI 5 "s_register_operand" ""))))
10200 (clobber (reg:CC CC_REGNUM))]
10201 "TARGET_ARM && reload_completed"
10202 [(set (match_dup 6) (match_dup 7))
10203 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10204 (set (match_dup 0) (match_dup 4)))
10205 (cond_exec (match_dup 8)
10206 (set (match_dup 0) (not:SI (match_dup 5))))]
10209 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10210 operands[2], operands[3]);
10211 enum rtx_code rc = GET_CODE (operands[1]);
10213 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10214 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10215 if (mode == CCFPmode || mode == CCFPEmode)
10216 rc = reverse_condition_maybe_unordered (rc);
10218 rc = reverse_condition (rc);
10220 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10224 (define_insn "*cond_move_not"
10225 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10226 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10227 [(match_operand 3 "cc_register" "") (const_int 0)])
10228 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10230 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10234 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10235 [(set_attr "conds" "use")
10236 (set_attr "type" "mvn_reg,multiple")
10237 (set_attr "length" "4,8")]
10240 ;; The next two patterns occur when an AND operation is followed by a
10241 ;; scc insn sequence
10243 (define_insn "*sign_extract_onebit"
10244 [(set (match_operand:SI 0 "s_register_operand" "=r")
10245 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10247 (match_operand:SI 2 "const_int_operand" "n")))
10248 (clobber (reg:CC CC_REGNUM))]
10251 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10252 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10253 return \"mvnne\\t%0, #0\";
10255 [(set_attr "conds" "clob")
10256 (set_attr "length" "8")
10257 (set_attr "type" "multiple")]
10260 (define_insn "*not_signextract_onebit"
10261 [(set (match_operand:SI 0 "s_register_operand" "=r")
10263 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10265 (match_operand:SI 2 "const_int_operand" "n"))))
10266 (clobber (reg:CC CC_REGNUM))]
10269 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10270 output_asm_insn (\"tst\\t%1, %2\", operands);
10271 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10272 return \"movne\\t%0, #0\";
10274 [(set_attr "conds" "clob")
10275 (set_attr "length" "12")
10276 (set_attr "type" "multiple")]
10278 ;; ??? The above patterns need auditing for Thumb-2
10280 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10281 ;; expressions. For simplicity, the first register is also in the unspec
10283 ;; To avoid the usage of GNU extension, the length attribute is computed
10284 ;; in a C function arm_attr_length_push_multi.
10285 (define_insn "*push_multi"
10286 [(match_parallel 2 "multi_register_push"
10287 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10288 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10289 UNSPEC_PUSH_MULT))])]
10293 int num_saves = XVECLEN (operands[2], 0);
10295 /* For the StrongARM at least it is faster to
10296 use STR to store only a single register.
10297 In Thumb mode always use push, and the assembler will pick
10298 something appropriate. */
10299 if (num_saves == 1 && TARGET_ARM)
10300 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10307 strcpy (pattern, \"push%?\\t{%1\");
10309 strcpy (pattern, \"push\\t{%1\");
10311 for (i = 1; i < num_saves; i++)
10313 strcat (pattern, \", %|\");
10315 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10318 strcat (pattern, \"}\");
10319 output_asm_insn (pattern, operands);
10324 [(set_attr "type" "store_16")
10325 (set (attr "length")
10326 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10329 (define_insn "stack_tie"
10330 [(set (mem:BLK (scratch))
10331 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10332 (match_operand:SI 1 "s_register_operand" "rk")]
10336 [(set_attr "length" "0")
10337 (set_attr "type" "block")]
10340 ;; Pop (as used in epilogue RTL)
10342 (define_insn "*load_multiple_with_writeback"
10343 [(match_parallel 0 "load_multiple_operation"
10344 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10345 (plus:SI (match_dup 1)
10346 (match_operand:SI 2 "const_int_I_operand" "I")))
10347 (set (match_operand:SI 3 "s_register_operand" "=rk")
10348 (mem:SI (match_dup 1)))
10350 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10353 arm_output_multireg_pop (operands, /*return_pc=*/false,
10354 /*cond=*/const_true_rtx,
10360 [(set_attr "type" "load_16")
10361 (set_attr "predicable" "yes")
10362 (set (attr "length")
10363 (symbol_ref "arm_attr_length_pop_multi (operands,
10364 /*return_pc=*/false,
10365 /*write_back_p=*/true)"))]
10368 ;; Pop with return (as used in epilogue RTL)
10370 ;; This instruction is generated when the registers are popped at the end of
10371 ;; epilogue. Here, instead of popping the value into LR and then generating
10372 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10374 (define_insn "*pop_multiple_with_writeback_and_return"
10375 [(match_parallel 0 "pop_multiple_return"
10377 (set (match_operand:SI 1 "s_register_operand" "+rk")
10378 (plus:SI (match_dup 1)
10379 (match_operand:SI 2 "const_int_I_operand" "I")))
10380 (set (match_operand:SI 3 "s_register_operand" "=rk")
10381 (mem:SI (match_dup 1)))
10383 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10386 arm_output_multireg_pop (operands, /*return_pc=*/true,
10387 /*cond=*/const_true_rtx,
10393 [(set_attr "type" "load_16")
10394 (set_attr "predicable" "yes")
10395 (set (attr "length")
10396 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10397 /*write_back_p=*/true)"))]
10400 (define_insn "*pop_multiple_with_return"
10401 [(match_parallel 0 "pop_multiple_return"
10403 (set (match_operand:SI 2 "s_register_operand" "=rk")
10404 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10406 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10409 arm_output_multireg_pop (operands, /*return_pc=*/true,
10410 /*cond=*/const_true_rtx,
10416 [(set_attr "type" "load_16")
10417 (set_attr "predicable" "yes")
10418 (set (attr "length")
10419 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10420 /*write_back_p=*/false)"))]
10423 ;; Load into PC and return
10424 (define_insn "*ldr_with_return"
10426 (set (reg:SI PC_REGNUM)
10427 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10428 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10429 "ldr%?\t%|pc, [%0], #4"
10430 [(set_attr "type" "load_4")
10431 (set_attr "predicable" "yes")]
10433 ;; Pop for floating point registers (as used in epilogue RTL)
10434 (define_insn "*vfp_pop_multiple_with_writeback"
10435 [(match_parallel 0 "pop_multiple_fp"
10436 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10437 (plus:SI (match_dup 1)
10438 (match_operand:SI 2 "const_int_I_operand" "I")))
10439 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10440 (mem:DF (match_dup 1)))])]
10441 "TARGET_32BIT && TARGET_HARD_FLOAT"
10444 int num_regs = XVECLEN (operands[0], 0);
10447 strcpy (pattern, \"vldm\\t\");
10448 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10449 strcat (pattern, \"!, {\");
10450 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10451 strcat (pattern, \"%P0\");
10452 if ((num_regs - 1) > 1)
10454 strcat (pattern, \"-%P1\");
10455 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10458 strcat (pattern, \"}\");
10459 output_asm_insn (pattern, op_list);
10463 [(set_attr "type" "load_16")
10464 (set_attr "conds" "unconditional")
10465 (set_attr "predicable" "no")]
10468 ;; Special patterns for dealing with the constant pool
10470 (define_insn "align_4"
10471 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10474 assemble_align (32);
10477 [(set_attr "type" "no_insn")]
10480 (define_insn "align_8"
10481 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10484 assemble_align (64);
10487 [(set_attr "type" "no_insn")]
10490 (define_insn "consttable_end"
10491 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10494 making_const_table = FALSE;
10497 [(set_attr "type" "no_insn")]
10500 (define_insn "consttable_1"
10501 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10504 making_const_table = TRUE;
10505 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10506 assemble_zeros (3);
10509 [(set_attr "length" "4")
10510 (set_attr "type" "no_insn")]
10513 (define_insn "consttable_2"
10514 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10518 rtx x = operands[0];
10519 making_const_table = TRUE;
10520 switch (GET_MODE_CLASS (GET_MODE (x)))
10523 arm_emit_fp16_const (x);
10526 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10527 assemble_zeros (2);
10532 [(set_attr "length" "4")
10533 (set_attr "type" "no_insn")]
10536 (define_insn "consttable_4"
10537 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10541 rtx x = operands[0];
10542 making_const_table = TRUE;
10543 scalar_float_mode float_mode;
10544 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10545 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10548 /* XXX: Sometimes gcc does something really dumb and ends up with
10549 a HIGH in a constant pool entry, usually because it's trying to
10550 load into a VFP register. We know this will always be used in
10551 combination with a LO_SUM which ignores the high bits, so just
10552 strip off the HIGH. */
10553 if (GET_CODE (x) == HIGH)
10555 assemble_integer (x, 4, BITS_PER_WORD, 1);
10556 mark_symbol_refs_as_used (x);
10560 [(set_attr "length" "4")
10561 (set_attr "type" "no_insn")]
10564 (define_insn "consttable_8"
10565 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10569 making_const_table = TRUE;
10570 scalar_float_mode float_mode;
10571 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10572 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10573 float_mode, BITS_PER_WORD);
10575 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10578 [(set_attr "length" "8")
10579 (set_attr "type" "no_insn")]
10582 (define_insn "consttable_16"
10583 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10587 making_const_table = TRUE;
10588 scalar_float_mode float_mode;
10589 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10590 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10591 float_mode, BITS_PER_WORD);
10593 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10596 [(set_attr "length" "16")
10597 (set_attr "type" "no_insn")]
10600 ;; V5 Instructions,
10602 (define_insn "clzsi2"
10603 [(set (match_operand:SI 0 "s_register_operand" "=r")
10604 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10605 "TARGET_32BIT && arm_arch5t"
10607 [(set_attr "predicable" "yes")
10608 (set_attr "type" "clz")])
10610 (define_insn "rbitsi2"
10611 [(set (match_operand:SI 0 "s_register_operand" "=r")
10612 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10613 "TARGET_32BIT && arm_arch_thumb2"
10615 [(set_attr "predicable" "yes")
10616 (set_attr "type" "clz")])
10618 ;; Keep this as a CTZ expression until after reload and then split
10619 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10620 ;; to fold with any other expression.
10622 (define_insn_and_split "ctzsi2"
10623 [(set (match_operand:SI 0 "s_register_operand" "=r")
10624 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10625 "TARGET_32BIT && arm_arch_thumb2"
10627 "&& reload_completed"
10630 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10631 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10635 ;; V5E instructions.
10637 (define_insn "prefetch"
10638 [(prefetch (match_operand:SI 0 "address_operand" "p")
10639 (match_operand:SI 1 "" "")
10640 (match_operand:SI 2 "" ""))]
10641 "TARGET_32BIT && arm_arch5te"
10643 [(set_attr "type" "load_4")]
10646 ;; General predication pattern
10649 [(match_operator 0 "arm_comparison_operator"
10650 [(match_operand 1 "cc_register" "")
10653 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10655 [(set_attr "predicated" "yes")]
10658 (define_insn "force_register_use"
10659 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10662 [(set_attr "length" "0")
10663 (set_attr "type" "no_insn")]
10667 ;; Patterns for exception handling
10669 (define_expand "eh_return"
10670 [(use (match_operand 0 "general_operand"))]
10675 emit_insn (gen_arm_eh_return (operands[0]));
10677 emit_insn (gen_thumb_eh_return (operands[0]));
10682 ;; We can't expand this before we know where the link register is stored.
10683 (define_insn_and_split "arm_eh_return"
10684 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10686 (clobber (match_scratch:SI 1 "=&r"))]
10689 "&& reload_completed"
10693 arm_set_return_address (operands[0], operands[1]);
10701 (define_insn "load_tp_hard"
10702 [(set (match_operand:SI 0 "register_operand" "=r")
10703 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10705 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10706 [(set_attr "predicable" "yes")
10707 (set_attr "type" "mrs")]
10710 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10711 (define_insn "load_tp_soft_fdpic"
10712 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10713 (clobber (reg:SI FDPIC_REGNUM))
10714 (clobber (reg:SI LR_REGNUM))
10715 (clobber (reg:SI IP_REGNUM))
10716 (clobber (reg:CC CC_REGNUM))]
10717 "TARGET_SOFT_TP && TARGET_FDPIC"
10718 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10719 [(set_attr "conds" "clob")
10720 (set_attr "type" "branch")]
10723 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10724 (define_insn "load_tp_soft"
10725 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10726 (clobber (reg:SI LR_REGNUM))
10727 (clobber (reg:SI IP_REGNUM))
10728 (clobber (reg:CC CC_REGNUM))]
10729 "TARGET_SOFT_TP && !TARGET_FDPIC"
10730 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10731 [(set_attr "conds" "clob")
10732 (set_attr "type" "branch")]
10735 ;; tls descriptor call
10736 (define_insn "tlscall"
10737 [(set (reg:SI R0_REGNUM)
10738 (unspec:SI [(reg:SI R0_REGNUM)
10739 (match_operand:SI 0 "" "X")
10740 (match_operand 1 "" "")] UNSPEC_TLS))
10741 (clobber (reg:SI R1_REGNUM))
10742 (clobber (reg:SI LR_REGNUM))
10743 (clobber (reg:SI CC_REGNUM))]
10746 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10747 INTVAL (operands[1]));
10748 return "bl\\t%c0(tlscall)";
10750 [(set_attr "conds" "clob")
10751 (set_attr "length" "4")
10752 (set_attr "type" "branch")]
10755 ;; For thread pointer builtin
10756 (define_expand "get_thread_pointersi"
10757 [(match_operand:SI 0 "s_register_operand")]
10761 arm_load_tp (operands[0]);
10767 ;; We only care about the lower 16 bits of the constant
10768 ;; being inserted into the upper 16 bits of the register.
10769 (define_insn "*arm_movtas_ze"
10770 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10773 (match_operand:SI 1 "const_int_operand" ""))]
10778 [(set_attr "arch" "32,v8mb")
10779 (set_attr "predicable" "yes")
10780 (set_attr "length" "4")
10781 (set_attr "type" "alu_sreg")]
10784 (define_insn "*arm_rev"
10785 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10786 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10792 [(set_attr "arch" "t1,t2,32")
10793 (set_attr "length" "2,2,4")
10794 (set_attr "predicable" "no,yes,yes")
10795 (set_attr "type" "rev")]
10798 (define_expand "arm_legacy_rev"
10799 [(set (match_operand:SI 2 "s_register_operand")
10800 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10804 (lshiftrt:SI (match_dup 2)
10806 (set (match_operand:SI 3 "s_register_operand")
10807 (rotatert:SI (match_dup 1)
10810 (and:SI (match_dup 2)
10811 (const_int -65281)))
10812 (set (match_operand:SI 0 "s_register_operand")
10813 (xor:SI (match_dup 3)
10819 ;; Reuse temporaries to keep register pressure down.
10820 (define_expand "thumb_legacy_rev"
10821 [(set (match_operand:SI 2 "s_register_operand")
10822 (ashift:SI (match_operand:SI 1 "s_register_operand")
10824 (set (match_operand:SI 3 "s_register_operand")
10825 (lshiftrt:SI (match_dup 1)
10828 (ior:SI (match_dup 3)
10830 (set (match_operand:SI 4 "s_register_operand")
10832 (set (match_operand:SI 5 "s_register_operand")
10833 (rotatert:SI (match_dup 1)
10836 (ashift:SI (match_dup 5)
10839 (lshiftrt:SI (match_dup 5)
10842 (ior:SI (match_dup 5)
10845 (rotatert:SI (match_dup 5)
10847 (set (match_operand:SI 0 "s_register_operand")
10848 (ior:SI (match_dup 5)
10854 ;; ARM-specific expansion of signed mod by power of 2
10855 ;; using conditional negate.
10856 ;; For r0 % n where n is a power of 2 produce:
10858 ;; and r0, r0, #(n - 1)
10859 ;; and r1, r1, #(n - 1)
10860 ;; rsbpl r0, r1, #0
10862 (define_expand "modsi3"
10863 [(match_operand:SI 0 "register_operand")
10864 (match_operand:SI 1 "register_operand")
10865 (match_operand:SI 2 "const_int_operand")]
10868 HOST_WIDE_INT val = INTVAL (operands[2]);
10871 || exact_log2 (val) <= 0)
10874 rtx mask = GEN_INT (val - 1);
10876 /* In the special case of x0 % 2 we can do the even shorter:
10879 rsblt r0, r0, #0. */
10883 rtx cc_reg = arm_gen_compare_reg (LT,
10884 operands[1], const0_rtx, NULL_RTX);
10885 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10886 rtx masked = gen_reg_rtx (SImode);
10888 emit_insn (gen_andsi3 (masked, operands[1], mask));
10889 emit_move_insn (operands[0],
10890 gen_rtx_IF_THEN_ELSE (SImode, cond,
10891 gen_rtx_NEG (SImode,
10897 rtx neg_op = gen_reg_rtx (SImode);
10898 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10901 /* Extract the condition register and mode. */
10902 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10903 rtx cc_reg = SET_DEST (cmp);
10904 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10906 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10908 rtx masked_neg = gen_reg_rtx (SImode);
10909 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10911 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10912 during expand does not always work. Do an IF_THEN_ELSE instead. */
10913 emit_move_insn (operands[0],
10914 gen_rtx_IF_THEN_ELSE (SImode, cond,
10915 gen_rtx_NEG (SImode, masked_neg),
10923 (define_expand "bswapsi2"
10924 [(set (match_operand:SI 0 "s_register_operand")
10925 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10926 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10930 rtx op2 = gen_reg_rtx (SImode);
10931 rtx op3 = gen_reg_rtx (SImode);
10935 rtx op4 = gen_reg_rtx (SImode);
10936 rtx op5 = gen_reg_rtx (SImode);
10938 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10939 op2, op3, op4, op5));
10943 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10952 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
10953 ;; and unsigned variants, respectively. For rev16, expose
10954 ;; byte-swapping in the lower 16 bits only.
10955 (define_insn "*arm_revsh"
10956 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10957 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
10963 [(set_attr "arch" "t1,t2,32")
10964 (set_attr "length" "2,2,4")
10965 (set_attr "type" "rev")]
10968 (define_insn "*arm_rev16"
10969 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
10970 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
10976 [(set_attr "arch" "t1,t2,32")
10977 (set_attr "length" "2,2,4")
10978 (set_attr "type" "rev")]
10981 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
10982 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
10983 ;; each valid permutation.
10985 (define_insn "arm_rev16si2"
10986 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
10987 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
10989 (match_operand:SI 3 "const_int_operand" "n,n,n"))
10990 (and:SI (lshiftrt:SI (match_dup 1)
10992 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
10994 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
10995 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
10997 [(set_attr "arch" "t1,t2,32")
10998 (set_attr "length" "2,2,4")
10999 (set_attr "type" "rev")]
11002 (define_insn "arm_rev16si2_alt"
11003 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11004 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11006 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11007 (and:SI (ashift:SI (match_dup 1)
11009 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11011 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11012 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11014 [(set_attr "arch" "t1,t2,32")
11015 (set_attr "length" "2,2,4")
11016 (set_attr "type" "rev")]
11019 (define_expand "bswaphi2"
11020 [(set (match_operand:HI 0 "s_register_operand")
11021 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11026 ;; Patterns for LDRD/STRD in Thumb2 mode
11028 (define_insn "*thumb2_ldrd"
11029 [(set (match_operand:SI 0 "s_register_operand" "=r")
11030 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11031 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11032 (set (match_operand:SI 3 "s_register_operand" "=r")
11033 (mem:SI (plus:SI (match_dup 1)
11034 (match_operand:SI 4 "const_int_operand" ""))))]
11035 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11036 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11037 && (operands_ok_ldrd_strd (operands[0], operands[3],
11038 operands[1], INTVAL (operands[2]),
11040 "ldrd%?\t%0, %3, [%1, %2]"
11041 [(set_attr "type" "load_8")
11042 (set_attr "predicable" "yes")])
11044 (define_insn "*thumb2_ldrd_base"
11045 [(set (match_operand:SI 0 "s_register_operand" "=r")
11046 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11047 (set (match_operand:SI 2 "s_register_operand" "=r")
11048 (mem:SI (plus:SI (match_dup 1)
11050 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11051 && (operands_ok_ldrd_strd (operands[0], operands[2],
11052 operands[1], 0, false, true))"
11053 "ldrd%?\t%0, %2, [%1]"
11054 [(set_attr "type" "load_8")
11055 (set_attr "predicable" "yes")])
11057 (define_insn "*thumb2_ldrd_base_neg"
11058 [(set (match_operand:SI 0 "s_register_operand" "=r")
11059 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11061 (set (match_operand:SI 2 "s_register_operand" "=r")
11062 (mem:SI (match_dup 1)))]
11063 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11064 && (operands_ok_ldrd_strd (operands[0], operands[2],
11065 operands[1], -4, false, true))"
11066 "ldrd%?\t%0, %2, [%1, #-4]"
11067 [(set_attr "type" "load_8")
11068 (set_attr "predicable" "yes")])
11070 (define_insn "*thumb2_strd"
11071 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11072 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11073 (match_operand:SI 2 "s_register_operand" "r"))
11074 (set (mem:SI (plus:SI (match_dup 0)
11075 (match_operand:SI 3 "const_int_operand" "")))
11076 (match_operand:SI 4 "s_register_operand" "r"))]
11077 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11078 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11079 && (operands_ok_ldrd_strd (operands[2], operands[4],
11080 operands[0], INTVAL (operands[1]),
11082 "strd%?\t%2, %4, [%0, %1]"
11083 [(set_attr "type" "store_8")
11084 (set_attr "predicable" "yes")])
11086 (define_insn "*thumb2_strd_base"
11087 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11088 (match_operand:SI 1 "s_register_operand" "r"))
11089 (set (mem:SI (plus:SI (match_dup 0)
11091 (match_operand:SI 2 "s_register_operand" "r"))]
11092 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11093 && (operands_ok_ldrd_strd (operands[1], operands[2],
11094 operands[0], 0, false, false))"
11095 "strd%?\t%1, %2, [%0]"
11096 [(set_attr "type" "store_8")
11097 (set_attr "predicable" "yes")])
11099 (define_insn "*thumb2_strd_base_neg"
11100 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11102 (match_operand:SI 1 "s_register_operand" "r"))
11103 (set (mem:SI (match_dup 0))
11104 (match_operand:SI 2 "s_register_operand" "r"))]
11105 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11106 && (operands_ok_ldrd_strd (operands[1], operands[2],
11107 operands[0], -4, false, false))"
11108 "strd%?\t%1, %2, [%0, #-4]"
11109 [(set_attr "type" "store_8")
11110 (set_attr "predicable" "yes")])
11112 ;; ARMv8 CRC32 instructions.
11113 (define_insn "arm_<crc_variant>"
11114 [(set (match_operand:SI 0 "s_register_operand" "=r")
11115 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11116 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11119 "<crc_variant>\\t%0, %1, %2"
11120 [(set_attr "type" "crc")
11121 (set_attr "conds" "unconditional")]
11124 ;; Load the load/store double peephole optimizations.
11125 (include "ldrdstrd.md")
11127 ;; Load the load/store multiple patterns
11128 (include "ldmstm.md")
11130 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11131 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11132 ;; The operands are validated through the load_multiple_operation
11133 ;; match_parallel predicate rather than through constraints so enable it only
11135 (define_insn "*load_multiple"
11136 [(match_parallel 0 "load_multiple_operation"
11137 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11138 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11140 "TARGET_32BIT && reload_completed"
11143 arm_output_multireg_pop (operands, /*return_pc=*/false,
11144 /*cond=*/const_true_rtx,
11150 [(set_attr "predicable" "yes")]
11153 (define_expand "copysignsf3"
11154 [(match_operand:SF 0 "register_operand")
11155 (match_operand:SF 1 "register_operand")
11156 (match_operand:SF 2 "register_operand")]
11157 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11159 emit_move_insn (operands[0], operands[2]);
11160 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11161 GEN_INT (31), GEN_INT (0),
11162 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11167 (define_expand "copysigndf3"
11168 [(match_operand:DF 0 "register_operand")
11169 (match_operand:DF 1 "register_operand")
11170 (match_operand:DF 2 "register_operand")]
11171 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11173 rtx op0_low = gen_lowpart (SImode, operands[0]);
11174 rtx op0_high = gen_highpart (SImode, operands[0]);
11175 rtx op1_low = gen_lowpart (SImode, operands[1]);
11176 rtx op1_high = gen_highpart (SImode, operands[1]);
11177 rtx op2_high = gen_highpart (SImode, operands[2]);
11179 rtx scratch1 = gen_reg_rtx (SImode);
11180 rtx scratch2 = gen_reg_rtx (SImode);
11181 emit_move_insn (scratch1, op2_high);
11182 emit_move_insn (scratch2, op1_high);
11184 emit_insn(gen_rtx_SET(scratch1,
11185 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11186 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11187 emit_move_insn (op0_low, op1_low);
11188 emit_move_insn (op0_high, scratch2);
11194 ;; movmisalign patterns for HImode and SImode.
11195 (define_expand "movmisalign<mode>"
11196 [(match_operand:HSI 0 "general_operand")
11197 (match_operand:HSI 1 "general_operand")]
11200 /* This pattern is not permitted to fail during expansion: if both arguments
11201 are non-registers (e.g. memory := constant), force operand 1 into a
11203 rtx (* gen_unaligned_load)(rtx, rtx);
11204 rtx tmp_dest = operands[0];
11205 if (!s_register_operand (operands[0], <MODE>mode)
11206 && !s_register_operand (operands[1], <MODE>mode))
11207 operands[1] = force_reg (<MODE>mode, operands[1]);
11209 if (<MODE>mode == HImode)
11211 gen_unaligned_load = gen_unaligned_loadhiu;
11212 tmp_dest = gen_reg_rtx (SImode);
11215 gen_unaligned_load = gen_unaligned_loadsi;
11217 if (MEM_P (operands[1]))
11219 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11220 if (<MODE>mode == HImode)
11221 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11224 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11229 (define_insn "arm_<cdp>"
11230 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11231 (match_operand:SI 1 "immediate_operand" "n")
11232 (match_operand:SI 2 "immediate_operand" "n")
11233 (match_operand:SI 3 "immediate_operand" "n")
11234 (match_operand:SI 4 "immediate_operand" "n")
11235 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11236 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11238 arm_const_bounds (operands[0], 0, 16);
11239 arm_const_bounds (operands[1], 0, 16);
11240 arm_const_bounds (operands[2], 0, (1 << 5));
11241 arm_const_bounds (operands[3], 0, (1 << 5));
11242 arm_const_bounds (operands[4], 0, (1 << 5));
11243 arm_const_bounds (operands[5], 0, 8);
11244 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11246 [(set_attr "length" "4")
11247 (set_attr "type" "coproc")])
11249 (define_insn "*ldc"
11250 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11251 (match_operand:SI 1 "immediate_operand" "n")
11252 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11253 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11255 arm_const_bounds (operands[0], 0, 16);
11256 arm_const_bounds (operands[1], 0, (1 << 5));
11257 return "<ldc>\\tp%c0, CR%c1, %2";
11259 [(set_attr "length" "4")
11260 (set_attr "type" "coproc")])
11262 (define_insn "*stc"
11263 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11264 (match_operand:SI 1 "immediate_operand" "n")
11265 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11266 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11268 arm_const_bounds (operands[0], 0, 16);
11269 arm_const_bounds (operands[1], 0, (1 << 5));
11270 return "<stc>\\tp%c0, CR%c1, %2";
11272 [(set_attr "length" "4")
11273 (set_attr "type" "coproc")])
11275 (define_expand "arm_<ldc>"
11276 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11277 (match_operand:SI 1 "immediate_operand")
11278 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11279 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11281 (define_expand "arm_<stc>"
11282 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11283 (match_operand:SI 1 "immediate_operand")
11284 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11285 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11287 (define_insn "arm_<mcr>"
11288 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11289 (match_operand:SI 1 "immediate_operand" "n")
11290 (match_operand:SI 2 "s_register_operand" "r")
11291 (match_operand:SI 3 "immediate_operand" "n")
11292 (match_operand:SI 4 "immediate_operand" "n")
11293 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11294 (use (match_dup 2))]
11295 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11297 arm_const_bounds (operands[0], 0, 16);
11298 arm_const_bounds (operands[1], 0, 8);
11299 arm_const_bounds (operands[3], 0, (1 << 5));
11300 arm_const_bounds (operands[4], 0, (1 << 5));
11301 arm_const_bounds (operands[5], 0, 8);
11302 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11304 [(set_attr "length" "4")
11305 (set_attr "type" "coproc")])
11307 (define_insn "arm_<mrc>"
11308 [(set (match_operand:SI 0 "s_register_operand" "=r")
11309 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11310 (match_operand:SI 2 "immediate_operand" "n")
11311 (match_operand:SI 3 "immediate_operand" "n")
11312 (match_operand:SI 4 "immediate_operand" "n")
11313 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11314 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11316 arm_const_bounds (operands[1], 0, 16);
11317 arm_const_bounds (operands[2], 0, 8);
11318 arm_const_bounds (operands[3], 0, (1 << 5));
11319 arm_const_bounds (operands[4], 0, (1 << 5));
11320 arm_const_bounds (operands[5], 0, 8);
11321 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11323 [(set_attr "length" "4")
11324 (set_attr "type" "coproc")])
11326 (define_insn "arm_<mcrr>"
11327 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11328 (match_operand:SI 1 "immediate_operand" "n")
11329 (match_operand:DI 2 "s_register_operand" "r")
11330 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11331 (use (match_dup 2))]
11332 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11334 arm_const_bounds (operands[0], 0, 16);
11335 arm_const_bounds (operands[1], 0, 8);
11336 arm_const_bounds (operands[3], 0, (1 << 5));
11337 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11339 [(set_attr "length" "4")
11340 (set_attr "type" "coproc")])
11342 (define_insn "arm_<mrrc>"
11343 [(set (match_operand:DI 0 "s_register_operand" "=r")
11344 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11345 (match_operand:SI 2 "immediate_operand" "n")
11346 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11347 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11349 arm_const_bounds (operands[1], 0, 16);
11350 arm_const_bounds (operands[2], 0, 8);
11351 arm_const_bounds (operands[3], 0, (1 << 5));
11352 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11354 [(set_attr "length" "4")
11355 (set_attr "type" "coproc")])
11357 (define_expand "speculation_barrier"
11358 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11361 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11362 have a usable barrier (and probably don't need one in practice).
11363 But to be safe if such code is run on later architectures, call a
11364 helper function in libgcc that will do the thing for the active
11366 if (!(arm_arch7 || arm_arch8))
11368 arm_emit_speculation_barrier_function ();
11374 ;; Generate a hard speculation barrier when we have not enabled speculation
11376 (define_insn "*speculation_barrier_insn"
11377 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11378 "arm_arch7 || arm_arch8"
11380 [(set_attr "type" "block")
11381 (set_attr "length" "8")]
11384 ;; Vector bits common to IWMMXT and Neon
11385 (include "vec-common.md")
11386 ;; Load the Intel Wireless Multimedia Extension patterns
11387 (include "iwmmxt.md")
11388 ;; Load the VFP co-processor patterns
11390 ;; Thumb-1 patterns
11391 (include "thumb1.md")
11392 ;; Thumb-2 patterns
11393 (include "thumb2.md")
11395 (include "neon.md")
11397 (include "crypto.md")
11398 ;; Synchronization Primitives
11399 (include "sync.md")
11400 ;; Fixed-point patterns
11401 (include "arm-fixed.md")