1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compareC (lo_dest, lo_op1, lo_op2));
474 if (hi_op2 == const0_rtx)
475 emit_insn (gen_add0si3_carryin_ltu (hi_dest, hi_op1));
477 emit_insn (gen_addsi3_carryin_ltu (hi_dest, hi_op1, hi_op2));
480 if (lo_result != lo_dest)
481 emit_move_insn (lo_result, lo_dest);
482 if (hi_result != hi_dest)
483 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
489 (define_expand "addv<mode>4"
490 [(match_operand:SIDI 0 "register_operand")
491 (match_operand:SIDI 1 "register_operand")
492 (match_operand:SIDI 2 "register_operand")
493 (match_operand 3 "")]
496 emit_insn (gen_add<mode>3_compareV (operands[0], operands[1], operands[2]));
497 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
502 (define_expand "uaddv<mode>4"
503 [(match_operand:SIDI 0 "register_operand")
504 (match_operand:SIDI 1 "register_operand")
505 (match_operand:SIDI 2 "register_operand")
506 (match_operand 3 "")]
509 emit_insn (gen_add<mode>3_compareC (operands[0], operands[1], operands[2]));
510 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
515 (define_expand "addsi3"
516 [(set (match_operand:SI 0 "s_register_operand")
517 (plus:SI (match_operand:SI 1 "s_register_operand")
518 (match_operand:SI 2 "reg_or_int_operand")))]
521 if (TARGET_32BIT && CONST_INT_P (operands[2]))
523 arm_split_constant (PLUS, SImode, NULL_RTX,
524 INTVAL (operands[2]), operands[0], operands[1],
525 optimize && can_create_pseudo_p ());
531 ; If there is a scratch available, this will be faster than synthesizing the
534 [(match_scratch:SI 3 "r")
535 (set (match_operand:SI 0 "arm_general_register_operand" "")
536 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
537 (match_operand:SI 2 "const_int_operand" "")))]
539 !(const_ok_for_arm (INTVAL (operands[2]))
540 || const_ok_for_arm (-INTVAL (operands[2])))
541 && const_ok_for_arm (~INTVAL (operands[2]))"
542 [(set (match_dup 3) (match_dup 2))
543 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
547 ;; The r/r/k alternative is required when reloading the address
548 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
549 ;; put the duplicated register first, and not try the commutative version.
550 (define_insn_and_split "*arm_addsi3"
551 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
552 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
553 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
569 subw%?\\t%0, %1, #%n2
570 subw%?\\t%0, %1, #%n2
573 && CONST_INT_P (operands[2])
574 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
575 && (reload_completed || !arm_eliminable_register (operands[1]))"
576 [(clobber (const_int 0))]
578 arm_split_constant (PLUS, SImode, curr_insn,
579 INTVAL (operands[2]), operands[0],
583 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
584 (set_attr "predicable" "yes")
585 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
586 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
587 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
588 (const_string "alu_imm")
589 (const_string "alu_sreg")))
593 (define_insn "adddi3_compareV"
594 [(set (reg:CC_V CC_REGNUM)
597 (sign_extend:TI (match_operand:DI 1 "s_register_operand" "r"))
598 (sign_extend:TI (match_operand:DI 2 "s_register_operand" "r")))
599 (sign_extend:TI (plus:DI (match_dup 1) (match_dup 2)))))
600 (set (match_operand:DI 0 "s_register_operand" "=&r")
601 (plus:DI (match_dup 1) (match_dup 2)))]
603 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
604 [(set_attr "conds" "set")
605 (set_attr "length" "8")
606 (set_attr "type" "multiple")]
609 (define_insn "addsi3_compareV"
610 [(set (reg:CC_V CC_REGNUM)
613 (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
614 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
615 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
616 (set (match_operand:SI 0 "register_operand" "=r")
617 (plus:SI (match_dup 1) (match_dup 2)))]
619 "adds%?\\t%0, %1, %2"
620 [(set_attr "conds" "set")
621 (set_attr "type" "alus_sreg")]
624 (define_insn "adddi3_compareC"
625 [(set (reg:CC_C CC_REGNUM)
628 (match_operand:DI 1 "register_operand" "r")
629 (match_operand:DI 2 "register_operand" "r"))
631 (set (match_operand:DI 0 "register_operand" "=&r")
632 (plus:DI (match_dup 1) (match_dup 2)))]
634 "adds\\t%Q0, %Q1, %Q2;adcs\\t%R0, %R1, %R2"
635 [(set_attr "conds" "set")
636 (set_attr "length" "8")
637 (set_attr "type" "multiple")]
640 (define_insn "addsi3_compareC"
641 [(set (reg:CC_C CC_REGNUM)
642 (compare:CC_C (plus:SI (match_operand:SI 1 "register_operand" "r")
643 (match_operand:SI 2 "register_operand" "r"))
645 (set (match_operand:SI 0 "register_operand" "=r")
646 (plus:SI (match_dup 1) (match_dup 2)))]
648 "adds%?\\t%0, %1, %2"
649 [(set_attr "conds" "set")
650 (set_attr "type" "alus_sreg")]
653 (define_insn "addsi3_compare0"
654 [(set (reg:CC_NOOV CC_REGNUM)
656 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
657 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
659 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
660 (plus:SI (match_dup 1) (match_dup 2)))]
664 subs%?\\t%0, %1, #%n2
666 [(set_attr "conds" "set")
667 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
670 (define_insn "*addsi3_compare0_scratch"
671 [(set (reg:CC_NOOV CC_REGNUM)
673 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
674 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
681 [(set_attr "conds" "set")
682 (set_attr "predicable" "yes")
683 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
686 (define_insn "*compare_negsi_si"
687 [(set (reg:CC_Z CC_REGNUM)
689 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
690 (match_operand:SI 1 "s_register_operand" "l,r")))]
693 [(set_attr "conds" "set")
694 (set_attr "predicable" "yes")
695 (set_attr "arch" "t2,*")
696 (set_attr "length" "2,4")
697 (set_attr "predicable_short_it" "yes,no")
698 (set_attr "type" "alus_sreg")]
701 ;; This is the canonicalization of subsi3_compare when the
702 ;; addend is a constant.
703 (define_insn "cmpsi2_addneg"
704 [(set (reg:CC CC_REGNUM)
706 (match_operand:SI 1 "s_register_operand" "r,r")
707 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
708 (set (match_operand:SI 0 "s_register_operand" "=r,r")
709 (plus:SI (match_dup 1)
710 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
712 && (INTVAL (operands[2])
713 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
715 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
716 in different condition codes (like cmn rather than like cmp), so that
717 alternative comes first. Both alternatives can match for any 0x??000000
718 where except for 0 and INT_MIN it doesn't matter what we choose, and also
719 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
721 if (which_alternative == 0 && operands[3] != const1_rtx)
722 return "subs%?\\t%0, %1, #%n3";
724 return "adds%?\\t%0, %1, %3";
726 [(set_attr "conds" "set")
727 (set_attr "type" "alus_sreg")]
730 ;; Convert the sequence
732 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
736 ;; bcs dest ((unsigned)rn >= 1)
737 ;; similarly for the beq variant using bcc.
738 ;; This is a common looping idiom (while (n--))
740 [(set (match_operand:SI 0 "arm_general_register_operand" "")
741 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
743 (set (match_operand 2 "cc_register" "")
744 (compare (match_dup 0) (const_int -1)))
746 (if_then_else (match_operator 3 "equality_operator"
747 [(match_dup 2) (const_int 0)])
748 (match_operand 4 "" "")
749 (match_operand 5 "" "")))]
750 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
754 (match_dup 1) (const_int 1)))
755 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
757 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
760 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
761 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
764 operands[2], const0_rtx);"
767 ;; The next four insns work because they compare the result with one of
768 ;; the operands, and we know that the use of the condition code is
769 ;; either GEU or LTU, so we can use the carry flag from the addition
770 ;; instead of doing the compare a second time.
771 (define_insn "*addsi3_compare_op1"
772 [(set (reg:CC_C CC_REGNUM)
774 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
775 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
777 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
778 (plus:SI (match_dup 1) (match_dup 2)))]
783 subs%?\\t%0, %1, #%n2
784 subs%?\\t%0, %0, #%n2
786 subs%?\\t%0, %1, #%n2
788 [(set_attr "conds" "set")
789 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
790 (set_attr "length" "2,2,2,2,4,4,4")
792 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
795 (define_insn "*addsi3_compare_op2"
796 [(set (reg:CC_C CC_REGNUM)
798 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r,r")
799 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,I,L,r"))
801 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r,r")
802 (plus:SI (match_dup 1) (match_dup 2)))]
807 subs%?\\t%0, %1, #%n2
808 subs%?\\t%0, %0, #%n2
810 subs%?\\t%0, %1, #%n2
812 [(set_attr "conds" "set")
813 (set_attr "arch" "t2,t2,t2,t2,*,*,*")
814 (set_attr "length" "2,2,2,2,4,4,4")
816 "alus_sreg,alus_imm,alus_sreg,alus_imm,alus_imm,alus_imm,alus_sreg")]
819 (define_insn "*compare_addsi2_op0"
820 [(set (reg:CC_C CC_REGNUM)
822 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
823 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
832 [(set_attr "conds" "set")
833 (set_attr "predicable" "yes")
834 (set_attr "arch" "t2,t2,*,*,*")
835 (set_attr "predicable_short_it" "yes,yes,no,no,no")
836 (set_attr "length" "2,2,4,4,4")
837 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
840 (define_insn "*compare_addsi2_op1"
841 [(set (reg:CC_C CC_REGNUM)
843 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r,r")
844 (match_operand:SI 1 "arm_add_operand" "Pv,l,I,L,r"))
853 [(set_attr "conds" "set")
854 (set_attr "predicable" "yes")
855 (set_attr "arch" "t2,t2,*,*,*")
856 (set_attr "predicable_short_it" "yes,yes,no,no,no")
857 (set_attr "length" "2,2,4,4,4")
858 (set_attr "type" "alus_imm,alus_sreg,alus_imm,alus_imm,alus_sreg")]
861 (define_insn "addsi3_carryin_<optab>"
862 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
863 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
864 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
865 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
870 sbc%?\\t%0, %1, #%B2"
871 [(set_attr "conds" "use")
872 (set_attr "predicable" "yes")
873 (set_attr "arch" "t2,*,*")
874 (set_attr "length" "4")
875 (set_attr "predicable_short_it" "yes,no,no")
876 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
879 ;; Canonicalization of the above when the immediate is zero.
880 (define_insn "add0si3_carryin_<optab>"
881 [(set (match_operand:SI 0 "s_register_operand" "=r")
882 (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
883 (match_operand:SI 1 "arm_not_operand" "r")))]
886 [(set_attr "conds" "use")
887 (set_attr "predicable" "yes")
888 (set_attr "length" "4")
889 (set_attr "type" "adc_imm")]
892 (define_insn "*addsi3_carryin_alt2_<optab>"
893 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
894 (plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
895 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
896 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
901 sbc%?\\t%0, %1, #%B2"
902 [(set_attr "conds" "use")
903 (set_attr "predicable" "yes")
904 (set_attr "arch" "t2,*,*")
905 (set_attr "length" "4")
906 (set_attr "predicable_short_it" "yes,no,no")
907 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
910 (define_insn "*addsi3_carryin_shift_<optab>"
911 [(set (match_operand:SI 0 "s_register_operand" "=r")
913 (match_operator:SI 2 "shift_operator"
914 [(match_operand:SI 3 "s_register_operand" "r")
915 (match_operand:SI 4 "reg_or_int_operand" "rM")])
916 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0)))
917 (match_operand:SI 1 "s_register_operand" "r")))]
919 "adc%?\\t%0, %1, %3%S2"
920 [(set_attr "conds" "use")
921 (set_attr "predicable" "yes")
922 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
923 (const_string "alu_shift_imm")
924 (const_string "alu_shift_reg")))]
927 (define_insn "*addsi3_carryin_clobercc_<optab>"
928 [(set (match_operand:SI 0 "s_register_operand" "=r")
929 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
930 (match_operand:SI 2 "arm_rhs_operand" "rI"))
931 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
932 (clobber (reg:CC CC_REGNUM))]
934 "adcs%?\\t%0, %1, %2"
935 [(set_attr "conds" "set")
936 (set_attr "type" "adcs_reg")]
939 (define_expand "subv<mode>4"
940 [(match_operand:SIDI 0 "register_operand")
941 (match_operand:SIDI 1 "register_operand")
942 (match_operand:SIDI 2 "register_operand")
943 (match_operand 3 "")]
946 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
947 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
952 (define_expand "usubv<mode>4"
953 [(match_operand:SIDI 0 "register_operand")
954 (match_operand:SIDI 1 "register_operand")
955 (match_operand:SIDI 2 "register_operand")
956 (match_operand 3 "")]
959 emit_insn (gen_sub<mode>3_compare1 (operands[0], operands[1], operands[2]));
960 arm_gen_unlikely_cbranch (LTU, CCmode, operands[3]);
965 (define_insn "subdi3_compare1"
966 [(set (reg:CC CC_REGNUM)
968 (match_operand:DI 1 "s_register_operand" "r")
969 (match_operand:DI 2 "s_register_operand" "r")))
970 (set (match_operand:DI 0 "s_register_operand" "=&r")
971 (minus:DI (match_dup 1) (match_dup 2)))]
973 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
974 [(set_attr "conds" "set")
975 (set_attr "length" "8")
976 (set_attr "type" "multiple")]
979 (define_insn "subsi3_compare1"
980 [(set (reg:CC CC_REGNUM)
982 (match_operand:SI 1 "register_operand" "r")
983 (match_operand:SI 2 "register_operand" "r")))
984 (set (match_operand:SI 0 "register_operand" "=r")
985 (minus:SI (match_dup 1) (match_dup 2)))]
987 "subs%?\\t%0, %1, %2"
988 [(set_attr "conds" "set")
989 (set_attr "type" "alus_sreg")]
992 (define_insn "*subsi3_carryin"
993 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
994 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
995 (match_operand:SI 2 "s_register_operand" "r,r,r"))
996 (match_operand:SI 3 "arm_borrow_operation" "")))]
1001 sbc%?\\t%0, %2, %2, lsl #1"
1002 [(set_attr "conds" "use")
1003 (set_attr "arch" "*,a,t2")
1004 (set_attr "predicable" "yes")
1005 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1008 (define_insn "*subsi3_carryin_const"
1009 [(set (match_operand:SI 0 "s_register_operand" "=r")
1011 (match_operand:SI 1 "s_register_operand" "r")
1012 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1013 (match_operand:SI 3 "arm_borrow_operation" "")))]
1015 "sbc\\t%0, %1, #%n2"
1016 [(set_attr "conds" "use")
1017 (set_attr "type" "adc_imm")]
1020 (define_insn "*subsi3_carryin_const0"
1021 [(set (match_operand:SI 0 "s_register_operand" "=r")
1022 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1023 (match_operand:SI 2 "arm_borrow_operation" "")))]
1026 [(set_attr "conds" "use")
1027 (set_attr "type" "adc_imm")]
1030 (define_insn "*subsi3_carryin_shift"
1031 [(set (match_operand:SI 0 "s_register_operand" "=r")
1033 (match_operand:SI 1 "s_register_operand" "r")
1034 (match_operator:SI 2 "shift_operator"
1035 [(match_operand:SI 3 "s_register_operand" "r")
1036 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1037 (match_operand:SI 5 "arm_borrow_operation" "")))]
1039 "sbc%?\\t%0, %1, %3%S2"
1040 [(set_attr "conds" "use")
1041 (set_attr "predicable" "yes")
1042 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1043 (const_string "alu_shift_imm")
1044 (const_string "alu_shift_reg")))]
1047 (define_insn "*rsbsi3_carryin_shift"
1048 [(set (match_operand:SI 0 "s_register_operand" "=r")
1050 (match_operator:SI 2 "shift_operator"
1051 [(match_operand:SI 3 "s_register_operand" "r")
1052 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1053 (match_operand:SI 1 "s_register_operand" "r"))
1054 (match_operand:SI 5 "arm_borrow_operation" "")))]
1056 "rsc%?\\t%0, %1, %3%S2"
1057 [(set_attr "conds" "use")
1058 (set_attr "predicable" "yes")
1059 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1060 (const_string "alu_shift_imm")
1061 (const_string "alu_shift_reg")))]
1064 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1066 [(set (match_operand:SI 0 "s_register_operand" "")
1067 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1068 (match_operand:SI 2 "s_register_operand" ""))
1070 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1072 [(set (match_dup 3) (match_dup 1))
1073 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1075 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1078 (define_expand "addsf3"
1079 [(set (match_operand:SF 0 "s_register_operand")
1080 (plus:SF (match_operand:SF 1 "s_register_operand")
1081 (match_operand:SF 2 "s_register_operand")))]
1082 "TARGET_32BIT && TARGET_HARD_FLOAT"
1086 (define_expand "adddf3"
1087 [(set (match_operand:DF 0 "s_register_operand")
1088 (plus:DF (match_operand:DF 1 "s_register_operand")
1089 (match_operand:DF 2 "s_register_operand")))]
1090 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1094 (define_expand "subdi3"
1096 [(set (match_operand:DI 0 "s_register_operand")
1097 (minus:DI (match_operand:DI 1 "s_register_operand")
1098 (match_operand:DI 2 "s_register_operand")))
1099 (clobber (reg:CC CC_REGNUM))])]
1104 (define_insn "*arm_subdi3"
1105 [(set (match_operand:DI 0 "arm_general_register_operand" "=&r,&r,&r")
1106 (minus:DI (match_operand:DI 1 "arm_general_register_operand" "0,r,0")
1107 (match_operand:DI 2 "arm_general_register_operand" "r,0,0")))
1108 (clobber (reg:CC CC_REGNUM))]
1110 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1111 [(set_attr "conds" "clob")
1112 (set_attr "length" "8")
1113 (set_attr "type" "multiple")]
1116 (define_expand "subsi3"
1117 [(set (match_operand:SI 0 "s_register_operand")
1118 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1119 (match_operand:SI 2 "s_register_operand")))]
1122 if (CONST_INT_P (operands[1]))
1126 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1127 operands[1] = force_reg (SImode, operands[1]);
1130 arm_split_constant (MINUS, SImode, NULL_RTX,
1131 INTVAL (operands[1]), operands[0],
1133 optimize && can_create_pseudo_p ());
1137 else /* TARGET_THUMB1 */
1138 operands[1] = force_reg (SImode, operands[1]);
1143 ; ??? Check Thumb-2 split length
1144 (define_insn_and_split "*arm_subsi3_insn"
1145 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1146 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1147 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1159 "&& (CONST_INT_P (operands[1])
1160 && !const_ok_for_arm (INTVAL (operands[1])))"
1161 [(clobber (const_int 0))]
1163 arm_split_constant (MINUS, SImode, curr_insn,
1164 INTVAL (operands[1]), operands[0], operands[2], 0);
1167 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1168 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1169 (set_attr "predicable" "yes")
1170 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1171 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1175 [(match_scratch:SI 3 "r")
1176 (set (match_operand:SI 0 "arm_general_register_operand" "")
1177 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1178 (match_operand:SI 2 "arm_general_register_operand" "")))]
1180 && !const_ok_for_arm (INTVAL (operands[1]))
1181 && const_ok_for_arm (~INTVAL (operands[1]))"
1182 [(set (match_dup 3) (match_dup 1))
1183 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1187 (define_insn "subsi3_compare0"
1188 [(set (reg:CC_NOOV CC_REGNUM)
1190 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1191 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1193 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1194 (minus:SI (match_dup 1) (match_dup 2)))]
1199 rsbs%?\\t%0, %2, %1"
1200 [(set_attr "conds" "set")
1201 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1204 (define_insn "subsi3_compare"
1205 [(set (reg:CC CC_REGNUM)
1206 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1207 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1208 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1209 (minus:SI (match_dup 1) (match_dup 2)))]
1214 rsbs%?\\t%0, %2, %1"
1215 [(set_attr "conds" "set")
1216 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1219 (define_expand "subsf3"
1220 [(set (match_operand:SF 0 "s_register_operand")
1221 (minus:SF (match_operand:SF 1 "s_register_operand")
1222 (match_operand:SF 2 "s_register_operand")))]
1223 "TARGET_32BIT && TARGET_HARD_FLOAT"
1227 (define_expand "subdf3"
1228 [(set (match_operand:DF 0 "s_register_operand")
1229 (minus:DF (match_operand:DF 1 "s_register_operand")
1230 (match_operand:DF 2 "s_register_operand")))]
1231 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1236 ;; Multiplication insns
1238 (define_expand "mulhi3"
1239 [(set (match_operand:HI 0 "s_register_operand")
1240 (mult:HI (match_operand:HI 1 "s_register_operand")
1241 (match_operand:HI 2 "s_register_operand")))]
1242 "TARGET_DSP_MULTIPLY"
1245 rtx result = gen_reg_rtx (SImode);
1246 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
1247 emit_move_insn (operands[0], gen_lowpart (HImode, result));
1252 (define_expand "mulsi3"
1253 [(set (match_operand:SI 0 "s_register_operand")
1254 (mult:SI (match_operand:SI 2 "s_register_operand")
1255 (match_operand:SI 1 "s_register_operand")))]
1260 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
1262 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
1263 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
1264 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
1266 "mul%?\\t%0, %2, %1"
1267 [(set_attr "type" "mul")
1268 (set_attr "predicable" "yes")
1269 (set_attr "arch" "t2,v6,nov6,nov6")
1270 (set_attr "length" "4")
1271 (set_attr "predicable_short_it" "yes,no,*,*")]
1274 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
1275 ;; reusing the same register.
1278 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
1280 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
1281 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
1282 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
1284 "mla%?\\t%0, %3, %2, %1"
1285 [(set_attr "type" "mla")
1286 (set_attr "predicable" "yes")
1287 (set_attr "arch" "v6,nov6,nov6,nov6")]
1291 [(set (match_operand:SI 0 "s_register_operand" "=r")
1293 (match_operand:SI 1 "s_register_operand" "r")
1294 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
1295 (match_operand:SI 2 "s_register_operand" "r"))))]
1296 "TARGET_32BIT && arm_arch_thumb2"
1297 "mls%?\\t%0, %3, %2, %1"
1298 [(set_attr "type" "mla")
1299 (set_attr "predicable" "yes")]
1302 (define_insn "*mulsi3_compare0"
1303 [(set (reg:CC_NOOV CC_REGNUM)
1304 (compare:CC_NOOV (mult:SI
1305 (match_operand:SI 2 "s_register_operand" "r,r")
1306 (match_operand:SI 1 "s_register_operand" "%0,r"))
1308 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1309 (mult:SI (match_dup 2) (match_dup 1)))]
1310 "TARGET_ARM && !arm_arch6"
1311 "muls%?\\t%0, %2, %1"
1312 [(set_attr "conds" "set")
1313 (set_attr "type" "muls")]
1316 (define_insn "*mulsi3_compare0_v6"
1317 [(set (reg:CC_NOOV CC_REGNUM)
1318 (compare:CC_NOOV (mult:SI
1319 (match_operand:SI 2 "s_register_operand" "r")
1320 (match_operand:SI 1 "s_register_operand" "r"))
1322 (set (match_operand:SI 0 "s_register_operand" "=r")
1323 (mult:SI (match_dup 2) (match_dup 1)))]
1324 "TARGET_ARM && arm_arch6 && optimize_size"
1325 "muls%?\\t%0, %2, %1"
1326 [(set_attr "conds" "set")
1327 (set_attr "type" "muls")]
1330 (define_insn "*mulsi_compare0_scratch"
1331 [(set (reg:CC_NOOV CC_REGNUM)
1332 (compare:CC_NOOV (mult:SI
1333 (match_operand:SI 2 "s_register_operand" "r,r")
1334 (match_operand:SI 1 "s_register_operand" "%0,r"))
1336 (clobber (match_scratch:SI 0 "=&r,&r"))]
1337 "TARGET_ARM && !arm_arch6"
1338 "muls%?\\t%0, %2, %1"
1339 [(set_attr "conds" "set")
1340 (set_attr "type" "muls")]
1343 (define_insn "*mulsi_compare0_scratch_v6"
1344 [(set (reg:CC_NOOV CC_REGNUM)
1345 (compare:CC_NOOV (mult:SI
1346 (match_operand:SI 2 "s_register_operand" "r")
1347 (match_operand:SI 1 "s_register_operand" "r"))
1349 (clobber (match_scratch:SI 0 "=r"))]
1350 "TARGET_ARM && arm_arch6 && optimize_size"
1351 "muls%?\\t%0, %2, %1"
1352 [(set_attr "conds" "set")
1353 (set_attr "type" "muls")]
1356 (define_insn "*mulsi3addsi_compare0"
1357 [(set (reg:CC_NOOV CC_REGNUM)
1360 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1361 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1362 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1364 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1365 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1367 "TARGET_ARM && arm_arch6"
1368 "mlas%?\\t%0, %2, %1, %3"
1369 [(set_attr "conds" "set")
1370 (set_attr "type" "mlas")]
1373 (define_insn "*mulsi3addsi_compare0_v6"
1374 [(set (reg:CC_NOOV CC_REGNUM)
1377 (match_operand:SI 2 "s_register_operand" "r")
1378 (match_operand:SI 1 "s_register_operand" "r"))
1379 (match_operand:SI 3 "s_register_operand" "r"))
1381 (set (match_operand:SI 0 "s_register_operand" "=r")
1382 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1384 "TARGET_ARM && arm_arch6 && optimize_size"
1385 "mlas%?\\t%0, %2, %1, %3"
1386 [(set_attr "conds" "set")
1387 (set_attr "type" "mlas")]
1390 (define_insn "*mulsi3addsi_compare0_scratch"
1391 [(set (reg:CC_NOOV CC_REGNUM)
1394 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1395 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1396 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1398 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1399 "TARGET_ARM && !arm_arch6"
1400 "mlas%?\\t%0, %2, %1, %3"
1401 [(set_attr "conds" "set")
1402 (set_attr "type" "mlas")]
1405 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1406 [(set (reg:CC_NOOV CC_REGNUM)
1409 (match_operand:SI 2 "s_register_operand" "r")
1410 (match_operand:SI 1 "s_register_operand" "r"))
1411 (match_operand:SI 3 "s_register_operand" "r"))
1413 (clobber (match_scratch:SI 0 "=r"))]
1414 "TARGET_ARM && arm_arch6 && optimize_size"
1415 "mlas%?\\t%0, %2, %1, %3"
1416 [(set_attr "conds" "set")
1417 (set_attr "type" "mlas")]
1420 ;; 32x32->64 widening multiply.
1421 ;; The only difference between the v3-5 and v6+ versions is the requirement
1422 ;; that the output does not overlap with either input.
1424 (define_expand "<Us>mulsidi3"
1425 [(set (match_operand:DI 0 "s_register_operand")
1427 (SE:DI (match_operand:SI 1 "s_register_operand"))
1428 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
1431 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
1432 gen_highpart (SImode, operands[0]),
1433 operands[1], operands[2]));
1438 (define_insn "<US>mull"
1439 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1441 (match_operand:SI 2 "s_register_operand" "%r,r")
1442 (match_operand:SI 3 "s_register_operand" "r,r")))
1443 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
1446 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
1449 "<US>mull%?\\t%0, %1, %2, %3"
1450 [(set_attr "type" "umull")
1451 (set_attr "predicable" "yes")
1452 (set_attr "arch" "v6,nov6")]
1455 (define_expand "<Us>maddsidi4"
1456 [(set (match_operand:DI 0 "s_register_operand")
1459 (SE:DI (match_operand:SI 1 "s_register_operand"))
1460 (SE:DI (match_operand:SI 2 "s_register_operand")))
1461 (match_operand:DI 3 "s_register_operand")))]
1464 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
1465 gen_lowpart (SImode, operands[3]),
1466 gen_highpart (SImode, operands[0]),
1467 gen_highpart (SImode, operands[3]),
1468 operands[1], operands[2]));
1473 (define_insn "<US>mlal"
1474 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
1477 (match_operand:SI 4 "s_register_operand" "%r,r")
1478 (match_operand:SI 5 "s_register_operand" "r,r"))
1479 (match_operand:SI 1 "s_register_operand" "0,0")))
1480 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
1485 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
1486 (zero_extend:DI (match_dup 1)))
1488 (match_operand:SI 3 "s_register_operand" "2,2")))]
1490 "<US>mlal%?\\t%0, %2, %4, %5"
1491 [(set_attr "type" "umlal")
1492 (set_attr "predicable" "yes")
1493 (set_attr "arch" "v6,nov6")]
1496 (define_expand "<US>mulsi3_highpart"
1498 [(set (match_operand:SI 0 "s_register_operand")
1502 (SE:DI (match_operand:SI 1 "s_register_operand"))
1503 (SE:DI (match_operand:SI 2 "s_register_operand")))
1505 (clobber (match_scratch:SI 3 ""))])]
1510 (define_insn "*<US>mull_high"
1511 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
1515 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
1516 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
1518 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
1520 "<US>mull%?\\t%3, %0, %2, %1"
1521 [(set_attr "type" "umull")
1522 (set_attr "predicable" "yes")
1523 (set_attr "arch" "v6,nov6,nov6")]
1526 (define_insn "mulhisi3"
1527 [(set (match_operand:SI 0 "s_register_operand" "=r")
1528 (mult:SI (sign_extend:SI
1529 (match_operand:HI 1 "s_register_operand" "%r"))
1531 (match_operand:HI 2 "s_register_operand" "r"))))]
1532 "TARGET_DSP_MULTIPLY"
1533 "smulbb%?\\t%0, %1, %2"
1534 [(set_attr "type" "smulxy")
1535 (set_attr "predicable" "yes")]
1538 (define_insn "*mulhisi3tb"
1539 [(set (match_operand:SI 0 "s_register_operand" "=r")
1540 (mult:SI (ashiftrt:SI
1541 (match_operand:SI 1 "s_register_operand" "r")
1544 (match_operand:HI 2 "s_register_operand" "r"))))]
1545 "TARGET_DSP_MULTIPLY"
1546 "smultb%?\\t%0, %1, %2"
1547 [(set_attr "type" "smulxy")
1548 (set_attr "predicable" "yes")]
1551 (define_insn "*mulhisi3bt"
1552 [(set (match_operand:SI 0 "s_register_operand" "=r")
1553 (mult:SI (sign_extend:SI
1554 (match_operand:HI 1 "s_register_operand" "r"))
1556 (match_operand:SI 2 "s_register_operand" "r")
1558 "TARGET_DSP_MULTIPLY"
1559 "smulbt%?\\t%0, %1, %2"
1560 [(set_attr "type" "smulxy")
1561 (set_attr "predicable" "yes")]
1564 (define_insn "*mulhisi3tt"
1565 [(set (match_operand:SI 0 "s_register_operand" "=r")
1566 (mult:SI (ashiftrt:SI
1567 (match_operand:SI 1 "s_register_operand" "r")
1570 (match_operand:SI 2 "s_register_operand" "r")
1572 "TARGET_DSP_MULTIPLY"
1573 "smultt%?\\t%0, %1, %2"
1574 [(set_attr "type" "smulxy")
1575 (set_attr "predicable" "yes")]
1578 (define_insn "maddhisi4"
1579 [(set (match_operand:SI 0 "s_register_operand" "=r")
1580 (plus:SI (mult:SI (sign_extend:SI
1581 (match_operand:HI 1 "s_register_operand" "r"))
1583 (match_operand:HI 2 "s_register_operand" "r")))
1584 (match_operand:SI 3 "s_register_operand" "r")))]
1585 "TARGET_DSP_MULTIPLY"
1586 "smlabb%?\\t%0, %1, %2, %3"
1587 [(set_attr "type" "smlaxy")
1588 (set_attr "predicable" "yes")]
1591 ;; Note: there is no maddhisi4ibt because this one is canonical form
1592 (define_insn "*maddhisi4tb"
1593 [(set (match_operand:SI 0 "s_register_operand" "=r")
1594 (plus:SI (mult:SI (ashiftrt:SI
1595 (match_operand:SI 1 "s_register_operand" "r")
1598 (match_operand:HI 2 "s_register_operand" "r")))
1599 (match_operand:SI 3 "s_register_operand" "r")))]
1600 "TARGET_DSP_MULTIPLY"
1601 "smlatb%?\\t%0, %1, %2, %3"
1602 [(set_attr "type" "smlaxy")
1603 (set_attr "predicable" "yes")]
1606 (define_insn "*maddhisi4tt"
1607 [(set (match_operand:SI 0 "s_register_operand" "=r")
1608 (plus:SI (mult:SI (ashiftrt:SI
1609 (match_operand:SI 1 "s_register_operand" "r")
1612 (match_operand:SI 2 "s_register_operand" "r")
1614 (match_operand:SI 3 "s_register_operand" "r")))]
1615 "TARGET_DSP_MULTIPLY"
1616 "smlatt%?\\t%0, %1, %2, %3"
1617 [(set_attr "type" "smlaxy")
1618 (set_attr "predicable" "yes")]
1621 (define_insn "maddhidi4"
1622 [(set (match_operand:DI 0 "s_register_operand" "=r")
1624 (mult:DI (sign_extend:DI
1625 (match_operand:HI 1 "s_register_operand" "r"))
1627 (match_operand:HI 2 "s_register_operand" "r")))
1628 (match_operand:DI 3 "s_register_operand" "0")))]
1629 "TARGET_DSP_MULTIPLY"
1630 "smlalbb%?\\t%Q0, %R0, %1, %2"
1631 [(set_attr "type" "smlalxy")
1632 (set_attr "predicable" "yes")])
1634 ;; Note: there is no maddhidi4ibt because this one is canonical form
1635 (define_insn "*maddhidi4tb"
1636 [(set (match_operand:DI 0 "s_register_operand" "=r")
1638 (mult:DI (sign_extend:DI
1640 (match_operand:SI 1 "s_register_operand" "r")
1643 (match_operand:HI 2 "s_register_operand" "r")))
1644 (match_operand:DI 3 "s_register_operand" "0")))]
1645 "TARGET_DSP_MULTIPLY"
1646 "smlaltb%?\\t%Q0, %R0, %1, %2"
1647 [(set_attr "type" "smlalxy")
1648 (set_attr "predicable" "yes")])
1650 (define_insn "*maddhidi4tt"
1651 [(set (match_operand:DI 0 "s_register_operand" "=r")
1653 (mult:DI (sign_extend:DI
1655 (match_operand:SI 1 "s_register_operand" "r")
1659 (match_operand:SI 2 "s_register_operand" "r")
1661 (match_operand:DI 3 "s_register_operand" "0")))]
1662 "TARGET_DSP_MULTIPLY"
1663 "smlaltt%?\\t%Q0, %R0, %1, %2"
1664 [(set_attr "type" "smlalxy")
1665 (set_attr "predicable" "yes")])
1667 (define_expand "mulsf3"
1668 [(set (match_operand:SF 0 "s_register_operand")
1669 (mult:SF (match_operand:SF 1 "s_register_operand")
1670 (match_operand:SF 2 "s_register_operand")))]
1671 "TARGET_32BIT && TARGET_HARD_FLOAT"
1675 (define_expand "muldf3"
1676 [(set (match_operand:DF 0 "s_register_operand")
1677 (mult:DF (match_operand:DF 1 "s_register_operand")
1678 (match_operand:DF 2 "s_register_operand")))]
1679 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1685 (define_expand "divsf3"
1686 [(set (match_operand:SF 0 "s_register_operand")
1687 (div:SF (match_operand:SF 1 "s_register_operand")
1688 (match_operand:SF 2 "s_register_operand")))]
1689 "TARGET_32BIT && TARGET_HARD_FLOAT"
1692 (define_expand "divdf3"
1693 [(set (match_operand:DF 0 "s_register_operand")
1694 (div:DF (match_operand:DF 1 "s_register_operand")
1695 (match_operand:DF 2 "s_register_operand")))]
1696 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1700 ; Expand logical operations. The mid-end expander does not split off memory
1701 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
1702 ; So an explicit expander is needed to generate better code.
1704 (define_expand "<LOGICAL:optab>di3"
1705 [(set (match_operand:DI 0 "s_register_operand")
1706 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
1707 (match_operand:DI 2 "arm_<optab>di_operand")))]
1710 rtx low = simplify_gen_binary (<CODE>, SImode,
1711 gen_lowpart (SImode, operands[1]),
1712 gen_lowpart (SImode, operands[2]));
1713 rtx high = simplify_gen_binary (<CODE>, SImode,
1714 gen_highpart (SImode, operands[1]),
1715 gen_highpart_mode (SImode, DImode,
1718 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1719 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1724 (define_expand "one_cmpldi2"
1725 [(set (match_operand:DI 0 "s_register_operand")
1726 (not:DI (match_operand:DI 1 "s_register_operand")))]
1729 rtx low = simplify_gen_unary (NOT, SImode,
1730 gen_lowpart (SImode, operands[1]),
1732 rtx high = simplify_gen_unary (NOT, SImode,
1733 gen_highpart_mode (SImode, DImode,
1737 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
1738 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
1743 ;; Split DImode and, ior, xor operations. Simply perform the logical
1744 ;; operation on the upper and lower halves of the registers.
1745 ;; This is needed for atomic operations in arm_split_atomic_op.
1746 ;; Avoid splitting IWMMXT instructions.
1748 [(set (match_operand:DI 0 "s_register_operand" "")
1749 (match_operator:DI 6 "logical_binary_operator"
1750 [(match_operand:DI 1 "s_register_operand" "")
1751 (match_operand:DI 2 "s_register_operand" "")]))]
1752 "TARGET_32BIT && reload_completed
1753 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1754 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1755 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1758 operands[3] = gen_highpart (SImode, operands[0]);
1759 operands[0] = gen_lowpart (SImode, operands[0]);
1760 operands[4] = gen_highpart (SImode, operands[1]);
1761 operands[1] = gen_lowpart (SImode, operands[1]);
1762 operands[5] = gen_highpart (SImode, operands[2]);
1763 operands[2] = gen_lowpart (SImode, operands[2]);
1767 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
1768 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
1770 [(set (match_operand:DI 0 "s_register_operand")
1771 (not:DI (match_operand:DI 1 "s_register_operand")))]
1773 [(set (match_dup 0) (not:SI (match_dup 1)))
1774 (set (match_dup 2) (not:SI (match_dup 3)))]
1777 operands[2] = gen_highpart (SImode, operands[0]);
1778 operands[0] = gen_lowpart (SImode, operands[0]);
1779 operands[3] = gen_highpart (SImode, operands[1]);
1780 operands[1] = gen_lowpart (SImode, operands[1]);
1784 (define_expand "andsi3"
1785 [(set (match_operand:SI 0 "s_register_operand")
1786 (and:SI (match_operand:SI 1 "s_register_operand")
1787 (match_operand:SI 2 "reg_or_int_operand")))]
1792 if (CONST_INT_P (operands[2]))
1794 if (INTVAL (operands[2]) == 255 && arm_arch6)
1796 operands[1] = convert_to_mode (QImode, operands[1], 1);
1797 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
1801 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
1802 operands[2] = force_reg (SImode, operands[2]);
1805 arm_split_constant (AND, SImode, NULL_RTX,
1806 INTVAL (operands[2]), operands[0],
1808 optimize && can_create_pseudo_p ());
1814 else /* TARGET_THUMB1 */
1816 if (!CONST_INT_P (operands[2]))
1818 rtx tmp = force_reg (SImode, operands[2]);
1819 if (rtx_equal_p (operands[0], operands[1]))
1823 operands[2] = operands[1];
1831 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1833 operands[2] = force_reg (SImode,
1834 GEN_INT (~INTVAL (operands[2])));
1836 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
1841 for (i = 9; i <= 31; i++)
1843 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
1845 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1849 else if ((HOST_WIDE_INT_1 << i) - 1
1850 == ~INTVAL (operands[2]))
1852 rtx shift = GEN_INT (i);
1853 rtx reg = gen_reg_rtx (SImode);
1855 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1856 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1862 operands[2] = force_reg (SImode, operands[2]);
1868 ; ??? Check split length for Thumb-2
1869 (define_insn_and_split "*arm_andsi3_insn"
1870 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
1871 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
1872 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
1877 bic%?\\t%0, %1, #%B2
1881 && CONST_INT_P (operands[2])
1882 && !(const_ok_for_arm (INTVAL (operands[2]))
1883 || const_ok_for_arm (~INTVAL (operands[2])))"
1884 [(clobber (const_int 0))]
1886 arm_split_constant (AND, SImode, curr_insn,
1887 INTVAL (operands[2]), operands[0], operands[1], 0);
1890 [(set_attr "length" "4,4,4,4,16")
1891 (set_attr "predicable" "yes")
1892 (set_attr "predicable_short_it" "no,yes,no,no,no")
1893 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
1896 (define_insn "*andsi3_compare0"
1897 [(set (reg:CC_NOOV CC_REGNUM)
1899 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1900 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
1902 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1903 (and:SI (match_dup 1) (match_dup 2)))]
1907 bics%?\\t%0, %1, #%B2
1908 ands%?\\t%0, %1, %2"
1909 [(set_attr "conds" "set")
1910 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1913 (define_insn "*andsi3_compare0_scratch"
1914 [(set (reg:CC_NOOV CC_REGNUM)
1916 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
1917 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
1919 (clobber (match_scratch:SI 2 "=X,r,X"))]
1923 bics%?\\t%2, %0, #%B1
1925 [(set_attr "conds" "set")
1926 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
1929 (define_insn "*zeroextractsi_compare0_scratch"
1930 [(set (reg:CC_NOOV CC_REGNUM)
1931 (compare:CC_NOOV (zero_extract:SI
1932 (match_operand:SI 0 "s_register_operand" "r")
1933 (match_operand 1 "const_int_operand" "n")
1934 (match_operand 2 "const_int_operand" "n"))
1937 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1938 && INTVAL (operands[1]) > 0
1939 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1940 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1942 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1943 << INTVAL (operands[2]));
1944 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1947 [(set_attr "conds" "set")
1948 (set_attr "predicable" "yes")
1949 (set_attr "type" "logics_imm")]
1952 (define_insn_and_split "*ne_zeroextractsi"
1953 [(set (match_operand:SI 0 "s_register_operand" "=r")
1954 (ne:SI (zero_extract:SI
1955 (match_operand:SI 1 "s_register_operand" "r")
1956 (match_operand:SI 2 "const_int_operand" "n")
1957 (match_operand:SI 3 "const_int_operand" "n"))
1959 (clobber (reg:CC CC_REGNUM))]
1961 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1962 && INTVAL (operands[2]) > 0
1963 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1964 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1967 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1968 && INTVAL (operands[2]) > 0
1969 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1970 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1971 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1972 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1974 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1976 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1977 (match_dup 0) (const_int 1)))]
1979 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1980 << INTVAL (operands[3]));
1982 [(set_attr "conds" "clob")
1983 (set (attr "length")
1984 (if_then_else (eq_attr "is_thumb" "yes")
1987 (set_attr "type" "multiple")]
1990 (define_insn_and_split "*ne_zeroextractsi_shifted"
1991 [(set (match_operand:SI 0 "s_register_operand" "=r")
1992 (ne:SI (zero_extract:SI
1993 (match_operand:SI 1 "s_register_operand" "r")
1994 (match_operand:SI 2 "const_int_operand" "n")
1997 (clobber (reg:CC CC_REGNUM))]
2001 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2002 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2004 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2006 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2007 (match_dup 0) (const_int 1)))]
2009 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2011 [(set_attr "conds" "clob")
2012 (set_attr "length" "8")
2013 (set_attr "type" "multiple")]
2016 (define_insn_and_split "*ite_ne_zeroextractsi"
2017 [(set (match_operand:SI 0 "s_register_operand" "=r")
2018 (if_then_else:SI (ne (zero_extract:SI
2019 (match_operand:SI 1 "s_register_operand" "r")
2020 (match_operand:SI 2 "const_int_operand" "n")
2021 (match_operand:SI 3 "const_int_operand" "n"))
2023 (match_operand:SI 4 "arm_not_operand" "rIK")
2025 (clobber (reg:CC CC_REGNUM))]
2027 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2028 && INTVAL (operands[2]) > 0
2029 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2030 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2031 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2034 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2035 && INTVAL (operands[2]) > 0
2036 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2037 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2038 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2039 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2040 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2042 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2044 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2045 (match_dup 0) (match_dup 4)))]
2047 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2048 << INTVAL (operands[3]));
2050 [(set_attr "conds" "clob")
2051 (set_attr "length" "8")
2052 (set_attr "type" "multiple")]
2055 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2056 [(set (match_operand:SI 0 "s_register_operand" "=r")
2057 (if_then_else:SI (ne (zero_extract:SI
2058 (match_operand:SI 1 "s_register_operand" "r")
2059 (match_operand:SI 2 "const_int_operand" "n")
2062 (match_operand:SI 3 "arm_not_operand" "rIK")
2064 (clobber (reg:CC CC_REGNUM))]
2065 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2067 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2068 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2069 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2071 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2073 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2074 (match_dup 0) (match_dup 3)))]
2076 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2078 [(set_attr "conds" "clob")
2079 (set_attr "length" "8")
2080 (set_attr "type" "multiple")]
2083 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2085 [(set (match_operand:SI 0 "s_register_operand" "")
2086 (match_operator:SI 1 "shiftable_operator"
2087 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2088 (match_operand:SI 3 "const_int_operand" "")
2089 (match_operand:SI 4 "const_int_operand" ""))
2090 (match_operand:SI 5 "s_register_operand" "")]))
2091 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2093 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2096 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2099 HOST_WIDE_INT temp = INTVAL (operands[3]);
2101 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2102 operands[4] = GEN_INT (32 - temp);
2107 [(set (match_operand:SI 0 "s_register_operand" "")
2108 (match_operator:SI 1 "shiftable_operator"
2109 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2110 (match_operand:SI 3 "const_int_operand" "")
2111 (match_operand:SI 4 "const_int_operand" ""))
2112 (match_operand:SI 5 "s_register_operand" "")]))
2113 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2115 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2118 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2121 HOST_WIDE_INT temp = INTVAL (operands[3]);
2123 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2124 operands[4] = GEN_INT (32 - temp);
2128 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2129 ;;; represented by the bitfield, then this will produce incorrect results.
2130 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2131 ;;; which have a real bit-field insert instruction, the truncation happens
2132 ;;; in the bit-field insert instruction itself. Since arm does not have a
2133 ;;; bit-field insert instruction, we would have to emit code here to truncate
2134 ;;; the value before we insert. This loses some of the advantage of having
2135 ;;; this insv pattern, so this pattern needs to be reevalutated.
2137 (define_expand "insv"
2138 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2139 (match_operand 1 "general_operand")
2140 (match_operand 2 "general_operand"))
2141 (match_operand 3 "reg_or_int_operand"))]
2142 "TARGET_ARM || arm_arch_thumb2"
2145 int start_bit = INTVAL (operands[2]);
2146 int width = INTVAL (operands[1]);
2147 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2148 rtx target, subtarget;
2150 if (arm_arch_thumb2)
2152 if (unaligned_access && MEM_P (operands[0])
2153 && s_register_operand (operands[3], GET_MODE (operands[3]))
2154 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2158 if (BYTES_BIG_ENDIAN)
2159 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2164 base_addr = adjust_address (operands[0], SImode,
2165 start_bit / BITS_PER_UNIT);
2166 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2170 rtx tmp = gen_reg_rtx (HImode);
2172 base_addr = adjust_address (operands[0], HImode,
2173 start_bit / BITS_PER_UNIT);
2174 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2175 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2179 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2181 bool use_bfi = TRUE;
2183 if (CONST_INT_P (operands[3]))
2185 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2189 emit_insn (gen_insv_zero (operands[0], operands[1],
2194 /* See if the set can be done with a single orr instruction. */
2195 if (val == mask && const_ok_for_arm (val << start_bit))
2201 if (!REG_P (operands[3]))
2202 operands[3] = force_reg (SImode, operands[3]);
2204 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2213 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2216 target = copy_rtx (operands[0]);
2217 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2218 subreg as the final target. */
2219 if (GET_CODE (target) == SUBREG)
2221 subtarget = gen_reg_rtx (SImode);
2222 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2223 < GET_MODE_SIZE (SImode))
2224 target = SUBREG_REG (target);
2229 if (CONST_INT_P (operands[3]))
2231 /* Since we are inserting a known constant, we may be able to
2232 reduce the number of bits that we have to clear so that
2233 the mask becomes simple. */
2234 /* ??? This code does not check to see if the new mask is actually
2235 simpler. It may not be. */
2236 rtx op1 = gen_reg_rtx (SImode);
2237 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2238 start of this pattern. */
2239 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2240 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2242 emit_insn (gen_andsi3 (op1, operands[0],
2243 gen_int_mode (~mask2, SImode)));
2244 emit_insn (gen_iorsi3 (subtarget, op1,
2245 gen_int_mode (op3_value << start_bit, SImode)));
2247 else if (start_bit == 0
2248 && !(const_ok_for_arm (mask)
2249 || const_ok_for_arm (~mask)))
2251 /* A Trick, since we are setting the bottom bits in the word,
2252 we can shift operand[3] up, operand[0] down, OR them together
2253 and rotate the result back again. This takes 3 insns, and
2254 the third might be mergeable into another op. */
2255 /* The shift up copes with the possibility that operand[3] is
2256 wider than the bitfield. */
2257 rtx op0 = gen_reg_rtx (SImode);
2258 rtx op1 = gen_reg_rtx (SImode);
2260 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2261 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2262 emit_insn (gen_iorsi3 (op1, op1, op0));
2263 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2265 else if ((width + start_bit == 32)
2266 && !(const_ok_for_arm (mask)
2267 || const_ok_for_arm (~mask)))
2269 /* Similar trick, but slightly less efficient. */
2271 rtx op0 = gen_reg_rtx (SImode);
2272 rtx op1 = gen_reg_rtx (SImode);
2274 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2275 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2276 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2277 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2281 rtx op0 = gen_int_mode (mask, SImode);
2282 rtx op1 = gen_reg_rtx (SImode);
2283 rtx op2 = gen_reg_rtx (SImode);
2285 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2287 rtx tmp = gen_reg_rtx (SImode);
2289 emit_insn (gen_movsi (tmp, op0));
2293 /* Mask out any bits in operand[3] that are not needed. */
2294 emit_insn (gen_andsi3 (op1, operands[3], op0));
2296 if (CONST_INT_P (op0)
2297 && (const_ok_for_arm (mask << start_bit)
2298 || const_ok_for_arm (~(mask << start_bit))))
2300 op0 = gen_int_mode (~(mask << start_bit), SImode);
2301 emit_insn (gen_andsi3 (op2, operands[0], op0));
2305 if (CONST_INT_P (op0))
2307 rtx tmp = gen_reg_rtx (SImode);
2309 emit_insn (gen_movsi (tmp, op0));
2314 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2316 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2320 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2322 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2325 if (subtarget != target)
2327 /* If TARGET is still a SUBREG, then it must be wider than a word,
2328 so we must be careful only to set the subword we were asked to. */
2329 if (GET_CODE (target) == SUBREG)
2330 emit_move_insn (target, subtarget);
2332 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2339 (define_insn "insv_zero"
2340 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2341 (match_operand:SI 1 "const_int_M_operand" "M")
2342 (match_operand:SI 2 "const_int_M_operand" "M"))
2346 [(set_attr "length" "4")
2347 (set_attr "predicable" "yes")
2348 (set_attr "type" "bfm")]
2351 (define_insn "insv_t2"
2352 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2353 (match_operand:SI 1 "const_int_M_operand" "M")
2354 (match_operand:SI 2 "const_int_M_operand" "M"))
2355 (match_operand:SI 3 "s_register_operand" "r"))]
2357 "bfi%?\t%0, %3, %2, %1"
2358 [(set_attr "length" "4")
2359 (set_attr "predicable" "yes")
2360 (set_attr "type" "bfm")]
2363 (define_insn "andsi_notsi_si"
2364 [(set (match_operand:SI 0 "s_register_operand" "=r")
2365 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2366 (match_operand:SI 1 "s_register_operand" "r")))]
2368 "bic%?\\t%0, %1, %2"
2369 [(set_attr "predicable" "yes")
2370 (set_attr "type" "logic_reg")]
2373 (define_insn "andsi_not_shiftsi_si"
2374 [(set (match_operand:SI 0 "s_register_operand" "=r")
2375 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2376 [(match_operand:SI 2 "s_register_operand" "r")
2377 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2378 (match_operand:SI 1 "s_register_operand" "r")))]
2380 "bic%?\\t%0, %1, %2%S4"
2381 [(set_attr "predicable" "yes")
2382 (set_attr "shift" "2")
2383 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2384 (const_string "logic_shift_imm")
2385 (const_string "logic_shift_reg")))]
2388 ;; Shifted bics pattern used to set up CC status register and not reusing
2389 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
2390 ;; does not support shift by register.
2391 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
2392 [(set (reg:CC_NOOV CC_REGNUM)
2394 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2395 [(match_operand:SI 1 "s_register_operand" "r")
2396 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2397 (match_operand:SI 3 "s_register_operand" "r"))
2399 (clobber (match_scratch:SI 4 "=r"))]
2400 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2401 "bics%?\\t%4, %3, %1%S0"
2402 [(set_attr "predicable" "yes")
2403 (set_attr "conds" "set")
2404 (set_attr "shift" "1")
2405 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2406 (const_string "logic_shift_imm")
2407 (const_string "logic_shift_reg")))]
2410 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
2411 ;; getting reused later.
2412 (define_insn "andsi_not_shiftsi_si_scc"
2413 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2415 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
2416 [(match_operand:SI 1 "s_register_operand" "r")
2417 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
2418 (match_operand:SI 3 "s_register_operand" "r"))
2420 (set (match_operand:SI 4 "s_register_operand" "=r")
2421 (and:SI (not:SI (match_op_dup 0
2425 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
2426 "bics%?\\t%4, %3, %1%S0"
2427 [(set_attr "predicable" "yes")
2428 (set_attr "conds" "set")
2429 (set_attr "shift" "1")
2430 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
2431 (const_string "logic_shift_imm")
2432 (const_string "logic_shift_reg")))]
2435 (define_insn "*andsi_notsi_si_compare0"
2436 [(set (reg:CC_NOOV CC_REGNUM)
2438 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2439 (match_operand:SI 1 "s_register_operand" "r"))
2441 (set (match_operand:SI 0 "s_register_operand" "=r")
2442 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2445 [(set_attr "conds" "set")
2446 (set_attr "type" "logics_shift_reg")]
2449 (define_insn "*andsi_notsi_si_compare0_scratch"
2450 [(set (reg:CC_NOOV CC_REGNUM)
2452 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2453 (match_operand:SI 1 "s_register_operand" "r"))
2455 (clobber (match_scratch:SI 0 "=r"))]
2458 [(set_attr "conds" "set")
2459 (set_attr "type" "logics_shift_reg")]
2462 (define_expand "iorsi3"
2463 [(set (match_operand:SI 0 "s_register_operand")
2464 (ior:SI (match_operand:SI 1 "s_register_operand")
2465 (match_operand:SI 2 "reg_or_int_operand")))]
2468 if (CONST_INT_P (operands[2]))
2472 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
2473 operands[2] = force_reg (SImode, operands[2]);
2476 arm_split_constant (IOR, SImode, NULL_RTX,
2477 INTVAL (operands[2]), operands[0],
2479 optimize && can_create_pseudo_p ());
2483 else /* TARGET_THUMB1 */
2485 rtx tmp = force_reg (SImode, operands[2]);
2486 if (rtx_equal_p (operands[0], operands[1]))
2490 operands[2] = operands[1];
2498 (define_insn_and_split "*iorsi3_insn"
2499 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2500 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2501 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2506 orn%?\\t%0, %1, #%B2
2510 && CONST_INT_P (operands[2])
2511 && !(const_ok_for_arm (INTVAL (operands[2]))
2512 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2513 [(clobber (const_int 0))]
2515 arm_split_constant (IOR, SImode, curr_insn,
2516 INTVAL (operands[2]), operands[0], operands[1], 0);
2519 [(set_attr "length" "4,4,4,4,16")
2520 (set_attr "arch" "32,t2,t2,32,32")
2521 (set_attr "predicable" "yes")
2522 (set_attr "predicable_short_it" "no,yes,no,no,no")
2523 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
2527 [(match_scratch:SI 3 "r")
2528 (set (match_operand:SI 0 "arm_general_register_operand" "")
2529 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2530 (match_operand:SI 2 "const_int_operand" "")))]
2532 && !const_ok_for_arm (INTVAL (operands[2]))
2533 && const_ok_for_arm (~INTVAL (operands[2]))"
2534 [(set (match_dup 3) (match_dup 2))
2535 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2539 (define_insn "*iorsi3_compare0"
2540 [(set (reg:CC_NOOV CC_REGNUM)
2542 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2543 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2545 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
2546 (ior:SI (match_dup 1) (match_dup 2)))]
2548 "orrs%?\\t%0, %1, %2"
2549 [(set_attr "conds" "set")
2550 (set_attr "arch" "*,t2,*")
2551 (set_attr "length" "4,2,4")
2552 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2555 (define_insn "*iorsi3_compare0_scratch"
2556 [(set (reg:CC_NOOV CC_REGNUM)
2558 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
2559 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
2561 (clobber (match_scratch:SI 0 "=r,l,r"))]
2563 "orrs%?\\t%0, %1, %2"
2564 [(set_attr "conds" "set")
2565 (set_attr "arch" "*,t2,*")
2566 (set_attr "length" "4,2,4")
2567 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
2570 (define_expand "xorsi3"
2571 [(set (match_operand:SI 0 "s_register_operand")
2572 (xor:SI (match_operand:SI 1 "s_register_operand")
2573 (match_operand:SI 2 "reg_or_int_operand")))]
2575 "if (CONST_INT_P (operands[2]))
2579 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
2580 operands[2] = force_reg (SImode, operands[2]);
2583 arm_split_constant (XOR, SImode, NULL_RTX,
2584 INTVAL (operands[2]), operands[0],
2586 optimize && can_create_pseudo_p ());
2590 else /* TARGET_THUMB1 */
2592 rtx tmp = force_reg (SImode, operands[2]);
2593 if (rtx_equal_p (operands[0], operands[1]))
2597 operands[2] = operands[1];
2604 (define_insn_and_split "*arm_xorsi3"
2605 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
2606 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
2607 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
2615 && CONST_INT_P (operands[2])
2616 && !const_ok_for_arm (INTVAL (operands[2]))"
2617 [(clobber (const_int 0))]
2619 arm_split_constant (XOR, SImode, curr_insn,
2620 INTVAL (operands[2]), operands[0], operands[1], 0);
2623 [(set_attr "length" "4,4,4,16")
2624 (set_attr "predicable" "yes")
2625 (set_attr "predicable_short_it" "no,yes,no,no")
2626 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
2629 (define_insn "*xorsi3_compare0"
2630 [(set (reg:CC_NOOV CC_REGNUM)
2631 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
2632 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2634 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2635 (xor:SI (match_dup 1) (match_dup 2)))]
2637 "eors%?\\t%0, %1, %2"
2638 [(set_attr "conds" "set")
2639 (set_attr "type" "logics_imm,logics_reg")]
2642 (define_insn "*xorsi3_compare0_scratch"
2643 [(set (reg:CC_NOOV CC_REGNUM)
2644 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
2645 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
2649 [(set_attr "conds" "set")
2650 (set_attr "type" "logics_imm,logics_reg")]
2653 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2654 ; (NOT D) we can sometimes merge the final NOT into one of the following
2658 [(set (match_operand:SI 0 "s_register_operand" "")
2659 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2660 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2661 (match_operand:SI 3 "arm_rhs_operand" "")))
2662 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2664 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2665 (not:SI (match_dup 3))))
2666 (set (match_dup 0) (not:SI (match_dup 4)))]
2670 (define_insn_and_split "*andsi_iorsi3_notsi"
2671 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2672 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2673 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2674 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2676 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2677 "&& reload_completed"
2678 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
2679 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
2681 /* If operands[3] is a constant make sure to fold the NOT into it
2682 to avoid creating a NOT of a CONST_INT. */
2683 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
2684 if (CONST_INT_P (not_rtx))
2686 operands[4] = operands[0];
2687 operands[5] = not_rtx;
2691 operands[5] = operands[0];
2692 operands[4] = not_rtx;
2695 [(set_attr "length" "8")
2696 (set_attr "ce_count" "2")
2697 (set_attr "predicable" "yes")
2698 (set_attr "type" "multiple")]
2701 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2702 ; insns are available?
2704 [(set (match_operand:SI 0 "s_register_operand" "")
2705 (match_operator:SI 1 "logical_binary_operator"
2706 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2707 (match_operand:SI 3 "const_int_operand" "")
2708 (match_operand:SI 4 "const_int_operand" ""))
2709 (match_operator:SI 9 "logical_binary_operator"
2710 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2711 (match_operand:SI 6 "const_int_operand" ""))
2712 (match_operand:SI 7 "s_register_operand" "")])]))
2713 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2715 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2716 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2719 [(ashift:SI (match_dup 2) (match_dup 4))
2723 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2726 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2730 [(set (match_operand:SI 0 "s_register_operand" "")
2731 (match_operator:SI 1 "logical_binary_operator"
2732 [(match_operator:SI 9 "logical_binary_operator"
2733 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2734 (match_operand:SI 6 "const_int_operand" ""))
2735 (match_operand:SI 7 "s_register_operand" "")])
2736 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2737 (match_operand:SI 3 "const_int_operand" "")
2738 (match_operand:SI 4 "const_int_operand" ""))]))
2739 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2741 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2742 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2745 [(ashift:SI (match_dup 2) (match_dup 4))
2749 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2752 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2756 [(set (match_operand:SI 0 "s_register_operand" "")
2757 (match_operator:SI 1 "logical_binary_operator"
2758 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2759 (match_operand:SI 3 "const_int_operand" "")
2760 (match_operand:SI 4 "const_int_operand" ""))
2761 (match_operator:SI 9 "logical_binary_operator"
2762 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2763 (match_operand:SI 6 "const_int_operand" ""))
2764 (match_operand:SI 7 "s_register_operand" "")])]))
2765 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2767 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2768 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2771 [(ashift:SI (match_dup 2) (match_dup 4))
2775 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2778 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2782 [(set (match_operand:SI 0 "s_register_operand" "")
2783 (match_operator:SI 1 "logical_binary_operator"
2784 [(match_operator:SI 9 "logical_binary_operator"
2785 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2786 (match_operand:SI 6 "const_int_operand" ""))
2787 (match_operand:SI 7 "s_register_operand" "")])
2788 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2789 (match_operand:SI 3 "const_int_operand" "")
2790 (match_operand:SI 4 "const_int_operand" ""))]))
2791 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2793 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2794 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2797 [(ashift:SI (match_dup 2) (match_dup 4))
2801 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2804 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2808 ;; Minimum and maximum insns
2810 (define_expand "smaxsi3"
2812 (set (match_operand:SI 0 "s_register_operand")
2813 (smax:SI (match_operand:SI 1 "s_register_operand")
2814 (match_operand:SI 2 "arm_rhs_operand")))
2815 (clobber (reg:CC CC_REGNUM))])]
2818 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2820 /* No need for a clobber of the condition code register here. */
2821 emit_insn (gen_rtx_SET (operands[0],
2822 gen_rtx_SMAX (SImode, operands[1],
2828 (define_insn "*smax_0"
2829 [(set (match_operand:SI 0 "s_register_operand" "=r")
2830 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2833 "bic%?\\t%0, %1, %1, asr #31"
2834 [(set_attr "predicable" "yes")
2835 (set_attr "type" "logic_shift_reg")]
2838 (define_insn "*smax_m1"
2839 [(set (match_operand:SI 0 "s_register_operand" "=r")
2840 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2843 "orr%?\\t%0, %1, %1, asr #31"
2844 [(set_attr "predicable" "yes")
2845 (set_attr "type" "logic_shift_reg")]
2848 (define_insn_and_split "*arm_smax_insn"
2849 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2850 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2851 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2852 (clobber (reg:CC CC_REGNUM))]
2855 ; cmp\\t%1, %2\;movlt\\t%0, %2
2856 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2858 [(set (reg:CC CC_REGNUM)
2859 (compare:CC (match_dup 1) (match_dup 2)))
2861 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
2865 [(set_attr "conds" "clob")
2866 (set_attr "length" "8,12")
2867 (set_attr "type" "multiple")]
2870 (define_expand "sminsi3"
2872 (set (match_operand:SI 0 "s_register_operand")
2873 (smin:SI (match_operand:SI 1 "s_register_operand")
2874 (match_operand:SI 2 "arm_rhs_operand")))
2875 (clobber (reg:CC CC_REGNUM))])]
2878 if (operands[2] == const0_rtx)
2880 /* No need for a clobber of the condition code register here. */
2881 emit_insn (gen_rtx_SET (operands[0],
2882 gen_rtx_SMIN (SImode, operands[1],
2888 (define_insn "*smin_0"
2889 [(set (match_operand:SI 0 "s_register_operand" "=r")
2890 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2893 "and%?\\t%0, %1, %1, asr #31"
2894 [(set_attr "predicable" "yes")
2895 (set_attr "type" "logic_shift_reg")]
2898 (define_insn_and_split "*arm_smin_insn"
2899 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2900 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2901 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2902 (clobber (reg:CC CC_REGNUM))]
2905 ; cmp\\t%1, %2\;movge\\t%0, %2
2906 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2908 [(set (reg:CC CC_REGNUM)
2909 (compare:CC (match_dup 1) (match_dup 2)))
2911 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
2915 [(set_attr "conds" "clob")
2916 (set_attr "length" "8,12")
2917 (set_attr "type" "multiple,multiple")]
2920 (define_expand "umaxsi3"
2922 (set (match_operand:SI 0 "s_register_operand")
2923 (umax:SI (match_operand:SI 1 "s_register_operand")
2924 (match_operand:SI 2 "arm_rhs_operand")))
2925 (clobber (reg:CC CC_REGNUM))])]
2930 (define_insn_and_split "*arm_umaxsi3"
2931 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2932 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2933 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2934 (clobber (reg:CC CC_REGNUM))]
2937 ; cmp\\t%1, %2\;movcc\\t%0, %2
2938 ; cmp\\t%1, %2\;movcs\\t%0, %1
2939 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2941 [(set (reg:CC CC_REGNUM)
2942 (compare:CC (match_dup 1) (match_dup 2)))
2944 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
2948 [(set_attr "conds" "clob")
2949 (set_attr "length" "8,8,12")
2950 (set_attr "type" "store_4")]
2953 (define_expand "uminsi3"
2955 (set (match_operand:SI 0 "s_register_operand")
2956 (umin:SI (match_operand:SI 1 "s_register_operand")
2957 (match_operand:SI 2 "arm_rhs_operand")))
2958 (clobber (reg:CC CC_REGNUM))])]
2963 (define_insn_and_split "*arm_uminsi3"
2964 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2965 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2966 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2967 (clobber (reg:CC CC_REGNUM))]
2970 ; cmp\\t%1, %2\;movcs\\t%0, %2
2971 ; cmp\\t%1, %2\;movcc\\t%0, %1
2972 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2974 [(set (reg:CC CC_REGNUM)
2975 (compare:CC (match_dup 1) (match_dup 2)))
2977 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
2981 [(set_attr "conds" "clob")
2982 (set_attr "length" "8,8,12")
2983 (set_attr "type" "store_4")]
2986 (define_insn "*store_minmaxsi"
2987 [(set (match_operand:SI 0 "memory_operand" "=m")
2988 (match_operator:SI 3 "minmax_operator"
2989 [(match_operand:SI 1 "s_register_operand" "r")
2990 (match_operand:SI 2 "s_register_operand" "r")]))
2991 (clobber (reg:CC CC_REGNUM))]
2992 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
2994 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2995 operands[1], operands[2]);
2996 output_asm_insn (\"cmp\\t%1, %2\", operands);
2998 output_asm_insn (\"ite\t%d3\", operands);
2999 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3000 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3003 [(set_attr "conds" "clob")
3004 (set (attr "length")
3005 (if_then_else (eq_attr "is_thumb" "yes")
3008 (set_attr "type" "store_4")]
3011 ; Reject the frame pointer in operand[1], since reloading this after
3012 ; it has been eliminated can cause carnage.
3013 (define_insn "*minmax_arithsi"
3014 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3015 (match_operator:SI 4 "shiftable_operator"
3016 [(match_operator:SI 5 "minmax_operator"
3017 [(match_operand:SI 2 "s_register_operand" "r,r")
3018 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3019 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3020 (clobber (reg:CC CC_REGNUM))]
3021 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3024 enum rtx_code code = GET_CODE (operands[4]);
3027 if (which_alternative != 0 || operands[3] != const0_rtx
3028 || (code != PLUS && code != IOR && code != XOR))
3033 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3034 operands[2], operands[3]);
3035 output_asm_insn (\"cmp\\t%2, %3\", operands);
3039 output_asm_insn (\"ite\\t%d5\", operands);
3041 output_asm_insn (\"it\\t%d5\", operands);
3043 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3045 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3048 [(set_attr "conds" "clob")
3049 (set (attr "length")
3050 (if_then_else (eq_attr "is_thumb" "yes")
3053 (set_attr "type" "multiple")]
3056 ; Reject the frame pointer in operand[1], since reloading this after
3057 ; it has been eliminated can cause carnage.
3058 (define_insn_and_split "*minmax_arithsi_non_canon"
3059 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3061 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3062 (match_operator:SI 4 "minmax_operator"
3063 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3064 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3065 (clobber (reg:CC CC_REGNUM))]
3066 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3067 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3069 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3070 [(set (reg:CC CC_REGNUM)
3071 (compare:CC (match_dup 2) (match_dup 3)))
3073 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3075 (minus:SI (match_dup 1)
3077 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3081 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3082 operands[2], operands[3]);
3083 enum rtx_code rc = minmax_code (operands[4]);
3084 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3085 operands[2], operands[3]);
3087 if (mode == CCFPmode || mode == CCFPEmode)
3088 rc = reverse_condition_maybe_unordered (rc);
3090 rc = reverse_condition (rc);
3091 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3092 if (CONST_INT_P (operands[3]))
3093 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3095 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3097 [(set_attr "conds" "clob")
3098 (set (attr "length")
3099 (if_then_else (eq_attr "is_thumb" "yes")
3102 (set_attr "type" "multiple")]
3105 (define_code_iterator SAT [smin smax])
3106 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3107 (define_code_attr SATlo [(smin "1") (smax "2")])
3108 (define_code_attr SAThi [(smin "2") (smax "1")])
3110 (define_insn "*satsi_<SAT:code>"
3111 [(set (match_operand:SI 0 "s_register_operand" "=r")
3112 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3113 (match_operand:SI 1 "const_int_operand" "i"))
3114 (match_operand:SI 2 "const_int_operand" "i")))]
3115 "TARGET_32BIT && arm_arch6
3116 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3120 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3121 &mask, &signed_sat))
3124 operands[1] = GEN_INT (mask);
3126 return "ssat%?\t%0, %1, %3";
3128 return "usat%?\t%0, %1, %3";
3130 [(set_attr "predicable" "yes")
3131 (set_attr "type" "alus_imm")]
3134 (define_insn "*satsi_<SAT:code>_shift"
3135 [(set (match_operand:SI 0 "s_register_operand" "=r")
3136 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3137 [(match_operand:SI 4 "s_register_operand" "r")
3138 (match_operand:SI 5 "const_int_operand" "i")])
3139 (match_operand:SI 1 "const_int_operand" "i"))
3140 (match_operand:SI 2 "const_int_operand" "i")))]
3141 "TARGET_32BIT && arm_arch6
3142 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3146 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3147 &mask, &signed_sat))
3150 operands[1] = GEN_INT (mask);
3152 return "ssat%?\t%0, %1, %4%S3";
3154 return "usat%?\t%0, %1, %4%S3";
3156 [(set_attr "predicable" "yes")
3157 (set_attr "shift" "3")
3158 (set_attr "type" "logic_shift_reg")])
3160 ;; Shift and rotation insns
3162 (define_expand "ashldi3"
3163 [(set (match_operand:DI 0 "s_register_operand")
3164 (ashift:DI (match_operand:DI 1 "s_register_operand")
3165 (match_operand:SI 2 "reg_or_int_operand")))]
3168 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3169 operands[2], gen_reg_rtx (SImode),
3170 gen_reg_rtx (SImode));
3174 (define_expand "ashlsi3"
3175 [(set (match_operand:SI 0 "s_register_operand")
3176 (ashift:SI (match_operand:SI 1 "s_register_operand")
3177 (match_operand:SI 2 "arm_rhs_operand")))]
3180 if (CONST_INT_P (operands[2])
3181 && (UINTVAL (operands[2])) > 31)
3183 emit_insn (gen_movsi (operands[0], const0_rtx));
3189 (define_expand "ashrdi3"
3190 [(set (match_operand:DI 0 "s_register_operand")
3191 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3192 (match_operand:SI 2 "reg_or_int_operand")))]
3195 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3196 operands[2], gen_reg_rtx (SImode),
3197 gen_reg_rtx (SImode));
3201 (define_expand "ashrsi3"
3202 [(set (match_operand:SI 0 "s_register_operand")
3203 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
3204 (match_operand:SI 2 "arm_rhs_operand")))]
3207 if (CONST_INT_P (operands[2])
3208 && UINTVAL (operands[2]) > 31)
3209 operands[2] = GEN_INT (31);
3213 (define_expand "lshrdi3"
3214 [(set (match_operand:DI 0 "s_register_operand")
3215 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
3216 (match_operand:SI 2 "reg_or_int_operand")))]
3219 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3220 operands[2], gen_reg_rtx (SImode),
3221 gen_reg_rtx (SImode));
3225 (define_expand "lshrsi3"
3226 [(set (match_operand:SI 0 "s_register_operand")
3227 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
3228 (match_operand:SI 2 "arm_rhs_operand")))]
3231 if (CONST_INT_P (operands[2])
3232 && (UINTVAL (operands[2])) > 31)
3234 emit_insn (gen_movsi (operands[0], const0_rtx));
3240 (define_expand "rotlsi3"
3241 [(set (match_operand:SI 0 "s_register_operand")
3242 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3243 (match_operand:SI 2 "reg_or_int_operand")))]
3246 if (CONST_INT_P (operands[2]))
3247 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3250 rtx reg = gen_reg_rtx (SImode);
3251 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3257 (define_expand "rotrsi3"
3258 [(set (match_operand:SI 0 "s_register_operand")
3259 (rotatert:SI (match_operand:SI 1 "s_register_operand")
3260 (match_operand:SI 2 "arm_rhs_operand")))]
3265 if (CONST_INT_P (operands[2])
3266 && UINTVAL (operands[2]) > 31)
3267 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3269 else /* TARGET_THUMB1 */
3271 if (CONST_INT_P (operands [2]))
3272 operands [2] = force_reg (SImode, operands[2]);
3277 (define_insn "*arm_shiftsi3"
3278 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
3279 (match_operator:SI 3 "shift_operator"
3280 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
3281 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
3283 "* return arm_output_shift(operands, 0);"
3284 [(set_attr "predicable" "yes")
3285 (set_attr "arch" "t2,t2,*,*")
3286 (set_attr "predicable_short_it" "yes,yes,no,no")
3287 (set_attr "length" "4")
3288 (set_attr "shift" "1")
3289 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
3292 (define_insn "*shiftsi3_compare0"
3293 [(set (reg:CC_NOOV CC_REGNUM)
3294 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3295 [(match_operand:SI 1 "s_register_operand" "r,r")
3296 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3298 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3299 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3301 "* return arm_output_shift(operands, 1);"
3302 [(set_attr "conds" "set")
3303 (set_attr "shift" "1")
3304 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
3307 (define_insn "*shiftsi3_compare0_scratch"
3308 [(set (reg:CC_NOOV CC_REGNUM)
3309 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3310 [(match_operand:SI 1 "s_register_operand" "r,r")
3311 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
3313 (clobber (match_scratch:SI 0 "=r,r"))]
3315 "* return arm_output_shift(operands, 1);"
3316 [(set_attr "conds" "set")
3317 (set_attr "shift" "1")
3318 (set_attr "type" "shift_imm,shift_reg")]
3321 (define_insn "*not_shiftsi"
3322 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3323 (not:SI (match_operator:SI 3 "shift_operator"
3324 [(match_operand:SI 1 "s_register_operand" "r,r")
3325 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3328 [(set_attr "predicable" "yes")
3329 (set_attr "shift" "1")
3330 (set_attr "arch" "32,a")
3331 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3333 (define_insn "*not_shiftsi_compare0"
3334 [(set (reg:CC_NOOV CC_REGNUM)
3336 (not:SI (match_operator:SI 3 "shift_operator"
3337 [(match_operand:SI 1 "s_register_operand" "r,r")
3338 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3340 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3341 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3343 "mvns%?\\t%0, %1%S3"
3344 [(set_attr "conds" "set")
3345 (set_attr "shift" "1")
3346 (set_attr "arch" "32,a")
3347 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3349 (define_insn "*not_shiftsi_compare0_scratch"
3350 [(set (reg:CC_NOOV CC_REGNUM)
3352 (not:SI (match_operator:SI 3 "shift_operator"
3353 [(match_operand:SI 1 "s_register_operand" "r,r")
3354 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3356 (clobber (match_scratch:SI 0 "=r,r"))]
3358 "mvns%?\\t%0, %1%S3"
3359 [(set_attr "conds" "set")
3360 (set_attr "shift" "1")
3361 (set_attr "arch" "32,a")
3362 (set_attr "type" "mvn_shift,mvn_shift_reg")])
3364 ;; We don't really have extzv, but defining this using shifts helps
3365 ;; to reduce register pressure later on.
3367 (define_expand "extzv"
3368 [(set (match_operand 0 "s_register_operand")
3369 (zero_extract (match_operand 1 "nonimmediate_operand")
3370 (match_operand 2 "const_int_operand")
3371 (match_operand 3 "const_int_operand")))]
3372 "TARGET_THUMB1 || arm_arch_thumb2"
3375 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3376 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3378 if (arm_arch_thumb2)
3380 HOST_WIDE_INT width = INTVAL (operands[2]);
3381 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3383 if (unaligned_access && MEM_P (operands[1])
3384 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3388 if (BYTES_BIG_ENDIAN)
3389 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3394 base_addr = adjust_address (operands[1], SImode,
3395 bitpos / BITS_PER_UNIT);
3396 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3400 rtx dest = operands[0];
3401 rtx tmp = gen_reg_rtx (SImode);
3403 /* We may get a paradoxical subreg here. Strip it off. */
3404 if (GET_CODE (dest) == SUBREG
3405 && GET_MODE (dest) == SImode
3406 && GET_MODE (SUBREG_REG (dest)) == HImode)
3407 dest = SUBREG_REG (dest);
3409 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3412 base_addr = adjust_address (operands[1], HImode,
3413 bitpos / BITS_PER_UNIT);
3414 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3415 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3419 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3421 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3429 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3432 operands[3] = GEN_INT (rshift);
3436 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3440 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3441 operands[3], gen_reg_rtx (SImode)));
3446 ;; Helper for extzv, for the Thumb-1 register-shifts case.
3448 (define_expand "extzv_t1"
3449 [(set (match_operand:SI 4 "s_register_operand")
3450 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
3451 (match_operand:SI 2 "const_int_operand")))
3452 (set (match_operand:SI 0 "s_register_operand")
3453 (lshiftrt:SI (match_dup 4)
3454 (match_operand:SI 3 "const_int_operand")))]
3458 (define_expand "extv"
3459 [(set (match_operand 0 "s_register_operand")
3460 (sign_extract (match_operand 1 "nonimmediate_operand")
3461 (match_operand 2 "const_int_operand")
3462 (match_operand 3 "const_int_operand")))]
3465 HOST_WIDE_INT width = INTVAL (operands[2]);
3466 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3468 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3469 && (bitpos % BITS_PER_UNIT) == 0)
3473 if (BYTES_BIG_ENDIAN)
3474 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3478 base_addr = adjust_address (operands[1], SImode,
3479 bitpos / BITS_PER_UNIT);
3480 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3484 rtx dest = operands[0];
3485 rtx tmp = gen_reg_rtx (SImode);
3487 /* We may get a paradoxical subreg here. Strip it off. */
3488 if (GET_CODE (dest) == SUBREG
3489 && GET_MODE (dest) == SImode
3490 && GET_MODE (SUBREG_REG (dest)) == HImode)
3491 dest = SUBREG_REG (dest);
3493 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3496 base_addr = adjust_address (operands[1], HImode,
3497 bitpos / BITS_PER_UNIT);
3498 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
3499 emit_move_insn (gen_lowpart (SImode, dest), tmp);
3504 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3506 else if (GET_MODE (operands[0]) == SImode
3507 && GET_MODE (operands[1]) == SImode)
3509 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
3517 ; Helper to expand register forms of extv with the proper modes.
3519 (define_expand "extv_regsi"
3520 [(set (match_operand:SI 0 "s_register_operand")
3521 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
3522 (match_operand 2 "const_int_operand")
3523 (match_operand 3 "const_int_operand")))]
3528 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
3530 (define_insn "unaligned_loaddi"
3531 [(set (match_operand:DI 0 "s_register_operand" "=r")
3532 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
3533 UNSPEC_UNALIGNED_LOAD))]
3534 "TARGET_32BIT && TARGET_LDRD"
3536 return output_move_double (operands, true, NULL);
3538 [(set_attr "length" "8")
3539 (set_attr "type" "load_8")])
3541 (define_insn "unaligned_loadsi"
3542 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3543 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
3544 UNSPEC_UNALIGNED_LOAD))]
3547 ldr\t%0, %1\t@ unaligned
3548 ldr%?\t%0, %1\t@ unaligned
3549 ldr%?\t%0, %1\t@ unaligned"
3550 [(set_attr "arch" "t1,t2,32")
3551 (set_attr "length" "2,2,4")
3552 (set_attr "predicable" "no,yes,yes")
3553 (set_attr "predicable_short_it" "no,yes,no")
3554 (set_attr "type" "load_4")])
3556 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
3557 ;; address (there's no immediate format). That's tricky to support
3558 ;; here and we don't really need this pattern for that case, so only
3559 ;; enable for 32-bit ISAs.
3560 (define_insn "unaligned_loadhis"
3561 [(set (match_operand:SI 0 "s_register_operand" "=r")
3563 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
3564 UNSPEC_UNALIGNED_LOAD)))]
3565 "unaligned_access && TARGET_32BIT"
3566 "ldrsh%?\t%0, %1\t@ unaligned"
3567 [(set_attr "predicable" "yes")
3568 (set_attr "type" "load_byte")])
3570 (define_insn "unaligned_loadhiu"
3571 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
3573 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
3574 UNSPEC_UNALIGNED_LOAD)))]
3577 ldrh\t%0, %1\t@ unaligned
3578 ldrh%?\t%0, %1\t@ unaligned
3579 ldrh%?\t%0, %1\t@ unaligned"
3580 [(set_attr "arch" "t1,t2,32")
3581 (set_attr "length" "2,2,4")
3582 (set_attr "predicable" "no,yes,yes")
3583 (set_attr "predicable_short_it" "no,yes,no")
3584 (set_attr "type" "load_byte")])
3586 (define_insn "unaligned_storedi"
3587 [(set (match_operand:DI 0 "memory_operand" "=m")
3588 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
3589 UNSPEC_UNALIGNED_STORE))]
3590 "TARGET_32BIT && TARGET_LDRD"
3592 return output_move_double (operands, true, NULL);
3594 [(set_attr "length" "8")
3595 (set_attr "type" "store_8")])
3597 (define_insn "unaligned_storesi"
3598 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
3599 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
3600 UNSPEC_UNALIGNED_STORE))]
3603 str\t%1, %0\t@ unaligned
3604 str%?\t%1, %0\t@ unaligned
3605 str%?\t%1, %0\t@ unaligned"
3606 [(set_attr "arch" "t1,t2,32")
3607 (set_attr "length" "2,2,4")
3608 (set_attr "predicable" "no,yes,yes")
3609 (set_attr "predicable_short_it" "no,yes,no")
3610 (set_attr "type" "store_4")])
3612 (define_insn "unaligned_storehi"
3613 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
3614 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
3615 UNSPEC_UNALIGNED_STORE))]
3618 strh\t%1, %0\t@ unaligned
3619 strh%?\t%1, %0\t@ unaligned
3620 strh%?\t%1, %0\t@ unaligned"
3621 [(set_attr "arch" "t1,t2,32")
3622 (set_attr "length" "2,2,4")
3623 (set_attr "predicable" "no,yes,yes")
3624 (set_attr "predicable_short_it" "no,yes,no")
3625 (set_attr "type" "store_4")])
3628 (define_insn "*extv_reg"
3629 [(set (match_operand:SI 0 "s_register_operand" "=r")
3630 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3631 (match_operand:SI 2 "const_int_operand" "n")
3632 (match_operand:SI 3 "const_int_operand" "n")))]
3634 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3635 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3636 "sbfx%?\t%0, %1, %3, %2"
3637 [(set_attr "length" "4")
3638 (set_attr "predicable" "yes")
3639 (set_attr "type" "bfm")]
3642 (define_insn "extzv_t2"
3643 [(set (match_operand:SI 0 "s_register_operand" "=r")
3644 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3645 (match_operand:SI 2 "const_int_operand" "n")
3646 (match_operand:SI 3 "const_int_operand" "n")))]
3648 && IN_RANGE (INTVAL (operands[3]), 0, 31)
3649 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
3650 "ubfx%?\t%0, %1, %3, %2"
3651 [(set_attr "length" "4")
3652 (set_attr "predicable" "yes")
3653 (set_attr "type" "bfm")]
3657 ;; Division instructions
3658 (define_insn "divsi3"
3659 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3660 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
3661 (match_operand:SI 2 "s_register_operand" "r,r")))]
3666 [(set_attr "arch" "32,v8mb")
3667 (set_attr "predicable" "yes")
3668 (set_attr "type" "sdiv")]
3671 (define_insn "udivsi3"
3672 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3673 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
3674 (match_operand:SI 2 "s_register_operand" "r,r")))]
3679 [(set_attr "arch" "32,v8mb")
3680 (set_attr "predicable" "yes")
3681 (set_attr "type" "udiv")]
3685 ;; Unary arithmetic insns
3687 (define_expand "negvsi3"
3688 [(match_operand:SI 0 "register_operand")
3689 (match_operand:SI 1 "register_operand")
3690 (match_operand 2 "")]
3693 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
3694 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3699 (define_expand "negvdi3"
3700 [(match_operand:DI 0 "s_register_operand")
3701 (match_operand:DI 1 "s_register_operand")
3702 (match_operand 2 "")]
3705 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
3706 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
3712 (define_insn "negdi2_compare"
3713 [(set (reg:CC CC_REGNUM)
3716 (match_operand:DI 1 "register_operand" "r,r")))
3717 (set (match_operand:DI 0 "register_operand" "=&r,&r")
3718 (minus:DI (const_int 0) (match_dup 1)))]
3721 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
3722 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
3723 [(set_attr "conds" "set")
3724 (set_attr "arch" "a,t2")
3725 (set_attr "length" "8")
3726 (set_attr "type" "multiple")]
3729 (define_expand "negdi2"
3731 [(set (match_operand:DI 0 "s_register_operand")
3732 (neg:DI (match_operand:DI 1 "s_register_operand")))
3733 (clobber (reg:CC CC_REGNUM))])]
3737 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3738 (define_insn "*negdi2_insn"
3739 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3740 (neg:DI (match_operand:DI 1 "s_register_operand" "r,r")))
3741 (clobber (reg:CC CC_REGNUM))]
3744 rsbs\\t%Q0, %Q1, #0; rsc\\t%R0, %R1, #0
3745 negs\\t%Q0, %Q1; sbc\\t%R0, %R1, %R1, lsl #1"
3746 [(set_attr "conds" "clob")
3747 (set_attr "arch" "a,t2")
3748 (set_attr "length" "8")
3749 (set_attr "type" "multiple")]
3752 (define_expand "negsi2"
3753 [(set (match_operand:SI 0 "s_register_operand")
3754 (neg:SI (match_operand:SI 1 "s_register_operand")))]
3759 (define_insn "*arm_negsi2"
3760 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
3761 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
3763 "rsb%?\\t%0, %1, #0"
3764 [(set_attr "predicable" "yes")
3765 (set_attr "predicable_short_it" "yes,no")
3766 (set_attr "arch" "t2,*")
3767 (set_attr "length" "4")
3768 (set_attr "type" "alu_sreg")]
3771 (define_expand "negsf2"
3772 [(set (match_operand:SF 0 "s_register_operand")
3773 (neg:SF (match_operand:SF 1 "s_register_operand")))]
3774 "TARGET_32BIT && TARGET_HARD_FLOAT"
3778 (define_expand "negdf2"
3779 [(set (match_operand:DF 0 "s_register_operand")
3780 (neg:DF (match_operand:DF 1 "s_register_operand")))]
3781 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
3784 (define_insn_and_split "*zextendsidi_negsi"
3785 [(set (match_operand:DI 0 "s_register_operand" "=r")
3786 (zero_extend:DI (neg:SI (match_operand:SI 1 "s_register_operand" "r"))))]
3791 (neg:SI (match_dup 1)))
3795 operands[2] = gen_lowpart (SImode, operands[0]);
3796 operands[3] = gen_highpart (SImode, operands[0]);
3798 [(set_attr "length" "8")
3799 (set_attr "type" "multiple")]
3802 ;; Negate an extended 32-bit value.
3803 (define_insn_and_split "*negdi_extendsidi"
3804 [(set (match_operand:DI 0 "s_register_operand" "=l,r")
3805 (neg:DI (sign_extend:DI
3806 (match_operand:SI 1 "s_register_operand" "l,r"))))
3807 (clobber (reg:CC CC_REGNUM))]
3810 "&& reload_completed"
3813 rtx low = gen_lowpart (SImode, operands[0]);
3814 rtx high = gen_highpart (SImode, operands[0]);
3816 if (reg_overlap_mentioned_p (low, operands[1]))
3818 /* Input overlaps the low word of the output. Use:
3821 rsc Rhi, Rhi, #0 (thumb2: sbc Rhi, Rhi, Rhi, lsl #1). */
3822 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
3824 emit_insn (gen_rtx_SET (high,
3825 gen_rtx_ASHIFTRT (SImode, operands[1],
3828 emit_insn (gen_subsi3_compare (low, const0_rtx, operands[1]));
3830 emit_insn (gen_rtx_SET (high,
3831 gen_rtx_MINUS (SImode,
3832 gen_rtx_MINUS (SImode,
3835 gen_rtx_LTU (SImode,
3840 rtx two_x = gen_rtx_ASHIFT (SImode, high, GEN_INT (1));
3841 emit_insn (gen_rtx_SET (high,
3842 gen_rtx_MINUS (SImode,
3843 gen_rtx_MINUS (SImode,
3846 gen_rtx_LTU (SImode,
3853 /* No overlap, or overlap on high word. Use:
3857 Flags not needed for this sequence. */
3858 emit_insn (gen_rtx_SET (low, gen_rtx_NEG (SImode, operands[1])));
3859 emit_insn (gen_rtx_SET (high,
3860 gen_rtx_AND (SImode,
3861 gen_rtx_NOT (SImode, operands[1]),
3863 emit_insn (gen_rtx_SET (high,
3864 gen_rtx_ASHIFTRT (SImode, high,
3869 [(set_attr "length" "12")
3870 (set_attr "arch" "t2,*")
3871 (set_attr "type" "multiple")]
3874 ;; abssi2 doesn't really clobber the condition codes if a different register
3875 ;; is being set. To keep things simple, assume during rtl manipulations that
3876 ;; it does, but tell the final scan operator the truth. Similarly for
3879 (define_expand "abssi2"
3881 [(set (match_operand:SI 0 "s_register_operand")
3882 (abs:SI (match_operand:SI 1 "s_register_operand")))
3883 (clobber (match_dup 2))])]
3887 operands[2] = gen_rtx_SCRATCH (SImode);
3889 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3892 (define_insn_and_split "*arm_abssi2"
3893 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3894 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3895 (clobber (reg:CC CC_REGNUM))]
3898 "&& reload_completed"
3901 /* if (which_alternative == 0) */
3902 if (REGNO(operands[0]) == REGNO(operands[1]))
3904 /* Emit the pattern:
3905 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3906 [(set (reg:CC CC_REGNUM)
3907 (compare:CC (match_dup 0) (const_int 0)))
3908 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
3909 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
3911 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3912 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3913 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3914 (gen_rtx_LT (SImode,
3915 gen_rtx_REG (CCmode, CC_REGNUM),
3917 (gen_rtx_SET (operands[0],
3918 (gen_rtx_MINUS (SImode,
3925 /* Emit the pattern:
3926 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
3928 (xor:SI (match_dup 1)
3929 (ashiftrt:SI (match_dup 1) (const_int 31))))
3931 (minus:SI (match_dup 0)
3932 (ashiftrt:SI (match_dup 1) (const_int 31))))]
3934 emit_insn (gen_rtx_SET (operands[0],
3935 gen_rtx_XOR (SImode,
3936 gen_rtx_ASHIFTRT (SImode,
3940 emit_insn (gen_rtx_SET (operands[0],
3941 gen_rtx_MINUS (SImode,
3943 gen_rtx_ASHIFTRT (SImode,
3949 [(set_attr "conds" "clob,*")
3950 (set_attr "shift" "1")
3951 (set_attr "predicable" "no, yes")
3952 (set_attr "length" "8")
3953 (set_attr "type" "multiple")]
3956 (define_insn_and_split "*arm_neg_abssi2"
3957 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3958 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3959 (clobber (reg:CC CC_REGNUM))]
3962 "&& reload_completed"
3965 /* if (which_alternative == 0) */
3966 if (REGNO (operands[0]) == REGNO (operands[1]))
3968 /* Emit the pattern:
3969 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3971 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
3972 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
3973 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
3975 gen_rtx_REG (CCmode, CC_REGNUM),
3977 gen_rtx_SET (operands[0],
3978 (gen_rtx_MINUS (SImode,
3984 /* Emit the pattern:
3985 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
3987 emit_insn (gen_rtx_SET (operands[0],
3988 gen_rtx_XOR (SImode,
3989 gen_rtx_ASHIFTRT (SImode,
3993 emit_insn (gen_rtx_SET (operands[0],
3994 gen_rtx_MINUS (SImode,
3995 gen_rtx_ASHIFTRT (SImode,
4002 [(set_attr "conds" "clob,*")
4003 (set_attr "shift" "1")
4004 (set_attr "predicable" "no, yes")
4005 (set_attr "length" "8")
4006 (set_attr "type" "multiple")]
4009 (define_expand "abssf2"
4010 [(set (match_operand:SF 0 "s_register_operand")
4011 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4012 "TARGET_32BIT && TARGET_HARD_FLOAT"
4015 (define_expand "absdf2"
4016 [(set (match_operand:DF 0 "s_register_operand")
4017 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4018 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4021 (define_expand "sqrtsf2"
4022 [(set (match_operand:SF 0 "s_register_operand")
4023 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4024 "TARGET_32BIT && TARGET_HARD_FLOAT"
4027 (define_expand "sqrtdf2"
4028 [(set (match_operand:DF 0 "s_register_operand")
4029 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4030 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4033 (define_expand "one_cmplsi2"
4034 [(set (match_operand:SI 0 "s_register_operand")
4035 (not:SI (match_operand:SI 1 "s_register_operand")))]
4040 (define_insn "*arm_one_cmplsi2"
4041 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4042 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4045 [(set_attr "predicable" "yes")
4046 (set_attr "predicable_short_it" "yes,no")
4047 (set_attr "arch" "t2,*")
4048 (set_attr "length" "4")
4049 (set_attr "type" "mvn_reg")]
4052 (define_insn "*notsi_compare0"
4053 [(set (reg:CC_NOOV CC_REGNUM)
4054 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4056 (set (match_operand:SI 0 "s_register_operand" "=r")
4057 (not:SI (match_dup 1)))]
4060 [(set_attr "conds" "set")
4061 (set_attr "type" "mvn_reg")]
4064 (define_insn "*notsi_compare0_scratch"
4065 [(set (reg:CC_NOOV CC_REGNUM)
4066 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4068 (clobber (match_scratch:SI 0 "=r"))]
4071 [(set_attr "conds" "set")
4072 (set_attr "type" "mvn_reg")]
4075 ;; Fixed <--> Floating conversion insns
4077 (define_expand "floatsihf2"
4078 [(set (match_operand:HF 0 "general_operand")
4079 (float:HF (match_operand:SI 1 "general_operand")))]
4083 rtx op1 = gen_reg_rtx (SFmode);
4084 expand_float (op1, operands[1], 0);
4085 op1 = convert_to_mode (HFmode, op1, 0);
4086 emit_move_insn (operands[0], op1);
4091 (define_expand "floatdihf2"
4092 [(set (match_operand:HF 0 "general_operand")
4093 (float:HF (match_operand:DI 1 "general_operand")))]
4097 rtx op1 = gen_reg_rtx (SFmode);
4098 expand_float (op1, operands[1], 0);
4099 op1 = convert_to_mode (HFmode, op1, 0);
4100 emit_move_insn (operands[0], op1);
4105 (define_expand "floatsisf2"
4106 [(set (match_operand:SF 0 "s_register_operand")
4107 (float:SF (match_operand:SI 1 "s_register_operand")))]
4108 "TARGET_32BIT && TARGET_HARD_FLOAT"
4112 (define_expand "floatsidf2"
4113 [(set (match_operand:DF 0 "s_register_operand")
4114 (float:DF (match_operand:SI 1 "s_register_operand")))]
4115 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4119 (define_expand "fix_trunchfsi2"
4120 [(set (match_operand:SI 0 "general_operand")
4121 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4125 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4126 expand_fix (operands[0], op1, 0);
4131 (define_expand "fix_trunchfdi2"
4132 [(set (match_operand:DI 0 "general_operand")
4133 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4137 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4138 expand_fix (operands[0], op1, 0);
4143 (define_expand "fix_truncsfsi2"
4144 [(set (match_operand:SI 0 "s_register_operand")
4145 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4146 "TARGET_32BIT && TARGET_HARD_FLOAT"
4150 (define_expand "fix_truncdfsi2"
4151 [(set (match_operand:SI 0 "s_register_operand")
4152 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4153 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4159 (define_expand "truncdfsf2"
4160 [(set (match_operand:SF 0 "s_register_operand")
4162 (match_operand:DF 1 "s_register_operand")))]
4163 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4167 ;; DFmode to HFmode conversions on targets without a single-step hardware
4168 ;; instruction for it would have to go through SFmode. This is dangerous
4169 ;; as it introduces double rounding.
4171 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4172 ;; a single-step instruction.
4174 (define_expand "truncdfhf2"
4175 [(set (match_operand:HF 0 "s_register_operand")
4177 (match_operand:DF 1 "s_register_operand")))]
4178 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4179 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4181 /* We don't have a direct instruction for this, so we must be in
4182 an unsafe math mode, and going via SFmode. */
4184 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4187 op1 = convert_to_mode (SFmode, operands[1], 0);
4188 op1 = convert_to_mode (HFmode, op1, 0);
4189 emit_move_insn (operands[0], op1);
4192 /* Otherwise, we will pick this up as a single instruction with
4193 no intermediary rounding. */
4197 ;; Zero and sign extension instructions.
4199 (define_expand "zero_extend<mode>di2"
4200 [(set (match_operand:DI 0 "s_register_operand" "")
4201 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4202 "TARGET_32BIT <qhs_zextenddi_cond>"
4204 rtx res_lo, res_hi, op0_lo, op0_hi;
4205 res_lo = gen_lowpart (SImode, operands[0]);
4206 res_hi = gen_highpart (SImode, operands[0]);
4207 if (can_create_pseudo_p ())
4209 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4210 op0_hi = gen_reg_rtx (SImode);
4214 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4217 if (<MODE>mode != SImode)
4218 emit_insn (gen_rtx_SET (op0_lo,
4219 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4220 emit_insn (gen_movsi (op0_hi, const0_rtx));
4221 if (res_lo != op0_lo)
4222 emit_move_insn (res_lo, op0_lo);
4223 if (res_hi != op0_hi)
4224 emit_move_insn (res_hi, op0_hi);
4229 (define_expand "extend<mode>di2"
4230 [(set (match_operand:DI 0 "s_register_operand" "")
4231 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4232 "TARGET_32BIT <qhs_sextenddi_cond>"
4234 rtx res_lo, res_hi, op0_lo, op0_hi;
4235 res_lo = gen_lowpart (SImode, operands[0]);
4236 res_hi = gen_highpart (SImode, operands[0]);
4237 if (can_create_pseudo_p ())
4239 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4240 op0_hi = gen_reg_rtx (SImode);
4244 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4247 if (<MODE>mode != SImode)
4248 emit_insn (gen_rtx_SET (op0_lo,
4249 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4250 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4251 if (res_lo != op0_lo)
4252 emit_move_insn (res_lo, op0_lo);
4253 if (res_hi != op0_hi)
4254 emit_move_insn (res_hi, op0_hi);
4259 ;; Splits for all extensions to DImode
4261 [(set (match_operand:DI 0 "s_register_operand" "")
4262 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4264 [(set (match_dup 0) (match_dup 1))]
4266 rtx lo_part = gen_lowpart (SImode, operands[0]);
4267 machine_mode src_mode = GET_MODE (operands[1]);
4269 if (src_mode == SImode)
4270 emit_move_insn (lo_part, operands[1]);
4272 emit_insn (gen_rtx_SET (lo_part,
4273 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4274 operands[0] = gen_highpart (SImode, operands[0]);
4275 operands[1] = const0_rtx;
4279 [(set (match_operand:DI 0 "s_register_operand" "")
4280 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4282 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4284 rtx lo_part = gen_lowpart (SImode, operands[0]);
4285 machine_mode src_mode = GET_MODE (operands[1]);
4287 if (src_mode == SImode)
4288 emit_move_insn (lo_part, operands[1]);
4290 emit_insn (gen_rtx_SET (lo_part,
4291 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4292 operands[1] = lo_part;
4293 operands[0] = gen_highpart (SImode, operands[0]);
4296 (define_expand "zero_extendhisi2"
4297 [(set (match_operand:SI 0 "s_register_operand")
4298 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4301 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4303 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4306 if (!arm_arch6 && !MEM_P (operands[1]))
4308 rtx t = gen_lowpart (SImode, operands[1]);
4309 rtx tmp = gen_reg_rtx (SImode);
4310 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4311 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4317 [(set (match_operand:SI 0 "s_register_operand" "")
4318 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4319 "!TARGET_THUMB2 && !arm_arch6"
4320 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4321 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4323 operands[2] = gen_lowpart (SImode, operands[1]);
4326 (define_insn "*arm_zero_extendhisi2"
4327 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4328 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4329 "TARGET_ARM && arm_arch4 && !arm_arch6"
4333 [(set_attr "type" "alu_shift_reg,load_byte")
4334 (set_attr "predicable" "yes")]
4337 (define_insn "*arm_zero_extendhisi2_v6"
4338 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4339 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4340 "TARGET_ARM && arm_arch6"
4344 [(set_attr "predicable" "yes")
4345 (set_attr "type" "extend,load_byte")]
4348 (define_insn "*arm_zero_extendhisi2addsi"
4349 [(set (match_operand:SI 0 "s_register_operand" "=r")
4350 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4351 (match_operand:SI 2 "s_register_operand" "r")))]
4353 "uxtah%?\\t%0, %2, %1"
4354 [(set_attr "type" "alu_shift_reg")
4355 (set_attr "predicable" "yes")]
4358 (define_expand "zero_extendqisi2"
4359 [(set (match_operand:SI 0 "s_register_operand")
4360 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
4363 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4365 emit_insn (gen_andsi3 (operands[0],
4366 gen_lowpart (SImode, operands[1]),
4370 if (!arm_arch6 && !MEM_P (operands[1]))
4372 rtx t = gen_lowpart (SImode, operands[1]);
4373 rtx tmp = gen_reg_rtx (SImode);
4374 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4375 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4381 [(set (match_operand:SI 0 "s_register_operand" "")
4382 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4384 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4385 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4387 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4390 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4395 (define_insn "*arm_zero_extendqisi2"
4396 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4397 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4398 "TARGET_ARM && !arm_arch6"
4401 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4402 [(set_attr "length" "8,4")
4403 (set_attr "type" "alu_shift_reg,load_byte")
4404 (set_attr "predicable" "yes")]
4407 (define_insn "*arm_zero_extendqisi2_v6"
4408 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4409 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4410 "TARGET_ARM && arm_arch6"
4413 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
4414 [(set_attr "type" "extend,load_byte")
4415 (set_attr "predicable" "yes")]
4418 (define_insn "*arm_zero_extendqisi2addsi"
4419 [(set (match_operand:SI 0 "s_register_operand" "=r")
4420 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4421 (match_operand:SI 2 "s_register_operand" "r")))]
4423 "uxtab%?\\t%0, %2, %1"
4424 [(set_attr "predicable" "yes")
4425 (set_attr "type" "alu_shift_reg")]
4429 [(set (match_operand:SI 0 "s_register_operand" "")
4430 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4431 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4432 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4433 [(set (match_dup 2) (match_dup 1))
4434 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4439 [(set (match_operand:SI 0 "s_register_operand" "")
4440 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4441 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4442 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4443 [(set (match_dup 2) (match_dup 1))
4444 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4450 [(set (match_operand:SI 0 "s_register_operand" "")
4451 (IOR_XOR:SI (and:SI (ashift:SI
4452 (match_operand:SI 1 "s_register_operand" "")
4453 (match_operand:SI 2 "const_int_operand" ""))
4454 (match_operand:SI 3 "const_int_operand" ""))
4456 (match_operator 5 "subreg_lowpart_operator"
4457 [(match_operand:SI 4 "s_register_operand" "")]))))]
4459 && (UINTVAL (operands[3])
4460 == (GET_MODE_MASK (GET_MODE (operands[5]))
4461 & (GET_MODE_MASK (GET_MODE (operands[5]))
4462 << (INTVAL (operands[2])))))"
4463 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
4465 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4466 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4469 (define_insn "*compareqi_eq0"
4470 [(set (reg:CC_Z CC_REGNUM)
4471 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4475 [(set_attr "conds" "set")
4476 (set_attr "predicable" "yes")
4477 (set_attr "type" "logic_imm")]
4480 (define_expand "extendhisi2"
4481 [(set (match_operand:SI 0 "s_register_operand")
4482 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
4487 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4490 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4492 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4496 if (!arm_arch6 && !MEM_P (operands[1]))
4498 rtx t = gen_lowpart (SImode, operands[1]);
4499 rtx tmp = gen_reg_rtx (SImode);
4500 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4501 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4508 [(set (match_operand:SI 0 "register_operand" "")
4509 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4510 (clobber (match_scratch:SI 2 ""))])]
4512 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4513 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4515 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4518 ;; This pattern will only be used when ldsh is not available
4519 (define_expand "extendhisi2_mem"
4520 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4522 (zero_extend:SI (match_dup 7)))
4523 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4524 (set (match_operand:SI 0 "" "")
4525 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4530 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4532 mem1 = change_address (operands[1], QImode, addr);
4533 mem2 = change_address (operands[1], QImode,
4534 plus_constant (Pmode, addr, 1));
4535 operands[0] = gen_lowpart (SImode, operands[0]);
4537 operands[2] = gen_reg_rtx (SImode);
4538 operands[3] = gen_reg_rtx (SImode);
4539 operands[6] = gen_reg_rtx (SImode);
4542 if (BYTES_BIG_ENDIAN)
4544 operands[4] = operands[2];
4545 operands[5] = operands[3];
4549 operands[4] = operands[3];
4550 operands[5] = operands[2];
4556 [(set (match_operand:SI 0 "register_operand" "")
4557 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4559 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4560 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4562 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4565 (define_insn "*arm_extendhisi2"
4566 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4567 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4568 "TARGET_ARM && arm_arch4 && !arm_arch6"
4572 [(set_attr "length" "8,4")
4573 (set_attr "type" "alu_shift_reg,load_byte")
4574 (set_attr "predicable" "yes")]
4577 ;; ??? Check Thumb-2 pool range
4578 (define_insn "*arm_extendhisi2_v6"
4579 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4580 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4581 "TARGET_32BIT && arm_arch6"
4585 [(set_attr "type" "extend,load_byte")
4586 (set_attr "predicable" "yes")]
4589 (define_insn "*arm_extendhisi2addsi"
4590 [(set (match_operand:SI 0 "s_register_operand" "=r")
4591 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4592 (match_operand:SI 2 "s_register_operand" "r")))]
4594 "sxtah%?\\t%0, %2, %1"
4595 [(set_attr "type" "alu_shift_reg")]
4598 (define_expand "extendqihi2"
4600 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
4602 (set (match_operand:HI 0 "s_register_operand")
4603 (ashiftrt:SI (match_dup 2)
4608 if (arm_arch4 && MEM_P (operands[1]))
4610 emit_insn (gen_rtx_SET (operands[0],
4611 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4614 if (!s_register_operand (operands[1], QImode))
4615 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4616 operands[0] = gen_lowpart (SImode, operands[0]);
4617 operands[1] = gen_lowpart (SImode, operands[1]);
4618 operands[2] = gen_reg_rtx (SImode);
4622 (define_insn "*arm_extendqihi_insn"
4623 [(set (match_operand:HI 0 "s_register_operand" "=r")
4624 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4625 "TARGET_ARM && arm_arch4"
4627 [(set_attr "type" "load_byte")
4628 (set_attr "predicable" "yes")]
4631 (define_expand "extendqisi2"
4632 [(set (match_operand:SI 0 "s_register_operand")
4633 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
4636 if (!arm_arch4 && MEM_P (operands[1]))
4637 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4639 if (!arm_arch6 && !MEM_P (operands[1]))
4641 rtx t = gen_lowpart (SImode, operands[1]);
4642 rtx tmp = gen_reg_rtx (SImode);
4643 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4644 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
4650 [(set (match_operand:SI 0 "register_operand" "")
4651 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
4653 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4654 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
4656 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4659 (define_insn "*arm_extendqisi"
4660 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4661 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4662 "TARGET_ARM && arm_arch4 && !arm_arch6"
4666 [(set_attr "length" "8,4")
4667 (set_attr "type" "alu_shift_reg,load_byte")
4668 (set_attr "predicable" "yes")]
4671 (define_insn "*arm_extendqisi_v6"
4672 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4674 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4675 "TARGET_ARM && arm_arch6"
4679 [(set_attr "type" "extend,load_byte")
4680 (set_attr "predicable" "yes")]
4683 (define_insn "*arm_extendqisi2addsi"
4684 [(set (match_operand:SI 0 "s_register_operand" "=r")
4685 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4686 (match_operand:SI 2 "s_register_operand" "r")))]
4688 "sxtab%?\\t%0, %2, %1"
4689 [(set_attr "type" "alu_shift_reg")
4690 (set_attr "predicable" "yes")]
4693 (define_insn "arm_<sup>xtb16"
4694 [(set (match_operand:SI 0 "s_register_operand" "=r")
4696 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
4698 "<sup>xtb16%?\\t%0, %1"
4699 [(set_attr "predicable" "yes")
4700 (set_attr "type" "alu_dsp_reg")])
4702 (define_insn "arm_<simd32_op>"
4703 [(set (match_operand:SI 0 "s_register_operand" "=r")
4705 [(match_operand:SI 1 "s_register_operand" "r")
4706 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
4708 "<simd32_op>%?\\t%0, %1, %2"
4709 [(set_attr "predicable" "yes")
4710 (set_attr "type" "alu_dsp_reg")])
4712 (define_insn "arm_usada8"
4713 [(set (match_operand:SI 0 "s_register_operand" "=r")
4715 [(match_operand:SI 1 "s_register_operand" "r")
4716 (match_operand:SI 2 "s_register_operand" "r")
4717 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
4719 "usada8%?\\t%0, %1, %2, %3"
4720 [(set_attr "predicable" "yes")
4721 (set_attr "type" "alu_dsp_reg")])
4723 (define_insn "arm_<simd32_op>"
4724 [(set (match_operand:DI 0 "s_register_operand" "=r")
4726 [(match_operand:SI 1 "s_register_operand" "r")
4727 (match_operand:SI 2 "s_register_operand" "r")
4728 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
4730 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
4731 [(set_attr "predicable" "yes")
4732 (set_attr "type" "smlald")])
4734 (define_expand "extendsfdf2"
4735 [(set (match_operand:DF 0 "s_register_operand")
4736 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
4737 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4741 ;; HFmode -> DFmode conversions where we don't have an instruction for it
4742 ;; must go through SFmode.
4744 ;; This is always safe for an extend.
4746 (define_expand "extendhfdf2"
4747 [(set (match_operand:DF 0 "s_register_operand")
4748 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
4751 /* We don't have a direct instruction for this, so go via SFmode. */
4752 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4755 op1 = convert_to_mode (SFmode, operands[1], 0);
4756 op1 = convert_to_mode (DFmode, op1, 0);
4757 emit_insn (gen_movdf (operands[0], op1));
4760 /* Otherwise, we're done producing RTL and will pick up the correct
4761 pattern to do this with one rounding-step in a single instruction. */
4765 ;; Move insns (including loads and stores)
4767 ;; XXX Just some ideas about movti.
4768 ;; I don't think these are a good idea on the arm, there just aren't enough
4770 ;;(define_expand "loadti"
4771 ;; [(set (match_operand:TI 0 "s_register_operand")
4772 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
4775 ;;(define_expand "storeti"
4776 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
4777 ;; (match_operand:TI 1 "s_register_operand"))]
4780 ;;(define_expand "movti"
4781 ;; [(set (match_operand:TI 0 "general_operand")
4782 ;; (match_operand:TI 1 "general_operand"))]
4788 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
4789 ;; operands[1] = copy_to_reg (operands[1]);
4790 ;; if (MEM_P (operands[0]))
4791 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4792 ;; else if (MEM_P (operands[1]))
4793 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4797 ;; emit_insn (insn);
4801 ;; Recognize garbage generated above.
4804 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4805 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4809 ;; register mem = (which_alternative < 3);
4810 ;; register const char *template;
4812 ;; operands[mem] = XEXP (operands[mem], 0);
4813 ;; switch (which_alternative)
4815 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4816 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4817 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4818 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4819 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4820 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4822 ;; output_asm_insn (template, operands);
4826 (define_expand "movdi"
4827 [(set (match_operand:DI 0 "general_operand")
4828 (match_operand:DI 1 "general_operand"))]
4831 gcc_checking_assert (aligned_operand (operands[0], DImode));
4832 gcc_checking_assert (aligned_operand (operands[1], DImode));
4833 if (can_create_pseudo_p ())
4835 if (!REG_P (operands[0]))
4836 operands[1] = force_reg (DImode, operands[1]);
4838 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
4839 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
4841 /* Avoid LDRD's into an odd-numbered register pair in ARM state
4842 when expanding function calls. */
4843 gcc_assert (can_create_pseudo_p ());
4844 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
4846 /* Perform load into legal reg pair first, then move. */
4847 rtx reg = gen_reg_rtx (DImode);
4848 emit_insn (gen_movdi (reg, operands[1]));
4851 emit_move_insn (gen_lowpart (SImode, operands[0]),
4852 gen_lowpart (SImode, operands[1]));
4853 emit_move_insn (gen_highpart (SImode, operands[0]),
4854 gen_highpart (SImode, operands[1]));
4857 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
4858 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
4860 /* Avoid STRD's from an odd-numbered register pair in ARM state
4861 when expanding function prologue. */
4862 gcc_assert (can_create_pseudo_p ());
4863 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
4864 ? gen_reg_rtx (DImode)
4866 emit_move_insn (gen_lowpart (SImode, split_dest),
4867 gen_lowpart (SImode, operands[1]));
4868 emit_move_insn (gen_highpart (SImode, split_dest),
4869 gen_highpart (SImode, operands[1]));
4870 if (split_dest != operands[0])
4871 emit_insn (gen_movdi (operands[0], split_dest));
4877 (define_insn "*arm_movdi"
4878 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4879 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4881 && !(TARGET_HARD_FLOAT)
4883 && ( register_operand (operands[0], DImode)
4884 || register_operand (operands[1], DImode))"
4886 switch (which_alternative)
4893 /* Cannot load it directly, split to load it via MOV / MOVT. */
4894 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
4898 return output_move_double (operands, true, NULL);
4901 [(set_attr "length" "8,12,16,8,8")
4902 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
4903 (set_attr "arm_pool_range" "*,*,*,1020,*")
4904 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
4905 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
4906 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
4910 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4911 (match_operand:ANY64 1 "immediate_operand" ""))]
4914 && (arm_disable_literal_pool
4915 || (arm_const_double_inline_cost (operands[1])
4916 <= arm_max_const_double_inline_cost ()))"
4919 arm_split_constant (SET, SImode, curr_insn,
4920 INTVAL (gen_lowpart (SImode, operands[1])),
4921 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4922 arm_split_constant (SET, SImode, curr_insn,
4923 INTVAL (gen_highpart_mode (SImode,
4924 GET_MODE (operands[0]),
4926 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4931 ; If optimizing for size, or if we have load delay slots, then
4932 ; we want to split the constant into two separate operations.
4933 ; In both cases this may split a trivial part into a single data op
4934 ; leaving a single complex constant to load. We can also get longer
4935 ; offsets in a LDR which means we get better chances of sharing the pool
4936 ; entries. Finally, we can normally do a better job of scheduling
4937 ; LDR instructions than we can with LDM.
4938 ; This pattern will only match if the one above did not.
4940 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4941 (match_operand:ANY64 1 "const_double_operand" ""))]
4942 "TARGET_ARM && reload_completed
4943 && arm_const_double_by_parts (operands[1])"
4944 [(set (match_dup 0) (match_dup 1))
4945 (set (match_dup 2) (match_dup 3))]
4947 operands[2] = gen_highpart (SImode, operands[0]);
4948 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4950 operands[0] = gen_lowpart (SImode, operands[0]);
4951 operands[1] = gen_lowpart (SImode, operands[1]);
4956 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4957 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4958 "TARGET_EITHER && reload_completed"
4959 [(set (match_dup 0) (match_dup 1))
4960 (set (match_dup 2) (match_dup 3))]
4962 operands[2] = gen_highpart (SImode, operands[0]);
4963 operands[3] = gen_highpart (SImode, operands[1]);
4964 operands[0] = gen_lowpart (SImode, operands[0]);
4965 operands[1] = gen_lowpart (SImode, operands[1]);
4967 /* Handle a partial overlap. */
4968 if (rtx_equal_p (operands[0], operands[3]))
4970 rtx tmp0 = operands[0];
4971 rtx tmp1 = operands[1];
4973 operands[0] = operands[2];
4974 operands[1] = operands[3];
4981 ;; We can't actually do base+index doubleword loads if the index and
4982 ;; destination overlap. Split here so that we at least have chance to
4985 [(set (match_operand:DI 0 "s_register_operand" "")
4986 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4987 (match_operand:SI 2 "s_register_operand" ""))))]
4989 && reg_overlap_mentioned_p (operands[0], operands[1])
4990 && reg_overlap_mentioned_p (operands[0], operands[2])"
4992 (plus:SI (match_dup 1)
4995 (mem:DI (match_dup 4)))]
4997 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5001 (define_expand "movsi"
5002 [(set (match_operand:SI 0 "general_operand")
5003 (match_operand:SI 1 "general_operand"))]
5007 rtx base, offset, tmp;
5009 gcc_checking_assert (aligned_operand (operands[0], SImode));
5010 gcc_checking_assert (aligned_operand (operands[1], SImode));
5011 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5013 /* Everything except mem = const or mem = mem can be done easily. */
5014 if (MEM_P (operands[0]))
5015 operands[1] = force_reg (SImode, operands[1]);
5016 if (arm_general_register_operand (operands[0], SImode)
5017 && CONST_INT_P (operands[1])
5018 && !(const_ok_for_arm (INTVAL (operands[1]))
5019 || const_ok_for_arm (~INTVAL (operands[1]))))
5021 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5023 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5028 arm_split_constant (SET, SImode, NULL_RTX,
5029 INTVAL (operands[1]), operands[0], NULL_RTX,
5030 optimize && can_create_pseudo_p ());
5035 else /* Target doesn't have MOVT... */
5037 if (can_create_pseudo_p ())
5039 if (!REG_P (operands[0]))
5040 operands[1] = force_reg (SImode, operands[1]);
5044 split_const (operands[1], &base, &offset);
5045 if (INTVAL (offset) != 0
5046 && targetm.cannot_force_const_mem (SImode, operands[1]))
5048 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5049 emit_move_insn (tmp, base);
5050 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5054 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5056 /* Recognize the case where operand[1] is a reference to thread-local
5057 data and load its address to a register. Offsets have been split off
5059 if (arm_tls_referenced_p (operands[1]))
5060 operands[1] = legitimize_tls_address (operands[1], tmp);
5062 && (CONSTANT_P (operands[1])
5063 || symbol_mentioned_p (operands[1])
5064 || label_mentioned_p (operands[1])))
5066 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5071 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5072 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5073 ;; so this does not matter.
5074 (define_insn "*arm_movt"
5075 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5076 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5077 (match_operand:SI 2 "general_operand" "i,i")))]
5078 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5080 movt%?\t%0, #:upper16:%c2
5081 movt\t%0, #:upper16:%c2"
5082 [(set_attr "arch" "32,v8mb")
5083 (set_attr "predicable" "yes")
5084 (set_attr "length" "4")
5085 (set_attr "type" "alu_sreg")]
5088 (define_insn "*arm_movsi_insn"
5089 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5090 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5091 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5092 && ( register_operand (operands[0], SImode)
5093 || register_operand (operands[1], SImode))"
5101 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5102 (set_attr "predicable" "yes")
5103 (set_attr "arch" "*,*,*,v6t2,*,*")
5104 (set_attr "pool_range" "*,*,*,*,4096,*")
5105 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5109 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5110 (match_operand:SI 1 "const_int_operand" ""))]
5111 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5112 && (!(const_ok_for_arm (INTVAL (operands[1]))
5113 || const_ok_for_arm (~INTVAL (operands[1]))))"
5114 [(clobber (const_int 0))]
5116 arm_split_constant (SET, SImode, NULL_RTX,
5117 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5122 ;; A normal way to do (symbol + offset) requires three instructions at least
5123 ;; (depends on how big the offset is) as below:
5124 ;; movw r0, #:lower16:g
5125 ;; movw r0, #:upper16:g
5128 ;; A better way would be:
5129 ;; movw r0, #:lower16:g+4
5130 ;; movw r0, #:upper16:g+4
5132 ;; The limitation of this way is that the length of offset should be a 16-bit
5133 ;; signed value, because current assembler only supports REL type relocation for
5134 ;; such case. If the more powerful RELA type is supported in future, we should
5135 ;; update this pattern to go with better way.
5137 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5138 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5139 (match_operand:SI 2 "const_int_operand" ""))))]
5142 && arm_disable_literal_pool
5144 && GET_CODE (operands[1]) == SYMBOL_REF"
5145 [(clobber (const_int 0))]
5147 int offset = INTVAL (operands[2]);
5149 if (offset < -0x8000 || offset > 0x7fff)
5151 arm_emit_movpair (operands[0], operands[1]);
5152 emit_insn (gen_rtx_SET (operands[0],
5153 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5157 rtx op = gen_rtx_CONST (SImode,
5158 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5159 arm_emit_movpair (operands[0], op);
5164 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5165 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5166 ;; and lo_sum would be merged back into memory load at cprop. However,
5167 ;; if the default is to prefer movt/movw rather than a load from the constant
5168 ;; pool, the performance is better.
5170 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5171 (match_operand:SI 1 "general_operand" ""))]
5172 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5173 && !target_word_relocations
5174 && !arm_tls_referenced_p (operands[1])"
5175 [(clobber (const_int 0))]
5177 arm_emit_movpair (operands[0], operands[1]);
5181 ;; When generating pic, we need to load the symbol offset into a register.
5182 ;; So that the optimizer does not confuse this with a normal symbol load
5183 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5184 ;; since that is the only type of relocation we can use.
5186 ;; Wrap calculation of the whole PIC address in a single pattern for the
5187 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5188 ;; a PIC address involves two loads from memory, so we want to CSE it
5189 ;; as often as possible.
5190 ;; This pattern will be split into one of the pic_load_addr_* patterns
5191 ;; and a move after GCSE optimizations.
5193 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5194 (define_expand "calculate_pic_address"
5195 [(set (match_operand:SI 0 "register_operand")
5196 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5197 (unspec:SI [(match_operand:SI 2 "" "")]
5202 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5204 [(set (match_operand:SI 0 "register_operand" "")
5205 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5206 (unspec:SI [(match_operand:SI 2 "" "")]
5209 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5210 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5211 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5214 ;; operand1 is the memory address to go into
5215 ;; pic_load_addr_32bit.
5216 ;; operand2 is the PIC label to be emitted
5217 ;; from pic_add_dot_plus_eight.
5218 ;; We do this to allow hoisting of the entire insn.
5219 (define_insn_and_split "pic_load_addr_unified"
5220 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5221 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5222 (match_operand:SI 2 "" "")]
5223 UNSPEC_PIC_UNIFIED))]
5226 "&& reload_completed"
5227 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5228 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5229 (match_dup 2)] UNSPEC_PIC_BASE))]
5230 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5231 [(set_attr "type" "load_4,load_4,load_4")
5232 (set_attr "pool_range" "4096,4094,1022")
5233 (set_attr "neg_pool_range" "4084,0,0")
5234 (set_attr "arch" "a,t2,t1")
5235 (set_attr "length" "8,6,4")]
5238 ;; The rather odd constraints on the following are to force reload to leave
5239 ;; the insn alone, and to force the minipool generation pass to then move
5240 ;; the GOT symbol to memory.
5242 (define_insn "pic_load_addr_32bit"
5243 [(set (match_operand:SI 0 "s_register_operand" "=r")
5244 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5245 "TARGET_32BIT && flag_pic"
5247 [(set_attr "type" "load_4")
5248 (set (attr "pool_range")
5249 (if_then_else (eq_attr "is_thumb" "no")
5252 (set (attr "neg_pool_range")
5253 (if_then_else (eq_attr "is_thumb" "no")
5258 (define_insn "pic_load_addr_thumb1"
5259 [(set (match_operand:SI 0 "s_register_operand" "=l")
5260 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5261 "TARGET_THUMB1 && flag_pic"
5263 [(set_attr "type" "load_4")
5264 (set (attr "pool_range") (const_int 1018))]
5267 (define_insn "pic_add_dot_plus_four"
5268 [(set (match_operand:SI 0 "register_operand" "=r")
5269 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5271 (match_operand 2 "" "")]
5275 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5276 INTVAL (operands[2]));
5277 return \"add\\t%0, %|pc\";
5279 [(set_attr "length" "2")
5280 (set_attr "type" "alu_sreg")]
5283 (define_insn "pic_add_dot_plus_eight"
5284 [(set (match_operand:SI 0 "register_operand" "=r")
5285 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5287 (match_operand 2 "" "")]
5291 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5292 INTVAL (operands[2]));
5293 return \"add%?\\t%0, %|pc, %1\";
5295 [(set_attr "predicable" "yes")
5296 (set_attr "type" "alu_sreg")]
5299 (define_insn "tls_load_dot_plus_eight"
5300 [(set (match_operand:SI 0 "register_operand" "=r")
5301 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5303 (match_operand 2 "" "")]
5307 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5308 INTVAL (operands[2]));
5309 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5311 [(set_attr "predicable" "yes")
5312 (set_attr "type" "load_4")]
5315 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5316 ;; followed by a load. These sequences can be crunched down to
5317 ;; tls_load_dot_plus_eight by a peephole.
5320 [(set (match_operand:SI 0 "register_operand" "")
5321 (unspec:SI [(match_operand:SI 3 "register_operand" "")
5323 (match_operand 1 "" "")]
5325 (set (match_operand:SI 2 "arm_general_register_operand" "")
5326 (mem:SI (match_dup 0)))]
5327 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5329 (mem:SI (unspec:SI [(match_dup 3)
5336 (define_insn "pic_offset_arm"
5337 [(set (match_operand:SI 0 "register_operand" "=r")
5338 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5339 (unspec:SI [(match_operand:SI 2 "" "X")]
5340 UNSPEC_PIC_OFFSET))))]
5341 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5342 "ldr%?\\t%0, [%1,%2]"
5343 [(set_attr "type" "load_4")]
5346 (define_expand "builtin_setjmp_receiver"
5347 [(label_ref (match_operand 0 "" ""))]
5351 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5353 if (arm_pic_register != INVALID_REGNUM)
5354 arm_load_pic_register (1UL << 3, NULL_RTX);
5358 ;; If copying one reg to another we can set the condition codes according to
5359 ;; its value. Such a move is common after a return from subroutine and the
5360 ;; result is being tested against zero.
5362 (define_insn "*movsi_compare0"
5363 [(set (reg:CC CC_REGNUM)
5364 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5366 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5371 subs%?\\t%0, %1, #0"
5372 [(set_attr "conds" "set")
5373 (set_attr "type" "alus_imm,alus_imm")]
5376 ;; Subroutine to store a half word from a register into memory.
5377 ;; Operand 0 is the source register (HImode)
5378 ;; Operand 1 is the destination address in a register (SImode)
5380 ;; In both this routine and the next, we must be careful not to spill
5381 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5382 ;; can generate unrecognizable rtl.
5384 (define_expand "storehi"
5385 [;; store the low byte
5386 (set (match_operand 1 "" "") (match_dup 3))
5387 ;; extract the high byte
5389 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5390 ;; store the high byte
5391 (set (match_dup 4) (match_dup 5))]
5395 rtx op1 = operands[1];
5396 rtx addr = XEXP (op1, 0);
5397 enum rtx_code code = GET_CODE (addr);
5399 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5401 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5403 operands[4] = adjust_address (op1, QImode, 1);
5404 operands[1] = adjust_address (operands[1], QImode, 0);
5405 operands[3] = gen_lowpart (QImode, operands[0]);
5406 operands[0] = gen_lowpart (SImode, operands[0]);
5407 operands[2] = gen_reg_rtx (SImode);
5408 operands[5] = gen_lowpart (QImode, operands[2]);
5412 (define_expand "storehi_bigend"
5413 [(set (match_dup 4) (match_dup 3))
5415 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5416 (set (match_operand 1 "" "") (match_dup 5))]
5420 rtx op1 = operands[1];
5421 rtx addr = XEXP (op1, 0);
5422 enum rtx_code code = GET_CODE (addr);
5424 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5426 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5428 operands[4] = adjust_address (op1, QImode, 1);
5429 operands[1] = adjust_address (operands[1], QImode, 0);
5430 operands[3] = gen_lowpart (QImode, operands[0]);
5431 operands[0] = gen_lowpart (SImode, operands[0]);
5432 operands[2] = gen_reg_rtx (SImode);
5433 operands[5] = gen_lowpart (QImode, operands[2]);
5437 ;; Subroutine to store a half word integer constant into memory.
5438 (define_expand "storeinthi"
5439 [(set (match_operand 0 "" "")
5440 (match_operand 1 "" ""))
5441 (set (match_dup 3) (match_dup 2))]
5445 HOST_WIDE_INT value = INTVAL (operands[1]);
5446 rtx addr = XEXP (operands[0], 0);
5447 rtx op0 = operands[0];
5448 enum rtx_code code = GET_CODE (addr);
5450 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5452 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5454 operands[1] = gen_reg_rtx (SImode);
5455 if (BYTES_BIG_ENDIAN)
5457 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5458 if ((value & 255) == ((value >> 8) & 255))
5459 operands[2] = operands[1];
5462 operands[2] = gen_reg_rtx (SImode);
5463 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5468 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5469 if ((value & 255) == ((value >> 8) & 255))
5470 operands[2] = operands[1];
5473 operands[2] = gen_reg_rtx (SImode);
5474 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5478 operands[3] = adjust_address (op0, QImode, 1);
5479 operands[0] = adjust_address (operands[0], QImode, 0);
5480 operands[2] = gen_lowpart (QImode, operands[2]);
5481 operands[1] = gen_lowpart (QImode, operands[1]);
5485 (define_expand "storehi_single_op"
5486 [(set (match_operand:HI 0 "memory_operand")
5487 (match_operand:HI 1 "general_operand"))]
5488 "TARGET_32BIT && arm_arch4"
5490 if (!s_register_operand (operands[1], HImode))
5491 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5495 (define_expand "movhi"
5496 [(set (match_operand:HI 0 "general_operand")
5497 (match_operand:HI 1 "general_operand"))]
5500 gcc_checking_assert (aligned_operand (operands[0], HImode));
5501 gcc_checking_assert (aligned_operand (operands[1], HImode));
5504 if (can_create_pseudo_p ())
5506 if (MEM_P (operands[0]))
5510 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5513 if (CONST_INT_P (operands[1]))
5514 emit_insn (gen_storeinthi (operands[0], operands[1]));
5517 if (MEM_P (operands[1]))
5518 operands[1] = force_reg (HImode, operands[1]);
5519 if (BYTES_BIG_ENDIAN)
5520 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5522 emit_insn (gen_storehi (operands[1], operands[0]));
5526 /* Sign extend a constant, and keep it in an SImode reg. */
5527 else if (CONST_INT_P (operands[1]))
5529 rtx reg = gen_reg_rtx (SImode);
5530 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5532 /* If the constant is already valid, leave it alone. */
5533 if (!const_ok_for_arm (val))
5535 /* If setting all the top bits will make the constant
5536 loadable in a single instruction, then set them.
5537 Otherwise, sign extend the number. */
5539 if (const_ok_for_arm (~(val | ~0xffff)))
5541 else if (val & 0x8000)
5545 emit_insn (gen_movsi (reg, GEN_INT (val)));
5546 operands[1] = gen_lowpart (HImode, reg);
5548 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5549 && MEM_P (operands[1]))
5551 rtx reg = gen_reg_rtx (SImode);
5553 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5554 operands[1] = gen_lowpart (HImode, reg);
5556 else if (!arm_arch4)
5558 if (MEM_P (operands[1]))
5561 rtx offset = const0_rtx;
5562 rtx reg = gen_reg_rtx (SImode);
5564 if ((REG_P (base = XEXP (operands[1], 0))
5565 || (GET_CODE (base) == PLUS
5566 && (CONST_INT_P (offset = XEXP (base, 1)))
5567 && ((INTVAL(offset) & 1) != 1)
5568 && REG_P (base = XEXP (base, 0))))
5569 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5573 new_rtx = widen_memory_access (operands[1], SImode,
5574 ((INTVAL (offset) & ~3)
5575 - INTVAL (offset)));
5576 emit_insn (gen_movsi (reg, new_rtx));
5577 if (((INTVAL (offset) & 2) != 0)
5578 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5580 rtx reg2 = gen_reg_rtx (SImode);
5582 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5587 emit_insn (gen_movhi_bytes (reg, operands[1]));
5589 operands[1] = gen_lowpart (HImode, reg);
5593 /* Handle loading a large integer during reload. */
5594 else if (CONST_INT_P (operands[1])
5595 && !const_ok_for_arm (INTVAL (operands[1]))
5596 && !const_ok_for_arm (~INTVAL (operands[1])))
5598 /* Writing a constant to memory needs a scratch, which should
5599 be handled with SECONDARY_RELOADs. */
5600 gcc_assert (REG_P (operands[0]));
5602 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5603 emit_insn (gen_movsi (operands[0], operands[1]));
5607 else if (TARGET_THUMB2)
5609 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5610 if (can_create_pseudo_p ())
5612 if (!REG_P (operands[0]))
5613 operands[1] = force_reg (HImode, operands[1]);
5614 /* Zero extend a constant, and keep it in an SImode reg. */
5615 else if (CONST_INT_P (operands[1]))
5617 rtx reg = gen_reg_rtx (SImode);
5618 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5620 emit_insn (gen_movsi (reg, GEN_INT (val)));
5621 operands[1] = gen_lowpart (HImode, reg);
5625 else /* TARGET_THUMB1 */
5627 if (can_create_pseudo_p ())
5629 if (CONST_INT_P (operands[1]))
5631 rtx reg = gen_reg_rtx (SImode);
5633 emit_insn (gen_movsi (reg, operands[1]));
5634 operands[1] = gen_lowpart (HImode, reg);
5637 /* ??? We shouldn't really get invalid addresses here, but this can
5638 happen if we are passed a SP (never OK for HImode/QImode) or
5639 virtual register (also rejected as illegitimate for HImode/QImode)
5640 relative address. */
5641 /* ??? This should perhaps be fixed elsewhere, for instance, in
5642 fixup_stack_1, by checking for other kinds of invalid addresses,
5643 e.g. a bare reference to a virtual register. This may confuse the
5644 alpha though, which must handle this case differently. */
5645 if (MEM_P (operands[0])
5646 && !memory_address_p (GET_MODE (operands[0]),
5647 XEXP (operands[0], 0)))
5649 = replace_equiv_address (operands[0],
5650 copy_to_reg (XEXP (operands[0], 0)));
5652 if (MEM_P (operands[1])
5653 && !memory_address_p (GET_MODE (operands[1]),
5654 XEXP (operands[1], 0)))
5656 = replace_equiv_address (operands[1],
5657 copy_to_reg (XEXP (operands[1], 0)));
5659 if (MEM_P (operands[1]) && optimize > 0)
5661 rtx reg = gen_reg_rtx (SImode);
5663 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5664 operands[1] = gen_lowpart (HImode, reg);
5667 if (MEM_P (operands[0]))
5668 operands[1] = force_reg (HImode, operands[1]);
5670 else if (CONST_INT_P (operands[1])
5671 && !satisfies_constraint_I (operands[1]))
5673 /* Handle loading a large integer during reload. */
5675 /* Writing a constant to memory needs a scratch, which should
5676 be handled with SECONDARY_RELOADs. */
5677 gcc_assert (REG_P (operands[0]));
5679 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5680 emit_insn (gen_movsi (operands[0], operands[1]));
5687 (define_expand "movhi_bytes"
5688 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5690 (zero_extend:SI (match_dup 6)))
5691 (set (match_operand:SI 0 "" "")
5692 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5697 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5699 mem1 = change_address (operands[1], QImode, addr);
5700 mem2 = change_address (operands[1], QImode,
5701 plus_constant (Pmode, addr, 1));
5702 operands[0] = gen_lowpart (SImode, operands[0]);
5704 operands[2] = gen_reg_rtx (SImode);
5705 operands[3] = gen_reg_rtx (SImode);
5708 if (BYTES_BIG_ENDIAN)
5710 operands[4] = operands[2];
5711 operands[5] = operands[3];
5715 operands[4] = operands[3];
5716 operands[5] = operands[2];
5721 (define_expand "movhi_bigend"
5723 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
5726 (ashiftrt:SI (match_dup 2) (const_int 16)))
5727 (set (match_operand:HI 0 "s_register_operand")
5731 operands[2] = gen_reg_rtx (SImode);
5732 operands[3] = gen_reg_rtx (SImode);
5733 operands[4] = gen_lowpart (HImode, operands[3]);
5737 ;; Pattern to recognize insn generated default case above
5738 (define_insn "*movhi_insn_arch4"
5739 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
5740 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
5742 && arm_arch4 && !TARGET_HARD_FLOAT
5743 && (register_operand (operands[0], HImode)
5744 || register_operand (operands[1], HImode))"
5746 mov%?\\t%0, %1\\t%@ movhi
5747 mvn%?\\t%0, #%B1\\t%@ movhi
5748 movw%?\\t%0, %L1\\t%@ movhi
5749 strh%?\\t%1, %0\\t%@ movhi
5750 ldrh%?\\t%0, %1\\t%@ movhi"
5751 [(set_attr "predicable" "yes")
5752 (set_attr "pool_range" "*,*,*,*,256")
5753 (set_attr "neg_pool_range" "*,*,*,*,244")
5754 (set_attr "arch" "*,*,v6t2,*,*")
5755 (set_attr_alternative "type"
5756 [(if_then_else (match_operand 1 "const_int_operand" "")
5757 (const_string "mov_imm" )
5758 (const_string "mov_reg"))
5759 (const_string "mvn_imm")
5760 (const_string "mov_imm")
5761 (const_string "store_4")
5762 (const_string "load_4")])]
5765 (define_insn "*movhi_bytes"
5766 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
5767 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
5768 "TARGET_ARM && !TARGET_HARD_FLOAT"
5770 mov%?\\t%0, %1\\t%@ movhi
5771 mov%?\\t%0, %1\\t%@ movhi
5772 mvn%?\\t%0, #%B1\\t%@ movhi"
5773 [(set_attr "predicable" "yes")
5774 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
5777 ;; We use a DImode scratch because we may occasionally need an additional
5778 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5779 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5780 ;; The reload_in<m> and reload_out<m> patterns require special constraints
5781 ;; to be correctly handled in default_secondary_reload function.
5782 (define_expand "reload_outhi"
5783 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5784 (match_operand:HI 1 "s_register_operand" "r")
5785 (match_operand:DI 2 "s_register_operand" "=&l")])]
5788 arm_reload_out_hi (operands);
5790 thumb_reload_out_hi (operands);
5795 (define_expand "reload_inhi"
5796 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5797 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5798 (match_operand:DI 2 "s_register_operand" "=&r")])]
5802 arm_reload_in_hi (operands);
5804 thumb_reload_out_hi (operands);
5808 (define_expand "movqi"
5809 [(set (match_operand:QI 0 "general_operand")
5810 (match_operand:QI 1 "general_operand"))]
5813 /* Everything except mem = const or mem = mem can be done easily */
5815 if (can_create_pseudo_p ())
5817 if (CONST_INT_P (operands[1]))
5819 rtx reg = gen_reg_rtx (SImode);
5821 /* For thumb we want an unsigned immediate, then we are more likely
5822 to be able to use a movs insn. */
5824 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5826 emit_insn (gen_movsi (reg, operands[1]));
5827 operands[1] = gen_lowpart (QImode, reg);
5832 /* ??? We shouldn't really get invalid addresses here, but this can
5833 happen if we are passed a SP (never OK for HImode/QImode) or
5834 virtual register (also rejected as illegitimate for HImode/QImode)
5835 relative address. */
5836 /* ??? This should perhaps be fixed elsewhere, for instance, in
5837 fixup_stack_1, by checking for other kinds of invalid addresses,
5838 e.g. a bare reference to a virtual register. This may confuse the
5839 alpha though, which must handle this case differently. */
5840 if (MEM_P (operands[0])
5841 && !memory_address_p (GET_MODE (operands[0]),
5842 XEXP (operands[0], 0)))
5844 = replace_equiv_address (operands[0],
5845 copy_to_reg (XEXP (operands[0], 0)));
5846 if (MEM_P (operands[1])
5847 && !memory_address_p (GET_MODE (operands[1]),
5848 XEXP (operands[1], 0)))
5850 = replace_equiv_address (operands[1],
5851 copy_to_reg (XEXP (operands[1], 0)));
5854 if (MEM_P (operands[1]) && optimize > 0)
5856 rtx reg = gen_reg_rtx (SImode);
5858 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5859 operands[1] = gen_lowpart (QImode, reg);
5862 if (MEM_P (operands[0]))
5863 operands[1] = force_reg (QImode, operands[1]);
5865 else if (TARGET_THUMB
5866 && CONST_INT_P (operands[1])
5867 && !satisfies_constraint_I (operands[1]))
5869 /* Handle loading a large integer during reload. */
5871 /* Writing a constant to memory needs a scratch, which should
5872 be handled with SECONDARY_RELOADs. */
5873 gcc_assert (REG_P (operands[0]));
5875 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5876 emit_insn (gen_movsi (operands[0], operands[1]));
5882 (define_insn "*arm_movqi_insn"
5883 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
5884 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
5886 && ( register_operand (operands[0], QImode)
5887 || register_operand (operands[1], QImode))"
5898 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
5899 (set_attr "predicable" "yes")
5900 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
5901 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
5902 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
5906 (define_expand "movhf"
5907 [(set (match_operand:HF 0 "general_operand")
5908 (match_operand:HF 1 "general_operand"))]
5911 gcc_checking_assert (aligned_operand (operands[0], HFmode));
5912 gcc_checking_assert (aligned_operand (operands[1], HFmode));
5915 if (MEM_P (operands[0]))
5916 operands[1] = force_reg (HFmode, operands[1]);
5918 else /* TARGET_THUMB1 */
5920 if (can_create_pseudo_p ())
5922 if (!REG_P (operands[0]))
5923 operands[1] = force_reg (HFmode, operands[1]);
5929 (define_insn "*arm32_movhf"
5930 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5931 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
5932 "TARGET_32BIT && !TARGET_HARD_FLOAT
5933 && ( s_register_operand (operands[0], HFmode)
5934 || s_register_operand (operands[1], HFmode))"
5936 switch (which_alternative)
5938 case 0: /* ARM register from memory */
5939 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
5940 case 1: /* memory from ARM register */
5941 return \"strh%?\\t%1, %0\\t%@ __fp16\";
5942 case 2: /* ARM register from ARM register */
5943 return \"mov%?\\t%0, %1\\t%@ __fp16\";
5944 case 3: /* ARM register from constant */
5949 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
5951 ops[0] = operands[0];
5952 ops[1] = GEN_INT (bits);
5953 ops[2] = GEN_INT (bits & 0xff00);
5954 ops[3] = GEN_INT (bits & 0x00ff);
5956 if (arm_arch_thumb2)
5957 output_asm_insn (\"movw%?\\t%0, %1\", ops);
5959 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
5966 [(set_attr "conds" "unconditional")
5967 (set_attr "type" "load_4,store_4,mov_reg,multiple")
5968 (set_attr "length" "4,4,4,8")
5969 (set_attr "predicable" "yes")]
5972 (define_expand "movsf"
5973 [(set (match_operand:SF 0 "general_operand")
5974 (match_operand:SF 1 "general_operand"))]
5977 gcc_checking_assert (aligned_operand (operands[0], SFmode));
5978 gcc_checking_assert (aligned_operand (operands[1], SFmode));
5981 if (MEM_P (operands[0]))
5982 operands[1] = force_reg (SFmode, operands[1]);
5984 else /* TARGET_THUMB1 */
5986 if (can_create_pseudo_p ())
5988 if (!REG_P (operands[0]))
5989 operands[1] = force_reg (SFmode, operands[1]);
5993 /* Cannot load it directly, generate a load with clobber so that it can be
5994 loaded via GPR with MOV / MOVT. */
5995 if (arm_disable_literal_pool
5996 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
5997 && CONST_DOUBLE_P (operands[1])
5998 && TARGET_HARD_FLOAT
5999 && !vfp3_const_double_rtx (operands[1]))
6001 rtx clobreg = gen_reg_rtx (SFmode);
6002 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6009 ;; Transform a floating-point move of a constant into a core register into
6010 ;; an SImode operation.
6012 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6013 (match_operand:SF 1 "immediate_operand" ""))]
6016 && CONST_DOUBLE_P (operands[1])"
6017 [(set (match_dup 2) (match_dup 3))]
6019 operands[2] = gen_lowpart (SImode, operands[0]);
6020 operands[3] = gen_lowpart (SImode, operands[1]);
6021 if (operands[2] == 0 || operands[3] == 0)
6026 (define_insn "*arm_movsf_soft_insn"
6027 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6028 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6030 && TARGET_SOFT_FLOAT
6031 && (!MEM_P (operands[0])
6032 || register_operand (operands[1], SFmode))"
6034 switch (which_alternative)
6036 case 0: return \"mov%?\\t%0, %1\";
6038 /* Cannot load it directly, split to load it via MOV / MOVT. */
6039 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6041 return \"ldr%?\\t%0, %1\\t%@ float\";
6042 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6043 default: gcc_unreachable ();
6046 [(set_attr "predicable" "yes")
6047 (set_attr "type" "mov_reg,load_4,store_4")
6048 (set_attr "arm_pool_range" "*,4096,*")
6049 (set_attr "thumb2_pool_range" "*,4094,*")
6050 (set_attr "arm_neg_pool_range" "*,4084,*")
6051 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6054 ;; Splitter for the above.
6056 [(set (match_operand:SF 0 "s_register_operand")
6057 (match_operand:SF 1 "const_double_operand"))]
6058 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6062 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6063 rtx cst = gen_int_mode (buf, SImode);
6064 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6069 (define_expand "movdf"
6070 [(set (match_operand:DF 0 "general_operand")
6071 (match_operand:DF 1 "general_operand"))]
6074 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6075 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6078 if (MEM_P (operands[0]))
6079 operands[1] = force_reg (DFmode, operands[1]);
6081 else /* TARGET_THUMB */
6083 if (can_create_pseudo_p ())
6085 if (!REG_P (operands[0]))
6086 operands[1] = force_reg (DFmode, operands[1]);
6090 /* Cannot load it directly, generate a load with clobber so that it can be
6091 loaded via GPR with MOV / MOVT. */
6092 if (arm_disable_literal_pool
6093 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6094 && CONSTANT_P (operands[1])
6095 && TARGET_HARD_FLOAT
6096 && !arm_const_double_rtx (operands[1])
6097 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6099 rtx clobreg = gen_reg_rtx (DFmode);
6100 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6107 ;; Reloading a df mode value stored in integer regs to memory can require a
6109 ;; Another reload_out<m> pattern that requires special constraints.
6110 (define_expand "reload_outdf"
6111 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6112 (match_operand:DF 1 "s_register_operand" "r")
6113 (match_operand:SI 2 "s_register_operand" "=&r")]
6117 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6120 operands[2] = XEXP (operands[0], 0);
6121 else if (code == POST_INC || code == PRE_DEC)
6123 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6124 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6125 emit_insn (gen_movdi (operands[0], operands[1]));
6128 else if (code == PRE_INC)
6130 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6132 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6135 else if (code == POST_DEC)
6136 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6138 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6139 XEXP (XEXP (operands[0], 0), 1)));
6141 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6144 if (code == POST_DEC)
6145 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6151 (define_insn "*movdf_soft_insn"
6152 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6153 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6154 "TARGET_32BIT && TARGET_SOFT_FLOAT
6155 && ( register_operand (operands[0], DFmode)
6156 || register_operand (operands[1], DFmode))"
6158 switch (which_alternative)
6165 /* Cannot load it directly, split to load it via MOV / MOVT. */
6166 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6170 return output_move_double (operands, true, NULL);
6173 [(set_attr "length" "8,12,16,8,8")
6174 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6175 (set_attr "arm_pool_range" "*,*,*,1020,*")
6176 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6177 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6178 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6181 ;; Splitter for the above.
6183 [(set (match_operand:DF 0 "s_register_operand")
6184 (match_operand:DF 1 "const_double_operand"))]
6185 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6189 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6190 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6191 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6192 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6193 rtx cst = gen_int_mode (ival, DImode);
6194 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6200 ;; load- and store-multiple insns
6201 ;; The arm can load/store any set of registers, provided that they are in
6202 ;; ascending order, but these expanders assume a contiguous set.
6204 (define_expand "load_multiple"
6205 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6206 (match_operand:SI 1 "" ""))
6207 (use (match_operand:SI 2 "" ""))])]
6210 HOST_WIDE_INT offset = 0;
6212 /* Support only fixed point registers. */
6213 if (!CONST_INT_P (operands[2])
6214 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6215 || INTVAL (operands[2]) < 2
6216 || !MEM_P (operands[1])
6217 || !REG_P (operands[0])
6218 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6219 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6223 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6224 INTVAL (operands[2]),
6225 force_reg (SImode, XEXP (operands[1], 0)),
6226 FALSE, operands[1], &offset);
6229 (define_expand "store_multiple"
6230 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6231 (match_operand:SI 1 "" ""))
6232 (use (match_operand:SI 2 "" ""))])]
6235 HOST_WIDE_INT offset = 0;
6237 /* Support only fixed point registers. */
6238 if (!CONST_INT_P (operands[2])
6239 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6240 || INTVAL (operands[2]) < 2
6241 || !REG_P (operands[1])
6242 || !MEM_P (operands[0])
6243 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6244 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6248 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6249 INTVAL (operands[2]),
6250 force_reg (SImode, XEXP (operands[0], 0)),
6251 FALSE, operands[0], &offset);
6255 (define_expand "setmemsi"
6256 [(match_operand:BLK 0 "general_operand")
6257 (match_operand:SI 1 "const_int_operand")
6258 (match_operand:SI 2 "const_int_operand")
6259 (match_operand:SI 3 "const_int_operand")]
6262 if (arm_gen_setmem (operands))
6269 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6270 ;; We could let this apply for blocks of less than this, but it clobbers so
6271 ;; many registers that there is then probably a better way.
6273 (define_expand "cpymemqi"
6274 [(match_operand:BLK 0 "general_operand")
6275 (match_operand:BLK 1 "general_operand")
6276 (match_operand:SI 2 "const_int_operand")
6277 (match_operand:SI 3 "const_int_operand")]
6282 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
6283 && !optimize_function_for_size_p (cfun))
6285 if (gen_cpymem_ldrd_strd (operands))
6290 if (arm_gen_cpymemqi (operands))
6294 else /* TARGET_THUMB1 */
6296 if ( INTVAL (operands[3]) != 4
6297 || INTVAL (operands[2]) > 48)
6300 thumb_expand_cpymemqi (operands);
6307 ;; Compare & branch insns
6308 ;; The range calculations are based as follows:
6309 ;; For forward branches, the address calculation returns the address of
6310 ;; the next instruction. This is 2 beyond the branch instruction.
6311 ;; For backward branches, the address calculation returns the address of
6312 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6313 ;; instruction for the shortest sequence, and 4 before the branch instruction
6314 ;; if we have to jump around an unconditional branch.
6315 ;; To the basic branch range the PC offset must be added (this is +4).
6316 ;; So for forward branches we have
6317 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6318 ;; And for backward branches we have
6319 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6321 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6322 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6324 (define_expand "cbranchsi4"
6325 [(set (pc) (if_then_else
6326 (match_operator 0 "expandable_comparison_operator"
6327 [(match_operand:SI 1 "s_register_operand")
6328 (match_operand:SI 2 "nonmemory_operand")])
6329 (label_ref (match_operand 3 "" ""))
6335 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6337 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6341 if (thumb1_cmpneg_operand (operands[2], SImode))
6343 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6344 operands[3], operands[0]));
6347 if (!thumb1_cmp_operand (operands[2], SImode))
6348 operands[2] = force_reg (SImode, operands[2]);
6351 (define_expand "cbranchsf4"
6352 [(set (pc) (if_then_else
6353 (match_operator 0 "expandable_comparison_operator"
6354 [(match_operand:SF 1 "s_register_operand")
6355 (match_operand:SF 2 "vfp_compare_operand")])
6356 (label_ref (match_operand 3 "" ""))
6358 "TARGET_32BIT && TARGET_HARD_FLOAT"
6359 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6360 operands[3])); DONE;"
6363 (define_expand "cbranchdf4"
6364 [(set (pc) (if_then_else
6365 (match_operator 0 "expandable_comparison_operator"
6366 [(match_operand:DF 1 "s_register_operand")
6367 (match_operand:DF 2 "vfp_compare_operand")])
6368 (label_ref (match_operand 3 "" ""))
6370 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6371 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6372 operands[3])); DONE;"
6375 (define_expand "cbranchdi4"
6376 [(set (pc) (if_then_else
6377 (match_operator 0 "expandable_comparison_operator"
6378 [(match_operand:DI 1 "s_register_operand")
6379 (match_operand:DI 2 "cmpdi_operand")])
6380 (label_ref (match_operand 3 "" ""))
6384 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6386 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6392 ;; Comparison and test insns
6394 (define_insn "*arm_cmpsi_insn"
6395 [(set (reg:CC CC_REGNUM)
6396 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
6397 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
6405 [(set_attr "conds" "set")
6406 (set_attr "arch" "t2,t2,any,any,any")
6407 (set_attr "length" "2,2,4,4,4")
6408 (set_attr "predicable" "yes")
6409 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
6410 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
6413 (define_insn "*cmpsi_shiftsi"
6414 [(set (reg:CC CC_REGNUM)
6415 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
6416 (match_operator:SI 3 "shift_operator"
6417 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6418 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
6421 [(set_attr "conds" "set")
6422 (set_attr "shift" "1")
6423 (set_attr "arch" "32,a,a")
6424 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6426 (define_insn "*cmpsi_shiftsi_swp"
6427 [(set (reg:CC_SWP CC_REGNUM)
6428 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
6429 [(match_operand:SI 1 "s_register_operand" "r,r,r")
6430 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
6431 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
6434 [(set_attr "conds" "set")
6435 (set_attr "shift" "1")
6436 (set_attr "arch" "32,a,a")
6437 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
6439 (define_insn "*arm_cmpsi_negshiftsi_si"
6440 [(set (reg:CC_Z CC_REGNUM)
6442 (neg:SI (match_operator:SI 1 "shift_operator"
6443 [(match_operand:SI 2 "s_register_operand" "r")
6444 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
6445 (match_operand:SI 0 "s_register_operand" "r")))]
6448 [(set_attr "conds" "set")
6449 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
6450 (const_string "alus_shift_imm")
6451 (const_string "alus_shift_reg")))
6452 (set_attr "predicable" "yes")]
6455 ;; DImode comparisons. The generic code generates branches that
6456 ;; if-conversion cannot reduce to a conditional compare, so we do
6459 (define_insn "*arm_cmpdi_insn"
6460 [(set (reg:CC_NCV CC_REGNUM)
6461 (compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
6462 (match_operand:DI 1 "arm_di_operand" "rDi")))
6463 (clobber (match_scratch:SI 2 "=r"))]
6465 "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
6466 [(set_attr "conds" "set")
6467 (set_attr "length" "8")
6468 (set_attr "type" "multiple")]
6471 (define_insn_and_split "*arm_cmpdi_unsigned"
6472 [(set (reg:CC_CZ CC_REGNUM)
6473 (compare:CC_CZ (match_operand:DI 0 "s_register_operand" "l,r,r,r")
6474 (match_operand:DI 1 "arm_di_operand" "Py,r,Di,rDi")))]
6477 "#" ; "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
6478 "&& reload_completed"
6479 [(set (reg:CC CC_REGNUM)
6480 (compare:CC (match_dup 2) (match_dup 3)))
6481 (cond_exec (eq:SI (reg:CC CC_REGNUM) (const_int 0))
6482 (set (reg:CC CC_REGNUM)
6483 (compare:CC (match_dup 0) (match_dup 1))))]
6485 operands[2] = gen_highpart (SImode, operands[0]);
6486 operands[0] = gen_lowpart (SImode, operands[0]);
6487 if (CONST_INT_P (operands[1]))
6488 operands[3] = gen_highpart_mode (SImode, DImode, operands[1]);
6490 operands[3] = gen_highpart (SImode, operands[1]);
6491 operands[1] = gen_lowpart (SImode, operands[1]);
6493 [(set_attr "conds" "set")
6494 (set_attr "enabled_for_short_it" "yes,yes,no,*")
6495 (set_attr "arch" "t2,t2,t2,a")
6496 (set_attr "length" "6,6,10,8")
6497 (set_attr "type" "multiple")]
6500 (define_insn "*arm_cmpdi_zero"
6501 [(set (reg:CC_Z CC_REGNUM)
6502 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
6504 (clobber (match_scratch:SI 1 "=r"))]
6506 "orrs%?\\t%1, %Q0, %R0"
6507 [(set_attr "conds" "set")
6508 (set_attr "type" "logics_reg")]
6511 ; This insn allows redundant compares to be removed by cse, nothing should
6512 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
6513 ; is deleted later on. The match_dup will match the mode here, so that
6514 ; mode changes of the condition codes aren't lost by this even though we don't
6515 ; specify what they are.
6517 (define_insn "*deleted_compare"
6518 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
6520 "\\t%@ deleted compare"
6521 [(set_attr "conds" "set")
6522 (set_attr "length" "0")
6523 (set_attr "type" "no_insn")]
6527 ;; Conditional branch insns
6529 (define_expand "cbranch_cc"
6531 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
6532 (match_operand 2 "" "")])
6533 (label_ref (match_operand 3 "" ""))
6536 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
6537 operands[1], operands[2], NULL_RTX);
6538 operands[2] = const0_rtx;"
6542 ;; Patterns to match conditional branch insns.
6545 (define_insn "arm_cond_branch"
6547 (if_then_else (match_operator 1 "arm_comparison_operator"
6548 [(match_operand 2 "cc_register" "") (const_int 0)])
6549 (label_ref (match_operand 0 "" ""))
6553 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6555 arm_ccfsm_state += 2;
6558 return \"b%d1\\t%l0\";
6560 [(set_attr "conds" "use")
6561 (set_attr "type" "branch")
6562 (set (attr "length")
6564 (and (match_test "TARGET_THUMB2")
6565 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6566 (le (minus (match_dup 0) (pc)) (const_int 256))))
6571 (define_insn "*arm_cond_branch_reversed"
6573 (if_then_else (match_operator 1 "arm_comparison_operator"
6574 [(match_operand 2 "cc_register" "") (const_int 0)])
6576 (label_ref (match_operand 0 "" ""))))]
6579 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
6581 arm_ccfsm_state += 2;
6584 return \"b%D1\\t%l0\";
6586 [(set_attr "conds" "use")
6587 (set_attr "type" "branch")
6588 (set (attr "length")
6590 (and (match_test "TARGET_THUMB2")
6591 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
6592 (le (minus (match_dup 0) (pc)) (const_int 256))))
6601 (define_expand "cstore_cc"
6602 [(set (match_operand:SI 0 "s_register_operand")
6603 (match_operator:SI 1 "" [(match_operand 2 "" "")
6604 (match_operand 3 "" "")]))]
6606 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
6607 operands[2], operands[3], NULL_RTX);
6608 operands[3] = const0_rtx;"
6611 (define_insn_and_split "*mov_scc"
6612 [(set (match_operand:SI 0 "s_register_operand" "=r")
6613 (match_operator:SI 1 "arm_comparison_operator_mode"
6614 [(match_operand 2 "cc_register" "") (const_int 0)]))]
6616 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
6619 (if_then_else:SI (match_dup 1)
6623 [(set_attr "conds" "use")
6624 (set_attr "length" "8")
6625 (set_attr "type" "multiple")]
6628 (define_insn_and_split "*mov_negscc"
6629 [(set (match_operand:SI 0 "s_register_operand" "=r")
6630 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
6631 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6633 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
6636 (if_then_else:SI (match_dup 1)
6640 operands[3] = GEN_INT (~0);
6642 [(set_attr "conds" "use")
6643 (set_attr "length" "8")
6644 (set_attr "type" "multiple")]
6647 (define_insn_and_split "*mov_notscc"
6648 [(set (match_operand:SI 0 "s_register_operand" "=r")
6649 (not:SI (match_operator:SI 1 "arm_comparison_operator"
6650 [(match_operand 2 "cc_register" "") (const_int 0)])))]
6652 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
6655 (if_then_else:SI (match_dup 1)
6659 operands[3] = GEN_INT (~1);
6660 operands[4] = GEN_INT (~0);
6662 [(set_attr "conds" "use")
6663 (set_attr "length" "8")
6664 (set_attr "type" "multiple")]
6667 (define_expand "cstoresi4"
6668 [(set (match_operand:SI 0 "s_register_operand")
6669 (match_operator:SI 1 "expandable_comparison_operator"
6670 [(match_operand:SI 2 "s_register_operand")
6671 (match_operand:SI 3 "reg_or_int_operand")]))]
6672 "TARGET_32BIT || TARGET_THUMB1"
6674 rtx op3, scratch, scratch2;
6678 if (!arm_add_operand (operands[3], SImode))
6679 operands[3] = force_reg (SImode, operands[3]);
6680 emit_insn (gen_cstore_cc (operands[0], operands[1],
6681 operands[2], operands[3]));
6685 if (operands[3] == const0_rtx)
6687 switch (GET_CODE (operands[1]))
6690 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
6694 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
6698 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
6699 NULL_RTX, 0, OPTAB_WIDEN);
6700 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
6701 NULL_RTX, 0, OPTAB_WIDEN);
6702 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6703 operands[0], 1, OPTAB_WIDEN);
6707 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
6709 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
6710 NULL_RTX, 1, OPTAB_WIDEN);
6714 scratch = expand_binop (SImode, ashr_optab, operands[2],
6715 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
6716 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
6717 NULL_RTX, 0, OPTAB_WIDEN);
6718 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
6722 /* LT is handled by generic code. No need for unsigned with 0. */
6729 switch (GET_CODE (operands[1]))
6732 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6733 NULL_RTX, 0, OPTAB_WIDEN);
6734 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
6738 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
6739 NULL_RTX, 0, OPTAB_WIDEN);
6740 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
6744 op3 = force_reg (SImode, operands[3]);
6746 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
6747 NULL_RTX, 1, OPTAB_WIDEN);
6748 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
6749 NULL_RTX, 0, OPTAB_WIDEN);
6750 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6756 if (!thumb1_cmp_operand (op3, SImode))
6757 op3 = force_reg (SImode, op3);
6758 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
6759 NULL_RTX, 0, OPTAB_WIDEN);
6760 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
6761 NULL_RTX, 1, OPTAB_WIDEN);
6762 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
6767 op3 = force_reg (SImode, operands[3]);
6768 scratch = force_reg (SImode, const0_rtx);
6769 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6775 if (!thumb1_cmp_operand (op3, SImode))
6776 op3 = force_reg (SImode, op3);
6777 scratch = force_reg (SImode, const0_rtx);
6778 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
6784 if (!thumb1_cmp_operand (op3, SImode))
6785 op3 = force_reg (SImode, op3);
6786 scratch = gen_reg_rtx (SImode);
6787 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
6791 op3 = force_reg (SImode, operands[3]);
6792 scratch = gen_reg_rtx (SImode);
6793 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
6796 /* No good sequences for GT, LT. */
6803 (define_expand "cstorehf4"
6804 [(set (match_operand:SI 0 "s_register_operand")
6805 (match_operator:SI 1 "expandable_comparison_operator"
6806 [(match_operand:HF 2 "s_register_operand")
6807 (match_operand:HF 3 "vfp_compare_operand")]))]
6808 "TARGET_VFP_FP16INST"
6810 if (!arm_validize_comparison (&operands[1],
6815 emit_insn (gen_cstore_cc (operands[0], operands[1],
6816 operands[2], operands[3]));
6821 (define_expand "cstoresf4"
6822 [(set (match_operand:SI 0 "s_register_operand")
6823 (match_operator:SI 1 "expandable_comparison_operator"
6824 [(match_operand:SF 2 "s_register_operand")
6825 (match_operand:SF 3 "vfp_compare_operand")]))]
6826 "TARGET_32BIT && TARGET_HARD_FLOAT"
6827 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6828 operands[2], operands[3])); DONE;"
6831 (define_expand "cstoredf4"
6832 [(set (match_operand:SI 0 "s_register_operand")
6833 (match_operator:SI 1 "expandable_comparison_operator"
6834 [(match_operand:DF 2 "s_register_operand")
6835 (match_operand:DF 3 "vfp_compare_operand")]))]
6836 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6837 "emit_insn (gen_cstore_cc (operands[0], operands[1],
6838 operands[2], operands[3])); DONE;"
6841 (define_expand "cstoredi4"
6842 [(set (match_operand:SI 0 "s_register_operand")
6843 (match_operator:SI 1 "expandable_comparison_operator"
6844 [(match_operand:DI 2 "s_register_operand")
6845 (match_operand:DI 3 "cmpdi_operand")]))]
6848 if (!arm_validize_comparison (&operands[1],
6852 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
6859 ;; Conditional move insns
6861 (define_expand "movsicc"
6862 [(set (match_operand:SI 0 "s_register_operand")
6863 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
6864 (match_operand:SI 2 "arm_not_operand")
6865 (match_operand:SI 3 "arm_not_operand")))]
6872 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6873 &XEXP (operands[1], 1)))
6876 code = GET_CODE (operands[1]);
6877 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6878 XEXP (operands[1], 1), NULL_RTX);
6879 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6883 (define_expand "movhfcc"
6884 [(set (match_operand:HF 0 "s_register_operand")
6885 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
6886 (match_operand:HF 2 "s_register_operand")
6887 (match_operand:HF 3 "s_register_operand")))]
6888 "TARGET_VFP_FP16INST"
6891 enum rtx_code code = GET_CODE (operands[1]);
6894 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6895 &XEXP (operands[1], 1)))
6898 code = GET_CODE (operands[1]);
6899 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6900 XEXP (operands[1], 1), NULL_RTX);
6901 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6905 (define_expand "movsfcc"
6906 [(set (match_operand:SF 0 "s_register_operand")
6907 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
6908 (match_operand:SF 2 "s_register_operand")
6909 (match_operand:SF 3 "s_register_operand")))]
6910 "TARGET_32BIT && TARGET_HARD_FLOAT"
6913 enum rtx_code code = GET_CODE (operands[1]);
6916 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6917 &XEXP (operands[1], 1)))
6920 code = GET_CODE (operands[1]);
6921 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6922 XEXP (operands[1], 1), NULL_RTX);
6923 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6927 (define_expand "movdfcc"
6928 [(set (match_operand:DF 0 "s_register_operand")
6929 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
6930 (match_operand:DF 2 "s_register_operand")
6931 (match_operand:DF 3 "s_register_operand")))]
6932 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
6935 enum rtx_code code = GET_CODE (operands[1]);
6938 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
6939 &XEXP (operands[1], 1)))
6941 code = GET_CODE (operands[1]);
6942 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
6943 XEXP (operands[1], 1), NULL_RTX);
6944 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
6948 (define_insn "*cmov<mode>"
6949 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
6950 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
6951 [(match_operand 2 "cc_register" "") (const_int 0)])
6952 (match_operand:SDF 3 "s_register_operand"
6954 (match_operand:SDF 4 "s_register_operand"
6955 "<F_constraint>")))]
6956 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
6959 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6966 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
6971 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
6977 [(set_attr "conds" "use")
6978 (set_attr "type" "fcsel")]
6981 (define_insn "*cmovhf"
6982 [(set (match_operand:HF 0 "s_register_operand" "=t")
6983 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
6984 [(match_operand 2 "cc_register" "") (const_int 0)])
6985 (match_operand:HF 3 "s_register_operand" "t")
6986 (match_operand:HF 4 "s_register_operand" "t")))]
6987 "TARGET_VFP_FP16INST"
6990 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
6997 return \"vsel%d1.f16\\t%0, %3, %4\";
7002 return \"vsel%D1.f16\\t%0, %4, %3\";
7008 [(set_attr "conds" "use")
7009 (set_attr "type" "fcsel")]
7012 (define_insn_and_split "*movsicc_insn"
7013 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7015 (match_operator 3 "arm_comparison_operator"
7016 [(match_operand 4 "cc_register" "") (const_int 0)])
7017 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7018 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7029 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7030 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7031 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7032 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7033 "&& reload_completed"
7036 enum rtx_code rev_code;
7040 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7042 gen_rtx_SET (operands[0], operands[1])));
7044 rev_code = GET_CODE (operands[3]);
7045 mode = GET_MODE (operands[4]);
7046 if (mode == CCFPmode || mode == CCFPEmode)
7047 rev_code = reverse_condition_maybe_unordered (rev_code);
7049 rev_code = reverse_condition (rev_code);
7051 rev_cond = gen_rtx_fmt_ee (rev_code,
7055 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7057 gen_rtx_SET (operands[0], operands[2])));
7060 [(set_attr "length" "4,4,4,4,8,8,8,8")
7061 (set_attr "conds" "use")
7062 (set_attr_alternative "type"
7063 [(if_then_else (match_operand 2 "const_int_operand" "")
7064 (const_string "mov_imm")
7065 (const_string "mov_reg"))
7066 (const_string "mvn_imm")
7067 (if_then_else (match_operand 1 "const_int_operand" "")
7068 (const_string "mov_imm")
7069 (const_string "mov_reg"))
7070 (const_string "mvn_imm")
7071 (const_string "multiple")
7072 (const_string "multiple")
7073 (const_string "multiple")
7074 (const_string "multiple")])]
7077 (define_insn "*movsfcc_soft_insn"
7078 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7079 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7080 [(match_operand 4 "cc_register" "") (const_int 0)])
7081 (match_operand:SF 1 "s_register_operand" "0,r")
7082 (match_operand:SF 2 "s_register_operand" "r,0")))]
7083 "TARGET_ARM && TARGET_SOFT_FLOAT"
7087 [(set_attr "conds" "use")
7088 (set_attr "type" "mov_reg")]
7092 ;; Jump and linkage insns
7094 (define_expand "jump"
7096 (label_ref (match_operand 0 "" "")))]
7101 (define_insn "*arm_jump"
7103 (label_ref (match_operand 0 "" "")))]
7107 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7109 arm_ccfsm_state += 2;
7112 return \"b%?\\t%l0\";
7115 [(set_attr "predicable" "yes")
7116 (set (attr "length")
7118 (and (match_test "TARGET_THUMB2")
7119 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7120 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7123 (set_attr "type" "branch")]
7126 (define_expand "call"
7127 [(parallel [(call (match_operand 0 "memory_operand")
7128 (match_operand 1 "general_operand"))
7129 (use (match_operand 2 "" ""))
7130 (clobber (reg:SI LR_REGNUM))])]
7135 tree addr = MEM_EXPR (operands[0]);
7137 /* In an untyped call, we can get NULL for operand 2. */
7138 if (operands[2] == NULL_RTX)
7139 operands[2] = const0_rtx;
7141 /* Decide if we should generate indirect calls by loading the
7142 32-bit address of the callee into a register before performing the
7144 callee = XEXP (operands[0], 0);
7145 if (GET_CODE (callee) == SYMBOL_REF
7146 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7148 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7150 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7151 /* Indirect call: set r9 with FDPIC value of callee. */
7152 XEXP (operands[0], 0)
7153 = arm_load_function_descriptor (XEXP (operands[0], 0));
7155 if (detect_cmse_nonsecure_call (addr))
7157 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7159 emit_call_insn (pat);
7163 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7164 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7167 /* Restore FDPIC register (r9) after call. */
7170 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7171 rtx initial_fdpic_reg
7172 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7174 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7175 initial_fdpic_reg));
7182 (define_insn "restore_pic_register_after_call"
7183 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7184 (unspec:SI [(match_dup 0)
7185 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7186 UNSPEC_PIC_RESTORE))]
7193 (define_expand "call_internal"
7194 [(parallel [(call (match_operand 0 "memory_operand")
7195 (match_operand 1 "general_operand"))
7196 (use (match_operand 2 "" ""))
7197 (clobber (reg:SI LR_REGNUM))])])
7199 (define_expand "nonsecure_call_internal"
7200 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7201 UNSPEC_NONSECURE_MEM)
7202 (match_operand 1 "general_operand"))
7203 (use (match_operand 2 "" ""))
7204 (clobber (reg:SI LR_REGNUM))])]
7209 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7210 gen_rtx_REG (SImode, R4_REGNUM),
7213 operands[0] = replace_equiv_address (operands[0], tmp);
7216 (define_insn "*call_reg_armv5"
7217 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7218 (match_operand 1 "" ""))
7219 (use (match_operand 2 "" ""))
7220 (clobber (reg:SI LR_REGNUM))]
7221 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7223 [(set_attr "type" "call")]
7226 (define_insn "*call_reg_arm"
7227 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7228 (match_operand 1 "" ""))
7229 (use (match_operand 2 "" ""))
7230 (clobber (reg:SI LR_REGNUM))]
7231 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7233 return output_call (operands);
7235 ;; length is worst case, normally it is only two
7236 [(set_attr "length" "12")
7237 (set_attr "type" "call")]
7241 (define_expand "call_value"
7242 [(parallel [(set (match_operand 0 "" "")
7243 (call (match_operand 1 "memory_operand")
7244 (match_operand 2 "general_operand")))
7245 (use (match_operand 3 "" ""))
7246 (clobber (reg:SI LR_REGNUM))])]
7251 tree addr = MEM_EXPR (operands[1]);
7253 /* In an untyped call, we can get NULL for operand 2. */
7254 if (operands[3] == 0)
7255 operands[3] = const0_rtx;
7257 /* Decide if we should generate indirect calls by loading the
7258 32-bit address of the callee into a register before performing the
7260 callee = XEXP (operands[1], 0);
7261 if (GET_CODE (callee) == SYMBOL_REF
7262 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7264 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7266 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7267 /* Indirect call: set r9 with FDPIC value of callee. */
7268 XEXP (operands[1], 0)
7269 = arm_load_function_descriptor (XEXP (operands[1], 0));
7271 if (detect_cmse_nonsecure_call (addr))
7273 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7274 operands[2], operands[3]);
7275 emit_call_insn (pat);
7279 pat = gen_call_value_internal (operands[0], operands[1],
7280 operands[2], operands[3]);
7281 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7284 /* Restore FDPIC register (r9) after call. */
7287 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7288 rtx initial_fdpic_reg
7289 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7291 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7292 initial_fdpic_reg));
7299 (define_expand "call_value_internal"
7300 [(parallel [(set (match_operand 0 "" "")
7301 (call (match_operand 1 "memory_operand")
7302 (match_operand 2 "general_operand")))
7303 (use (match_operand 3 "" ""))
7304 (clobber (reg:SI LR_REGNUM))])])
7306 (define_expand "nonsecure_call_value_internal"
7307 [(parallel [(set (match_operand 0 "" "")
7308 (call (unspec:SI [(match_operand 1 "memory_operand")]
7309 UNSPEC_NONSECURE_MEM)
7310 (match_operand 2 "general_operand")))
7311 (use (match_operand 3 "" ""))
7312 (clobber (reg:SI LR_REGNUM))])]
7317 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7318 gen_rtx_REG (SImode, R4_REGNUM),
7321 operands[1] = replace_equiv_address (operands[1], tmp);
7324 (define_insn "*call_value_reg_armv5"
7325 [(set (match_operand 0 "" "")
7326 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7327 (match_operand 2 "" "")))
7328 (use (match_operand 3 "" ""))
7329 (clobber (reg:SI LR_REGNUM))]
7330 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7332 [(set_attr "type" "call")]
7335 (define_insn "*call_value_reg_arm"
7336 [(set (match_operand 0 "" "")
7337 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
7338 (match_operand 2 "" "")))
7339 (use (match_operand 3 "" ""))
7340 (clobber (reg:SI LR_REGNUM))]
7341 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7343 return output_call (&operands[1]);
7345 [(set_attr "length" "12")
7346 (set_attr "type" "call")]
7349 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
7350 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
7352 (define_insn "*call_symbol"
7353 [(call (mem:SI (match_operand:SI 0 "" ""))
7354 (match_operand 1 "" ""))
7355 (use (match_operand 2 "" ""))
7356 (clobber (reg:SI LR_REGNUM))]
7358 && !SIBLING_CALL_P (insn)
7359 && (GET_CODE (operands[0]) == SYMBOL_REF)
7360 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
7363 rtx op = operands[0];
7365 /* Switch mode now when possible. */
7366 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7367 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7368 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
7370 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
7372 [(set_attr "type" "call")]
7375 (define_insn "*call_value_symbol"
7376 [(set (match_operand 0 "" "")
7377 (call (mem:SI (match_operand:SI 1 "" ""))
7378 (match_operand:SI 2 "" "")))
7379 (use (match_operand 3 "" ""))
7380 (clobber (reg:SI LR_REGNUM))]
7382 && !SIBLING_CALL_P (insn)
7383 && (GET_CODE (operands[1]) == SYMBOL_REF)
7384 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
7387 rtx op = operands[1];
7389 /* Switch mode now when possible. */
7390 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
7391 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
7392 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
7394 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
7396 [(set_attr "type" "call")]
7399 (define_expand "sibcall_internal"
7400 [(parallel [(call (match_operand 0 "memory_operand")
7401 (match_operand 1 "general_operand"))
7403 (use (match_operand 2 "" ""))])])
7405 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
7406 (define_expand "sibcall"
7407 [(parallel [(call (match_operand 0 "memory_operand")
7408 (match_operand 1 "general_operand"))
7410 (use (match_operand 2 "" ""))])]
7416 if ((!REG_P (XEXP (operands[0], 0))
7417 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
7418 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
7419 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
7420 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
7422 if (operands[2] == NULL_RTX)
7423 operands[2] = const0_rtx;
7425 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
7426 arm_emit_call_insn (pat, operands[0], true);
7431 (define_expand "sibcall_value_internal"
7432 [(parallel [(set (match_operand 0 "" "")
7433 (call (match_operand 1 "memory_operand")
7434 (match_operand 2 "general_operand")))
7436 (use (match_operand 3 "" ""))])])
7438 (define_expand "sibcall_value"
7439 [(parallel [(set (match_operand 0 "" "")
7440 (call (match_operand 1 "memory_operand")
7441 (match_operand 2 "general_operand")))
7443 (use (match_operand 3 "" ""))])]
7449 if ((!REG_P (XEXP (operands[1], 0))
7450 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
7451 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
7452 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
7453 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
7455 if (operands[3] == NULL_RTX)
7456 operands[3] = const0_rtx;
7458 pat = gen_sibcall_value_internal (operands[0], operands[1],
7459 operands[2], operands[3]);
7460 arm_emit_call_insn (pat, operands[1], true);
7465 (define_insn "*sibcall_insn"
7466 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
7467 (match_operand 1 "" ""))
7469 (use (match_operand 2 "" ""))]
7470 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7472 if (which_alternative == 1)
7473 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
7476 if (arm_arch5t || arm_arch4t)
7477 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
7479 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
7482 [(set_attr "type" "call")]
7485 (define_insn "*sibcall_value_insn"
7486 [(set (match_operand 0 "" "")
7487 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
7488 (match_operand 2 "" "")))
7490 (use (match_operand 3 "" ""))]
7491 "TARGET_32BIT && SIBLING_CALL_P (insn)"
7493 if (which_alternative == 1)
7494 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
7497 if (arm_arch5t || arm_arch4t)
7498 return \"bx%?\\t%1\";
7500 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
7503 [(set_attr "type" "call")]
7506 (define_expand "<return_str>return"
7508 "(TARGET_ARM || (TARGET_THUMB2
7509 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
7510 && !IS_STACKALIGN (arm_current_func_type ())))
7511 <return_cond_false>"
7516 thumb2_expand_return (<return_simple_p>);
7523 ;; Often the return insn will be the same as loading from memory, so set attr
7524 (define_insn "*arm_return"
7526 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
7529 if (arm_ccfsm_state == 2)
7531 arm_ccfsm_state += 2;
7534 return output_return_instruction (const_true_rtx, true, false, false);
7536 [(set_attr "type" "load_4")
7537 (set_attr "length" "12")
7538 (set_attr "predicable" "yes")]
7541 (define_insn "*cond_<return_str>return"
7543 (if_then_else (match_operator 0 "arm_comparison_operator"
7544 [(match_operand 1 "cc_register" "") (const_int 0)])
7547 "TARGET_ARM <return_cond_true>"
7550 if (arm_ccfsm_state == 2)
7552 arm_ccfsm_state += 2;
7555 return output_return_instruction (operands[0], true, false,
7558 [(set_attr "conds" "use")
7559 (set_attr "length" "12")
7560 (set_attr "type" "load_4")]
7563 (define_insn "*cond_<return_str>return_inverted"
7565 (if_then_else (match_operator 0 "arm_comparison_operator"
7566 [(match_operand 1 "cc_register" "") (const_int 0)])
7569 "TARGET_ARM <return_cond_true>"
7572 if (arm_ccfsm_state == 2)
7574 arm_ccfsm_state += 2;
7577 return output_return_instruction (operands[0], true, true,
7580 [(set_attr "conds" "use")
7581 (set_attr "length" "12")
7582 (set_attr "type" "load_4")]
7585 (define_insn "*arm_simple_return"
7590 if (arm_ccfsm_state == 2)
7592 arm_ccfsm_state += 2;
7595 return output_return_instruction (const_true_rtx, true, false, true);
7597 [(set_attr "type" "branch")
7598 (set_attr "length" "4")
7599 (set_attr "predicable" "yes")]
7602 ;; Generate a sequence of instructions to determine if the processor is
7603 ;; in 26-bit or 32-bit mode, and return the appropriate return address
7606 (define_expand "return_addr_mask"
7608 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7610 (set (match_operand:SI 0 "s_register_operand")
7611 (if_then_else:SI (eq (match_dup 1) (const_int 0))
7613 (const_int 67108860)))] ; 0x03fffffc
7616 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
7619 (define_insn "*check_arch2"
7620 [(set (match_operand:CC_NOOV 0 "cc_register" "")
7621 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
7624 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
7625 [(set_attr "length" "8")
7626 (set_attr "conds" "set")
7627 (set_attr "type" "multiple")]
7630 ;; Call subroutine returning any type.
7632 (define_expand "untyped_call"
7633 [(parallel [(call (match_operand 0 "" "")
7635 (match_operand 1 "" "")
7636 (match_operand 2 "" "")])]
7637 "TARGET_EITHER && !TARGET_FDPIC"
7641 rtx par = gen_rtx_PARALLEL (VOIDmode,
7642 rtvec_alloc (XVECLEN (operands[2], 0)));
7643 rtx addr = gen_reg_rtx (Pmode);
7647 emit_move_insn (addr, XEXP (operands[1], 0));
7648 mem = change_address (operands[1], BLKmode, addr);
7650 for (i = 0; i < XVECLEN (operands[2], 0); i++)
7652 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
7654 /* Default code only uses r0 as a return value, but we could
7655 be using anything up to 4 registers. */
7656 if (REGNO (src) == R0_REGNUM)
7657 src = gen_rtx_REG (TImode, R0_REGNUM);
7659 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
7661 size += GET_MODE_SIZE (GET_MODE (src));
7664 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
7668 for (i = 0; i < XVECLEN (par, 0); i++)
7670 HOST_WIDE_INT offset = 0;
7671 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
7674 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7676 mem = change_address (mem, GET_MODE (reg), NULL);
7677 if (REGNO (reg) == R0_REGNUM)
7679 /* On thumb we have to use a write-back instruction. */
7680 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
7681 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7682 size = TARGET_ARM ? 16 : 0;
7686 emit_move_insn (mem, reg);
7687 size = GET_MODE_SIZE (GET_MODE (reg));
7691 /* The optimizer does not know that the call sets the function value
7692 registers we stored in the result block. We avoid problems by
7693 claiming that all hard registers are used and clobbered at this
7695 emit_insn (gen_blockage ());
7701 (define_expand "untyped_return"
7702 [(match_operand:BLK 0 "memory_operand")
7703 (match_operand 1 "" "")]
7704 "TARGET_EITHER && !TARGET_FDPIC"
7708 rtx addr = gen_reg_rtx (Pmode);
7712 emit_move_insn (addr, XEXP (operands[0], 0));
7713 mem = change_address (operands[0], BLKmode, addr);
7715 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7717 HOST_WIDE_INT offset = 0;
7718 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
7721 emit_move_insn (addr, plus_constant (Pmode, addr, size));
7723 mem = change_address (mem, GET_MODE (reg), NULL);
7724 if (REGNO (reg) == R0_REGNUM)
7726 /* On thumb we have to use a write-back instruction. */
7727 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
7728 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
7729 size = TARGET_ARM ? 16 : 0;
7733 emit_move_insn (reg, mem);
7734 size = GET_MODE_SIZE (GET_MODE (reg));
7738 /* Emit USE insns before the return. */
7739 for (i = 0; i < XVECLEN (operands[1], 0); i++)
7740 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
7742 /* Construct the return. */
7743 expand_naked_return ();
7749 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
7750 ;; all of memory. This blocks insns from being moved across this point.
7752 (define_insn "blockage"
7753 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
7756 [(set_attr "length" "0")
7757 (set_attr "type" "block")]
7760 ;; Since we hard code r0 here use the 'o' constraint to prevent
7761 ;; provoking undefined behaviour in the hardware with putting out
7762 ;; auto-increment operations with potentially r0 as the base register.
7763 (define_insn "probe_stack"
7764 [(set (match_operand:SI 0 "memory_operand" "=o")
7765 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
7768 [(set_attr "type" "store_4")
7769 (set_attr "predicable" "yes")]
7772 (define_insn "probe_stack_range"
7773 [(set (match_operand:SI 0 "register_operand" "=r")
7774 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
7775 (match_operand:SI 2 "register_operand" "r")]
7776 VUNSPEC_PROBE_STACK_RANGE))]
7779 return output_probe_stack_range (operands[0], operands[2]);
7781 [(set_attr "type" "multiple")
7782 (set_attr "conds" "clob")]
7785 ;; Named patterns for stack smashing protection.
7786 (define_expand "stack_protect_combined_set"
7788 [(set (match_operand:SI 0 "memory_operand")
7789 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7791 (clobber (match_scratch:SI 2 ""))
7792 (clobber (match_scratch:SI 3 ""))])]
7797 ;; Use a separate insn from the above expand to be able to have the mem outside
7798 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7799 ;; try to reload the guard since we need to control how PIC access is done in
7800 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7801 ;; legitimize_pic_address ()).
7802 (define_insn_and_split "*stack_protect_combined_set_insn"
7803 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7804 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7806 (clobber (match_scratch:SI 2 "=&l,&r"))
7807 (clobber (match_scratch:SI 3 "=&l,&r"))]
7811 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
7813 (clobber (match_dup 2))])]
7821 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7823 pic_reg = operands[3];
7825 /* Forces recomputing of GOT base now. */
7826 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
7827 true /*compute_now*/);
7831 if (address_operand (operands[1], SImode))
7832 operands[2] = operands[1];
7835 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7836 emit_move_insn (operands[2], mem);
7840 [(set_attr "arch" "t1,32")]
7843 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
7844 ;; canary value does not live beyond the life of this sequence.
7845 (define_insn "*stack_protect_set_insn"
7846 [(set (match_operand:SI 0 "memory_operand" "=m,m")
7847 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
7849 (clobber (match_dup 1))]
7852 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
7853 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
7854 [(set_attr "length" "8,12")
7855 (set_attr "conds" "clob,nocond")
7856 (set_attr "type" "multiple")
7857 (set_attr "arch" "t1,32")]
7860 (define_expand "stack_protect_combined_test"
7864 (eq (match_operand:SI 0 "memory_operand")
7865 (unspec:SI [(match_operand:SI 1 "guard_operand")]
7867 (label_ref (match_operand 2))
7869 (clobber (match_scratch:SI 3 ""))
7870 (clobber (match_scratch:SI 4 ""))
7871 (clobber (reg:CC CC_REGNUM))])]
7876 ;; Use a separate insn from the above expand to be able to have the mem outside
7877 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
7878 ;; try to reload the guard since we need to control how PIC access is done in
7879 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
7880 ;; legitimize_pic_address ()).
7881 (define_insn_and_split "*stack_protect_combined_test_insn"
7884 (eq (match_operand:SI 0 "memory_operand" "m,m")
7885 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
7887 (label_ref (match_operand 2))
7889 (clobber (match_scratch:SI 3 "=&l,&r"))
7890 (clobber (match_scratch:SI 4 "=&l,&r"))
7891 (clobber (reg:CC CC_REGNUM))]
7904 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7906 pic_reg = operands[4];
7908 /* Forces recomputing of GOT base now. */
7909 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
7910 true /*compute_now*/);
7914 if (address_operand (operands[1], SImode))
7915 operands[3] = operands[1];
7918 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
7919 emit_move_insn (operands[3], mem);
7924 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
7926 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
7927 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
7928 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
7932 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
7934 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
7935 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
7940 [(set_attr "arch" "t1,32")]
7943 (define_insn "arm_stack_protect_test_insn"
7944 [(set (reg:CC_Z CC_REGNUM)
7945 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
7946 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
7949 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
7950 (clobber (match_dup 2))]
7952 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
7953 [(set_attr "length" "8,12")
7954 (set_attr "conds" "set")
7955 (set_attr "type" "multiple")
7956 (set_attr "arch" "t,32")]
7959 (define_expand "casesi"
7960 [(match_operand:SI 0 "s_register_operand") ; index to jump on
7961 (match_operand:SI 1 "const_int_operand") ; lower bound
7962 (match_operand:SI 2 "const_int_operand") ; total range
7963 (match_operand:SI 3 "" "") ; table label
7964 (match_operand:SI 4 "" "")] ; Out of range label
7965 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
7968 enum insn_code code;
7969 if (operands[1] != const0_rtx)
7971 rtx reg = gen_reg_rtx (SImode);
7973 emit_insn (gen_addsi3 (reg, operands[0],
7974 gen_int_mode (-INTVAL (operands[1]),
7980 code = CODE_FOR_arm_casesi_internal;
7981 else if (TARGET_THUMB1)
7982 code = CODE_FOR_thumb1_casesi_internal_pic;
7984 code = CODE_FOR_thumb2_casesi_internal_pic;
7986 code = CODE_FOR_thumb2_casesi_internal;
7988 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
7989 operands[2] = force_reg (SImode, operands[2]);
7991 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
7992 operands[3], operands[4]));
7997 ;; The USE in this pattern is needed to tell flow analysis that this is
7998 ;; a CASESI insn. It has no other purpose.
7999 (define_expand "arm_casesi_internal"
8000 [(parallel [(set (pc)
8002 (leu (match_operand:SI 0 "s_register_operand")
8003 (match_operand:SI 1 "arm_rhs_operand"))
8005 (label_ref:SI (match_operand 3 ""))))
8006 (clobber (reg:CC CC_REGNUM))
8007 (use (label_ref:SI (match_operand 2 "")))])]
8010 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8011 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8012 gen_rtx_LABEL_REF (SImode, operands[2]));
8013 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8014 MEM_READONLY_P (operands[4]) = 1;
8015 MEM_NOTRAP_P (operands[4]) = 1;
8018 (define_insn "*arm_casesi_internal"
8019 [(parallel [(set (pc)
8021 (leu (match_operand:SI 0 "s_register_operand" "r")
8022 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8023 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8024 (label_ref:SI (match_operand 2 "" ""))))
8025 (label_ref:SI (match_operand 3 "" ""))))
8026 (clobber (reg:CC CC_REGNUM))
8027 (use (label_ref:SI (match_dup 2)))])]
8031 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8032 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8034 [(set_attr "conds" "clob")
8035 (set_attr "length" "12")
8036 (set_attr "type" "multiple")]
8039 (define_expand "indirect_jump"
8041 (match_operand:SI 0 "s_register_operand"))]
8044 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8045 address and use bx. */
8049 tmp = gen_reg_rtx (SImode);
8050 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8056 ;; NB Never uses BX.
8057 (define_insn "*arm_indirect_jump"
8059 (match_operand:SI 0 "s_register_operand" "r"))]
8061 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8062 [(set_attr "predicable" "yes")
8063 (set_attr "type" "branch")]
8066 (define_insn "*load_indirect_jump"
8068 (match_operand:SI 0 "memory_operand" "m"))]
8070 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8071 [(set_attr "type" "load_4")
8072 (set_attr "pool_range" "4096")
8073 (set_attr "neg_pool_range" "4084")
8074 (set_attr "predicable" "yes")]
8084 [(set (attr "length")
8085 (if_then_else (eq_attr "is_thumb" "yes")
8088 (set_attr "type" "mov_reg")]
8092 [(trap_if (const_int 1) (const_int 0))]
8096 return \".inst\\t0xe7f000f0\";
8098 return \".inst\\t0xdeff\";
8100 [(set (attr "length")
8101 (if_then_else (eq_attr "is_thumb" "yes")
8104 (set_attr "type" "trap")
8105 (set_attr "conds" "unconditional")]
8109 ;; Patterns to allow combination of arithmetic, cond code and shifts
8111 (define_insn "*<arith_shift_insn>_multsi"
8112 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8114 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8115 (match_operand:SI 3 "power_of_two_operand" ""))
8116 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8118 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8119 [(set_attr "predicable" "yes")
8120 (set_attr "shift" "2")
8121 (set_attr "arch" "a,t2")
8122 (set_attr "type" "alu_shift_imm")])
8124 (define_insn "*<arith_shift_insn>_shiftsi"
8125 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8127 (match_operator:SI 2 "shift_nomul_operator"
8128 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8129 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8130 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8131 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8132 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8133 [(set_attr "predicable" "yes")
8134 (set_attr "shift" "3")
8135 (set_attr "arch" "a,t2,a")
8136 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8139 [(set (match_operand:SI 0 "s_register_operand" "")
8140 (match_operator:SI 1 "shiftable_operator"
8141 [(match_operator:SI 2 "shiftable_operator"
8142 [(match_operator:SI 3 "shift_operator"
8143 [(match_operand:SI 4 "s_register_operand" "")
8144 (match_operand:SI 5 "reg_or_int_operand" "")])
8145 (match_operand:SI 6 "s_register_operand" "")])
8146 (match_operand:SI 7 "arm_rhs_operand" "")]))
8147 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8150 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8153 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8156 (define_insn "*arith_shiftsi_compare0"
8157 [(set (reg:CC_NOOV CC_REGNUM)
8159 (match_operator:SI 1 "shiftable_operator"
8160 [(match_operator:SI 3 "shift_operator"
8161 [(match_operand:SI 4 "s_register_operand" "r,r")
8162 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8163 (match_operand:SI 2 "s_register_operand" "r,r")])
8165 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8166 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8169 "%i1s%?\\t%0, %2, %4%S3"
8170 [(set_attr "conds" "set")
8171 (set_attr "shift" "4")
8172 (set_attr "arch" "32,a")
8173 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8175 (define_insn "*arith_shiftsi_compare0_scratch"
8176 [(set (reg:CC_NOOV CC_REGNUM)
8178 (match_operator:SI 1 "shiftable_operator"
8179 [(match_operator:SI 3 "shift_operator"
8180 [(match_operand:SI 4 "s_register_operand" "r,r")
8181 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8182 (match_operand:SI 2 "s_register_operand" "r,r")])
8184 (clobber (match_scratch:SI 0 "=r,r"))]
8186 "%i1s%?\\t%0, %2, %4%S3"
8187 [(set_attr "conds" "set")
8188 (set_attr "shift" "4")
8189 (set_attr "arch" "32,a")
8190 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8192 (define_insn "*sub_shiftsi"
8193 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8194 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8195 (match_operator:SI 2 "shift_operator"
8196 [(match_operand:SI 3 "s_register_operand" "r,r")
8197 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8199 "sub%?\\t%0, %1, %3%S2"
8200 [(set_attr "predicable" "yes")
8201 (set_attr "predicable_short_it" "no")
8202 (set_attr "shift" "3")
8203 (set_attr "arch" "32,a")
8204 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8206 (define_insn "*sub_shiftsi_compare0"
8207 [(set (reg:CC_NOOV CC_REGNUM)
8209 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8210 (match_operator:SI 2 "shift_operator"
8211 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8212 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8214 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8215 (minus:SI (match_dup 1)
8216 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8218 "subs%?\\t%0, %1, %3%S2"
8219 [(set_attr "conds" "set")
8220 (set_attr "shift" "3")
8221 (set_attr "arch" "32,a,a")
8222 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8224 (define_insn "*sub_shiftsi_compare0_scratch"
8225 [(set (reg:CC_NOOV CC_REGNUM)
8227 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8228 (match_operator:SI 2 "shift_operator"
8229 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8230 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8232 (clobber (match_scratch:SI 0 "=r,r,r"))]
8234 "subs%?\\t%0, %1, %3%S2"
8235 [(set_attr "conds" "set")
8236 (set_attr "shift" "3")
8237 (set_attr "arch" "32,a,a")
8238 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8241 (define_insn_and_split "*and_scc"
8242 [(set (match_operand:SI 0 "s_register_operand" "=r")
8243 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8244 [(match_operand 2 "cc_register" "") (const_int 0)])
8245 (match_operand:SI 3 "s_register_operand" "r")))]
8247 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8248 "&& reload_completed"
8249 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8250 (cond_exec (match_dup 4) (set (match_dup 0)
8251 (and:SI (match_dup 3) (const_int 1))))]
8253 machine_mode mode = GET_MODE (operands[2]);
8254 enum rtx_code rc = GET_CODE (operands[1]);
8256 /* Note that operands[4] is the same as operands[1],
8257 but with VOIDmode as the result. */
8258 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8259 if (mode == CCFPmode || mode == CCFPEmode)
8260 rc = reverse_condition_maybe_unordered (rc);
8262 rc = reverse_condition (rc);
8263 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8265 [(set_attr "conds" "use")
8266 (set_attr "type" "multiple")
8267 (set_attr "length" "8")]
8270 (define_insn_and_split "*ior_scc"
8271 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8272 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8273 [(match_operand 2 "cc_register" "") (const_int 0)])
8274 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8279 "&& reload_completed
8280 && REGNO (operands [0]) != REGNO (operands[3])"
8281 ;; && which_alternative == 1
8282 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8283 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8284 (cond_exec (match_dup 4) (set (match_dup 0)
8285 (ior:SI (match_dup 3) (const_int 1))))]
8287 machine_mode mode = GET_MODE (operands[2]);
8288 enum rtx_code rc = GET_CODE (operands[1]);
8290 /* Note that operands[4] is the same as operands[1],
8291 but with VOIDmode as the result. */
8292 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8293 if (mode == CCFPmode || mode == CCFPEmode)
8294 rc = reverse_condition_maybe_unordered (rc);
8296 rc = reverse_condition (rc);
8297 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8299 [(set_attr "conds" "use")
8300 (set_attr "length" "4,8")
8301 (set_attr "type" "logic_imm,multiple")]
8304 ; A series of splitters for the compare_scc pattern below. Note that
8305 ; order is important.
8307 [(set (match_operand:SI 0 "s_register_operand" "")
8308 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8310 (clobber (reg:CC CC_REGNUM))]
8311 "TARGET_32BIT && reload_completed"
8312 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8315 [(set (match_operand:SI 0 "s_register_operand" "")
8316 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8318 (clobber (reg:CC CC_REGNUM))]
8319 "TARGET_32BIT && reload_completed"
8320 [(set (match_dup 0) (not:SI (match_dup 1)))
8321 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8324 [(set (match_operand:SI 0 "s_register_operand" "")
8325 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8327 (clobber (reg:CC CC_REGNUM))]
8328 "arm_arch5t && TARGET_32BIT"
8329 [(set (match_dup 0) (clz:SI (match_dup 1)))
8330 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8334 [(set (match_operand:SI 0 "s_register_operand" "")
8335 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8337 (clobber (reg:CC CC_REGNUM))]
8338 "TARGET_32BIT && reload_completed"
8340 [(set (reg:CC CC_REGNUM)
8341 (compare:CC (const_int 1) (match_dup 1)))
8343 (minus:SI (const_int 1) (match_dup 1)))])
8344 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
8345 (set (match_dup 0) (const_int 0)))])
8348 [(set (match_operand:SI 0 "s_register_operand" "")
8349 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8350 (match_operand:SI 2 "const_int_operand" "")))
8351 (clobber (reg:CC CC_REGNUM))]
8352 "TARGET_32BIT && reload_completed"
8354 [(set (reg:CC CC_REGNUM)
8355 (compare:CC (match_dup 1) (match_dup 2)))
8356 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
8357 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
8358 (set (match_dup 0) (const_int 1)))]
8360 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
8364 [(set (match_operand:SI 0 "s_register_operand" "")
8365 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8366 (match_operand:SI 2 "arm_add_operand" "")))
8367 (clobber (reg:CC CC_REGNUM))]
8368 "TARGET_32BIT && reload_completed"
8370 [(set (reg:CC_NOOV CC_REGNUM)
8371 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
8373 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
8374 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
8375 (set (match_dup 0) (const_int 1)))])
8377 (define_insn_and_split "*compare_scc"
8378 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8379 (match_operator:SI 1 "arm_comparison_operator"
8380 [(match_operand:SI 2 "s_register_operand" "r,r")
8381 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
8382 (clobber (reg:CC CC_REGNUM))]
8385 "&& reload_completed"
8386 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
8387 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
8388 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
8391 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
8392 operands[2], operands[3]);
8393 enum rtx_code rc = GET_CODE (operands[1]);
8395 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
8397 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8398 if (mode == CCFPmode || mode == CCFPEmode)
8399 rc = reverse_condition_maybe_unordered (rc);
8401 rc = reverse_condition (rc);
8402 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
8404 [(set_attr "type" "multiple")]
8407 ;; Attempt to improve the sequence generated by the compare_scc splitters
8408 ;; not to use conditional execution.
8410 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
8414 [(set (reg:CC CC_REGNUM)
8415 (compare:CC (match_operand:SI 1 "register_operand" "")
8417 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8418 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8419 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8420 (set (match_dup 0) (const_int 1)))]
8421 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8422 [(set (match_dup 0) (clz:SI (match_dup 1)))
8423 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8426 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
8430 [(set (reg:CC CC_REGNUM)
8431 (compare:CC (match_operand:SI 1 "register_operand" "")
8433 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8434 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8435 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8436 (set (match_dup 0) (const_int 1)))
8437 (match_scratch:SI 2 "r")]
8438 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8440 [(set (reg:CC CC_REGNUM)
8441 (compare:CC (const_int 0) (match_dup 1)))
8442 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
8444 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
8445 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8448 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
8449 ;; sub Rd, Reg1, reg2
8453 [(set (reg:CC CC_REGNUM)
8454 (compare:CC (match_operand:SI 1 "register_operand" "")
8455 (match_operand:SI 2 "arm_rhs_operand" "")))
8456 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8457 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8458 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8459 (set (match_dup 0) (const_int 1)))]
8460 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
8461 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
8462 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
8463 (set (match_dup 0) (clz:SI (match_dup 0)))
8464 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
8468 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
8469 ;; sub T1, Reg1, reg2
8473 [(set (reg:CC CC_REGNUM)
8474 (compare:CC (match_operand:SI 1 "register_operand" "")
8475 (match_operand:SI 2 "arm_rhs_operand" "")))
8476 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
8477 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
8478 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
8479 (set (match_dup 0) (const_int 1)))
8480 (match_scratch:SI 3 "r")]
8481 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
8482 [(set (match_dup 3) (match_dup 4))
8484 [(set (reg:CC CC_REGNUM)
8485 (compare:CC (const_int 0) (match_dup 3)))
8486 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
8488 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
8489 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
8491 if (CONST_INT_P (operands[2]))
8492 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
8494 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
8497 (define_insn "*cond_move"
8498 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8499 (if_then_else:SI (match_operator 3 "equality_operator"
8500 [(match_operator 4 "arm_comparison_operator"
8501 [(match_operand 5 "cc_register" "") (const_int 0)])
8503 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
8504 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
8507 if (GET_CODE (operands[3]) == NE)
8509 if (which_alternative != 1)
8510 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
8511 if (which_alternative != 0)
8512 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
8515 if (which_alternative != 0)
8516 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8517 if (which_alternative != 1)
8518 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
8521 [(set_attr "conds" "use")
8522 (set_attr_alternative "type"
8523 [(if_then_else (match_operand 2 "const_int_operand" "")
8524 (const_string "mov_imm")
8525 (const_string "mov_reg"))
8526 (if_then_else (match_operand 1 "const_int_operand" "")
8527 (const_string "mov_imm")
8528 (const_string "mov_reg"))
8529 (const_string "multiple")])
8530 (set_attr "length" "4,4,8")]
8533 (define_insn "*cond_arith"
8534 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8535 (match_operator:SI 5 "shiftable_operator"
8536 [(match_operator:SI 4 "arm_comparison_operator"
8537 [(match_operand:SI 2 "s_register_operand" "r,r")
8538 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
8539 (match_operand:SI 1 "s_register_operand" "0,?r")]))
8540 (clobber (reg:CC CC_REGNUM))]
8543 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
8544 return \"%i5\\t%0, %1, %2, lsr #31\";
8546 output_asm_insn (\"cmp\\t%2, %3\", operands);
8547 if (GET_CODE (operands[5]) == AND)
8548 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
8549 else if (GET_CODE (operands[5]) == MINUS)
8550 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
8551 else if (which_alternative != 0)
8552 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8553 return \"%i5%d4\\t%0, %1, #1\";
8555 [(set_attr "conds" "clob")
8556 (set_attr "length" "12")
8557 (set_attr "type" "multiple")]
8560 (define_insn "*cond_sub"
8561 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8562 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
8563 (match_operator:SI 4 "arm_comparison_operator"
8564 [(match_operand:SI 2 "s_register_operand" "r,r")
8565 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
8566 (clobber (reg:CC CC_REGNUM))]
8569 output_asm_insn (\"cmp\\t%2, %3\", operands);
8570 if (which_alternative != 0)
8571 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
8572 return \"sub%d4\\t%0, %1, #1\";
8574 [(set_attr "conds" "clob")
8575 (set_attr "length" "8,12")
8576 (set_attr "type" "multiple")]
8579 (define_insn "*cmp_ite0"
8580 [(set (match_operand 6 "dominant_cc_register" "")
8583 (match_operator 4 "arm_comparison_operator"
8584 [(match_operand:SI 0 "s_register_operand"
8585 "l,l,l,r,r,r,r,r,r")
8586 (match_operand:SI 1 "arm_add_operand"
8587 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8588 (match_operator:SI 5 "arm_comparison_operator"
8589 [(match_operand:SI 2 "s_register_operand"
8590 "l,r,r,l,l,r,r,r,r")
8591 (match_operand:SI 3 "arm_add_operand"
8592 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8598 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8600 {\"cmp%d5\\t%0, %1\",
8601 \"cmp%d4\\t%2, %3\"},
8602 {\"cmn%d5\\t%0, #%n1\",
8603 \"cmp%d4\\t%2, %3\"},
8604 {\"cmp%d5\\t%0, %1\",
8605 \"cmn%d4\\t%2, #%n3\"},
8606 {\"cmn%d5\\t%0, #%n1\",
8607 \"cmn%d4\\t%2, #%n3\"}
8609 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8614 \"cmn\\t%0, #%n1\"},
8615 {\"cmn\\t%2, #%n3\",
8617 {\"cmn\\t%2, #%n3\",
8620 static const char * const ite[2] =
8625 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8626 CMP_CMP, CMN_CMP, CMP_CMP,
8627 CMN_CMP, CMP_CMN, CMN_CMN};
8629 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8631 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8632 if (TARGET_THUMB2) {
8633 output_asm_insn (ite[swap], operands);
8635 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8638 [(set_attr "conds" "set")
8639 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8640 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8641 (set_attr "type" "multiple")
8642 (set_attr_alternative "length"
8648 (if_then_else (eq_attr "is_thumb" "no")
8651 (if_then_else (eq_attr "is_thumb" "no")
8654 (if_then_else (eq_attr "is_thumb" "no")
8657 (if_then_else (eq_attr "is_thumb" "no")
8662 (define_insn "*cmp_ite1"
8663 [(set (match_operand 6 "dominant_cc_register" "")
8666 (match_operator 4 "arm_comparison_operator"
8667 [(match_operand:SI 0 "s_register_operand"
8668 "l,l,l,r,r,r,r,r,r")
8669 (match_operand:SI 1 "arm_add_operand"
8670 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
8671 (match_operator:SI 5 "arm_comparison_operator"
8672 [(match_operand:SI 2 "s_register_operand"
8673 "l,r,r,l,l,r,r,r,r")
8674 (match_operand:SI 3 "arm_add_operand"
8675 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
8681 static const char * const cmp1[NUM_OF_COND_CMP][2] =
8685 {\"cmn\\t%0, #%n1\",
8688 \"cmn\\t%2, #%n3\"},
8689 {\"cmn\\t%0, #%n1\",
8692 static const char * const cmp2[NUM_OF_COND_CMP][2] =
8694 {\"cmp%d4\\t%2, %3\",
8695 \"cmp%D5\\t%0, %1\"},
8696 {\"cmp%d4\\t%2, %3\",
8697 \"cmn%D5\\t%0, #%n1\"},
8698 {\"cmn%d4\\t%2, #%n3\",
8699 \"cmp%D5\\t%0, %1\"},
8700 {\"cmn%d4\\t%2, #%n3\",
8701 \"cmn%D5\\t%0, #%n1\"}
8703 static const char * const ite[2] =
8708 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
8709 CMP_CMP, CMN_CMP, CMP_CMP,
8710 CMN_CMP, CMP_CMN, CMN_CMN};
8712 comparison_dominates_p (GET_CODE (operands[5]),
8713 reverse_condition (GET_CODE (operands[4])));
8715 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8716 if (TARGET_THUMB2) {
8717 output_asm_insn (ite[swap], operands);
8719 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8722 [(set_attr "conds" "set")
8723 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
8724 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
8725 (set_attr_alternative "length"
8731 (if_then_else (eq_attr "is_thumb" "no")
8734 (if_then_else (eq_attr "is_thumb" "no")
8737 (if_then_else (eq_attr "is_thumb" "no")
8740 (if_then_else (eq_attr "is_thumb" "no")
8743 (set_attr "type" "multiple")]
8746 (define_insn "*cmp_and"
8747 [(set (match_operand 6 "dominant_cc_register" "")
8750 (match_operator 4 "arm_comparison_operator"
8751 [(match_operand:SI 0 "s_register_operand"
8752 "l,l,l,r,r,r,r,r,r,r")
8753 (match_operand:SI 1 "arm_add_operand"
8754 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8755 (match_operator:SI 5 "arm_comparison_operator"
8756 [(match_operand:SI 2 "s_register_operand"
8757 "l,r,r,l,l,r,r,r,r,r")
8758 (match_operand:SI 3 "arm_add_operand"
8759 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8764 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8766 {\"cmp%d5\\t%0, %1\",
8767 \"cmp%d4\\t%2, %3\"},
8768 {\"cmn%d5\\t%0, #%n1\",
8769 \"cmp%d4\\t%2, %3\"},
8770 {\"cmp%d5\\t%0, %1\",
8771 \"cmn%d4\\t%2, #%n3\"},
8772 {\"cmn%d5\\t%0, #%n1\",
8773 \"cmn%d4\\t%2, #%n3\"}
8775 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8780 \"cmn\\t%0, #%n1\"},
8781 {\"cmn\\t%2, #%n3\",
8783 {\"cmn\\t%2, #%n3\",
8786 static const char *const ite[2] =
8791 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8792 CMP_CMP, CMN_CMP, CMP_CMP,
8793 CMP_CMP, CMN_CMP, CMP_CMN,
8796 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8798 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8799 if (TARGET_THUMB2) {
8800 output_asm_insn (ite[swap], operands);
8802 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8805 [(set_attr "conds" "set")
8806 (set_attr "predicable" "no")
8807 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8808 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8809 (set_attr_alternative "length"
8816 (if_then_else (eq_attr "is_thumb" "no")
8819 (if_then_else (eq_attr "is_thumb" "no")
8822 (if_then_else (eq_attr "is_thumb" "no")
8825 (if_then_else (eq_attr "is_thumb" "no")
8828 (set_attr "type" "multiple")]
8831 (define_insn "*cmp_ior"
8832 [(set (match_operand 6 "dominant_cc_register" "")
8835 (match_operator 4 "arm_comparison_operator"
8836 [(match_operand:SI 0 "s_register_operand"
8837 "l,l,l,r,r,r,r,r,r,r")
8838 (match_operand:SI 1 "arm_add_operand"
8839 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
8840 (match_operator:SI 5 "arm_comparison_operator"
8841 [(match_operand:SI 2 "s_register_operand"
8842 "l,r,r,l,l,r,r,r,r,r")
8843 (match_operand:SI 3 "arm_add_operand"
8844 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
8849 static const char *const cmp1[NUM_OF_COND_CMP][2] =
8853 {\"cmn\\t%0, #%n1\",
8856 \"cmn\\t%2, #%n3\"},
8857 {\"cmn\\t%0, #%n1\",
8860 static const char *const cmp2[NUM_OF_COND_CMP][2] =
8862 {\"cmp%D4\\t%2, %3\",
8863 \"cmp%D5\\t%0, %1\"},
8864 {\"cmp%D4\\t%2, %3\",
8865 \"cmn%D5\\t%0, #%n1\"},
8866 {\"cmn%D4\\t%2, #%n3\",
8867 \"cmp%D5\\t%0, %1\"},
8868 {\"cmn%D4\\t%2, #%n3\",
8869 \"cmn%D5\\t%0, #%n1\"}
8871 static const char *const ite[2] =
8876 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
8877 CMP_CMP, CMN_CMP, CMP_CMP,
8878 CMP_CMP, CMN_CMP, CMP_CMN,
8881 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
8883 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
8884 if (TARGET_THUMB2) {
8885 output_asm_insn (ite[swap], operands);
8887 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
8891 [(set_attr "conds" "set")
8892 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
8893 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
8894 (set_attr_alternative "length"
8901 (if_then_else (eq_attr "is_thumb" "no")
8904 (if_then_else (eq_attr "is_thumb" "no")
8907 (if_then_else (eq_attr "is_thumb" "no")
8910 (if_then_else (eq_attr "is_thumb" "no")
8913 (set_attr "type" "multiple")]
8916 (define_insn_and_split "*ior_scc_scc"
8917 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8918 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8919 [(match_operand:SI 1 "s_register_operand" "l,r")
8920 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8921 (match_operator:SI 6 "arm_comparison_operator"
8922 [(match_operand:SI 4 "s_register_operand" "l,r")
8923 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8924 (clobber (reg:CC CC_REGNUM))]
8926 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
8929 "TARGET_32BIT && reload_completed"
8933 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8934 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8936 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
8938 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
8941 [(set_attr "conds" "clob")
8942 (set_attr "enabled_for_short_it" "yes,no")
8943 (set_attr "length" "16")
8944 (set_attr "type" "multiple")]
8947 ; If the above pattern is followed by a CMP insn, then the compare is
8948 ; redundant, since we can rework the conditional instruction that follows.
8949 (define_insn_and_split "*ior_scc_scc_cmp"
8950 [(set (match_operand 0 "dominant_cc_register" "")
8951 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
8952 [(match_operand:SI 1 "s_register_operand" "l,r")
8953 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8954 (match_operator:SI 6 "arm_comparison_operator"
8955 [(match_operand:SI 4 "s_register_operand" "l,r")
8956 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
8958 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
8959 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8960 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
8963 "TARGET_32BIT && reload_completed"
8967 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8968 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
8970 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
8972 [(set_attr "conds" "set")
8973 (set_attr "enabled_for_short_it" "yes,no")
8974 (set_attr "length" "16")
8975 (set_attr "type" "multiple")]
8978 (define_insn_and_split "*and_scc_scc"
8979 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
8980 (and:SI (match_operator:SI 3 "arm_comparison_operator"
8981 [(match_operand:SI 1 "s_register_operand" "l,r")
8982 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
8983 (match_operator:SI 6 "arm_comparison_operator"
8984 [(match_operand:SI 4 "s_register_operand" "l,r")
8985 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
8986 (clobber (reg:CC CC_REGNUM))]
8988 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8991 "TARGET_32BIT && reload_completed
8992 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
8997 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
8998 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9000 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9002 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9005 [(set_attr "conds" "clob")
9006 (set_attr "enabled_for_short_it" "yes,no")
9007 (set_attr "length" "16")
9008 (set_attr "type" "multiple")]
9011 ; If the above pattern is followed by a CMP insn, then the compare is
9012 ; redundant, since we can rework the conditional instruction that follows.
9013 (define_insn_and_split "*and_scc_scc_cmp"
9014 [(set (match_operand 0 "dominant_cc_register" "")
9015 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9016 [(match_operand:SI 1 "s_register_operand" "l,r")
9017 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9018 (match_operator:SI 6 "arm_comparison_operator"
9019 [(match_operand:SI 4 "s_register_operand" "l,r")
9020 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9022 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9023 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9024 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9027 "TARGET_32BIT && reload_completed"
9031 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9032 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9034 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9036 [(set_attr "conds" "set")
9037 (set_attr "enabled_for_short_it" "yes,no")
9038 (set_attr "length" "16")
9039 (set_attr "type" "multiple")]
9042 ;; If there is no dominance in the comparison, then we can still save an
9043 ;; instruction in the AND case, since we can know that the second compare
9044 ;; need only zero the value if false (if true, then the value is already
9046 (define_insn_and_split "*and_scc_scc_nodom"
9047 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9048 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9049 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9050 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9051 (match_operator:SI 6 "arm_comparison_operator"
9052 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9053 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9054 (clobber (reg:CC CC_REGNUM))]
9056 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9059 "TARGET_32BIT && reload_completed"
9060 [(parallel [(set (match_dup 0)
9061 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9062 (clobber (reg:CC CC_REGNUM))])
9063 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9065 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9068 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9069 operands[4], operands[5]),
9071 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9073 [(set_attr "conds" "clob")
9074 (set_attr "length" "20")
9075 (set_attr "type" "multiple")]
9079 [(set (reg:CC_NOOV CC_REGNUM)
9080 (compare:CC_NOOV (ior:SI
9081 (and:SI (match_operand:SI 0 "s_register_operand" "")
9083 (match_operator:SI 1 "arm_comparison_operator"
9084 [(match_operand:SI 2 "s_register_operand" "")
9085 (match_operand:SI 3 "arm_add_operand" "")]))
9087 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9090 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9092 (set (reg:CC_NOOV CC_REGNUM)
9093 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9098 [(set (reg:CC_NOOV CC_REGNUM)
9099 (compare:CC_NOOV (ior:SI
9100 (match_operator:SI 1 "arm_comparison_operator"
9101 [(match_operand:SI 2 "s_register_operand" "")
9102 (match_operand:SI 3 "arm_add_operand" "")])
9103 (and:SI (match_operand:SI 0 "s_register_operand" "")
9106 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9109 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9111 (set (reg:CC_NOOV CC_REGNUM)
9112 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9115 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9117 (define_insn_and_split "*negscc"
9118 [(set (match_operand:SI 0 "s_register_operand" "=r")
9119 (neg:SI (match_operator 3 "arm_comparison_operator"
9120 [(match_operand:SI 1 "s_register_operand" "r")
9121 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9122 (clobber (reg:CC CC_REGNUM))]
9125 "&& reload_completed"
9128 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9130 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9132 /* Emit mov\\t%0, %1, asr #31 */
9133 emit_insn (gen_rtx_SET (operands[0],
9134 gen_rtx_ASHIFTRT (SImode,
9139 else if (GET_CODE (operands[3]) == NE)
9141 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9142 if (CONST_INT_P (operands[2]))
9143 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9144 gen_int_mode (-INTVAL (operands[2]),
9147 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9149 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9153 gen_rtx_SET (operands[0],
9159 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9160 emit_insn (gen_rtx_SET (cc_reg,
9161 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9162 enum rtx_code rc = GET_CODE (operands[3]);
9164 rc = reverse_condition (rc);
9165 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9170 gen_rtx_SET (operands[0], const0_rtx)));
9171 rc = GET_CODE (operands[3]);
9172 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9177 gen_rtx_SET (operands[0],
9183 [(set_attr "conds" "clob")
9184 (set_attr "length" "12")
9185 (set_attr "type" "multiple")]
9188 (define_insn_and_split "movcond_addsi"
9189 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9191 (match_operator 5 "comparison_operator"
9192 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9193 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9195 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9196 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9197 (clobber (reg:CC CC_REGNUM))]
9200 "&& reload_completed"
9201 [(set (reg:CC_NOOV CC_REGNUM)
9203 (plus:SI (match_dup 3)
9206 (set (match_dup 0) (match_dup 1))
9207 (cond_exec (match_dup 6)
9208 (set (match_dup 0) (match_dup 2)))]
9211 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9212 operands[3], operands[4]);
9213 enum rtx_code rc = GET_CODE (operands[5]);
9214 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9215 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9216 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9217 rc = reverse_condition (rc);
9219 std::swap (operands[1], operands[2]);
9221 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9224 [(set_attr "conds" "clob")
9225 (set_attr "enabled_for_short_it" "no,yes,yes")
9226 (set_attr "type" "multiple")]
9229 (define_insn "movcond"
9230 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9232 (match_operator 5 "arm_comparison_operator"
9233 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9234 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9235 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9236 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9237 (clobber (reg:CC CC_REGNUM))]
9240 if (GET_CODE (operands[5]) == LT
9241 && (operands[4] == const0_rtx))
9243 if (which_alternative != 1 && REG_P (operands[1]))
9245 if (operands[2] == const0_rtx)
9246 return \"and\\t%0, %1, %3, asr #31\";
9247 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9249 else if (which_alternative != 0 && REG_P (operands[2]))
9251 if (operands[1] == const0_rtx)
9252 return \"bic\\t%0, %2, %3, asr #31\";
9253 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9255 /* The only case that falls through to here is when both ops 1 & 2
9259 if (GET_CODE (operands[5]) == GE
9260 && (operands[4] == const0_rtx))
9262 if (which_alternative != 1 && REG_P (operands[1]))
9264 if (operands[2] == const0_rtx)
9265 return \"bic\\t%0, %1, %3, asr #31\";
9266 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9268 else if (which_alternative != 0 && REG_P (operands[2]))
9270 if (operands[1] == const0_rtx)
9271 return \"and\\t%0, %2, %3, asr #31\";
9272 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9274 /* The only case that falls through to here is when both ops 1 & 2
9277 if (CONST_INT_P (operands[4])
9278 && !const_ok_for_arm (INTVAL (operands[4])))
9279 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9281 output_asm_insn (\"cmp\\t%3, %4\", operands);
9282 if (which_alternative != 0)
9283 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9284 if (which_alternative != 1)
9285 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9288 [(set_attr "conds" "clob")
9289 (set_attr "length" "8,8,12")
9290 (set_attr "type" "multiple")]
9293 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9295 (define_insn "*ifcompare_plus_move"
9296 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9297 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9298 [(match_operand:SI 4 "s_register_operand" "r,r")
9299 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9301 (match_operand:SI 2 "s_register_operand" "r,r")
9302 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9303 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9304 (clobber (reg:CC CC_REGNUM))]
9307 [(set_attr "conds" "clob")
9308 (set_attr "length" "8,12")
9309 (set_attr "type" "multiple")]
9312 (define_insn "*if_plus_move"
9313 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9315 (match_operator 4 "arm_comparison_operator"
9316 [(match_operand 5 "cc_register" "") (const_int 0)])
9318 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9319 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9320 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9324 sub%d4\\t%0, %2, #%n3
9325 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9326 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9327 [(set_attr "conds" "use")
9328 (set_attr "length" "4,4,8,8")
9329 (set_attr_alternative "type"
9330 [(if_then_else (match_operand 3 "const_int_operand" "")
9331 (const_string "alu_imm" )
9332 (const_string "alu_sreg"))
9333 (const_string "alu_imm")
9334 (const_string "multiple")
9335 (const_string "multiple")])]
9338 (define_insn "*ifcompare_move_plus"
9339 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9340 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9341 [(match_operand:SI 4 "s_register_operand" "r,r")
9342 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9343 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9345 (match_operand:SI 2 "s_register_operand" "r,r")
9346 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9347 (clobber (reg:CC CC_REGNUM))]
9350 [(set_attr "conds" "clob")
9351 (set_attr "length" "8,12")
9352 (set_attr "type" "multiple")]
9355 (define_insn "*if_move_plus"
9356 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9358 (match_operator 4 "arm_comparison_operator"
9359 [(match_operand 5 "cc_register" "") (const_int 0)])
9360 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9362 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9363 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9367 sub%D4\\t%0, %2, #%n3
9368 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9369 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9370 [(set_attr "conds" "use")
9371 (set_attr "length" "4,4,8,8")
9372 (set_attr_alternative "type"
9373 [(if_then_else (match_operand 3 "const_int_operand" "")
9374 (const_string "alu_imm" )
9375 (const_string "alu_sreg"))
9376 (const_string "alu_imm")
9377 (const_string "multiple")
9378 (const_string "multiple")])]
9381 (define_insn "*ifcompare_arith_arith"
9382 [(set (match_operand:SI 0 "s_register_operand" "=r")
9383 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9384 [(match_operand:SI 5 "s_register_operand" "r")
9385 (match_operand:SI 6 "arm_add_operand" "rIL")])
9386 (match_operator:SI 8 "shiftable_operator"
9387 [(match_operand:SI 1 "s_register_operand" "r")
9388 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9389 (match_operator:SI 7 "shiftable_operator"
9390 [(match_operand:SI 3 "s_register_operand" "r")
9391 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9392 (clobber (reg:CC CC_REGNUM))]
9395 [(set_attr "conds" "clob")
9396 (set_attr "length" "12")
9397 (set_attr "type" "multiple")]
9400 (define_insn "*if_arith_arith"
9401 [(set (match_operand:SI 0 "s_register_operand" "=r")
9402 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9403 [(match_operand 8 "cc_register" "") (const_int 0)])
9404 (match_operator:SI 6 "shiftable_operator"
9405 [(match_operand:SI 1 "s_register_operand" "r")
9406 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9407 (match_operator:SI 7 "shiftable_operator"
9408 [(match_operand:SI 3 "s_register_operand" "r")
9409 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9411 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9412 [(set_attr "conds" "use")
9413 (set_attr "length" "8")
9414 (set_attr "type" "multiple")]
9417 (define_insn "*ifcompare_arith_move"
9418 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9419 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9420 [(match_operand:SI 2 "s_register_operand" "r,r")
9421 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9422 (match_operator:SI 7 "shiftable_operator"
9423 [(match_operand:SI 4 "s_register_operand" "r,r")
9424 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9425 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9426 (clobber (reg:CC CC_REGNUM))]
9429 /* If we have an operation where (op x 0) is the identity operation and
9430 the conditional operator is LT or GE and we are comparing against zero and
9431 everything is in registers then we can do this in two instructions. */
9432 if (operands[3] == const0_rtx
9433 && GET_CODE (operands[7]) != AND
9434 && REG_P (operands[5])
9435 && REG_P (operands[1])
9436 && REGNO (operands[1]) == REGNO (operands[4])
9437 && REGNO (operands[4]) != REGNO (operands[0]))
9439 if (GET_CODE (operands[6]) == LT)
9440 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9441 else if (GET_CODE (operands[6]) == GE)
9442 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9444 if (CONST_INT_P (operands[3])
9445 && !const_ok_for_arm (INTVAL (operands[3])))
9446 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9448 output_asm_insn (\"cmp\\t%2, %3\", operands);
9449 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9450 if (which_alternative != 0)
9451 return \"mov%D6\\t%0, %1\";
9454 [(set_attr "conds" "clob")
9455 (set_attr "length" "8,12")
9456 (set_attr "type" "multiple")]
9459 (define_insn "*if_arith_move"
9460 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9461 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9462 [(match_operand 6 "cc_register" "") (const_int 0)])
9463 (match_operator:SI 5 "shiftable_operator"
9464 [(match_operand:SI 2 "s_register_operand" "r,r")
9465 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9466 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9470 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9471 [(set_attr "conds" "use")
9472 (set_attr "length" "4,8")
9473 (set_attr_alternative "type"
9474 [(if_then_else (match_operand 3 "const_int_operand" "")
9475 (const_string "alu_shift_imm" )
9476 (const_string "alu_shift_reg"))
9477 (const_string "multiple")])]
9480 (define_insn "*ifcompare_move_arith"
9481 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9482 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9483 [(match_operand:SI 4 "s_register_operand" "r,r")
9484 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9485 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9486 (match_operator:SI 7 "shiftable_operator"
9487 [(match_operand:SI 2 "s_register_operand" "r,r")
9488 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9489 (clobber (reg:CC CC_REGNUM))]
9492 /* If we have an operation where (op x 0) is the identity operation and
9493 the conditional operator is LT or GE and we are comparing against zero and
9494 everything is in registers then we can do this in two instructions */
9495 if (operands[5] == const0_rtx
9496 && GET_CODE (operands[7]) != AND
9497 && REG_P (operands[3])
9498 && REG_P (operands[1])
9499 && REGNO (operands[1]) == REGNO (operands[2])
9500 && REGNO (operands[2]) != REGNO (operands[0]))
9502 if (GET_CODE (operands[6]) == GE)
9503 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9504 else if (GET_CODE (operands[6]) == LT)
9505 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9508 if (CONST_INT_P (operands[5])
9509 && !const_ok_for_arm (INTVAL (operands[5])))
9510 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9512 output_asm_insn (\"cmp\\t%4, %5\", operands);
9514 if (which_alternative != 0)
9515 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9516 return \"%I7%D6\\t%0, %2, %3\";
9518 [(set_attr "conds" "clob")
9519 (set_attr "length" "8,12")
9520 (set_attr "type" "multiple")]
9523 (define_insn "*if_move_arith"
9524 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9526 (match_operator 4 "arm_comparison_operator"
9527 [(match_operand 6 "cc_register" "") (const_int 0)])
9528 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9529 (match_operator:SI 5 "shiftable_operator"
9530 [(match_operand:SI 2 "s_register_operand" "r,r")
9531 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9535 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9536 [(set_attr "conds" "use")
9537 (set_attr "length" "4,8")
9538 (set_attr_alternative "type"
9539 [(if_then_else (match_operand 3 "const_int_operand" "")
9540 (const_string "alu_shift_imm" )
9541 (const_string "alu_shift_reg"))
9542 (const_string "multiple")])]
9545 (define_insn "*ifcompare_move_not"
9546 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9548 (match_operator 5 "arm_comparison_operator"
9549 [(match_operand:SI 3 "s_register_operand" "r,r")
9550 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9551 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9553 (match_operand:SI 2 "s_register_operand" "r,r"))))
9554 (clobber (reg:CC CC_REGNUM))]
9557 [(set_attr "conds" "clob")
9558 (set_attr "length" "8,12")
9559 (set_attr "type" "multiple")]
9562 (define_insn "*if_move_not"
9563 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9565 (match_operator 4 "arm_comparison_operator"
9566 [(match_operand 3 "cc_register" "") (const_int 0)])
9567 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9568 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9572 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9573 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9574 [(set_attr "conds" "use")
9575 (set_attr "type" "mvn_reg")
9576 (set_attr "length" "4,8,8")
9577 (set_attr "type" "mvn_reg,multiple,multiple")]
9580 (define_insn "*ifcompare_not_move"
9581 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9583 (match_operator 5 "arm_comparison_operator"
9584 [(match_operand:SI 3 "s_register_operand" "r,r")
9585 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9587 (match_operand:SI 2 "s_register_operand" "r,r"))
9588 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9589 (clobber (reg:CC CC_REGNUM))]
9592 [(set_attr "conds" "clob")
9593 (set_attr "length" "8,12")
9594 (set_attr "type" "multiple")]
9597 (define_insn "*if_not_move"
9598 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9600 (match_operator 4 "arm_comparison_operator"
9601 [(match_operand 3 "cc_register" "") (const_int 0)])
9602 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9603 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9607 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9608 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9609 [(set_attr "conds" "use")
9610 (set_attr "type" "mvn_reg,multiple,multiple")
9611 (set_attr "length" "4,8,8")]
9614 (define_insn "*ifcompare_shift_move"
9615 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9617 (match_operator 6 "arm_comparison_operator"
9618 [(match_operand:SI 4 "s_register_operand" "r,r")
9619 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9620 (match_operator:SI 7 "shift_operator"
9621 [(match_operand:SI 2 "s_register_operand" "r,r")
9622 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9623 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9624 (clobber (reg:CC CC_REGNUM))]
9627 [(set_attr "conds" "clob")
9628 (set_attr "length" "8,12")
9629 (set_attr "type" "multiple")]
9632 (define_insn "*if_shift_move"
9633 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9635 (match_operator 5 "arm_comparison_operator"
9636 [(match_operand 6 "cc_register" "") (const_int 0)])
9637 (match_operator:SI 4 "shift_operator"
9638 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9639 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9640 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9644 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9645 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9646 [(set_attr "conds" "use")
9647 (set_attr "shift" "2")
9648 (set_attr "length" "4,8,8")
9649 (set_attr_alternative "type"
9650 [(if_then_else (match_operand 3 "const_int_operand" "")
9651 (const_string "mov_shift" )
9652 (const_string "mov_shift_reg"))
9653 (const_string "multiple")
9654 (const_string "multiple")])]
9657 (define_insn "*ifcompare_move_shift"
9658 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9660 (match_operator 6 "arm_comparison_operator"
9661 [(match_operand:SI 4 "s_register_operand" "r,r")
9662 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9663 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9664 (match_operator:SI 7 "shift_operator"
9665 [(match_operand:SI 2 "s_register_operand" "r,r")
9666 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9667 (clobber (reg:CC CC_REGNUM))]
9670 [(set_attr "conds" "clob")
9671 (set_attr "length" "8,12")
9672 (set_attr "type" "multiple")]
9675 (define_insn "*if_move_shift"
9676 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9678 (match_operator 5 "arm_comparison_operator"
9679 [(match_operand 6 "cc_register" "") (const_int 0)])
9680 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9681 (match_operator:SI 4 "shift_operator"
9682 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9683 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9687 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9688 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9689 [(set_attr "conds" "use")
9690 (set_attr "shift" "2")
9691 (set_attr "length" "4,8,8")
9692 (set_attr_alternative "type"
9693 [(if_then_else (match_operand 3 "const_int_operand" "")
9694 (const_string "mov_shift" )
9695 (const_string "mov_shift_reg"))
9696 (const_string "multiple")
9697 (const_string "multiple")])]
9700 (define_insn "*ifcompare_shift_shift"
9701 [(set (match_operand:SI 0 "s_register_operand" "=r")
9703 (match_operator 7 "arm_comparison_operator"
9704 [(match_operand:SI 5 "s_register_operand" "r")
9705 (match_operand:SI 6 "arm_add_operand" "rIL")])
9706 (match_operator:SI 8 "shift_operator"
9707 [(match_operand:SI 1 "s_register_operand" "r")
9708 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9709 (match_operator:SI 9 "shift_operator"
9710 [(match_operand:SI 3 "s_register_operand" "r")
9711 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9712 (clobber (reg:CC CC_REGNUM))]
9715 [(set_attr "conds" "clob")
9716 (set_attr "length" "12")
9717 (set_attr "type" "multiple")]
9720 (define_insn "*if_shift_shift"
9721 [(set (match_operand:SI 0 "s_register_operand" "=r")
9723 (match_operator 5 "arm_comparison_operator"
9724 [(match_operand 8 "cc_register" "") (const_int 0)])
9725 (match_operator:SI 6 "shift_operator"
9726 [(match_operand:SI 1 "s_register_operand" "r")
9727 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9728 (match_operator:SI 7 "shift_operator"
9729 [(match_operand:SI 3 "s_register_operand" "r")
9730 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
9732 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
9733 [(set_attr "conds" "use")
9734 (set_attr "shift" "1")
9735 (set_attr "length" "8")
9736 (set (attr "type") (if_then_else
9737 (and (match_operand 2 "const_int_operand" "")
9738 (match_operand 4 "const_int_operand" ""))
9739 (const_string "mov_shift")
9740 (const_string "mov_shift_reg")))]
9743 (define_insn "*ifcompare_not_arith"
9744 [(set (match_operand:SI 0 "s_register_operand" "=r")
9746 (match_operator 6 "arm_comparison_operator"
9747 [(match_operand:SI 4 "s_register_operand" "r")
9748 (match_operand:SI 5 "arm_add_operand" "rIL")])
9749 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9750 (match_operator:SI 7 "shiftable_operator"
9751 [(match_operand:SI 2 "s_register_operand" "r")
9752 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
9753 (clobber (reg:CC CC_REGNUM))]
9756 [(set_attr "conds" "clob")
9757 (set_attr "length" "12")
9758 (set_attr "type" "multiple")]
9761 (define_insn "*if_not_arith"
9762 [(set (match_operand:SI 0 "s_register_operand" "=r")
9764 (match_operator 5 "arm_comparison_operator"
9765 [(match_operand 4 "cc_register" "") (const_int 0)])
9766 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
9767 (match_operator:SI 6 "shiftable_operator"
9768 [(match_operand:SI 2 "s_register_operand" "r")
9769 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
9771 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
9772 [(set_attr "conds" "use")
9773 (set_attr "type" "mvn_reg")
9774 (set_attr "length" "8")]
9777 (define_insn "*ifcompare_arith_not"
9778 [(set (match_operand:SI 0 "s_register_operand" "=r")
9780 (match_operator 6 "arm_comparison_operator"
9781 [(match_operand:SI 4 "s_register_operand" "r")
9782 (match_operand:SI 5 "arm_add_operand" "rIL")])
9783 (match_operator:SI 7 "shiftable_operator"
9784 [(match_operand:SI 2 "s_register_operand" "r")
9785 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9786 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
9787 (clobber (reg:CC CC_REGNUM))]
9790 [(set_attr "conds" "clob")
9791 (set_attr "length" "12")
9792 (set_attr "type" "multiple")]
9795 (define_insn "*if_arith_not"
9796 [(set (match_operand:SI 0 "s_register_operand" "=r")
9798 (match_operator 5 "arm_comparison_operator"
9799 [(match_operand 4 "cc_register" "") (const_int 0)])
9800 (match_operator:SI 6 "shiftable_operator"
9801 [(match_operand:SI 2 "s_register_operand" "r")
9802 (match_operand:SI 3 "arm_rhs_operand" "rI")])
9803 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
9805 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
9806 [(set_attr "conds" "use")
9807 (set_attr "type" "multiple")
9808 (set_attr "length" "8")]
9811 (define_insn "*ifcompare_neg_move"
9812 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9814 (match_operator 5 "arm_comparison_operator"
9815 [(match_operand:SI 3 "s_register_operand" "r,r")
9816 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9817 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
9818 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9819 (clobber (reg:CC CC_REGNUM))]
9822 [(set_attr "conds" "clob")
9823 (set_attr "length" "8,12")
9824 (set_attr "type" "multiple")]
9827 (define_insn_and_split "*if_neg_move"
9828 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9830 (match_operator 4 "arm_comparison_operator"
9831 [(match_operand 3 "cc_register" "") (const_int 0)])
9832 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
9833 (match_operand:SI 1 "s_register_operand" "0,0")))]
9836 "&& reload_completed"
9837 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
9838 (set (match_dup 0) (neg:SI (match_dup 2))))]
9840 [(set_attr "conds" "use")
9841 (set_attr "length" "4")
9842 (set_attr "arch" "t2,32")
9843 (set_attr "enabled_for_short_it" "yes,no")
9844 (set_attr "type" "logic_shift_imm")]
9847 (define_insn "*ifcompare_move_neg"
9848 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9850 (match_operator 5 "arm_comparison_operator"
9851 [(match_operand:SI 3 "s_register_operand" "r,r")
9852 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9853 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9854 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
9855 (clobber (reg:CC CC_REGNUM))]
9858 [(set_attr "conds" "clob")
9859 (set_attr "length" "8,12")
9860 (set_attr "type" "multiple")]
9863 (define_insn_and_split "*if_move_neg"
9864 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
9866 (match_operator 4 "arm_comparison_operator"
9867 [(match_operand 3 "cc_register" "") (const_int 0)])
9868 (match_operand:SI 1 "s_register_operand" "0,0")
9869 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
9872 "&& reload_completed"
9873 [(cond_exec (match_dup 5)
9874 (set (match_dup 0) (neg:SI (match_dup 2))))]
9876 machine_mode mode = GET_MODE (operands[3]);
9877 rtx_code rc = GET_CODE (operands[4]);
9879 if (mode == CCFPmode || mode == CCFPEmode)
9880 rc = reverse_condition_maybe_unordered (rc);
9882 rc = reverse_condition (rc);
9884 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
9886 [(set_attr "conds" "use")
9887 (set_attr "length" "4")
9888 (set_attr "arch" "t2,32")
9889 (set_attr "enabled_for_short_it" "yes,no")
9890 (set_attr "type" "logic_shift_imm")]
9893 (define_insn "*arith_adjacentmem"
9894 [(set (match_operand:SI 0 "s_register_operand" "=r")
9895 (match_operator:SI 1 "shiftable_operator"
9896 [(match_operand:SI 2 "memory_operand" "m")
9897 (match_operand:SI 3 "memory_operand" "m")]))
9898 (clobber (match_scratch:SI 4 "=r"))]
9899 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
9905 HOST_WIDE_INT val1 = 0, val2 = 0;
9907 if (REGNO (operands[0]) > REGNO (operands[4]))
9909 ldm[1] = operands[4];
9910 ldm[2] = operands[0];
9914 ldm[1] = operands[0];
9915 ldm[2] = operands[4];
9918 base_reg = XEXP (operands[2], 0);
9920 if (!REG_P (base_reg))
9922 val1 = INTVAL (XEXP (base_reg, 1));
9923 base_reg = XEXP (base_reg, 0);
9926 if (!REG_P (XEXP (operands[3], 0)))
9927 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
9929 arith[0] = operands[0];
9930 arith[3] = operands[1];
9944 if (val1 !=0 && val2 != 0)
9948 if (val1 == 4 || val2 == 4)
9949 /* Other val must be 8, since we know they are adjacent and neither
9951 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
9952 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
9954 ldm[0] = ops[0] = operands[4];
9956 ops[2] = GEN_INT (val1);
9957 output_add_immediate (ops);
9959 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9961 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9965 /* Offset is out of range for a single add, so use two ldr. */
9968 ops[2] = GEN_INT (val1);
9969 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9971 ops[2] = GEN_INT (val2);
9972 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
9978 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9980 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9985 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
9987 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
9989 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
9992 [(set_attr "length" "12")
9993 (set_attr "predicable" "yes")
9994 (set_attr "type" "load_4")]
9997 ; This pattern is never tried by combine, so do it as a peephole
10000 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10001 (match_operand:SI 1 "arm_general_register_operand" ""))
10002 (set (reg:CC CC_REGNUM)
10003 (compare:CC (match_dup 1) (const_int 0)))]
10005 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10006 (set (match_dup 0) (match_dup 1))])]
10011 [(set (match_operand:SI 0 "s_register_operand" "")
10012 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10014 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10015 [(match_operand:SI 3 "s_register_operand" "")
10016 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10017 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10019 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10020 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10025 ;; This split can be used because CC_Z mode implies that the following
10026 ;; branch will be an equality, or an unsigned inequality, so the sign
10027 ;; extension is not needed.
10030 [(set (reg:CC_Z CC_REGNUM)
10032 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10034 (match_operand 1 "const_int_operand" "")))
10035 (clobber (match_scratch:SI 2 ""))]
10037 && ((UINTVAL (operands[1]))
10038 == ((UINTVAL (operands[1])) >> 24) << 24)"
10039 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10040 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10042 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10045 ;; ??? Check the patterns above for Thumb-2 usefulness
10047 (define_expand "prologue"
10048 [(clobber (const_int 0))]
10051 arm_expand_prologue ();
10053 thumb1_expand_prologue ();
10058 (define_expand "epilogue"
10059 [(clobber (const_int 0))]
10062 if (crtl->calls_eh_return)
10063 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10066 thumb1_expand_epilogue ();
10067 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10068 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10070 else if (HAVE_return)
10072 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10073 no need for explicit testing again. */
10074 emit_jump_insn (gen_return ());
10076 else if (TARGET_32BIT)
10078 arm_expand_epilogue (true);
10084 ;; Note - although unspec_volatile's USE all hard registers,
10085 ;; USEs are ignored after relaod has completed. Thus we need
10086 ;; to add an unspec of the link register to ensure that flow
10087 ;; does not think that it is unused by the sibcall branch that
10088 ;; will replace the standard function epilogue.
10089 (define_expand "sibcall_epilogue"
10090 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10091 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10094 arm_expand_epilogue (false);
10099 (define_expand "eh_epilogue"
10100 [(use (match_operand:SI 0 "register_operand"))
10101 (use (match_operand:SI 1 "register_operand"))
10102 (use (match_operand:SI 2 "register_operand"))]
10106 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10107 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10109 rtx ra = gen_rtx_REG (Pmode, 2);
10111 emit_move_insn (ra, operands[2]);
10114 /* This is a hack -- we may have crystalized the function type too
10116 cfun->machine->func_type = 0;
10120 ;; This split is only used during output to reduce the number of patterns
10121 ;; that need assembler instructions adding to them. We allowed the setting
10122 ;; of the conditions to be implicit during rtl generation so that
10123 ;; the conditional compare patterns would work. However this conflicts to
10124 ;; some extent with the conditional data operations, so we have to split them
10127 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10128 ;; conditional execution sufficient?
10131 [(set (match_operand:SI 0 "s_register_operand" "")
10132 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10133 [(match_operand 2 "" "") (match_operand 3 "" "")])
10135 (match_operand 4 "" "")))
10136 (clobber (reg:CC CC_REGNUM))]
10137 "TARGET_ARM && reload_completed"
10138 [(set (match_dup 5) (match_dup 6))
10139 (cond_exec (match_dup 7)
10140 (set (match_dup 0) (match_dup 4)))]
10143 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10144 operands[2], operands[3]);
10145 enum rtx_code rc = GET_CODE (operands[1]);
10147 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10148 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10149 if (mode == CCFPmode || mode == CCFPEmode)
10150 rc = reverse_condition_maybe_unordered (rc);
10152 rc = reverse_condition (rc);
10154 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10159 [(set (match_operand:SI 0 "s_register_operand" "")
10160 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10161 [(match_operand 2 "" "") (match_operand 3 "" "")])
10162 (match_operand 4 "" "")
10164 (clobber (reg:CC CC_REGNUM))]
10165 "TARGET_ARM && reload_completed"
10166 [(set (match_dup 5) (match_dup 6))
10167 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10168 (set (match_dup 0) (match_dup 4)))]
10171 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10172 operands[2], operands[3]);
10174 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10175 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10180 [(set (match_operand:SI 0 "s_register_operand" "")
10181 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10182 [(match_operand 2 "" "") (match_operand 3 "" "")])
10183 (match_operand 4 "" "")
10184 (match_operand 5 "" "")))
10185 (clobber (reg:CC CC_REGNUM))]
10186 "TARGET_ARM && reload_completed"
10187 [(set (match_dup 6) (match_dup 7))
10188 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10189 (set (match_dup 0) (match_dup 4)))
10190 (cond_exec (match_dup 8)
10191 (set (match_dup 0) (match_dup 5)))]
10194 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10195 operands[2], operands[3]);
10196 enum rtx_code rc = GET_CODE (operands[1]);
10198 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10199 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10200 if (mode == CCFPmode || mode == CCFPEmode)
10201 rc = reverse_condition_maybe_unordered (rc);
10203 rc = reverse_condition (rc);
10205 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10210 [(set (match_operand:SI 0 "s_register_operand" "")
10211 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10212 [(match_operand:SI 2 "s_register_operand" "")
10213 (match_operand:SI 3 "arm_add_operand" "")])
10214 (match_operand:SI 4 "arm_rhs_operand" "")
10216 (match_operand:SI 5 "s_register_operand" ""))))
10217 (clobber (reg:CC CC_REGNUM))]
10218 "TARGET_ARM && reload_completed"
10219 [(set (match_dup 6) (match_dup 7))
10220 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10221 (set (match_dup 0) (match_dup 4)))
10222 (cond_exec (match_dup 8)
10223 (set (match_dup 0) (not:SI (match_dup 5))))]
10226 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10227 operands[2], operands[3]);
10228 enum rtx_code rc = GET_CODE (operands[1]);
10230 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10231 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10232 if (mode == CCFPmode || mode == CCFPEmode)
10233 rc = reverse_condition_maybe_unordered (rc);
10235 rc = reverse_condition (rc);
10237 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10241 (define_insn "*cond_move_not"
10242 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10243 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10244 [(match_operand 3 "cc_register" "") (const_int 0)])
10245 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10247 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10251 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10252 [(set_attr "conds" "use")
10253 (set_attr "type" "mvn_reg,multiple")
10254 (set_attr "length" "4,8")]
10257 ;; The next two patterns occur when an AND operation is followed by a
10258 ;; scc insn sequence
10260 (define_insn "*sign_extract_onebit"
10261 [(set (match_operand:SI 0 "s_register_operand" "=r")
10262 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10264 (match_operand:SI 2 "const_int_operand" "n")))
10265 (clobber (reg:CC CC_REGNUM))]
10268 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10269 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10270 return \"mvnne\\t%0, #0\";
10272 [(set_attr "conds" "clob")
10273 (set_attr "length" "8")
10274 (set_attr "type" "multiple")]
10277 (define_insn "*not_signextract_onebit"
10278 [(set (match_operand:SI 0 "s_register_operand" "=r")
10280 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10282 (match_operand:SI 2 "const_int_operand" "n"))))
10283 (clobber (reg:CC CC_REGNUM))]
10286 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10287 output_asm_insn (\"tst\\t%1, %2\", operands);
10288 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10289 return \"movne\\t%0, #0\";
10291 [(set_attr "conds" "clob")
10292 (set_attr "length" "12")
10293 (set_attr "type" "multiple")]
10295 ;; ??? The above patterns need auditing for Thumb-2
10297 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10298 ;; expressions. For simplicity, the first register is also in the unspec
10300 ;; To avoid the usage of GNU extension, the length attribute is computed
10301 ;; in a C function arm_attr_length_push_multi.
10302 (define_insn "*push_multi"
10303 [(match_parallel 2 "multi_register_push"
10304 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10305 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10306 UNSPEC_PUSH_MULT))])]
10310 int num_saves = XVECLEN (operands[2], 0);
10312 /* For the StrongARM at least it is faster to
10313 use STR to store only a single register.
10314 In Thumb mode always use push, and the assembler will pick
10315 something appropriate. */
10316 if (num_saves == 1 && TARGET_ARM)
10317 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10324 strcpy (pattern, \"push%?\\t{%1\");
10326 strcpy (pattern, \"push\\t{%1\");
10328 for (i = 1; i < num_saves; i++)
10330 strcat (pattern, \", %|\");
10332 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10335 strcat (pattern, \"}\");
10336 output_asm_insn (pattern, operands);
10341 [(set_attr "type" "store_16")
10342 (set (attr "length")
10343 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10346 (define_insn "stack_tie"
10347 [(set (mem:BLK (scratch))
10348 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10349 (match_operand:SI 1 "s_register_operand" "rk")]
10353 [(set_attr "length" "0")
10354 (set_attr "type" "block")]
10357 ;; Pop (as used in epilogue RTL)
10359 (define_insn "*load_multiple_with_writeback"
10360 [(match_parallel 0 "load_multiple_operation"
10361 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10362 (plus:SI (match_dup 1)
10363 (match_operand:SI 2 "const_int_I_operand" "I")))
10364 (set (match_operand:SI 3 "s_register_operand" "=rk")
10365 (mem:SI (match_dup 1)))
10367 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10370 arm_output_multireg_pop (operands, /*return_pc=*/false,
10371 /*cond=*/const_true_rtx,
10377 [(set_attr "type" "load_16")
10378 (set_attr "predicable" "yes")
10379 (set (attr "length")
10380 (symbol_ref "arm_attr_length_pop_multi (operands,
10381 /*return_pc=*/false,
10382 /*write_back_p=*/true)"))]
10385 ;; Pop with return (as used in epilogue RTL)
10387 ;; This instruction is generated when the registers are popped at the end of
10388 ;; epilogue. Here, instead of popping the value into LR and then generating
10389 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
10391 (define_insn "*pop_multiple_with_writeback_and_return"
10392 [(match_parallel 0 "pop_multiple_return"
10394 (set (match_operand:SI 1 "s_register_operand" "+rk")
10395 (plus:SI (match_dup 1)
10396 (match_operand:SI 2 "const_int_I_operand" "I")))
10397 (set (match_operand:SI 3 "s_register_operand" "=rk")
10398 (mem:SI (match_dup 1)))
10400 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10403 arm_output_multireg_pop (operands, /*return_pc=*/true,
10404 /*cond=*/const_true_rtx,
10410 [(set_attr "type" "load_16")
10411 (set_attr "predicable" "yes")
10412 (set (attr "length")
10413 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10414 /*write_back_p=*/true)"))]
10417 (define_insn "*pop_multiple_with_return"
10418 [(match_parallel 0 "pop_multiple_return"
10420 (set (match_operand:SI 2 "s_register_operand" "=rk")
10421 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
10423 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10426 arm_output_multireg_pop (operands, /*return_pc=*/true,
10427 /*cond=*/const_true_rtx,
10433 [(set_attr "type" "load_16")
10434 (set_attr "predicable" "yes")
10435 (set (attr "length")
10436 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
10437 /*write_back_p=*/false)"))]
10440 ;; Load into PC and return
10441 (define_insn "*ldr_with_return"
10443 (set (reg:SI PC_REGNUM)
10444 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
10445 "TARGET_32BIT && (reload_in_progress || reload_completed)"
10446 "ldr%?\t%|pc, [%0], #4"
10447 [(set_attr "type" "load_4")
10448 (set_attr "predicable" "yes")]
10450 ;; Pop for floating point registers (as used in epilogue RTL)
10451 (define_insn "*vfp_pop_multiple_with_writeback"
10452 [(match_parallel 0 "pop_multiple_fp"
10453 [(set (match_operand:SI 1 "s_register_operand" "+rk")
10454 (plus:SI (match_dup 1)
10455 (match_operand:SI 2 "const_int_I_operand" "I")))
10456 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
10457 (mem:DF (match_dup 1)))])]
10458 "TARGET_32BIT && TARGET_HARD_FLOAT"
10461 int num_regs = XVECLEN (operands[0], 0);
10464 strcpy (pattern, \"vldm\\t\");
10465 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
10466 strcat (pattern, \"!, {\");
10467 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
10468 strcat (pattern, \"%P0\");
10469 if ((num_regs - 1) > 1)
10471 strcat (pattern, \"-%P1\");
10472 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
10475 strcat (pattern, \"}\");
10476 output_asm_insn (pattern, op_list);
10480 [(set_attr "type" "load_16")
10481 (set_attr "conds" "unconditional")
10482 (set_attr "predicable" "no")]
10485 ;; Special patterns for dealing with the constant pool
10487 (define_insn "align_4"
10488 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10491 assemble_align (32);
10494 [(set_attr "type" "no_insn")]
10497 (define_insn "align_8"
10498 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10501 assemble_align (64);
10504 [(set_attr "type" "no_insn")]
10507 (define_insn "consttable_end"
10508 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10511 making_const_table = FALSE;
10514 [(set_attr "type" "no_insn")]
10517 (define_insn "consttable_1"
10518 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10521 making_const_table = TRUE;
10522 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10523 assemble_zeros (3);
10526 [(set_attr "length" "4")
10527 (set_attr "type" "no_insn")]
10530 (define_insn "consttable_2"
10531 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10535 rtx x = operands[0];
10536 making_const_table = TRUE;
10537 switch (GET_MODE_CLASS (GET_MODE (x)))
10540 arm_emit_fp16_const (x);
10543 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10544 assemble_zeros (2);
10549 [(set_attr "length" "4")
10550 (set_attr "type" "no_insn")]
10553 (define_insn "consttable_4"
10554 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10558 rtx x = operands[0];
10559 making_const_table = TRUE;
10560 scalar_float_mode float_mode;
10561 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
10562 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
10565 /* XXX: Sometimes gcc does something really dumb and ends up with
10566 a HIGH in a constant pool entry, usually because it's trying to
10567 load into a VFP register. We know this will always be used in
10568 combination with a LO_SUM which ignores the high bits, so just
10569 strip off the HIGH. */
10570 if (GET_CODE (x) == HIGH)
10572 assemble_integer (x, 4, BITS_PER_WORD, 1);
10573 mark_symbol_refs_as_used (x);
10577 [(set_attr "length" "4")
10578 (set_attr "type" "no_insn")]
10581 (define_insn "consttable_8"
10582 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10586 making_const_table = TRUE;
10587 scalar_float_mode float_mode;
10588 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10589 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10590 float_mode, BITS_PER_WORD);
10592 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10595 [(set_attr "length" "8")
10596 (set_attr "type" "no_insn")]
10599 (define_insn "consttable_16"
10600 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10604 making_const_table = TRUE;
10605 scalar_float_mode float_mode;
10606 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
10607 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
10608 float_mode, BITS_PER_WORD);
10610 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10613 [(set_attr "length" "16")
10614 (set_attr "type" "no_insn")]
10617 ;; V5 Instructions,
10619 (define_insn "clzsi2"
10620 [(set (match_operand:SI 0 "s_register_operand" "=r")
10621 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10622 "TARGET_32BIT && arm_arch5t"
10624 [(set_attr "predicable" "yes")
10625 (set_attr "type" "clz")])
10627 (define_insn "rbitsi2"
10628 [(set (match_operand:SI 0 "s_register_operand" "=r")
10629 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
10630 "TARGET_32BIT && arm_arch_thumb2"
10632 [(set_attr "predicable" "yes")
10633 (set_attr "type" "clz")])
10635 ;; Keep this as a CTZ expression until after reload and then split
10636 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
10637 ;; to fold with any other expression.
10639 (define_insn_and_split "ctzsi2"
10640 [(set (match_operand:SI 0 "s_register_operand" "=r")
10641 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10642 "TARGET_32BIT && arm_arch_thumb2"
10644 "&& reload_completed"
10647 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
10648 emit_insn (gen_clzsi2 (operands[0], operands[0]));
10652 ;; V5E instructions.
10654 (define_insn "prefetch"
10655 [(prefetch (match_operand:SI 0 "address_operand" "p")
10656 (match_operand:SI 1 "" "")
10657 (match_operand:SI 2 "" ""))]
10658 "TARGET_32BIT && arm_arch5te"
10660 [(set_attr "type" "load_4")]
10663 ;; General predication pattern
10666 [(match_operator 0 "arm_comparison_operator"
10667 [(match_operand 1 "cc_register" "")
10670 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
10672 [(set_attr "predicated" "yes")]
10675 (define_insn "force_register_use"
10676 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
10679 [(set_attr "length" "0")
10680 (set_attr "type" "no_insn")]
10684 ;; Patterns for exception handling
10686 (define_expand "eh_return"
10687 [(use (match_operand 0 "general_operand"))]
10692 emit_insn (gen_arm_eh_return (operands[0]));
10694 emit_insn (gen_thumb_eh_return (operands[0]));
10699 ;; We can't expand this before we know where the link register is stored.
10700 (define_insn_and_split "arm_eh_return"
10701 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10703 (clobber (match_scratch:SI 1 "=&r"))]
10706 "&& reload_completed"
10710 arm_set_return_address (operands[0], operands[1]);
10718 (define_insn "load_tp_hard"
10719 [(set (match_operand:SI 0 "register_operand" "=r")
10720 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10722 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10723 [(set_attr "predicable" "yes")
10724 (set_attr "type" "mrs")]
10727 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10728 (define_insn "load_tp_soft_fdpic"
10729 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10730 (clobber (reg:SI FDPIC_REGNUM))
10731 (clobber (reg:SI LR_REGNUM))
10732 (clobber (reg:SI IP_REGNUM))
10733 (clobber (reg:CC CC_REGNUM))]
10734 "TARGET_SOFT_TP && TARGET_FDPIC"
10735 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10736 [(set_attr "conds" "clob")
10737 (set_attr "type" "branch")]
10740 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10741 (define_insn "load_tp_soft"
10742 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10743 (clobber (reg:SI LR_REGNUM))
10744 (clobber (reg:SI IP_REGNUM))
10745 (clobber (reg:CC CC_REGNUM))]
10746 "TARGET_SOFT_TP && !TARGET_FDPIC"
10747 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10748 [(set_attr "conds" "clob")
10749 (set_attr "type" "branch")]
10752 ;; tls descriptor call
10753 (define_insn "tlscall"
10754 [(set (reg:SI R0_REGNUM)
10755 (unspec:SI [(reg:SI R0_REGNUM)
10756 (match_operand:SI 0 "" "X")
10757 (match_operand 1 "" "")] UNSPEC_TLS))
10758 (clobber (reg:SI R1_REGNUM))
10759 (clobber (reg:SI LR_REGNUM))
10760 (clobber (reg:SI CC_REGNUM))]
10763 targetm.asm_out.internal_label (asm_out_file, "LPIC",
10764 INTVAL (operands[1]));
10765 return "bl\\t%c0(tlscall)";
10767 [(set_attr "conds" "clob")
10768 (set_attr "length" "4")
10769 (set_attr "type" "branch")]
10772 ;; For thread pointer builtin
10773 (define_expand "get_thread_pointersi"
10774 [(match_operand:SI 0 "s_register_operand")]
10778 arm_load_tp (operands[0]);
10784 ;; We only care about the lower 16 bits of the constant
10785 ;; being inserted into the upper 16 bits of the register.
10786 (define_insn "*arm_movtas_ze"
10787 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
10790 (match_operand:SI 1 "const_int_operand" ""))]
10795 [(set_attr "arch" "32,v8mb")
10796 (set_attr "predicable" "yes")
10797 (set_attr "length" "4")
10798 (set_attr "type" "alu_sreg")]
10801 (define_insn "*arm_rev"
10802 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10803 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
10809 [(set_attr "arch" "t1,t2,32")
10810 (set_attr "length" "2,2,4")
10811 (set_attr "predicable" "no,yes,yes")
10812 (set_attr "type" "rev")]
10815 (define_expand "arm_legacy_rev"
10816 [(set (match_operand:SI 2 "s_register_operand")
10817 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
10821 (lshiftrt:SI (match_dup 2)
10823 (set (match_operand:SI 3 "s_register_operand")
10824 (rotatert:SI (match_dup 1)
10827 (and:SI (match_dup 2)
10828 (const_int -65281)))
10829 (set (match_operand:SI 0 "s_register_operand")
10830 (xor:SI (match_dup 3)
10836 ;; Reuse temporaries to keep register pressure down.
10837 (define_expand "thumb_legacy_rev"
10838 [(set (match_operand:SI 2 "s_register_operand")
10839 (ashift:SI (match_operand:SI 1 "s_register_operand")
10841 (set (match_operand:SI 3 "s_register_operand")
10842 (lshiftrt:SI (match_dup 1)
10845 (ior:SI (match_dup 3)
10847 (set (match_operand:SI 4 "s_register_operand")
10849 (set (match_operand:SI 5 "s_register_operand")
10850 (rotatert:SI (match_dup 1)
10853 (ashift:SI (match_dup 5)
10856 (lshiftrt:SI (match_dup 5)
10859 (ior:SI (match_dup 5)
10862 (rotatert:SI (match_dup 5)
10864 (set (match_operand:SI 0 "s_register_operand")
10865 (ior:SI (match_dup 5)
10871 ;; ARM-specific expansion of signed mod by power of 2
10872 ;; using conditional negate.
10873 ;; For r0 % n where n is a power of 2 produce:
10875 ;; and r0, r0, #(n - 1)
10876 ;; and r1, r1, #(n - 1)
10877 ;; rsbpl r0, r1, #0
10879 (define_expand "modsi3"
10880 [(match_operand:SI 0 "register_operand")
10881 (match_operand:SI 1 "register_operand")
10882 (match_operand:SI 2 "const_int_operand")]
10885 HOST_WIDE_INT val = INTVAL (operands[2]);
10888 || exact_log2 (val) <= 0)
10891 rtx mask = GEN_INT (val - 1);
10893 /* In the special case of x0 % 2 we can do the even shorter:
10896 rsblt r0, r0, #0. */
10900 rtx cc_reg = arm_gen_compare_reg (LT,
10901 operands[1], const0_rtx, NULL_RTX);
10902 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
10903 rtx masked = gen_reg_rtx (SImode);
10905 emit_insn (gen_andsi3 (masked, operands[1], mask));
10906 emit_move_insn (operands[0],
10907 gen_rtx_IF_THEN_ELSE (SImode, cond,
10908 gen_rtx_NEG (SImode,
10914 rtx neg_op = gen_reg_rtx (SImode);
10915 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
10918 /* Extract the condition register and mode. */
10919 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
10920 rtx cc_reg = SET_DEST (cmp);
10921 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
10923 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
10925 rtx masked_neg = gen_reg_rtx (SImode);
10926 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
10928 /* We want a conditional negate here, but emitting COND_EXEC rtxes
10929 during expand does not always work. Do an IF_THEN_ELSE instead. */
10930 emit_move_insn (operands[0],
10931 gen_rtx_IF_THEN_ELSE (SImode, cond,
10932 gen_rtx_NEG (SImode, masked_neg),
10940 (define_expand "bswapsi2"
10941 [(set (match_operand:SI 0 "s_register_operand")
10942 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
10943 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
10947 rtx op2 = gen_reg_rtx (SImode);
10948 rtx op3 = gen_reg_rtx (SImode);
10952 rtx op4 = gen_reg_rtx (SImode);
10953 rtx op5 = gen_reg_rtx (SImode);
10955 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
10956 op2, op3, op4, op5));
10960 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
10969 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
10970 ;; and unsigned variants, respectively. For rev16, expose
10971 ;; byte-swapping in the lower 16 bits only.
10972 (define_insn "*arm_revsh"
10973 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
10974 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
10980 [(set_attr "arch" "t1,t2,32")
10981 (set_attr "length" "2,2,4")
10982 (set_attr "type" "rev")]
10985 (define_insn "*arm_rev16"
10986 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
10987 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
10993 [(set_attr "arch" "t1,t2,32")
10994 (set_attr "length" "2,2,4")
10995 (set_attr "type" "rev")]
10998 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
10999 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11000 ;; each valid permutation.
11002 (define_insn "arm_rev16si2"
11003 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11004 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11006 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11007 (and:SI (lshiftrt:SI (match_dup 1)
11009 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11011 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11012 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11014 [(set_attr "arch" "t1,t2,32")
11015 (set_attr "length" "2,2,4")
11016 (set_attr "type" "rev")]
11019 (define_insn "arm_rev16si2_alt"
11020 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11021 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11023 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11024 (and:SI (ashift:SI (match_dup 1)
11026 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11028 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11029 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11031 [(set_attr "arch" "t1,t2,32")
11032 (set_attr "length" "2,2,4")
11033 (set_attr "type" "rev")]
11036 (define_expand "bswaphi2"
11037 [(set (match_operand:HI 0 "s_register_operand")
11038 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11043 ;; Patterns for LDRD/STRD in Thumb2 mode
11045 (define_insn "*thumb2_ldrd"
11046 [(set (match_operand:SI 0 "s_register_operand" "=r")
11047 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11048 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11049 (set (match_operand:SI 3 "s_register_operand" "=r")
11050 (mem:SI (plus:SI (match_dup 1)
11051 (match_operand:SI 4 "const_int_operand" ""))))]
11052 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11053 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11054 && (operands_ok_ldrd_strd (operands[0], operands[3],
11055 operands[1], INTVAL (operands[2]),
11057 "ldrd%?\t%0, %3, [%1, %2]"
11058 [(set_attr "type" "load_8")
11059 (set_attr "predicable" "yes")])
11061 (define_insn "*thumb2_ldrd_base"
11062 [(set (match_operand:SI 0 "s_register_operand" "=r")
11063 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11064 (set (match_operand:SI 2 "s_register_operand" "=r")
11065 (mem:SI (plus:SI (match_dup 1)
11067 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11068 && (operands_ok_ldrd_strd (operands[0], operands[2],
11069 operands[1], 0, false, true))"
11070 "ldrd%?\t%0, %2, [%1]"
11071 [(set_attr "type" "load_8")
11072 (set_attr "predicable" "yes")])
11074 (define_insn "*thumb2_ldrd_base_neg"
11075 [(set (match_operand:SI 0 "s_register_operand" "=r")
11076 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11078 (set (match_operand:SI 2 "s_register_operand" "=r")
11079 (mem:SI (match_dup 1)))]
11080 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11081 && (operands_ok_ldrd_strd (operands[0], operands[2],
11082 operands[1], -4, false, true))"
11083 "ldrd%?\t%0, %2, [%1, #-4]"
11084 [(set_attr "type" "load_8")
11085 (set_attr "predicable" "yes")])
11087 (define_insn "*thumb2_strd"
11088 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11089 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11090 (match_operand:SI 2 "s_register_operand" "r"))
11091 (set (mem:SI (plus:SI (match_dup 0)
11092 (match_operand:SI 3 "const_int_operand" "")))
11093 (match_operand:SI 4 "s_register_operand" "r"))]
11094 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11095 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11096 && (operands_ok_ldrd_strd (operands[2], operands[4],
11097 operands[0], INTVAL (operands[1]),
11099 "strd%?\t%2, %4, [%0, %1]"
11100 [(set_attr "type" "store_8")
11101 (set_attr "predicable" "yes")])
11103 (define_insn "*thumb2_strd_base"
11104 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11105 (match_operand:SI 1 "s_register_operand" "r"))
11106 (set (mem:SI (plus:SI (match_dup 0)
11108 (match_operand:SI 2 "s_register_operand" "r"))]
11109 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11110 && (operands_ok_ldrd_strd (operands[1], operands[2],
11111 operands[0], 0, false, false))"
11112 "strd%?\t%1, %2, [%0]"
11113 [(set_attr "type" "store_8")
11114 (set_attr "predicable" "yes")])
11116 (define_insn "*thumb2_strd_base_neg"
11117 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11119 (match_operand:SI 1 "s_register_operand" "r"))
11120 (set (mem:SI (match_dup 0))
11121 (match_operand:SI 2 "s_register_operand" "r"))]
11122 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11123 && (operands_ok_ldrd_strd (operands[1], operands[2],
11124 operands[0], -4, false, false))"
11125 "strd%?\t%1, %2, [%0, #-4]"
11126 [(set_attr "type" "store_8")
11127 (set_attr "predicable" "yes")])
11129 ;; ARMv8 CRC32 instructions.
11130 (define_insn "arm_<crc_variant>"
11131 [(set (match_operand:SI 0 "s_register_operand" "=r")
11132 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11133 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11136 "<crc_variant>\\t%0, %1, %2"
11137 [(set_attr "type" "crc")
11138 (set_attr "conds" "unconditional")]
11141 ;; Load the load/store double peephole optimizations.
11142 (include "ldrdstrd.md")
11144 ;; Load the load/store multiple patterns
11145 (include "ldmstm.md")
11147 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11148 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11149 ;; The operands are validated through the load_multiple_operation
11150 ;; match_parallel predicate rather than through constraints so enable it only
11152 (define_insn "*load_multiple"
11153 [(match_parallel 0 "load_multiple_operation"
11154 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11155 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11157 "TARGET_32BIT && reload_completed"
11160 arm_output_multireg_pop (operands, /*return_pc=*/false,
11161 /*cond=*/const_true_rtx,
11167 [(set_attr "predicable" "yes")]
11170 (define_expand "copysignsf3"
11171 [(match_operand:SF 0 "register_operand")
11172 (match_operand:SF 1 "register_operand")
11173 (match_operand:SF 2 "register_operand")]
11174 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11176 emit_move_insn (operands[0], operands[2]);
11177 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11178 GEN_INT (31), GEN_INT (0),
11179 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11184 (define_expand "copysigndf3"
11185 [(match_operand:DF 0 "register_operand")
11186 (match_operand:DF 1 "register_operand")
11187 (match_operand:DF 2 "register_operand")]
11188 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11190 rtx op0_low = gen_lowpart (SImode, operands[0]);
11191 rtx op0_high = gen_highpart (SImode, operands[0]);
11192 rtx op1_low = gen_lowpart (SImode, operands[1]);
11193 rtx op1_high = gen_highpart (SImode, operands[1]);
11194 rtx op2_high = gen_highpart (SImode, operands[2]);
11196 rtx scratch1 = gen_reg_rtx (SImode);
11197 rtx scratch2 = gen_reg_rtx (SImode);
11198 emit_move_insn (scratch1, op2_high);
11199 emit_move_insn (scratch2, op1_high);
11201 emit_insn(gen_rtx_SET(scratch1,
11202 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11203 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11204 emit_move_insn (op0_low, op1_low);
11205 emit_move_insn (op0_high, scratch2);
11211 ;; movmisalign patterns for HImode and SImode.
11212 (define_expand "movmisalign<mode>"
11213 [(match_operand:HSI 0 "general_operand")
11214 (match_operand:HSI 1 "general_operand")]
11217 /* This pattern is not permitted to fail during expansion: if both arguments
11218 are non-registers (e.g. memory := constant), force operand 1 into a
11220 rtx (* gen_unaligned_load)(rtx, rtx);
11221 rtx tmp_dest = operands[0];
11222 if (!s_register_operand (operands[0], <MODE>mode)
11223 && !s_register_operand (operands[1], <MODE>mode))
11224 operands[1] = force_reg (<MODE>mode, operands[1]);
11226 if (<MODE>mode == HImode)
11228 gen_unaligned_load = gen_unaligned_loadhiu;
11229 tmp_dest = gen_reg_rtx (SImode);
11232 gen_unaligned_load = gen_unaligned_loadsi;
11234 if (MEM_P (operands[1]))
11236 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11237 if (<MODE>mode == HImode)
11238 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11241 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11246 (define_insn "arm_<cdp>"
11247 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11248 (match_operand:SI 1 "immediate_operand" "n")
11249 (match_operand:SI 2 "immediate_operand" "n")
11250 (match_operand:SI 3 "immediate_operand" "n")
11251 (match_operand:SI 4 "immediate_operand" "n")
11252 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11253 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11255 arm_const_bounds (operands[0], 0, 16);
11256 arm_const_bounds (operands[1], 0, 16);
11257 arm_const_bounds (operands[2], 0, (1 << 5));
11258 arm_const_bounds (operands[3], 0, (1 << 5));
11259 arm_const_bounds (operands[4], 0, (1 << 5));
11260 arm_const_bounds (operands[5], 0, 8);
11261 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11263 [(set_attr "length" "4")
11264 (set_attr "type" "coproc")])
11266 (define_insn "*ldc"
11267 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11268 (match_operand:SI 1 "immediate_operand" "n")
11269 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11270 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11272 arm_const_bounds (operands[0], 0, 16);
11273 arm_const_bounds (operands[1], 0, (1 << 5));
11274 return "<ldc>\\tp%c0, CR%c1, %2";
11276 [(set_attr "length" "4")
11277 (set_attr "type" "coproc")])
11279 (define_insn "*stc"
11280 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11281 (match_operand:SI 1 "immediate_operand" "n")
11282 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11283 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11285 arm_const_bounds (operands[0], 0, 16);
11286 arm_const_bounds (operands[1], 0, (1 << 5));
11287 return "<stc>\\tp%c0, CR%c1, %2";
11289 [(set_attr "length" "4")
11290 (set_attr "type" "coproc")])
11292 (define_expand "arm_<ldc>"
11293 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11294 (match_operand:SI 1 "immediate_operand")
11295 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11296 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11298 (define_expand "arm_<stc>"
11299 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11300 (match_operand:SI 1 "immediate_operand")
11301 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11302 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11304 (define_insn "arm_<mcr>"
11305 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11306 (match_operand:SI 1 "immediate_operand" "n")
11307 (match_operand:SI 2 "s_register_operand" "r")
11308 (match_operand:SI 3 "immediate_operand" "n")
11309 (match_operand:SI 4 "immediate_operand" "n")
11310 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11311 (use (match_dup 2))]
11312 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11314 arm_const_bounds (operands[0], 0, 16);
11315 arm_const_bounds (operands[1], 0, 8);
11316 arm_const_bounds (operands[3], 0, (1 << 5));
11317 arm_const_bounds (operands[4], 0, (1 << 5));
11318 arm_const_bounds (operands[5], 0, 8);
11319 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11321 [(set_attr "length" "4")
11322 (set_attr "type" "coproc")])
11324 (define_insn "arm_<mrc>"
11325 [(set (match_operand:SI 0 "s_register_operand" "=r")
11326 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
11327 (match_operand:SI 2 "immediate_operand" "n")
11328 (match_operand:SI 3 "immediate_operand" "n")
11329 (match_operand:SI 4 "immediate_operand" "n")
11330 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
11331 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
11333 arm_const_bounds (operands[1], 0, 16);
11334 arm_const_bounds (operands[2], 0, 8);
11335 arm_const_bounds (operands[3], 0, (1 << 5));
11336 arm_const_bounds (operands[4], 0, (1 << 5));
11337 arm_const_bounds (operands[5], 0, 8);
11338 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
11340 [(set_attr "length" "4")
11341 (set_attr "type" "coproc")])
11343 (define_insn "arm_<mcrr>"
11344 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11345 (match_operand:SI 1 "immediate_operand" "n")
11346 (match_operand:DI 2 "s_register_operand" "r")
11347 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
11348 (use (match_dup 2))]
11349 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
11351 arm_const_bounds (operands[0], 0, 16);
11352 arm_const_bounds (operands[1], 0, 8);
11353 arm_const_bounds (operands[3], 0, (1 << 5));
11354 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
11356 [(set_attr "length" "4")
11357 (set_attr "type" "coproc")])
11359 (define_insn "arm_<mrrc>"
11360 [(set (match_operand:DI 0 "s_register_operand" "=r")
11361 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
11362 (match_operand:SI 2 "immediate_operand" "n")
11363 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
11364 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
11366 arm_const_bounds (operands[1], 0, 16);
11367 arm_const_bounds (operands[2], 0, 8);
11368 arm_const_bounds (operands[3], 0, (1 << 5));
11369 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
11371 [(set_attr "length" "4")
11372 (set_attr "type" "coproc")])
11374 (define_expand "speculation_barrier"
11375 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11378 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
11379 have a usable barrier (and probably don't need one in practice).
11380 But to be safe if such code is run on later architectures, call a
11381 helper function in libgcc that will do the thing for the active
11383 if (!(arm_arch7 || arm_arch8))
11385 arm_emit_speculation_barrier_function ();
11391 ;; Generate a hard speculation barrier when we have not enabled speculation
11393 (define_insn "*speculation_barrier_insn"
11394 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
11395 "arm_arch7 || arm_arch8"
11397 [(set_attr "type" "block")
11398 (set_attr "length" "8")]
11401 ;; Vector bits common to IWMMXT and Neon
11402 (include "vec-common.md")
11403 ;; Load the Intel Wireless Multimedia Extension patterns
11404 (include "iwmmxt.md")
11405 ;; Load the VFP co-processor patterns
11407 ;; Thumb-1 patterns
11408 (include "thumb1.md")
11409 ;; Thumb-2 patterns
11410 (include "thumb2.md")
11412 (include "neon.md")
11414 (include "crypto.md")
11415 ;; Synchronization Primitives
11416 (include "sync.md")
11417 ;; Fixed-point patterns
11418 (include "arm-fixed.md")