1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
491 (define_expand "addvsi4"
492 [(match_operand:SI 0 "s_register_operand")
493 (match_operand:SI 1 "s_register_operand")
494 (match_operand:SI 2 "arm_add_operand")
495 (match_operand 3 "")]
498 if (CONST_INT_P (operands[2]))
499 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
501 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
507 (define_expand "addvdi4"
508 [(match_operand:DI 0 "s_register_operand")
509 (match_operand:DI 1 "s_register_operand")
510 (match_operand:DI 2 "reg_or_int_operand")
511 (match_operand 3 "")]
514 rtx lo_result, hi_result;
515 rtx lo_op1, hi_op1, lo_op2, hi_op2;
516 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
518 lo_result = gen_lowpart (SImode, operands[0]);
519 hi_result = gen_highpart (SImode, operands[0]);
521 if (lo_op2 == const0_rtx)
523 emit_move_insn (lo_result, lo_op1);
524 if (!arm_add_operand (hi_op2, SImode))
525 hi_op2 = force_reg (SImode, hi_op2);
527 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
531 if (!arm_add_operand (lo_op2, SImode))
532 lo_op2 = force_reg (SImode, lo_op2);
533 if (!arm_not_operand (hi_op2, SImode))
534 hi_op2 = force_reg (SImode, hi_op2);
536 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
538 if (hi_op2 == const0_rtx)
539 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
540 else if (CONST_INT_P (hi_op2))
541 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
543 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
545 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
551 (define_expand "addsi3_cin_vout_reg"
556 (plus:DI (match_dup 4)
557 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
558 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
559 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
561 (set (match_operand:SI 0 "s_register_operand")
562 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
566 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
567 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
568 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
569 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
573 (define_insn "*addsi3_cin_vout_reg_insn"
574 [(set (reg:CC_V CC_REGNUM)
578 (match_operand:DI 3 "arm_carry_operation" "")
579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
582 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
585 (set (match_operand:SI 0 "s_register_operand" "=l,r")
586 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
592 [(set_attr "type" "alus_sreg")
593 (set_attr "arch" "t2,*")
594 (set_attr "length" "2,4")]
597 (define_expand "addsi3_cin_vout_imm"
602 (plus:DI (match_dup 4)
603 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
605 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
607 (set (match_operand:SI 0 "s_register_operand")
608 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
609 (match_operand 2 "arm_adcimm_operand")))])]
612 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
613 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
614 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
615 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
619 (define_insn "*addsi3_cin_vout_imm_insn"
620 [(set (reg:CC_V CC_REGNUM)
624 (match_operand:DI 3 "arm_carry_operation" "")
625 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
626 (match_operand 2 "arm_adcimm_operand" "I,K"))
628 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
631 (set (match_operand:SI 0 "s_register_operand" "=r,r")
632 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
637 sbcs%?\\t%0, %1, #%B2"
638 [(set_attr "type" "alus_imm")]
641 (define_expand "addsi3_cin_vout_0"
645 (plus:DI (match_dup 3)
646 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
647 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
648 (set (match_operand:SI 0 "s_register_operand")
649 (plus:SI (match_dup 4) (match_dup 1)))])]
652 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
659 (define_insn "*addsi3_cin_vout_0_insn"
660 [(set (reg:CC_V CC_REGNUM)
663 (match_operand:DI 2 "arm_carry_operation" "")
664 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
665 (sign_extend:DI (plus:SI
666 (match_operand:SI 3 "arm_carry_operation" "")
668 (set (match_operand:SI 0 "s_register_operand" "=r")
669 (plus:SI (match_dup 3) (match_dup 1)))]
671 "adcs%?\\t%0, %1, #0"
672 [(set_attr "type" "alus_imm")]
675 (define_expand "uaddvsi4"
676 [(match_operand:SI 0 "s_register_operand")
677 (match_operand:SI 1 "s_register_operand")
678 (match_operand:SI 2 "arm_add_operand")
679 (match_operand 3 "")]
682 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
683 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
688 (define_expand "uaddvdi4"
689 [(match_operand:DI 0 "s_register_operand")
690 (match_operand:DI 1 "s_register_operand")
691 (match_operand:DI 2 "reg_or_int_operand")
692 (match_operand 3 "")]
695 rtx lo_result, hi_result;
696 rtx lo_op1, hi_op1, lo_op2, hi_op2;
697 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
699 lo_result = gen_lowpart (SImode, operands[0]);
700 hi_result = gen_highpart (SImode, operands[0]);
702 if (lo_op2 == const0_rtx)
704 emit_move_insn (lo_result, lo_op1);
705 if (!arm_add_operand (hi_op2, SImode))
706 hi_op2 = force_reg (SImode, hi_op2);
708 gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
712 if (!arm_add_operand (lo_op2, SImode))
713 lo_op2 = force_reg (SImode, lo_op2);
714 if (!arm_not_operand (hi_op2, SImode))
715 hi_op2 = force_reg (SImode, hi_op2);
717 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
719 if (hi_op2 == const0_rtx)
720 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
721 else if (CONST_INT_P (hi_op2))
722 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
724 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
726 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
732 (define_expand "addsi3_cin_cout_reg"
737 (plus:DI (match_dup 4)
738 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
739 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
740 (const_int 4294967296)))
741 (set (match_operand:SI 0 "s_register_operand")
742 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
746 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
747 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
748 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
749 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
753 (define_insn "*addsi3_cin_cout_reg_insn"
754 [(set (reg:CC_ADC CC_REGNUM)
758 (match_operand:DI 3 "arm_carry_operation" "")
759 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
760 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
761 (const_int 4294967296)))
762 (set (match_operand:SI 0 "s_register_operand" "=l,r")
763 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
770 [(set_attr "type" "alus_sreg")
771 (set_attr "arch" "t2,*")
772 (set_attr "length" "2,4")]
775 (define_expand "addsi3_cin_cout_imm"
780 (plus:DI (match_dup 4)
781 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
783 (const_int 4294967296)))
784 (set (match_operand:SI 0 "s_register_operand")
785 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
786 (match_operand:SI 2 "arm_adcimm_operand")))])]
789 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
790 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
791 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
792 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
793 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
797 (define_insn "*addsi3_cin_cout_imm_insn"
798 [(set (reg:CC_ADC CC_REGNUM)
802 (match_operand:DI 3 "arm_carry_operation" "")
803 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
804 (match_operand:DI 5 "const_int_operand" "n,n"))
805 (const_int 4294967296)))
806 (set (match_operand:SI 0 "s_register_operand" "=r,r")
807 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
809 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
811 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
814 sbcs%?\\t%0, %1, #%B2"
815 [(set_attr "type" "alus_imm")]
818 (define_expand "addsi3_cin_cout_0"
822 (plus:DI (match_dup 3)
823 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
824 (const_int 4294967296)))
825 (set (match_operand:SI 0 "s_register_operand")
826 (plus:SI (match_dup 4) (match_dup 1)))])]
829 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
836 (define_insn "*addsi3_cin_cout_0_insn"
837 [(set (reg:CC_ADC CC_REGNUM)
840 (match_operand:DI 2 "arm_carry_operation" "")
841 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
842 (const_int 4294967296)))
843 (set (match_operand:SI 0 "s_register_operand" "=r")
844 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
846 "adcs%?\\t%0, %1, #0"
847 [(set_attr "type" "alus_imm")]
850 (define_expand "addsi3"
851 [(set (match_operand:SI 0 "s_register_operand")
852 (plus:SI (match_operand:SI 1 "s_register_operand")
853 (match_operand:SI 2 "reg_or_int_operand")))]
856 if (TARGET_32BIT && CONST_INT_P (operands[2]))
858 arm_split_constant (PLUS, SImode, NULL_RTX,
859 INTVAL (operands[2]), operands[0], operands[1],
860 optimize && can_create_pseudo_p ());
866 ; If there is a scratch available, this will be faster than synthesizing the
869 [(match_scratch:SI 3 "r")
870 (set (match_operand:SI 0 "arm_general_register_operand" "")
871 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
872 (match_operand:SI 2 "const_int_operand" "")))]
874 !(const_ok_for_arm (INTVAL (operands[2]))
875 || const_ok_for_arm (-INTVAL (operands[2])))
876 && const_ok_for_arm (~INTVAL (operands[2]))"
877 [(set (match_dup 3) (match_dup 2))
878 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
882 ;; The r/r/k alternative is required when reloading the address
883 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
884 ;; put the duplicated register first, and not try the commutative version.
885 (define_insn_and_split "*arm_addsi3"
886 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
887 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
888 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
904 subw%?\\t%0, %1, #%n2
905 subw%?\\t%0, %1, #%n2
908 && CONST_INT_P (operands[2])
909 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
910 && (reload_completed || !arm_eliminable_register (operands[1]))"
911 [(clobber (const_int 0))]
913 arm_split_constant (PLUS, SImode, curr_insn,
914 INTVAL (operands[2]), operands[0],
918 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
919 (set_attr "predicable" "yes")
920 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
921 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
922 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
923 (const_string "alu_imm")
924 (const_string "alu_sreg")))
928 (define_insn "addsi3_compareV_reg"
929 [(set (reg:CC_V CC_REGNUM)
932 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
933 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
934 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
935 (set (match_operand:SI 0 "register_operand" "=l,r,r")
936 (plus:SI (match_dup 1) (match_dup 2)))]
938 "adds%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "arch" "t2,t2,*")
941 (set_attr "length" "2,2,4")
942 (set_attr "type" "alus_sreg")]
945 (define_insn "*addsi3_compareV_reg_nosum"
946 [(set (reg:CC_V CC_REGNUM)
949 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
950 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
951 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
954 [(set_attr "conds" "set")
955 (set_attr "arch" "t2,*")
956 (set_attr "length" "2,4")
957 (set_attr "type" "alus_sreg")]
960 (define_insn "subvsi3_intmin"
961 [(set (reg:CC_V CC_REGNUM)
965 (match_operand:SI 1 "register_operand" "r"))
966 (const_int 2147483648))
967 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
968 (set (match_operand:SI 0 "register_operand" "=r")
969 (plus:SI (match_dup 1) (const_int -2147483648)))]
971 "subs%?\\t%0, %1, #-2147483648"
972 [(set_attr "conds" "set")
973 (set_attr "type" "alus_imm")]
976 (define_insn "addsi3_compareV_imm"
977 [(set (reg:CC_V CC_REGNUM)
981 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
982 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
983 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
984 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
985 (plus:SI (match_dup 1) (match_dup 2)))]
987 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
991 subs%?\\t%0, %1, #%n2
992 subs%?\\t%0, %0, #%n2
994 subs%?\\t%0, %1, #%n2"
995 [(set_attr "conds" "set")
996 (set_attr "arch" "t2,t2,t2,t2,*,*")
997 (set_attr "length" "2,2,2,2,4,4")
998 (set_attr "type" "alus_imm")]
1001 (define_insn "addsi3_compareV_imm_nosum"
1002 [(set (reg:CC_V CC_REGNUM)
1006 (match_operand:SI 0 "register_operand" "l,r,r"))
1007 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1008 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1010 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1015 [(set_attr "conds" "set")
1016 (set_attr "arch" "t2,*,*")
1017 (set_attr "length" "2,4,4")
1018 (set_attr "type" "alus_imm")]
1021 ;; We can handle more constants efficently if we can clobber either a scratch
1022 ;; or the other source operand. We deliberately leave this late as in
1023 ;; high register pressure situations it's not worth forcing any reloads.
1025 [(match_scratch:SI 2 "l")
1026 (set (reg:CC_V CC_REGNUM)
1030 (match_operand:SI 0 "low_register_operand"))
1031 (match_operand 1 "const_int_operand"))
1032 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1034 && satisfies_constraint_Pd (operands[1])"
1036 (set (reg:CC_V CC_REGNUM)
1038 (plus:DI (sign_extend:DI (match_dup 0))
1039 (sign_extend:DI (match_dup 1)))
1040 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1041 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1045 [(set (reg:CC_V CC_REGNUM)
1049 (match_operand:SI 0 "low_register_operand"))
1050 (match_operand 1 "const_int_operand"))
1051 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1053 && dead_or_set_p (peep2_next_insn (0), operands[0])
1054 && satisfies_constraint_Py (operands[1])"
1056 (set (reg:CC_V CC_REGNUM)
1058 (plus:DI (sign_extend:DI (match_dup 0))
1059 (sign_extend:DI (match_dup 1)))
1060 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1061 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1064 (define_insn "addsi3_compare0"
1065 [(set (reg:CC_NOOV CC_REGNUM)
1067 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1068 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1070 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1071 (plus:SI (match_dup 1) (match_dup 2)))]
1075 subs%?\\t%0, %1, #%n2
1076 adds%?\\t%0, %1, %2"
1077 [(set_attr "conds" "set")
1078 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1081 (define_insn "*addsi3_compare0_scratch"
1082 [(set (reg:CC_NOOV CC_REGNUM)
1084 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1085 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1092 [(set_attr "conds" "set")
1093 (set_attr "predicable" "yes")
1094 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1097 (define_insn "*compare_negsi_si"
1098 [(set (reg:CC_Z CC_REGNUM)
1100 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1101 (match_operand:SI 1 "s_register_operand" "l,r")))]
1104 [(set_attr "conds" "set")
1105 (set_attr "predicable" "yes")
1106 (set_attr "arch" "t2,*")
1107 (set_attr "length" "2,4")
1108 (set_attr "predicable_short_it" "yes,no")
1109 (set_attr "type" "alus_sreg")]
1112 ;; This is the canonicalization of subsi3_compare when the
1113 ;; addend is a constant.
1114 (define_insn "cmpsi2_addneg"
1115 [(set (reg:CC CC_REGNUM)
1117 (match_operand:SI 1 "s_register_operand" "r,r")
1118 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1119 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1120 (plus:SI (match_dup 1)
1121 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1123 && (INTVAL (operands[2])
1124 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1126 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1127 in different condition codes (like cmn rather than like cmp), so that
1128 alternative comes first. Both alternatives can match for any 0x??000000
1129 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1130 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1131 as it is shorter. */
1132 if (which_alternative == 0 && operands[3] != const1_rtx)
1133 return "subs%?\\t%0, %1, #%n3";
1135 return "adds%?\\t%0, %1, %3";
1137 [(set_attr "conds" "set")
1138 (set_attr "type" "alus_sreg")]
1141 ;; Convert the sequence
1143 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1147 ;; bcs dest ((unsigned)rn >= 1)
1148 ;; similarly for the beq variant using bcc.
1149 ;; This is a common looping idiom (while (n--))
1151 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1152 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1154 (set (match_operand 2 "cc_register" "")
1155 (compare (match_dup 0) (const_int -1)))
1157 (if_then_else (match_operator 3 "equality_operator"
1158 [(match_dup 2) (const_int 0)])
1159 (match_operand 4 "" "")
1160 (match_operand 5 "" "")))]
1161 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1165 (match_dup 1) (const_int 1)))
1166 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1168 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1171 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1172 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1175 operands[2], const0_rtx);"
1178 ;; The next four insns work because they compare the result with one of
1179 ;; the operands, and we know that the use of the condition code is
1180 ;; either GEU or LTU, so we can use the carry flag from the addition
1181 ;; instead of doing the compare a second time.
1182 (define_insn "addsi3_compare_op1"
1183 [(set (reg:CC_C CC_REGNUM)
1185 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1186 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1188 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1189 (plus:SI (match_dup 1) (match_dup 2)))]
1194 subs%?\\t%0, %1, #%n2
1195 subs%?\\t%0, %0, #%n2
1197 subs%?\\t%0, %1, #%n2"
1198 [(set_attr "conds" "set")
1199 (set_attr "arch" "t2,t2,t2,t2,*,*")
1200 (set_attr "length" "2,2,2,2,4,4")
1202 (if_then_else (match_operand 2 "const_int_operand")
1203 (const_string "alu_imm")
1204 (const_string "alu_sreg")))]
1207 (define_insn "*addsi3_compare_op2"
1208 [(set (reg:CC_C CC_REGNUM)
1210 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1211 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1213 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1214 (plus:SI (match_dup 1) (match_dup 2)))]
1219 subs%?\\t%0, %1, #%n2
1220 subs%?\\t%0, %0, #%n2
1222 subs%?\\t%0, %1, #%n2"
1223 [(set_attr "conds" "set")
1224 (set_attr "arch" "t2,t2,t2,t2,*,*")
1225 (set_attr "length" "2,2,2,2,4,4")
1227 (if_then_else (match_operand 2 "const_int_operand")
1228 (const_string "alu_imm")
1229 (const_string "alu_sreg")))]
1232 (define_insn "*compare_addsi2_op0"
1233 [(set (reg:CC_C CC_REGNUM)
1235 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1236 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1244 [(set_attr "conds" "set")
1245 (set_attr "predicable" "yes")
1246 (set_attr "arch" "t2,t2,*,*")
1247 (set_attr "predicable_short_it" "yes,yes,no,no")
1248 (set_attr "length" "2,2,4,4")
1250 (if_then_else (match_operand 1 "const_int_operand")
1251 (const_string "alu_imm")
1252 (const_string "alu_sreg")))]
1255 (define_insn "*compare_addsi2_op1"
1256 [(set (reg:CC_C CC_REGNUM)
1258 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1259 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1267 [(set_attr "conds" "set")
1268 (set_attr "predicable" "yes")
1269 (set_attr "arch" "t2,t2,*,*")
1270 (set_attr "predicable_short_it" "yes,yes,no,no")
1271 (set_attr "length" "2,2,4,4")
1273 (if_then_else (match_operand 1 "const_int_operand")
1274 (const_string "alu_imm")
1275 (const_string "alu_sreg")))]
1278 (define_insn "addsi3_carryin"
1279 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1280 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1281 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1282 (match_operand:SI 3 "arm_carry_operation" "")))]
1287 sbc%?\\t%0, %1, #%B2"
1288 [(set_attr "conds" "use")
1289 (set_attr "predicable" "yes")
1290 (set_attr "arch" "t2,*,*")
1291 (set_attr "length" "4")
1292 (set_attr "predicable_short_it" "yes,no,no")
1293 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1296 ;; Canonicalization of the above when the immediate is zero.
1297 (define_insn "add0si3_carryin"
1298 [(set (match_operand:SI 0 "s_register_operand" "=r")
1299 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1300 (match_operand:SI 1 "arm_not_operand" "r")))]
1302 "adc%?\\t%0, %1, #0"
1303 [(set_attr "conds" "use")
1304 (set_attr "predicable" "yes")
1305 (set_attr "length" "4")
1306 (set_attr "type" "adc_imm")]
1309 (define_insn "*addsi3_carryin_alt2"
1310 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1311 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1312 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1313 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1318 sbc%?\\t%0, %1, #%B2"
1319 [(set_attr "conds" "use")
1320 (set_attr "predicable" "yes")
1321 (set_attr "arch" "t2,*,*")
1322 (set_attr "length" "4")
1323 (set_attr "predicable_short_it" "yes,no,no")
1324 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1327 (define_insn "*addsi3_carryin_shift"
1328 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1330 (match_operator:SI 2 "shift_operator"
1331 [(match_operand:SI 3 "s_register_operand" "r,r")
1332 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1333 (match_operand:SI 5 "arm_carry_operation" ""))
1334 (match_operand:SI 1 "s_register_operand" "r,r")))]
1336 "adc%?\\t%0, %1, %3%S2"
1337 [(set_attr "conds" "use")
1338 (set_attr "arch" "32,a")
1339 (set_attr "shift" "3")
1340 (set_attr "predicable" "yes")
1341 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1342 (const_string "alu_shift_imm")
1343 (const_string "alu_shift_reg")))]
1346 (define_insn "*addsi3_carryin_clobercc"
1347 [(set (match_operand:SI 0 "s_register_operand" "=r")
1348 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1349 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1350 (match_operand:SI 3 "arm_carry_operation" "")))
1351 (clobber (reg:CC CC_REGNUM))]
1353 "adcs%?\\t%0, %1, %2"
1354 [(set_attr "conds" "set")
1355 (set_attr "type" "adcs_reg")]
1358 (define_expand "subvsi4"
1359 [(match_operand:SI 0 "s_register_operand")
1360 (match_operand:SI 1 "arm_rhs_operand")
1361 (match_operand:SI 2 "arm_add_operand")
1362 (match_operand 3 "")]
1365 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1367 /* If both operands are constants we can decide the result statically. */
1368 wi::overflow_type overflow;
1369 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1370 rtx_mode_t (operands[2], SImode),
1372 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1373 if (overflow != wi::OVF_NONE)
1374 emit_jump_insn (gen_jump (operands[3]));
1377 else if (CONST_INT_P (operands[2]))
1379 operands[2] = GEN_INT (-INTVAL (operands[2]));
1380 /* Special case for INT_MIN. */
1381 if (INTVAL (operands[2]) == 0x80000000)
1382 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1384 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1387 else if (CONST_INT_P (operands[1]))
1388 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1390 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1392 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1396 (define_expand "subvdi4"
1397 [(match_operand:DI 0 "s_register_operand")
1398 (match_operand:DI 1 "s_register_operand")
1399 (match_operand:DI 2 "s_register_operand")
1400 (match_operand 3 "")]
1403 emit_insn (gen_subdi3_compare1 (operands[0], operands[1], operands[2]));
1404 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1409 (define_expand "usubvsi4"
1410 [(match_operand:SI 0 "s_register_operand")
1411 (match_operand:SI 1 "arm_rhs_operand")
1412 (match_operand:SI 2 "arm_add_operand")
1413 (match_operand 3 "")]
1416 machine_mode mode = CCmode;
1417 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1419 /* If both operands are constants we can decide the result statically. */
1420 wi::overflow_type overflow;
1421 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1422 rtx_mode_t (operands[2], SImode),
1423 UNSIGNED, &overflow);
1424 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1425 if (overflow != wi::OVF_NONE)
1426 emit_jump_insn (gen_jump (operands[3]));
1429 else if (CONST_INT_P (operands[2]))
1430 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1431 GEN_INT (-INTVAL (operands[2]))));
1432 else if (CONST_INT_P (operands[1]))
1435 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1436 GEN_INT (~UINTVAL (operands[1]))));
1439 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1440 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1445 (define_expand "usubvdi4"
1446 [(match_operand:DI 0 "s_register_operand")
1447 (match_operand:DI 1 "reg_or_int_operand")
1448 (match_operand:DI 2 "reg_or_int_operand")
1449 (match_operand 3 "")]
1452 rtx lo_result, hi_result;
1453 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1454 lo_result = gen_lowpart (SImode, operands[0]);
1455 hi_result = gen_highpart (SImode, operands[0]);
1456 machine_mode mode = CCmode;
1458 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1460 /* If both operands are constants we can decide the result statically. */
1461 wi::overflow_type overflow;
1462 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1463 rtx_mode_t (operands[2], DImode),
1464 UNSIGNED, &overflow);
1465 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1466 if (overflow != wi::OVF_NONE)
1467 emit_jump_insn (gen_jump (operands[3]));
1470 else if (CONST_INT_P (operands[1]))
1472 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1474 if (const_ok_for_arm (INTVAL (lo_op1)))
1476 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1477 GEN_INT (~UINTVAL (lo_op1))));
1478 /* We could potentially use RSC here in Arm state, but not
1479 in Thumb, so it's probably not worth the effort of handling
1481 hi_op1 = force_reg (SImode, hi_op1);
1485 operands[1] = force_reg (DImode, operands[1]);
1488 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1490 if (lo_op2 == const0_rtx)
1492 emit_move_insn (lo_result, lo_op1);
1493 if (!arm_add_operand (hi_op2, SImode))
1494 hi_op2 = force_reg (SImode, hi_op2);
1495 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1499 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1500 lo_op2 = force_reg (SImode, lo_op2);
1501 if (CONST_INT_P (lo_op2))
1502 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1503 GEN_INT (-INTVAL (lo_op2))));
1505 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1508 if (!arm_not_operand (hi_op2, SImode))
1509 hi_op2 = force_reg (SImode, hi_op2);
1510 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1511 if (CONST_INT_P (hi_op2))
1512 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1513 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1514 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1515 gen_rtx_LTU (DImode, ccreg,
1518 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1519 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1520 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1521 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1526 (define_insn "subdi3_compare1"
1527 [(set (reg:CC CC_REGNUM)
1529 (match_operand:DI 1 "s_register_operand" "r")
1530 (match_operand:DI 2 "s_register_operand" "r")))
1531 (set (match_operand:DI 0 "s_register_operand" "=&r")
1532 (minus:DI (match_dup 1) (match_dup 2)))]
1534 "subs\\t%Q0, %Q1, %Q2;sbcs\\t%R0, %R1, %R2"
1535 [(set_attr "conds" "set")
1536 (set_attr "length" "8")
1537 (set_attr "type" "multiple")]
1540 (define_insn "subsi3_compare1"
1541 [(set (reg:CC CC_REGNUM)
1543 (match_operand:SI 1 "register_operand" "r")
1544 (match_operand:SI 2 "register_operand" "r")))
1545 (set (match_operand:SI 0 "register_operand" "=r")
1546 (minus:SI (match_dup 1) (match_dup 2)))]
1548 "subs%?\\t%0, %1, %2"
1549 [(set_attr "conds" "set")
1550 (set_attr "type" "alus_sreg")]
1553 (define_insn "subvsi3"
1554 [(set (reg:CC_V CC_REGNUM)
1557 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1558 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1559 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1560 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1561 (minus:SI (match_dup 1) (match_dup 2)))]
1563 "subs%?\\t%0, %1, %2"
1564 [(set_attr "conds" "set")
1565 (set_attr "arch" "t2,*")
1566 (set_attr "length" "2,4")
1567 (set_attr "type" "alus_sreg")]
1570 (define_insn "subvsi3_imm1"
1571 [(set (reg:CC_V CC_REGNUM)
1574 (match_operand 1 "arm_immediate_operand" "I")
1575 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1576 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1577 (set (match_operand:SI 0 "s_register_operand" "=r")
1578 (minus:SI (match_dup 1) (match_dup 2)))]
1580 "rsbs%?\\t%0, %2, %1"
1581 [(set_attr "conds" "set")
1582 (set_attr "type" "alus_imm")]
1585 (define_insn "subsi3_carryin"
1586 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1587 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1588 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1589 (match_operand:SI 3 "arm_borrow_operation" "")))]
1594 sbc%?\\t%0, %2, %2, lsl #1"
1595 [(set_attr "conds" "use")
1596 (set_attr "arch" "*,a,t2")
1597 (set_attr "predicable" "yes")
1598 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1601 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1602 [(set (reg:<CC_EXTEND> CC_REGNUM)
1603 (compare:<CC_EXTEND>
1604 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1605 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1606 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1607 (clobber (match_scratch:SI 0 "=l,r"))]
1610 [(set_attr "conds" "set")
1611 (set_attr "arch" "t2,*")
1612 (set_attr "length" "2,4")
1613 (set_attr "type" "adc_reg")]
1616 ;; Similar to the above, but handling a constant which has a different
1617 ;; canonicalization.
1618 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1619 [(set (reg:<CC_EXTEND> CC_REGNUM)
1620 (compare:<CC_EXTEND>
1621 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1622 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1623 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1624 (clobber (match_scratch:SI 0 "=l,r"))]
1628 adcs\\t%0, %1, #%B2"
1629 [(set_attr "conds" "set")
1630 (set_attr "type" "adc_imm")]
1633 ;; Further canonicalization when the constant is zero.
1634 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1635 [(set (reg:<CC_EXTEND> CC_REGNUM)
1636 (compare:<CC_EXTEND>
1637 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1638 (match_operand:DI 2 "arm_borrow_operation" "")))
1639 (clobber (match_scratch:SI 0 "=l,r"))]
1642 [(set_attr "conds" "set")
1643 (set_attr "type" "adc_imm")]
1646 (define_insn "*subsi3_carryin_const"
1647 [(set (match_operand:SI 0 "s_register_operand" "=r")
1649 (match_operand:SI 1 "s_register_operand" "r")
1650 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1651 (match_operand:SI 3 "arm_borrow_operation" "")))]
1653 "sbc\\t%0, %1, #%n2"
1654 [(set_attr "conds" "use")
1655 (set_attr "type" "adc_imm")]
1658 (define_insn "*subsi3_carryin_const0"
1659 [(set (match_operand:SI 0 "s_register_operand" "=r")
1660 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1661 (match_operand:SI 2 "arm_borrow_operation" "")))]
1664 [(set_attr "conds" "use")
1665 (set_attr "type" "adc_imm")]
1668 (define_insn "*subsi3_carryin_shift"
1669 [(set (match_operand:SI 0 "s_register_operand" "=r")
1671 (match_operand:SI 1 "s_register_operand" "r")
1672 (match_operator:SI 2 "shift_operator"
1673 [(match_operand:SI 3 "s_register_operand" "r")
1674 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1675 (match_operand:SI 5 "arm_borrow_operation" "")))]
1677 "sbc%?\\t%0, %1, %3%S2"
1678 [(set_attr "conds" "use")
1679 (set_attr "predicable" "yes")
1680 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1681 (const_string "alu_shift_imm")
1682 (const_string "alu_shift_reg")))]
1685 (define_insn "*subsi3_carryin_shift_alt"
1686 [(set (match_operand:SI 0 "s_register_operand" "=r")
1688 (match_operand:SI 1 "s_register_operand" "r")
1689 (match_operand:SI 5 "arm_borrow_operation" ""))
1690 (match_operator:SI 2 "shift_operator"
1691 [(match_operand:SI 3 "s_register_operand" "r")
1692 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1694 "sbc%?\\t%0, %1, %3%S2"
1695 [(set_attr "conds" "use")
1696 (set_attr "predicable" "yes")
1697 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1698 (const_string "alu_shift_imm")
1699 (const_string "alu_shift_reg")))]
1702 (define_insn "*rsbsi3_carryin_shift"
1703 [(set (match_operand:SI 0 "s_register_operand" "=r")
1705 (match_operator:SI 2 "shift_operator"
1706 [(match_operand:SI 3 "s_register_operand" "r")
1707 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1708 (match_operand:SI 1 "s_register_operand" "r"))
1709 (match_operand:SI 5 "arm_borrow_operation" "")))]
1711 "rsc%?\\t%0, %1, %3%S2"
1712 [(set_attr "conds" "use")
1713 (set_attr "predicable" "yes")
1714 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1715 (const_string "alu_shift_imm")
1716 (const_string "alu_shift_reg")))]
1719 (define_insn "*rsbsi3_carryin_shift_alt"
1720 [(set (match_operand:SI 0 "s_register_operand" "=r")
1722 (match_operator:SI 2 "shift_operator"
1723 [(match_operand:SI 3 "s_register_operand" "r")
1724 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1725 (match_operand:SI 5 "arm_borrow_operation" ""))
1726 (match_operand:SI 1 "s_register_operand" "r")))]
1728 "rsc%?\\t%0, %1, %3%S2"
1729 [(set_attr "conds" "use")
1730 (set_attr "predicable" "yes")
1731 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1732 (const_string "alu_shift_imm")
1733 (const_string "alu_shift_reg")))]
1736 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1738 [(set (match_operand:SI 0 "s_register_operand" "")
1739 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1740 (match_operand:SI 2 "s_register_operand" ""))
1742 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1744 [(set (match_dup 3) (match_dup 1))
1745 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1747 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1750 (define_expand "addsf3"
1751 [(set (match_operand:SF 0 "s_register_operand")
1752 (plus:SF (match_operand:SF 1 "s_register_operand")
1753 (match_operand:SF 2 "s_register_operand")))]
1754 "TARGET_32BIT && TARGET_HARD_FLOAT"
1758 (define_expand "adddf3"
1759 [(set (match_operand:DF 0 "s_register_operand")
1760 (plus:DF (match_operand:DF 1 "s_register_operand")
1761 (match_operand:DF 2 "s_register_operand")))]
1762 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1766 (define_expand "subdi3"
1768 [(set (match_operand:DI 0 "s_register_operand")
1769 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1770 (match_operand:DI 2 "s_register_operand")))
1771 (clobber (reg:CC CC_REGNUM))])]
1776 if (!REG_P (operands[1]))
1777 operands[1] = force_reg (DImode, operands[1]);
1781 rtx lo_result, hi_result, lo_dest, hi_dest;
1782 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1785 /* Since operands[1] may be an integer, pass it second, so that
1786 any necessary simplifications will be done on the decomposed
1788 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1790 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1791 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1793 if (!arm_rhs_operand (lo_op1, SImode))
1794 lo_op1 = force_reg (SImode, lo_op1);
1796 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1797 || !arm_rhs_operand (hi_op1, SImode))
1798 hi_op1 = force_reg (SImode, hi_op1);
1801 if (lo_op1 == const0_rtx)
1803 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1804 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1806 else if (CONST_INT_P (lo_op1))
1808 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1809 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1810 GEN_INT (~UINTVAL (lo_op1))));
1814 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1815 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1818 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1820 if (hi_op1 == const0_rtx)
1821 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1823 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1825 if (lo_result != lo_dest)
1826 emit_move_insn (lo_result, lo_dest);
1828 if (hi_result != hi_dest)
1829 emit_move_insn (hi_result, hi_dest);
1836 (define_expand "subsi3"
1837 [(set (match_operand:SI 0 "s_register_operand")
1838 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1839 (match_operand:SI 2 "s_register_operand")))]
1842 if (CONST_INT_P (operands[1]))
1846 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1847 operands[1] = force_reg (SImode, operands[1]);
1850 arm_split_constant (MINUS, SImode, NULL_RTX,
1851 INTVAL (operands[1]), operands[0],
1853 optimize && can_create_pseudo_p ());
1857 else /* TARGET_THUMB1 */
1858 operands[1] = force_reg (SImode, operands[1]);
1863 ; ??? Check Thumb-2 split length
1864 (define_insn_and_split "*arm_subsi3_insn"
1865 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1866 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1867 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1879 "&& (CONST_INT_P (operands[1])
1880 && !const_ok_for_arm (INTVAL (operands[1])))"
1881 [(clobber (const_int 0))]
1883 arm_split_constant (MINUS, SImode, curr_insn,
1884 INTVAL (operands[1]), operands[0], operands[2], 0);
1887 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1888 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1889 (set_attr "predicable" "yes")
1890 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1891 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1895 [(match_scratch:SI 3 "r")
1896 (set (match_operand:SI 0 "arm_general_register_operand" "")
1897 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1898 (match_operand:SI 2 "arm_general_register_operand" "")))]
1900 && !const_ok_for_arm (INTVAL (operands[1]))
1901 && const_ok_for_arm (~INTVAL (operands[1]))"
1902 [(set (match_dup 3) (match_dup 1))
1903 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1907 (define_insn "subsi3_compare0"
1908 [(set (reg:CC_NOOV CC_REGNUM)
1910 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1911 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1913 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1914 (minus:SI (match_dup 1) (match_dup 2)))]
1919 rsbs%?\\t%0, %2, %1"
1920 [(set_attr "conds" "set")
1921 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1924 (define_insn "subsi3_compare"
1925 [(set (reg:CC CC_REGNUM)
1926 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1927 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1928 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1929 (minus:SI (match_dup 1) (match_dup 2)))]
1934 rsbs%?\\t%0, %2, %1"
1935 [(set_attr "conds" "set")
1936 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1939 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1940 ;; rather than (0 cmp reg). This gives the same results for unsigned
1941 ;; and equality compares which is what we mostly need here.
1942 (define_insn "rsb_imm_compare"
1943 [(set (reg:CC_RSB CC_REGNUM)
1944 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1945 (match_operand 3 "const_int_operand" "")))
1946 (set (match_operand:SI 0 "s_register_operand" "=r")
1947 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1949 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1951 [(set_attr "conds" "set")
1952 (set_attr "type" "alus_imm")]
1955 ;; Similarly, but the result is unused.
1956 (define_insn "rsb_imm_compare_scratch"
1957 [(set (reg:CC_RSB CC_REGNUM)
1958 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1959 (match_operand 1 "arm_not_immediate_operand" "K")))
1960 (clobber (match_scratch:SI 0 "=r"))]
1962 "rsbs\\t%0, %2, #%B1"
1963 [(set_attr "conds" "set")
1964 (set_attr "type" "alus_imm")]
1967 ;; Compare the sum of a value plus a carry against a constant. Uses
1968 ;; RSC, so the result is swapped. Only available on Arm
1969 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
1970 [(set (reg:CC_SWP CC_REGNUM)
1972 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
1973 (match_operand:DI 3 "arm_borrow_operation" ""))
1974 (match_operand 1 "arm_immediate_operand" "I")))
1975 (clobber (match_scratch:SI 0 "=r"))]
1978 [(set_attr "conds" "set")
1979 (set_attr "type" "alus_imm")]
1982 (define_insn "usubvsi3_borrow"
1983 [(set (reg:CC_B CC_REGNUM)
1985 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1986 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
1988 (match_operand:SI 2 "s_register_operand" "l,r")))))
1989 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1990 (minus:SI (match_dup 1)
1991 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
1994 "sbcs%?\\t%0, %1, %2"
1995 [(set_attr "conds" "set")
1996 (set_attr "arch" "t2,*")
1997 (set_attr "length" "2,4")]
2000 (define_insn "usubvsi3_borrow_imm"
2001 [(set (reg:CC_B CC_REGNUM)
2003 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2004 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2005 (match_operand:DI 3 "const_int_operand" "n,n"))))
2006 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2007 (minus:SI (match_dup 1)
2008 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2009 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2011 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2014 adcs%?\\t%0, %1, #%B2"
2015 [(set_attr "conds" "set")
2016 (set_attr "type" "alus_imm")]
2019 (define_expand "subsf3"
2020 [(set (match_operand:SF 0 "s_register_operand")
2021 (minus:SF (match_operand:SF 1 "s_register_operand")
2022 (match_operand:SF 2 "s_register_operand")))]
2023 "TARGET_32BIT && TARGET_HARD_FLOAT"
2027 (define_expand "subdf3"
2028 [(set (match_operand:DF 0 "s_register_operand")
2029 (minus:DF (match_operand:DF 1 "s_register_operand")
2030 (match_operand:DF 2 "s_register_operand")))]
2031 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2036 ;; Multiplication insns
2038 (define_expand "mulhi3"
2039 [(set (match_operand:HI 0 "s_register_operand")
2040 (mult:HI (match_operand:HI 1 "s_register_operand")
2041 (match_operand:HI 2 "s_register_operand")))]
2042 "TARGET_DSP_MULTIPLY"
2045 rtx result = gen_reg_rtx (SImode);
2046 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2047 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2052 (define_expand "mulsi3"
2053 [(set (match_operand:SI 0 "s_register_operand")
2054 (mult:SI (match_operand:SI 2 "s_register_operand")
2055 (match_operand:SI 1 "s_register_operand")))]
2060 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2062 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2063 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2064 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2066 "mul%?\\t%0, %2, %1"
2067 [(set_attr "type" "mul")
2068 (set_attr "predicable" "yes")
2069 (set_attr "arch" "t2,v6,nov6,nov6")
2070 (set_attr "length" "4")
2071 (set_attr "predicable_short_it" "yes,no,*,*")]
2074 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2075 ;; reusing the same register.
2078 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2080 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2081 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2082 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2084 "mla%?\\t%0, %3, %2, %1"
2085 [(set_attr "type" "mla")
2086 (set_attr "predicable" "yes")
2087 (set_attr "arch" "v6,nov6,nov6,nov6")]
2091 [(set (match_operand:SI 0 "s_register_operand" "=r")
2093 (match_operand:SI 1 "s_register_operand" "r")
2094 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2095 (match_operand:SI 2 "s_register_operand" "r"))))]
2096 "TARGET_32BIT && arm_arch_thumb2"
2097 "mls%?\\t%0, %3, %2, %1"
2098 [(set_attr "type" "mla")
2099 (set_attr "predicable" "yes")]
2102 (define_insn "*mulsi3_compare0"
2103 [(set (reg:CC_NOOV CC_REGNUM)
2104 (compare:CC_NOOV (mult:SI
2105 (match_operand:SI 2 "s_register_operand" "r,r")
2106 (match_operand:SI 1 "s_register_operand" "%0,r"))
2108 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2109 (mult:SI (match_dup 2) (match_dup 1)))]
2110 "TARGET_ARM && !arm_arch6"
2111 "muls%?\\t%0, %2, %1"
2112 [(set_attr "conds" "set")
2113 (set_attr "type" "muls")]
2116 (define_insn "*mulsi3_compare0_v6"
2117 [(set (reg:CC_NOOV CC_REGNUM)
2118 (compare:CC_NOOV (mult:SI
2119 (match_operand:SI 2 "s_register_operand" "r")
2120 (match_operand:SI 1 "s_register_operand" "r"))
2122 (set (match_operand:SI 0 "s_register_operand" "=r")
2123 (mult:SI (match_dup 2) (match_dup 1)))]
2124 "TARGET_ARM && arm_arch6 && optimize_size"
2125 "muls%?\\t%0, %2, %1"
2126 [(set_attr "conds" "set")
2127 (set_attr "type" "muls")]
2130 (define_insn "*mulsi_compare0_scratch"
2131 [(set (reg:CC_NOOV CC_REGNUM)
2132 (compare:CC_NOOV (mult:SI
2133 (match_operand:SI 2 "s_register_operand" "r,r")
2134 (match_operand:SI 1 "s_register_operand" "%0,r"))
2136 (clobber (match_scratch:SI 0 "=&r,&r"))]
2137 "TARGET_ARM && !arm_arch6"
2138 "muls%?\\t%0, %2, %1"
2139 [(set_attr "conds" "set")
2140 (set_attr "type" "muls")]
2143 (define_insn "*mulsi_compare0_scratch_v6"
2144 [(set (reg:CC_NOOV CC_REGNUM)
2145 (compare:CC_NOOV (mult:SI
2146 (match_operand:SI 2 "s_register_operand" "r")
2147 (match_operand:SI 1 "s_register_operand" "r"))
2149 (clobber (match_scratch:SI 0 "=r"))]
2150 "TARGET_ARM && arm_arch6 && optimize_size"
2151 "muls%?\\t%0, %2, %1"
2152 [(set_attr "conds" "set")
2153 (set_attr "type" "muls")]
2156 (define_insn "*mulsi3addsi_compare0"
2157 [(set (reg:CC_NOOV CC_REGNUM)
2160 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2161 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2162 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2164 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2165 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2167 "TARGET_ARM && arm_arch6"
2168 "mlas%?\\t%0, %2, %1, %3"
2169 [(set_attr "conds" "set")
2170 (set_attr "type" "mlas")]
2173 (define_insn "*mulsi3addsi_compare0_v6"
2174 [(set (reg:CC_NOOV CC_REGNUM)
2177 (match_operand:SI 2 "s_register_operand" "r")
2178 (match_operand:SI 1 "s_register_operand" "r"))
2179 (match_operand:SI 3 "s_register_operand" "r"))
2181 (set (match_operand:SI 0 "s_register_operand" "=r")
2182 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2184 "TARGET_ARM && arm_arch6 && optimize_size"
2185 "mlas%?\\t%0, %2, %1, %3"
2186 [(set_attr "conds" "set")
2187 (set_attr "type" "mlas")]
2190 (define_insn "*mulsi3addsi_compare0_scratch"
2191 [(set (reg:CC_NOOV CC_REGNUM)
2194 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2195 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2196 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2198 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2199 "TARGET_ARM && !arm_arch6"
2200 "mlas%?\\t%0, %2, %1, %3"
2201 [(set_attr "conds" "set")
2202 (set_attr "type" "mlas")]
2205 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2206 [(set (reg:CC_NOOV CC_REGNUM)
2209 (match_operand:SI 2 "s_register_operand" "r")
2210 (match_operand:SI 1 "s_register_operand" "r"))
2211 (match_operand:SI 3 "s_register_operand" "r"))
2213 (clobber (match_scratch:SI 0 "=r"))]
2214 "TARGET_ARM && arm_arch6 && optimize_size"
2215 "mlas%?\\t%0, %2, %1, %3"
2216 [(set_attr "conds" "set")
2217 (set_attr "type" "mlas")]
2220 ;; 32x32->64 widening multiply.
2221 ;; The only difference between the v3-5 and v6+ versions is the requirement
2222 ;; that the output does not overlap with either input.
2224 (define_expand "<Us>mulsidi3"
2225 [(set (match_operand:DI 0 "s_register_operand")
2227 (SE:DI (match_operand:SI 1 "s_register_operand"))
2228 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2231 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2232 gen_highpart (SImode, operands[0]),
2233 operands[1], operands[2]));
2238 (define_insn "<US>mull"
2239 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2241 (match_operand:SI 2 "s_register_operand" "%r,r")
2242 (match_operand:SI 3 "s_register_operand" "r,r")))
2243 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2246 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2249 "<US>mull%?\\t%0, %1, %2, %3"
2250 [(set_attr "type" "umull")
2251 (set_attr "predicable" "yes")
2252 (set_attr "arch" "v6,nov6")]
2255 (define_expand "<Us>maddsidi4"
2256 [(set (match_operand:DI 0 "s_register_operand")
2259 (SE:DI (match_operand:SI 1 "s_register_operand"))
2260 (SE:DI (match_operand:SI 2 "s_register_operand")))
2261 (match_operand:DI 3 "s_register_operand")))]
2264 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2265 gen_lowpart (SImode, operands[3]),
2266 gen_highpart (SImode, operands[0]),
2267 gen_highpart (SImode, operands[3]),
2268 operands[1], operands[2]));
2273 (define_insn "<US>mlal"
2274 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2277 (match_operand:SI 4 "s_register_operand" "%r,r")
2278 (match_operand:SI 5 "s_register_operand" "r,r"))
2279 (match_operand:SI 1 "s_register_operand" "0,0")))
2280 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2285 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2286 (zero_extend:DI (match_dup 1)))
2288 (match_operand:SI 3 "s_register_operand" "2,2")))]
2290 "<US>mlal%?\\t%0, %2, %4, %5"
2291 [(set_attr "type" "umlal")
2292 (set_attr "predicable" "yes")
2293 (set_attr "arch" "v6,nov6")]
2296 (define_expand "<US>mulsi3_highpart"
2298 [(set (match_operand:SI 0 "s_register_operand")
2302 (SE:DI (match_operand:SI 1 "s_register_operand"))
2303 (SE:DI (match_operand:SI 2 "s_register_operand")))
2305 (clobber (match_scratch:SI 3 ""))])]
2310 (define_insn "*<US>mull_high"
2311 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2315 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2316 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2318 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2320 "<US>mull%?\\t%3, %0, %2, %1"
2321 [(set_attr "type" "umull")
2322 (set_attr "predicable" "yes")
2323 (set_attr "arch" "v6,nov6,nov6")]
2326 (define_insn "mulhisi3"
2327 [(set (match_operand:SI 0 "s_register_operand" "=r")
2328 (mult:SI (sign_extend:SI
2329 (match_operand:HI 1 "s_register_operand" "%r"))
2331 (match_operand:HI 2 "s_register_operand" "r"))))]
2332 "TARGET_DSP_MULTIPLY"
2333 "smulbb%?\\t%0, %1, %2"
2334 [(set_attr "type" "smulxy")
2335 (set_attr "predicable" "yes")]
2338 (define_insn "*mulhisi3tb"
2339 [(set (match_operand:SI 0 "s_register_operand" "=r")
2340 (mult:SI (ashiftrt:SI
2341 (match_operand:SI 1 "s_register_operand" "r")
2344 (match_operand:HI 2 "s_register_operand" "r"))))]
2345 "TARGET_DSP_MULTIPLY"
2346 "smultb%?\\t%0, %1, %2"
2347 [(set_attr "type" "smulxy")
2348 (set_attr "predicable" "yes")]
2351 (define_insn "*mulhisi3bt"
2352 [(set (match_operand:SI 0 "s_register_operand" "=r")
2353 (mult:SI (sign_extend:SI
2354 (match_operand:HI 1 "s_register_operand" "r"))
2356 (match_operand:SI 2 "s_register_operand" "r")
2358 "TARGET_DSP_MULTIPLY"
2359 "smulbt%?\\t%0, %1, %2"
2360 [(set_attr "type" "smulxy")
2361 (set_attr "predicable" "yes")]
2364 (define_insn "*mulhisi3tt"
2365 [(set (match_operand:SI 0 "s_register_operand" "=r")
2366 (mult:SI (ashiftrt:SI
2367 (match_operand:SI 1 "s_register_operand" "r")
2370 (match_operand:SI 2 "s_register_operand" "r")
2372 "TARGET_DSP_MULTIPLY"
2373 "smultt%?\\t%0, %1, %2"
2374 [(set_attr "type" "smulxy")
2375 (set_attr "predicable" "yes")]
2378 (define_insn "maddhisi4"
2379 [(set (match_operand:SI 0 "s_register_operand" "=r")
2380 (plus:SI (mult:SI (sign_extend:SI
2381 (match_operand:HI 1 "s_register_operand" "r"))
2383 (match_operand:HI 2 "s_register_operand" "r")))
2384 (match_operand:SI 3 "s_register_operand" "r")))]
2385 "TARGET_DSP_MULTIPLY"
2386 "smlabb%?\\t%0, %1, %2, %3"
2387 [(set_attr "type" "smlaxy")
2388 (set_attr "predicable" "yes")]
2391 ;; Note: there is no maddhisi4ibt because this one is canonical form
2392 (define_insn "*maddhisi4tb"
2393 [(set (match_operand:SI 0 "s_register_operand" "=r")
2394 (plus:SI (mult:SI (ashiftrt:SI
2395 (match_operand:SI 1 "s_register_operand" "r")
2398 (match_operand:HI 2 "s_register_operand" "r")))
2399 (match_operand:SI 3 "s_register_operand" "r")))]
2400 "TARGET_DSP_MULTIPLY"
2401 "smlatb%?\\t%0, %1, %2, %3"
2402 [(set_attr "type" "smlaxy")
2403 (set_attr "predicable" "yes")]
2406 (define_insn "*maddhisi4tt"
2407 [(set (match_operand:SI 0 "s_register_operand" "=r")
2408 (plus:SI (mult:SI (ashiftrt:SI
2409 (match_operand:SI 1 "s_register_operand" "r")
2412 (match_operand:SI 2 "s_register_operand" "r")
2414 (match_operand:SI 3 "s_register_operand" "r")))]
2415 "TARGET_DSP_MULTIPLY"
2416 "smlatt%?\\t%0, %1, %2, %3"
2417 [(set_attr "type" "smlaxy")
2418 (set_attr "predicable" "yes")]
2421 (define_insn "maddhidi4"
2422 [(set (match_operand:DI 0 "s_register_operand" "=r")
2424 (mult:DI (sign_extend:DI
2425 (match_operand:HI 1 "s_register_operand" "r"))
2427 (match_operand:HI 2 "s_register_operand" "r")))
2428 (match_operand:DI 3 "s_register_operand" "0")))]
2429 "TARGET_DSP_MULTIPLY"
2430 "smlalbb%?\\t%Q0, %R0, %1, %2"
2431 [(set_attr "type" "smlalxy")
2432 (set_attr "predicable" "yes")])
2434 ;; Note: there is no maddhidi4ibt because this one is canonical form
2435 (define_insn "*maddhidi4tb"
2436 [(set (match_operand:DI 0 "s_register_operand" "=r")
2438 (mult:DI (sign_extend:DI
2440 (match_operand:SI 1 "s_register_operand" "r")
2443 (match_operand:HI 2 "s_register_operand" "r")))
2444 (match_operand:DI 3 "s_register_operand" "0")))]
2445 "TARGET_DSP_MULTIPLY"
2446 "smlaltb%?\\t%Q0, %R0, %1, %2"
2447 [(set_attr "type" "smlalxy")
2448 (set_attr "predicable" "yes")])
2450 (define_insn "*maddhidi4tt"
2451 [(set (match_operand:DI 0 "s_register_operand" "=r")
2453 (mult:DI (sign_extend:DI
2455 (match_operand:SI 1 "s_register_operand" "r")
2459 (match_operand:SI 2 "s_register_operand" "r")
2461 (match_operand:DI 3 "s_register_operand" "0")))]
2462 "TARGET_DSP_MULTIPLY"
2463 "smlaltt%?\\t%Q0, %R0, %1, %2"
2464 [(set_attr "type" "smlalxy")
2465 (set_attr "predicable" "yes")])
2467 (define_expand "mulsf3"
2468 [(set (match_operand:SF 0 "s_register_operand")
2469 (mult:SF (match_operand:SF 1 "s_register_operand")
2470 (match_operand:SF 2 "s_register_operand")))]
2471 "TARGET_32BIT && TARGET_HARD_FLOAT"
2475 (define_expand "muldf3"
2476 [(set (match_operand:DF 0 "s_register_operand")
2477 (mult:DF (match_operand:DF 1 "s_register_operand")
2478 (match_operand:DF 2 "s_register_operand")))]
2479 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2485 (define_expand "divsf3"
2486 [(set (match_operand:SF 0 "s_register_operand")
2487 (div:SF (match_operand:SF 1 "s_register_operand")
2488 (match_operand:SF 2 "s_register_operand")))]
2489 "TARGET_32BIT && TARGET_HARD_FLOAT"
2492 (define_expand "divdf3"
2493 [(set (match_operand:DF 0 "s_register_operand")
2494 (div:DF (match_operand:DF 1 "s_register_operand")
2495 (match_operand:DF 2 "s_register_operand")))]
2496 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2500 ; Expand logical operations. The mid-end expander does not split off memory
2501 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2502 ; So an explicit expander is needed to generate better code.
2504 (define_expand "<LOGICAL:optab>di3"
2505 [(set (match_operand:DI 0 "s_register_operand")
2506 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2507 (match_operand:DI 2 "arm_<optab>di_operand")))]
2510 rtx low = simplify_gen_binary (<CODE>, SImode,
2511 gen_lowpart (SImode, operands[1]),
2512 gen_lowpart (SImode, operands[2]));
2513 rtx high = simplify_gen_binary (<CODE>, SImode,
2514 gen_highpart (SImode, operands[1]),
2515 gen_highpart_mode (SImode, DImode,
2518 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2519 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2524 (define_expand "one_cmpldi2"
2525 [(set (match_operand:DI 0 "s_register_operand")
2526 (not:DI (match_operand:DI 1 "s_register_operand")))]
2529 rtx low = simplify_gen_unary (NOT, SImode,
2530 gen_lowpart (SImode, operands[1]),
2532 rtx high = simplify_gen_unary (NOT, SImode,
2533 gen_highpart_mode (SImode, DImode,
2537 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2538 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2543 ;; Split DImode and, ior, xor operations. Simply perform the logical
2544 ;; operation on the upper and lower halves of the registers.
2545 ;; This is needed for atomic operations in arm_split_atomic_op.
2546 ;; Avoid splitting IWMMXT instructions.
2548 [(set (match_operand:DI 0 "s_register_operand" "")
2549 (match_operator:DI 6 "logical_binary_operator"
2550 [(match_operand:DI 1 "s_register_operand" "")
2551 (match_operand:DI 2 "s_register_operand" "")]))]
2552 "TARGET_32BIT && reload_completed
2553 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2554 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2555 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2558 operands[3] = gen_highpart (SImode, operands[0]);
2559 operands[0] = gen_lowpart (SImode, operands[0]);
2560 operands[4] = gen_highpart (SImode, operands[1]);
2561 operands[1] = gen_lowpart (SImode, operands[1]);
2562 operands[5] = gen_highpart (SImode, operands[2]);
2563 operands[2] = gen_lowpart (SImode, operands[2]);
2567 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2568 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2570 [(set (match_operand:DI 0 "s_register_operand")
2571 (not:DI (match_operand:DI 1 "s_register_operand")))]
2573 [(set (match_dup 0) (not:SI (match_dup 1)))
2574 (set (match_dup 2) (not:SI (match_dup 3)))]
2577 operands[2] = gen_highpart (SImode, operands[0]);
2578 operands[0] = gen_lowpart (SImode, operands[0]);
2579 operands[3] = gen_highpart (SImode, operands[1]);
2580 operands[1] = gen_lowpart (SImode, operands[1]);
2584 (define_expand "andsi3"
2585 [(set (match_operand:SI 0 "s_register_operand")
2586 (and:SI (match_operand:SI 1 "s_register_operand")
2587 (match_operand:SI 2 "reg_or_int_operand")))]
2592 if (CONST_INT_P (operands[2]))
2594 if (INTVAL (operands[2]) == 255 && arm_arch6)
2596 operands[1] = convert_to_mode (QImode, operands[1], 1);
2597 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2601 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2602 operands[2] = force_reg (SImode, operands[2]);
2605 arm_split_constant (AND, SImode, NULL_RTX,
2606 INTVAL (operands[2]), operands[0],
2608 optimize && can_create_pseudo_p ());
2614 else /* TARGET_THUMB1 */
2616 if (!CONST_INT_P (operands[2]))
2618 rtx tmp = force_reg (SImode, operands[2]);
2619 if (rtx_equal_p (operands[0], operands[1]))
2623 operands[2] = operands[1];
2631 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2633 operands[2] = force_reg (SImode,
2634 GEN_INT (~INTVAL (operands[2])));
2636 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2641 for (i = 9; i <= 31; i++)
2643 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2645 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2649 else if ((HOST_WIDE_INT_1 << i) - 1
2650 == ~INTVAL (operands[2]))
2652 rtx shift = GEN_INT (i);
2653 rtx reg = gen_reg_rtx (SImode);
2655 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2656 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2662 operands[2] = force_reg (SImode, operands[2]);
2668 ; ??? Check split length for Thumb-2
2669 (define_insn_and_split "*arm_andsi3_insn"
2670 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2671 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2672 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2677 bic%?\\t%0, %1, #%B2
2681 && CONST_INT_P (operands[2])
2682 && !(const_ok_for_arm (INTVAL (operands[2]))
2683 || const_ok_for_arm (~INTVAL (operands[2])))"
2684 [(clobber (const_int 0))]
2686 arm_split_constant (AND, SImode, curr_insn,
2687 INTVAL (operands[2]), operands[0], operands[1], 0);
2690 [(set_attr "length" "4,4,4,4,16")
2691 (set_attr "predicable" "yes")
2692 (set_attr "predicable_short_it" "no,yes,no,no,no")
2693 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2696 (define_insn "*andsi3_compare0"
2697 [(set (reg:CC_NOOV CC_REGNUM)
2699 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2700 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2702 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2703 (and:SI (match_dup 1) (match_dup 2)))]
2707 bics%?\\t%0, %1, #%B2
2708 ands%?\\t%0, %1, %2"
2709 [(set_attr "conds" "set")
2710 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2713 (define_insn "*andsi3_compare0_scratch"
2714 [(set (reg:CC_NOOV CC_REGNUM)
2716 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2717 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2719 (clobber (match_scratch:SI 2 "=X,r,X"))]
2723 bics%?\\t%2, %0, #%B1
2725 [(set_attr "conds" "set")
2726 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2729 (define_insn "*zeroextractsi_compare0_scratch"
2730 [(set (reg:CC_NOOV CC_REGNUM)
2731 (compare:CC_NOOV (zero_extract:SI
2732 (match_operand:SI 0 "s_register_operand" "r")
2733 (match_operand 1 "const_int_operand" "n")
2734 (match_operand 2 "const_int_operand" "n"))
2737 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2738 && INTVAL (operands[1]) > 0
2739 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2740 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2742 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2743 << INTVAL (operands[2]));
2744 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2747 [(set_attr "conds" "set")
2748 (set_attr "predicable" "yes")
2749 (set_attr "type" "logics_imm")]
2752 (define_insn_and_split "*ne_zeroextractsi"
2753 [(set (match_operand:SI 0 "s_register_operand" "=r")
2754 (ne:SI (zero_extract:SI
2755 (match_operand:SI 1 "s_register_operand" "r")
2756 (match_operand:SI 2 "const_int_operand" "n")
2757 (match_operand:SI 3 "const_int_operand" "n"))
2759 (clobber (reg:CC CC_REGNUM))]
2761 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2762 && INTVAL (operands[2]) > 0
2763 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2764 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2767 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2768 && INTVAL (operands[2]) > 0
2769 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2770 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2771 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2772 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2774 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2776 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2777 (match_dup 0) (const_int 1)))]
2779 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2780 << INTVAL (operands[3]));
2782 [(set_attr "conds" "clob")
2783 (set (attr "length")
2784 (if_then_else (eq_attr "is_thumb" "yes")
2787 (set_attr "type" "multiple")]
2790 (define_insn_and_split "*ne_zeroextractsi_shifted"
2791 [(set (match_operand:SI 0 "s_register_operand" "=r")
2792 (ne:SI (zero_extract:SI
2793 (match_operand:SI 1 "s_register_operand" "r")
2794 (match_operand:SI 2 "const_int_operand" "n")
2797 (clobber (reg:CC CC_REGNUM))]
2801 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2802 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2804 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2806 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2807 (match_dup 0) (const_int 1)))]
2809 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2811 [(set_attr "conds" "clob")
2812 (set_attr "length" "8")
2813 (set_attr "type" "multiple")]
2816 (define_insn_and_split "*ite_ne_zeroextractsi"
2817 [(set (match_operand:SI 0 "s_register_operand" "=r")
2818 (if_then_else:SI (ne (zero_extract:SI
2819 (match_operand:SI 1 "s_register_operand" "r")
2820 (match_operand:SI 2 "const_int_operand" "n")
2821 (match_operand:SI 3 "const_int_operand" "n"))
2823 (match_operand:SI 4 "arm_not_operand" "rIK")
2825 (clobber (reg:CC CC_REGNUM))]
2827 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2828 && INTVAL (operands[2]) > 0
2829 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2830 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2831 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2834 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2835 && INTVAL (operands[2]) > 0
2836 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2837 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2838 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2839 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2840 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2842 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2844 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2845 (match_dup 0) (match_dup 4)))]
2847 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2848 << INTVAL (operands[3]));
2850 [(set_attr "conds" "clob")
2851 (set_attr "length" "8")
2852 (set_attr "type" "multiple")]
2855 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2856 [(set (match_operand:SI 0 "s_register_operand" "=r")
2857 (if_then_else:SI (ne (zero_extract:SI
2858 (match_operand:SI 1 "s_register_operand" "r")
2859 (match_operand:SI 2 "const_int_operand" "n")
2862 (match_operand:SI 3 "arm_not_operand" "rIK")
2864 (clobber (reg:CC CC_REGNUM))]
2865 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2867 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2868 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2869 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2871 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2873 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2874 (match_dup 0) (match_dup 3)))]
2876 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2878 [(set_attr "conds" "clob")
2879 (set_attr "length" "8")
2880 (set_attr "type" "multiple")]
2883 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2885 [(set (match_operand:SI 0 "s_register_operand" "")
2886 (match_operator:SI 1 "shiftable_operator"
2887 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2888 (match_operand:SI 3 "const_int_operand" "")
2889 (match_operand:SI 4 "const_int_operand" ""))
2890 (match_operand:SI 5 "s_register_operand" "")]))
2891 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2893 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2896 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2899 HOST_WIDE_INT temp = INTVAL (operands[3]);
2901 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2902 operands[4] = GEN_INT (32 - temp);
2907 [(set (match_operand:SI 0 "s_register_operand" "")
2908 (match_operator:SI 1 "shiftable_operator"
2909 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2910 (match_operand:SI 3 "const_int_operand" "")
2911 (match_operand:SI 4 "const_int_operand" ""))
2912 (match_operand:SI 5 "s_register_operand" "")]))
2913 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2915 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2918 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2921 HOST_WIDE_INT temp = INTVAL (operands[3]);
2923 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2924 operands[4] = GEN_INT (32 - temp);
2928 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2929 ;;; represented by the bitfield, then this will produce incorrect results.
2930 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2931 ;;; which have a real bit-field insert instruction, the truncation happens
2932 ;;; in the bit-field insert instruction itself. Since arm does not have a
2933 ;;; bit-field insert instruction, we would have to emit code here to truncate
2934 ;;; the value before we insert. This loses some of the advantage of having
2935 ;;; this insv pattern, so this pattern needs to be reevalutated.
2937 (define_expand "insv"
2938 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
2939 (match_operand 1 "general_operand")
2940 (match_operand 2 "general_operand"))
2941 (match_operand 3 "reg_or_int_operand"))]
2942 "TARGET_ARM || arm_arch_thumb2"
2945 int start_bit = INTVAL (operands[2]);
2946 int width = INTVAL (operands[1]);
2947 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
2948 rtx target, subtarget;
2950 if (arm_arch_thumb2)
2952 if (unaligned_access && MEM_P (operands[0])
2953 && s_register_operand (operands[3], GET_MODE (operands[3]))
2954 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2958 if (BYTES_BIG_ENDIAN)
2959 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2964 base_addr = adjust_address (operands[0], SImode,
2965 start_bit / BITS_PER_UNIT);
2966 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2970 rtx tmp = gen_reg_rtx (HImode);
2972 base_addr = adjust_address (operands[0], HImode,
2973 start_bit / BITS_PER_UNIT);
2974 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2975 emit_insn (gen_unaligned_storehi (base_addr, tmp));
2979 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2981 bool use_bfi = TRUE;
2983 if (CONST_INT_P (operands[3]))
2985 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2989 emit_insn (gen_insv_zero (operands[0], operands[1],
2994 /* See if the set can be done with a single orr instruction. */
2995 if (val == mask && const_ok_for_arm (val << start_bit))
3001 if (!REG_P (operands[3]))
3002 operands[3] = force_reg (SImode, operands[3]);
3004 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3013 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3016 target = copy_rtx (operands[0]);
3017 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3018 subreg as the final target. */
3019 if (GET_CODE (target) == SUBREG)
3021 subtarget = gen_reg_rtx (SImode);
3022 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3023 < GET_MODE_SIZE (SImode))
3024 target = SUBREG_REG (target);
3029 if (CONST_INT_P (operands[3]))
3031 /* Since we are inserting a known constant, we may be able to
3032 reduce the number of bits that we have to clear so that
3033 the mask becomes simple. */
3034 /* ??? This code does not check to see if the new mask is actually
3035 simpler. It may not be. */
3036 rtx op1 = gen_reg_rtx (SImode);
3037 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3038 start of this pattern. */
3039 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3040 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3042 emit_insn (gen_andsi3 (op1, operands[0],
3043 gen_int_mode (~mask2, SImode)));
3044 emit_insn (gen_iorsi3 (subtarget, op1,
3045 gen_int_mode (op3_value << start_bit, SImode)));
3047 else if (start_bit == 0
3048 && !(const_ok_for_arm (mask)
3049 || const_ok_for_arm (~mask)))
3051 /* A Trick, since we are setting the bottom bits in the word,
3052 we can shift operand[3] up, operand[0] down, OR them together
3053 and rotate the result back again. This takes 3 insns, and
3054 the third might be mergeable into another op. */
3055 /* The shift up copes with the possibility that operand[3] is
3056 wider than the bitfield. */
3057 rtx op0 = gen_reg_rtx (SImode);
3058 rtx op1 = gen_reg_rtx (SImode);
3060 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3061 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3062 emit_insn (gen_iorsi3 (op1, op1, op0));
3063 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3065 else if ((width + start_bit == 32)
3066 && !(const_ok_for_arm (mask)
3067 || const_ok_for_arm (~mask)))
3069 /* Similar trick, but slightly less efficient. */
3071 rtx op0 = gen_reg_rtx (SImode);
3072 rtx op1 = gen_reg_rtx (SImode);
3074 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3075 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3076 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3077 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3081 rtx op0 = gen_int_mode (mask, SImode);
3082 rtx op1 = gen_reg_rtx (SImode);
3083 rtx op2 = gen_reg_rtx (SImode);
3085 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3087 rtx tmp = gen_reg_rtx (SImode);
3089 emit_insn (gen_movsi (tmp, op0));
3093 /* Mask out any bits in operand[3] that are not needed. */
3094 emit_insn (gen_andsi3 (op1, operands[3], op0));
3096 if (CONST_INT_P (op0)
3097 && (const_ok_for_arm (mask << start_bit)
3098 || const_ok_for_arm (~(mask << start_bit))))
3100 op0 = gen_int_mode (~(mask << start_bit), SImode);
3101 emit_insn (gen_andsi3 (op2, operands[0], op0));
3105 if (CONST_INT_P (op0))
3107 rtx tmp = gen_reg_rtx (SImode);
3109 emit_insn (gen_movsi (tmp, op0));
3114 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3116 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3120 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3122 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3125 if (subtarget != target)
3127 /* If TARGET is still a SUBREG, then it must be wider than a word,
3128 so we must be careful only to set the subword we were asked to. */
3129 if (GET_CODE (target) == SUBREG)
3130 emit_move_insn (target, subtarget);
3132 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3139 (define_insn "insv_zero"
3140 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3141 (match_operand:SI 1 "const_int_M_operand" "M")
3142 (match_operand:SI 2 "const_int_M_operand" "M"))
3146 [(set_attr "length" "4")
3147 (set_attr "predicable" "yes")
3148 (set_attr "type" "bfm")]
3151 (define_insn "insv_t2"
3152 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3153 (match_operand:SI 1 "const_int_M_operand" "M")
3154 (match_operand:SI 2 "const_int_M_operand" "M"))
3155 (match_operand:SI 3 "s_register_operand" "r"))]
3157 "bfi%?\t%0, %3, %2, %1"
3158 [(set_attr "length" "4")
3159 (set_attr "predicable" "yes")
3160 (set_attr "type" "bfm")]
3163 (define_insn "andsi_notsi_si"
3164 [(set (match_operand:SI 0 "s_register_operand" "=r")
3165 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3166 (match_operand:SI 1 "s_register_operand" "r")))]
3168 "bic%?\\t%0, %1, %2"
3169 [(set_attr "predicable" "yes")
3170 (set_attr "type" "logic_reg")]
3173 (define_insn "andsi_not_shiftsi_si"
3174 [(set (match_operand:SI 0 "s_register_operand" "=r")
3175 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3176 [(match_operand:SI 2 "s_register_operand" "r")
3177 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
3178 (match_operand:SI 1 "s_register_operand" "r")))]
3180 "bic%?\\t%0, %1, %2%S4"
3181 [(set_attr "predicable" "yes")
3182 (set_attr "shift" "2")
3183 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
3184 (const_string "logic_shift_imm")
3185 (const_string "logic_shift_reg")))]
3188 ;; Shifted bics pattern used to set up CC status register and not reusing
3189 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3190 ;; does not support shift by register.
3191 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3192 [(set (reg:CC_NOOV CC_REGNUM)
3194 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3195 [(match_operand:SI 1 "s_register_operand" "r")
3196 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3197 (match_operand:SI 3 "s_register_operand" "r"))
3199 (clobber (match_scratch:SI 4 "=r"))]
3200 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3201 "bics%?\\t%4, %3, %1%S0"
3202 [(set_attr "predicable" "yes")
3203 (set_attr "conds" "set")
3204 (set_attr "shift" "1")
3205 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3206 (const_string "logic_shift_imm")
3207 (const_string "logic_shift_reg")))]
3210 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3211 ;; getting reused later.
3212 (define_insn "andsi_not_shiftsi_si_scc"
3213 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3215 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3216 [(match_operand:SI 1 "s_register_operand" "r")
3217 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3218 (match_operand:SI 3 "s_register_operand" "r"))
3220 (set (match_operand:SI 4 "s_register_operand" "=r")
3221 (and:SI (not:SI (match_op_dup 0
3225 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3226 "bics%?\\t%4, %3, %1%S0"
3227 [(set_attr "predicable" "yes")
3228 (set_attr "conds" "set")
3229 (set_attr "shift" "1")
3230 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3231 (const_string "logic_shift_imm")
3232 (const_string "logic_shift_reg")))]
3235 (define_insn "*andsi_notsi_si_compare0"
3236 [(set (reg:CC_NOOV CC_REGNUM)
3238 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3239 (match_operand:SI 1 "s_register_operand" "r"))
3241 (set (match_operand:SI 0 "s_register_operand" "=r")
3242 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3245 [(set_attr "conds" "set")
3246 (set_attr "type" "logics_shift_reg")]
3249 (define_insn "*andsi_notsi_si_compare0_scratch"
3250 [(set (reg:CC_NOOV CC_REGNUM)
3252 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3253 (match_operand:SI 1 "s_register_operand" "r"))
3255 (clobber (match_scratch:SI 0 "=r"))]
3258 [(set_attr "conds" "set")
3259 (set_attr "type" "logics_shift_reg")]
3262 (define_expand "iorsi3"
3263 [(set (match_operand:SI 0 "s_register_operand")
3264 (ior:SI (match_operand:SI 1 "s_register_operand")
3265 (match_operand:SI 2 "reg_or_int_operand")))]
3268 if (CONST_INT_P (operands[2]))
3272 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3273 operands[2] = force_reg (SImode, operands[2]);
3276 arm_split_constant (IOR, SImode, NULL_RTX,
3277 INTVAL (operands[2]), operands[0],
3279 optimize && can_create_pseudo_p ());
3283 else /* TARGET_THUMB1 */
3285 rtx tmp = force_reg (SImode, operands[2]);
3286 if (rtx_equal_p (operands[0], operands[1]))
3290 operands[2] = operands[1];
3298 (define_insn_and_split "*iorsi3_insn"
3299 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3300 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3301 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3306 orn%?\\t%0, %1, #%B2
3310 && CONST_INT_P (operands[2])
3311 && !(const_ok_for_arm (INTVAL (operands[2]))
3312 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3313 [(clobber (const_int 0))]
3315 arm_split_constant (IOR, SImode, curr_insn,
3316 INTVAL (operands[2]), operands[0], operands[1], 0);
3319 [(set_attr "length" "4,4,4,4,16")
3320 (set_attr "arch" "32,t2,t2,32,32")
3321 (set_attr "predicable" "yes")
3322 (set_attr "predicable_short_it" "no,yes,no,no,no")
3323 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3327 [(match_scratch:SI 3 "r")
3328 (set (match_operand:SI 0 "arm_general_register_operand" "")
3329 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3330 (match_operand:SI 2 "const_int_operand" "")))]
3332 && !const_ok_for_arm (INTVAL (operands[2]))
3333 && const_ok_for_arm (~INTVAL (operands[2]))"
3334 [(set (match_dup 3) (match_dup 2))
3335 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3339 (define_insn "*iorsi3_compare0"
3340 [(set (reg:CC_NOOV CC_REGNUM)
3342 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3343 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3345 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3346 (ior:SI (match_dup 1) (match_dup 2)))]
3348 "orrs%?\\t%0, %1, %2"
3349 [(set_attr "conds" "set")
3350 (set_attr "arch" "*,t2,*")
3351 (set_attr "length" "4,2,4")
3352 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3355 (define_insn "*iorsi3_compare0_scratch"
3356 [(set (reg:CC_NOOV CC_REGNUM)
3358 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3359 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3361 (clobber (match_scratch:SI 0 "=r,l,r"))]
3363 "orrs%?\\t%0, %1, %2"
3364 [(set_attr "conds" "set")
3365 (set_attr "arch" "*,t2,*")
3366 (set_attr "length" "4,2,4")
3367 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3370 (define_expand "xorsi3"
3371 [(set (match_operand:SI 0 "s_register_operand")
3372 (xor:SI (match_operand:SI 1 "s_register_operand")
3373 (match_operand:SI 2 "reg_or_int_operand")))]
3375 "if (CONST_INT_P (operands[2]))
3379 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3380 operands[2] = force_reg (SImode, operands[2]);
3383 arm_split_constant (XOR, SImode, NULL_RTX,
3384 INTVAL (operands[2]), operands[0],
3386 optimize && can_create_pseudo_p ());
3390 else /* TARGET_THUMB1 */
3392 rtx tmp = force_reg (SImode, operands[2]);
3393 if (rtx_equal_p (operands[0], operands[1]))
3397 operands[2] = operands[1];
3404 (define_insn_and_split "*arm_xorsi3"
3405 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3406 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3407 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3415 && CONST_INT_P (operands[2])
3416 && !const_ok_for_arm (INTVAL (operands[2]))"
3417 [(clobber (const_int 0))]
3419 arm_split_constant (XOR, SImode, curr_insn,
3420 INTVAL (operands[2]), operands[0], operands[1], 0);
3423 [(set_attr "length" "4,4,4,16")
3424 (set_attr "predicable" "yes")
3425 (set_attr "predicable_short_it" "no,yes,no,no")
3426 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3429 (define_insn "*xorsi3_compare0"
3430 [(set (reg:CC_NOOV CC_REGNUM)
3431 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3432 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3434 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3435 (xor:SI (match_dup 1) (match_dup 2)))]
3437 "eors%?\\t%0, %1, %2"
3438 [(set_attr "conds" "set")
3439 (set_attr "type" "logics_imm,logics_reg")]
3442 (define_insn "*xorsi3_compare0_scratch"
3443 [(set (reg:CC_NOOV CC_REGNUM)
3444 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3445 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3449 [(set_attr "conds" "set")
3450 (set_attr "type" "logics_imm,logics_reg")]
3453 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3454 ; (NOT D) we can sometimes merge the final NOT into one of the following
3458 [(set (match_operand:SI 0 "s_register_operand" "")
3459 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3460 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3461 (match_operand:SI 3 "arm_rhs_operand" "")))
3462 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3464 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3465 (not:SI (match_dup 3))))
3466 (set (match_dup 0) (not:SI (match_dup 4)))]
3470 (define_insn_and_split "*andsi_iorsi3_notsi"
3471 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3472 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3473 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3474 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3476 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3477 "&& reload_completed"
3478 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3479 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3481 /* If operands[3] is a constant make sure to fold the NOT into it
3482 to avoid creating a NOT of a CONST_INT. */
3483 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3484 if (CONST_INT_P (not_rtx))
3486 operands[4] = operands[0];
3487 operands[5] = not_rtx;
3491 operands[5] = operands[0];
3492 operands[4] = not_rtx;
3495 [(set_attr "length" "8")
3496 (set_attr "ce_count" "2")
3497 (set_attr "predicable" "yes")
3498 (set_attr "type" "multiple")]
3501 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3502 ; insns are available?
3504 [(set (match_operand:SI 0 "s_register_operand" "")
3505 (match_operator:SI 1 "logical_binary_operator"
3506 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3507 (match_operand:SI 3 "const_int_operand" "")
3508 (match_operand:SI 4 "const_int_operand" ""))
3509 (match_operator:SI 9 "logical_binary_operator"
3510 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3511 (match_operand:SI 6 "const_int_operand" ""))
3512 (match_operand:SI 7 "s_register_operand" "")])]))
3513 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3515 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3516 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3519 [(ashift:SI (match_dup 2) (match_dup 4))
3523 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3526 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3530 [(set (match_operand:SI 0 "s_register_operand" "")
3531 (match_operator:SI 1 "logical_binary_operator"
3532 [(match_operator:SI 9 "logical_binary_operator"
3533 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3534 (match_operand:SI 6 "const_int_operand" ""))
3535 (match_operand:SI 7 "s_register_operand" "")])
3536 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3537 (match_operand:SI 3 "const_int_operand" "")
3538 (match_operand:SI 4 "const_int_operand" ""))]))
3539 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3541 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3542 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3545 [(ashift:SI (match_dup 2) (match_dup 4))
3549 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3552 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3556 [(set (match_operand:SI 0 "s_register_operand" "")
3557 (match_operator:SI 1 "logical_binary_operator"
3558 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3559 (match_operand:SI 3 "const_int_operand" "")
3560 (match_operand:SI 4 "const_int_operand" ""))
3561 (match_operator:SI 9 "logical_binary_operator"
3562 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3563 (match_operand:SI 6 "const_int_operand" ""))
3564 (match_operand:SI 7 "s_register_operand" "")])]))
3565 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3567 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3568 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3571 [(ashift:SI (match_dup 2) (match_dup 4))
3575 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3578 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3582 [(set (match_operand:SI 0 "s_register_operand" "")
3583 (match_operator:SI 1 "logical_binary_operator"
3584 [(match_operator:SI 9 "logical_binary_operator"
3585 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3586 (match_operand:SI 6 "const_int_operand" ""))
3587 (match_operand:SI 7 "s_register_operand" "")])
3588 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3589 (match_operand:SI 3 "const_int_operand" "")
3590 (match_operand:SI 4 "const_int_operand" ""))]))
3591 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3593 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3594 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3597 [(ashift:SI (match_dup 2) (match_dup 4))
3601 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3604 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3608 ;; Minimum and maximum insns
3610 (define_expand "smaxsi3"
3612 (set (match_operand:SI 0 "s_register_operand")
3613 (smax:SI (match_operand:SI 1 "s_register_operand")
3614 (match_operand:SI 2 "arm_rhs_operand")))
3615 (clobber (reg:CC CC_REGNUM))])]
3618 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3620 /* No need for a clobber of the condition code register here. */
3621 emit_insn (gen_rtx_SET (operands[0],
3622 gen_rtx_SMAX (SImode, operands[1],
3628 (define_insn "*smax_0"
3629 [(set (match_operand:SI 0 "s_register_operand" "=r")
3630 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3633 "bic%?\\t%0, %1, %1, asr #31"
3634 [(set_attr "predicable" "yes")
3635 (set_attr "type" "logic_shift_reg")]
3638 (define_insn "*smax_m1"
3639 [(set (match_operand:SI 0 "s_register_operand" "=r")
3640 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3643 "orr%?\\t%0, %1, %1, asr #31"
3644 [(set_attr "predicable" "yes")
3645 (set_attr "type" "logic_shift_reg")]
3648 (define_insn_and_split "*arm_smax_insn"
3649 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3650 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3651 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3652 (clobber (reg:CC CC_REGNUM))]
3655 ; cmp\\t%1, %2\;movlt\\t%0, %2
3656 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3658 [(set (reg:CC CC_REGNUM)
3659 (compare:CC (match_dup 1) (match_dup 2)))
3661 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3665 [(set_attr "conds" "clob")
3666 (set_attr "length" "8,12")
3667 (set_attr "type" "multiple")]
3670 (define_expand "sminsi3"
3672 (set (match_operand:SI 0 "s_register_operand")
3673 (smin:SI (match_operand:SI 1 "s_register_operand")
3674 (match_operand:SI 2 "arm_rhs_operand")))
3675 (clobber (reg:CC CC_REGNUM))])]
3678 if (operands[2] == const0_rtx)
3680 /* No need for a clobber of the condition code register here. */
3681 emit_insn (gen_rtx_SET (operands[0],
3682 gen_rtx_SMIN (SImode, operands[1],
3688 (define_insn "*smin_0"
3689 [(set (match_operand:SI 0 "s_register_operand" "=r")
3690 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3693 "and%?\\t%0, %1, %1, asr #31"
3694 [(set_attr "predicable" "yes")
3695 (set_attr "type" "logic_shift_reg")]
3698 (define_insn_and_split "*arm_smin_insn"
3699 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3700 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3701 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3702 (clobber (reg:CC CC_REGNUM))]
3705 ; cmp\\t%1, %2\;movge\\t%0, %2
3706 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3708 [(set (reg:CC CC_REGNUM)
3709 (compare:CC (match_dup 1) (match_dup 2)))
3711 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3715 [(set_attr "conds" "clob")
3716 (set_attr "length" "8,12")
3717 (set_attr "type" "multiple,multiple")]
3720 (define_expand "umaxsi3"
3722 (set (match_operand:SI 0 "s_register_operand")
3723 (umax:SI (match_operand:SI 1 "s_register_operand")
3724 (match_operand:SI 2 "arm_rhs_operand")))
3725 (clobber (reg:CC CC_REGNUM))])]
3730 (define_insn_and_split "*arm_umaxsi3"
3731 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3732 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3733 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3734 (clobber (reg:CC CC_REGNUM))]
3737 ; cmp\\t%1, %2\;movcc\\t%0, %2
3738 ; cmp\\t%1, %2\;movcs\\t%0, %1
3739 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3741 [(set (reg:CC CC_REGNUM)
3742 (compare:CC (match_dup 1) (match_dup 2)))
3744 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3748 [(set_attr "conds" "clob")
3749 (set_attr "length" "8,8,12")
3750 (set_attr "type" "store_4")]
3753 (define_expand "uminsi3"
3755 (set (match_operand:SI 0 "s_register_operand")
3756 (umin:SI (match_operand:SI 1 "s_register_operand")
3757 (match_operand:SI 2 "arm_rhs_operand")))
3758 (clobber (reg:CC CC_REGNUM))])]
3763 (define_insn_and_split "*arm_uminsi3"
3764 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3765 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3766 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3767 (clobber (reg:CC CC_REGNUM))]
3770 ; cmp\\t%1, %2\;movcs\\t%0, %2
3771 ; cmp\\t%1, %2\;movcc\\t%0, %1
3772 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3774 [(set (reg:CC CC_REGNUM)
3775 (compare:CC (match_dup 1) (match_dup 2)))
3777 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3781 [(set_attr "conds" "clob")
3782 (set_attr "length" "8,8,12")
3783 (set_attr "type" "store_4")]
3786 (define_insn "*store_minmaxsi"
3787 [(set (match_operand:SI 0 "memory_operand" "=m")
3788 (match_operator:SI 3 "minmax_operator"
3789 [(match_operand:SI 1 "s_register_operand" "r")
3790 (match_operand:SI 2 "s_register_operand" "r")]))
3791 (clobber (reg:CC CC_REGNUM))]
3792 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3794 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3795 operands[1], operands[2]);
3796 output_asm_insn (\"cmp\\t%1, %2\", operands);
3798 output_asm_insn (\"ite\t%d3\", operands);
3799 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3800 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3803 [(set_attr "conds" "clob")
3804 (set (attr "length")
3805 (if_then_else (eq_attr "is_thumb" "yes")
3808 (set_attr "type" "store_4")]
3811 ; Reject the frame pointer in operand[1], since reloading this after
3812 ; it has been eliminated can cause carnage.
3813 (define_insn "*minmax_arithsi"
3814 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3815 (match_operator:SI 4 "shiftable_operator"
3816 [(match_operator:SI 5 "minmax_operator"
3817 [(match_operand:SI 2 "s_register_operand" "r,r")
3818 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3819 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3820 (clobber (reg:CC CC_REGNUM))]
3821 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3824 enum rtx_code code = GET_CODE (operands[4]);
3827 if (which_alternative != 0 || operands[3] != const0_rtx
3828 || (code != PLUS && code != IOR && code != XOR))
3833 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3834 operands[2], operands[3]);
3835 output_asm_insn (\"cmp\\t%2, %3\", operands);
3839 output_asm_insn (\"ite\\t%d5\", operands);
3841 output_asm_insn (\"it\\t%d5\", operands);
3843 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3845 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3848 [(set_attr "conds" "clob")
3849 (set (attr "length")
3850 (if_then_else (eq_attr "is_thumb" "yes")
3853 (set_attr "type" "multiple")]
3856 ; Reject the frame pointer in operand[1], since reloading this after
3857 ; it has been eliminated can cause carnage.
3858 (define_insn_and_split "*minmax_arithsi_non_canon"
3859 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3861 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3862 (match_operator:SI 4 "minmax_operator"
3863 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3864 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3865 (clobber (reg:CC CC_REGNUM))]
3866 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3867 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3869 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3870 [(set (reg:CC CC_REGNUM)
3871 (compare:CC (match_dup 2) (match_dup 3)))
3873 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3875 (minus:SI (match_dup 1)
3877 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3881 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3882 operands[2], operands[3]);
3883 enum rtx_code rc = minmax_code (operands[4]);
3884 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3885 operands[2], operands[3]);
3887 if (mode == CCFPmode || mode == CCFPEmode)
3888 rc = reverse_condition_maybe_unordered (rc);
3890 rc = reverse_condition (rc);
3891 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3892 if (CONST_INT_P (operands[3]))
3893 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3895 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3897 [(set_attr "conds" "clob")
3898 (set (attr "length")
3899 (if_then_else (eq_attr "is_thumb" "yes")
3902 (set_attr "type" "multiple")]
3905 (define_code_iterator SAT [smin smax])
3906 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3907 (define_code_attr SATlo [(smin "1") (smax "2")])
3908 (define_code_attr SAThi [(smin "2") (smax "1")])
3910 (define_insn "*satsi_<SAT:code>"
3911 [(set (match_operand:SI 0 "s_register_operand" "=r")
3912 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
3913 (match_operand:SI 1 "const_int_operand" "i"))
3914 (match_operand:SI 2 "const_int_operand" "i")))]
3915 "TARGET_32BIT && arm_arch6
3916 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3920 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3921 &mask, &signed_sat))
3924 operands[1] = GEN_INT (mask);
3926 return "ssat%?\t%0, %1, %3";
3928 return "usat%?\t%0, %1, %3";
3930 [(set_attr "predicable" "yes")
3931 (set_attr "type" "alus_imm")]
3934 (define_insn "*satsi_<SAT:code>_shift"
3935 [(set (match_operand:SI 0 "s_register_operand" "=r")
3936 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
3937 [(match_operand:SI 4 "s_register_operand" "r")
3938 (match_operand:SI 5 "const_int_operand" "i")])
3939 (match_operand:SI 1 "const_int_operand" "i"))
3940 (match_operand:SI 2 "const_int_operand" "i")))]
3941 "TARGET_32BIT && arm_arch6
3942 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3946 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3947 &mask, &signed_sat))
3950 operands[1] = GEN_INT (mask);
3952 return "ssat%?\t%0, %1, %4%S3";
3954 return "usat%?\t%0, %1, %4%S3";
3956 [(set_attr "predicable" "yes")
3957 (set_attr "shift" "3")
3958 (set_attr "type" "logic_shift_reg")])
3960 ;; Shift and rotation insns
3962 (define_expand "ashldi3"
3963 [(set (match_operand:DI 0 "s_register_operand")
3964 (ashift:DI (match_operand:DI 1 "s_register_operand")
3965 (match_operand:SI 2 "reg_or_int_operand")))]
3968 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3969 operands[2], gen_reg_rtx (SImode),
3970 gen_reg_rtx (SImode));
3974 (define_expand "ashlsi3"
3975 [(set (match_operand:SI 0 "s_register_operand")
3976 (ashift:SI (match_operand:SI 1 "s_register_operand")
3977 (match_operand:SI 2 "arm_rhs_operand")))]
3980 if (CONST_INT_P (operands[2])
3981 && (UINTVAL (operands[2])) > 31)
3983 emit_insn (gen_movsi (operands[0], const0_rtx));
3989 (define_expand "ashrdi3"
3990 [(set (match_operand:DI 0 "s_register_operand")
3991 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
3992 (match_operand:SI 2 "reg_or_int_operand")))]
3995 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3996 operands[2], gen_reg_rtx (SImode),
3997 gen_reg_rtx (SImode));
4001 (define_expand "ashrsi3"
4002 [(set (match_operand:SI 0 "s_register_operand")
4003 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4004 (match_operand:SI 2 "arm_rhs_operand")))]
4007 if (CONST_INT_P (operands[2])
4008 && UINTVAL (operands[2]) > 31)
4009 operands[2] = GEN_INT (31);
4013 (define_expand "lshrdi3"
4014 [(set (match_operand:DI 0 "s_register_operand")
4015 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4016 (match_operand:SI 2 "reg_or_int_operand")))]
4019 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4020 operands[2], gen_reg_rtx (SImode),
4021 gen_reg_rtx (SImode));
4025 (define_expand "lshrsi3"
4026 [(set (match_operand:SI 0 "s_register_operand")
4027 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4028 (match_operand:SI 2 "arm_rhs_operand")))]
4031 if (CONST_INT_P (operands[2])
4032 && (UINTVAL (operands[2])) > 31)
4034 emit_insn (gen_movsi (operands[0], const0_rtx));
4040 (define_expand "rotlsi3"
4041 [(set (match_operand:SI 0 "s_register_operand")
4042 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4043 (match_operand:SI 2 "reg_or_int_operand")))]
4046 if (CONST_INT_P (operands[2]))
4047 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4050 rtx reg = gen_reg_rtx (SImode);
4051 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4057 (define_expand "rotrsi3"
4058 [(set (match_operand:SI 0 "s_register_operand")
4059 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4060 (match_operand:SI 2 "arm_rhs_operand")))]
4065 if (CONST_INT_P (operands[2])
4066 && UINTVAL (operands[2]) > 31)
4067 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4069 else /* TARGET_THUMB1 */
4071 if (CONST_INT_P (operands [2]))
4072 operands [2] = force_reg (SImode, operands[2]);
4077 (define_insn "*arm_shiftsi3"
4078 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4079 (match_operator:SI 3 "shift_operator"
4080 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4081 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4083 "* return arm_output_shift(operands, 0);"
4084 [(set_attr "predicable" "yes")
4085 (set_attr "arch" "t2,t2,*,*")
4086 (set_attr "predicable_short_it" "yes,yes,no,no")
4087 (set_attr "length" "4")
4088 (set_attr "shift" "1")
4089 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4092 (define_insn "*shiftsi3_compare0"
4093 [(set (reg:CC_NOOV CC_REGNUM)
4094 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4095 [(match_operand:SI 1 "s_register_operand" "r,r")
4096 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4098 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4099 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4101 "* return arm_output_shift(operands, 1);"
4102 [(set_attr "conds" "set")
4103 (set_attr "shift" "1")
4104 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4107 (define_insn "*shiftsi3_compare0_scratch"
4108 [(set (reg:CC_NOOV CC_REGNUM)
4109 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4110 [(match_operand:SI 1 "s_register_operand" "r,r")
4111 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4113 (clobber (match_scratch:SI 0 "=r,r"))]
4115 "* return arm_output_shift(operands, 1);"
4116 [(set_attr "conds" "set")
4117 (set_attr "shift" "1")
4118 (set_attr "type" "shift_imm,shift_reg")]
4121 (define_insn "*not_shiftsi"
4122 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4123 (not:SI (match_operator:SI 3 "shift_operator"
4124 [(match_operand:SI 1 "s_register_operand" "r,r")
4125 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4128 [(set_attr "predicable" "yes")
4129 (set_attr "shift" "1")
4130 (set_attr "arch" "32,a")
4131 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4133 (define_insn "*not_shiftsi_compare0"
4134 [(set (reg:CC_NOOV CC_REGNUM)
4136 (not:SI (match_operator:SI 3 "shift_operator"
4137 [(match_operand:SI 1 "s_register_operand" "r,r")
4138 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4140 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4141 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4143 "mvns%?\\t%0, %1%S3"
4144 [(set_attr "conds" "set")
4145 (set_attr "shift" "1")
4146 (set_attr "arch" "32,a")
4147 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4149 (define_insn "*not_shiftsi_compare0_scratch"
4150 [(set (reg:CC_NOOV CC_REGNUM)
4152 (not:SI (match_operator:SI 3 "shift_operator"
4153 [(match_operand:SI 1 "s_register_operand" "r,r")
4154 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4156 (clobber (match_scratch:SI 0 "=r,r"))]
4158 "mvns%?\\t%0, %1%S3"
4159 [(set_attr "conds" "set")
4160 (set_attr "shift" "1")
4161 (set_attr "arch" "32,a")
4162 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4164 ;; We don't really have extzv, but defining this using shifts helps
4165 ;; to reduce register pressure later on.
4167 (define_expand "extzv"
4168 [(set (match_operand 0 "s_register_operand")
4169 (zero_extract (match_operand 1 "nonimmediate_operand")
4170 (match_operand 2 "const_int_operand")
4171 (match_operand 3 "const_int_operand")))]
4172 "TARGET_THUMB1 || arm_arch_thumb2"
4175 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4176 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4178 if (arm_arch_thumb2)
4180 HOST_WIDE_INT width = INTVAL (operands[2]);
4181 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4183 if (unaligned_access && MEM_P (operands[1])
4184 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4188 if (BYTES_BIG_ENDIAN)
4189 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4194 base_addr = adjust_address (operands[1], SImode,
4195 bitpos / BITS_PER_UNIT);
4196 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4200 rtx dest = operands[0];
4201 rtx tmp = gen_reg_rtx (SImode);
4203 /* We may get a paradoxical subreg here. Strip it off. */
4204 if (GET_CODE (dest) == SUBREG
4205 && GET_MODE (dest) == SImode
4206 && GET_MODE (SUBREG_REG (dest)) == HImode)
4207 dest = SUBREG_REG (dest);
4209 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4212 base_addr = adjust_address (operands[1], HImode,
4213 bitpos / BITS_PER_UNIT);
4214 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4215 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4219 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4221 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4229 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4232 operands[3] = GEN_INT (rshift);
4236 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4240 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4241 operands[3], gen_reg_rtx (SImode)));
4246 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4248 (define_expand "extzv_t1"
4249 [(set (match_operand:SI 4 "s_register_operand")
4250 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4251 (match_operand:SI 2 "const_int_operand")))
4252 (set (match_operand:SI 0 "s_register_operand")
4253 (lshiftrt:SI (match_dup 4)
4254 (match_operand:SI 3 "const_int_operand")))]
4258 (define_expand "extv"
4259 [(set (match_operand 0 "s_register_operand")
4260 (sign_extract (match_operand 1 "nonimmediate_operand")
4261 (match_operand 2 "const_int_operand")
4262 (match_operand 3 "const_int_operand")))]
4265 HOST_WIDE_INT width = INTVAL (operands[2]);
4266 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4268 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4269 && (bitpos % BITS_PER_UNIT) == 0)
4273 if (BYTES_BIG_ENDIAN)
4274 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4278 base_addr = adjust_address (operands[1], SImode,
4279 bitpos / BITS_PER_UNIT);
4280 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4284 rtx dest = operands[0];
4285 rtx tmp = gen_reg_rtx (SImode);
4287 /* We may get a paradoxical subreg here. Strip it off. */
4288 if (GET_CODE (dest) == SUBREG
4289 && GET_MODE (dest) == SImode
4290 && GET_MODE (SUBREG_REG (dest)) == HImode)
4291 dest = SUBREG_REG (dest);
4293 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4296 base_addr = adjust_address (operands[1], HImode,
4297 bitpos / BITS_PER_UNIT);
4298 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4299 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4304 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4306 else if (GET_MODE (operands[0]) == SImode
4307 && GET_MODE (operands[1]) == SImode)
4309 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4317 ; Helper to expand register forms of extv with the proper modes.
4319 (define_expand "extv_regsi"
4320 [(set (match_operand:SI 0 "s_register_operand")
4321 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4322 (match_operand 2 "const_int_operand")
4323 (match_operand 3 "const_int_operand")))]
4328 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4330 (define_insn "unaligned_loaddi"
4331 [(set (match_operand:DI 0 "s_register_operand" "=r")
4332 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4333 UNSPEC_UNALIGNED_LOAD))]
4334 "TARGET_32BIT && TARGET_LDRD"
4336 return output_move_double (operands, true, NULL);
4338 [(set_attr "length" "8")
4339 (set_attr "type" "load_8")])
4341 (define_insn "unaligned_loadsi"
4342 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4343 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4344 UNSPEC_UNALIGNED_LOAD))]
4347 ldr\t%0, %1\t@ unaligned
4348 ldr%?\t%0, %1\t@ unaligned
4349 ldr%?\t%0, %1\t@ unaligned"
4350 [(set_attr "arch" "t1,t2,32")
4351 (set_attr "length" "2,2,4")
4352 (set_attr "predicable" "no,yes,yes")
4353 (set_attr "predicable_short_it" "no,yes,no")
4354 (set_attr "type" "load_4")])
4356 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4357 ;; address (there's no immediate format). That's tricky to support
4358 ;; here and we don't really need this pattern for that case, so only
4359 ;; enable for 32-bit ISAs.
4360 (define_insn "unaligned_loadhis"
4361 [(set (match_operand:SI 0 "s_register_operand" "=r")
4363 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4364 UNSPEC_UNALIGNED_LOAD)))]
4365 "unaligned_access && TARGET_32BIT"
4366 "ldrsh%?\t%0, %1\t@ unaligned"
4367 [(set_attr "predicable" "yes")
4368 (set_attr "type" "load_byte")])
4370 (define_insn "unaligned_loadhiu"
4371 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4373 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4374 UNSPEC_UNALIGNED_LOAD)))]
4377 ldrh\t%0, %1\t@ unaligned
4378 ldrh%?\t%0, %1\t@ unaligned
4379 ldrh%?\t%0, %1\t@ unaligned"
4380 [(set_attr "arch" "t1,t2,32")
4381 (set_attr "length" "2,2,4")
4382 (set_attr "predicable" "no,yes,yes")
4383 (set_attr "predicable_short_it" "no,yes,no")
4384 (set_attr "type" "load_byte")])
4386 (define_insn "unaligned_storedi"
4387 [(set (match_operand:DI 0 "memory_operand" "=m")
4388 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4389 UNSPEC_UNALIGNED_STORE))]
4390 "TARGET_32BIT && TARGET_LDRD"
4392 return output_move_double (operands, true, NULL);
4394 [(set_attr "length" "8")
4395 (set_attr "type" "store_8")])
4397 (define_insn "unaligned_storesi"
4398 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4399 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4400 UNSPEC_UNALIGNED_STORE))]
4403 str\t%1, %0\t@ unaligned
4404 str%?\t%1, %0\t@ unaligned
4405 str%?\t%1, %0\t@ unaligned"
4406 [(set_attr "arch" "t1,t2,32")
4407 (set_attr "length" "2,2,4")
4408 (set_attr "predicable" "no,yes,yes")
4409 (set_attr "predicable_short_it" "no,yes,no")
4410 (set_attr "type" "store_4")])
4412 (define_insn "unaligned_storehi"
4413 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4414 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4415 UNSPEC_UNALIGNED_STORE))]
4418 strh\t%1, %0\t@ unaligned
4419 strh%?\t%1, %0\t@ unaligned
4420 strh%?\t%1, %0\t@ unaligned"
4421 [(set_attr "arch" "t1,t2,32")
4422 (set_attr "length" "2,2,4")
4423 (set_attr "predicable" "no,yes,yes")
4424 (set_attr "predicable_short_it" "no,yes,no")
4425 (set_attr "type" "store_4")])
4428 (define_insn "*extv_reg"
4429 [(set (match_operand:SI 0 "s_register_operand" "=r")
4430 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4431 (match_operand:SI 2 "const_int_operand" "n")
4432 (match_operand:SI 3 "const_int_operand" "n")))]
4434 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4435 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4436 "sbfx%?\t%0, %1, %3, %2"
4437 [(set_attr "length" "4")
4438 (set_attr "predicable" "yes")
4439 (set_attr "type" "bfm")]
4442 (define_insn "extzv_t2"
4443 [(set (match_operand:SI 0 "s_register_operand" "=r")
4444 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4445 (match_operand:SI 2 "const_int_operand" "n")
4446 (match_operand:SI 3 "const_int_operand" "n")))]
4448 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4449 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4450 "ubfx%?\t%0, %1, %3, %2"
4451 [(set_attr "length" "4")
4452 (set_attr "predicable" "yes")
4453 (set_attr "type" "bfm")]
4457 ;; Division instructions
4458 (define_insn "divsi3"
4459 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4460 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4461 (match_operand:SI 2 "s_register_operand" "r,r")))]
4466 [(set_attr "arch" "32,v8mb")
4467 (set_attr "predicable" "yes")
4468 (set_attr "type" "sdiv")]
4471 (define_insn "udivsi3"
4472 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4473 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4474 (match_operand:SI 2 "s_register_operand" "r,r")))]
4479 [(set_attr "arch" "32,v8mb")
4480 (set_attr "predicable" "yes")
4481 (set_attr "type" "udiv")]
4485 ;; Unary arithmetic insns
4487 (define_expand "negvsi3"
4488 [(match_operand:SI 0 "register_operand")
4489 (match_operand:SI 1 "register_operand")
4490 (match_operand 2 "")]
4493 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
4494 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4499 (define_expand "negvdi3"
4500 [(match_operand:DI 0 "s_register_operand")
4501 (match_operand:DI 1 "s_register_operand")
4502 (match_operand 2 "")]
4505 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
4506 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4512 (define_insn "negdi2_compare"
4513 [(set (reg:CC CC_REGNUM)
4516 (match_operand:DI 1 "register_operand" "r,r")))
4517 (set (match_operand:DI 0 "register_operand" "=&r,&r")
4518 (minus:DI (const_int 0) (match_dup 1)))]
4521 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
4522 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
4523 [(set_attr "conds" "set")
4524 (set_attr "arch" "a,t2")
4525 (set_attr "length" "8")
4526 (set_attr "type" "multiple")]
4529 (define_expand "negsi2"
4530 [(set (match_operand:SI 0 "s_register_operand")
4531 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4536 (define_insn "*arm_negsi2"
4537 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4538 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4540 "rsb%?\\t%0, %1, #0"
4541 [(set_attr "predicable" "yes")
4542 (set_attr "predicable_short_it" "yes,no")
4543 (set_attr "arch" "t2,*")
4544 (set_attr "length" "4")
4545 (set_attr "type" "alu_imm")]
4548 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
4549 ;; rather than (0 cmp reg). This gives the same results for unsigned
4550 ;; and equality compares which is what we mostly need here.
4551 (define_insn "negsi2_0compare"
4552 [(set (reg:CC_RSB CC_REGNUM)
4553 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
4555 (set (match_operand:SI 0 "s_register_operand" "=l,r")
4556 (neg:SI (match_dup 1)))]
4561 [(set_attr "conds" "set")
4562 (set_attr "arch" "t2,*")
4563 (set_attr "length" "2,*")
4564 (set_attr "type" "alus_imm")]
4567 (define_insn "negsi2_carryin"
4568 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4569 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
4570 (match_operand:SI 2 "arm_borrow_operation" "")))]
4574 sbc\\t%0, %1, %1, lsl #1"
4575 [(set_attr "conds" "use")
4576 (set_attr "arch" "a,t2")
4577 (set_attr "type" "adc_imm,adc_reg")]
4580 (define_expand "negsf2"
4581 [(set (match_operand:SF 0 "s_register_operand")
4582 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4583 "TARGET_32BIT && TARGET_HARD_FLOAT"
4587 (define_expand "negdf2"
4588 [(set (match_operand:DF 0 "s_register_operand")
4589 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4590 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4593 ;; abssi2 doesn't really clobber the condition codes if a different register
4594 ;; is being set. To keep things simple, assume during rtl manipulations that
4595 ;; it does, but tell the final scan operator the truth. Similarly for
4598 (define_expand "abssi2"
4600 [(set (match_operand:SI 0 "s_register_operand")
4601 (abs:SI (match_operand:SI 1 "s_register_operand")))
4602 (clobber (match_dup 2))])]
4606 operands[2] = gen_rtx_SCRATCH (SImode);
4608 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4611 (define_insn_and_split "*arm_abssi2"
4612 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4613 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4614 (clobber (reg:CC CC_REGNUM))]
4617 "&& reload_completed"
4620 /* if (which_alternative == 0) */
4621 if (REGNO(operands[0]) == REGNO(operands[1]))
4623 /* Emit the pattern:
4624 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4625 [(set (reg:CC CC_REGNUM)
4626 (compare:CC (match_dup 0) (const_int 0)))
4627 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4628 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4630 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4631 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4632 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4633 (gen_rtx_LT (SImode,
4634 gen_rtx_REG (CCmode, CC_REGNUM),
4636 (gen_rtx_SET (operands[0],
4637 (gen_rtx_MINUS (SImode,
4644 /* Emit the pattern:
4645 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4647 (xor:SI (match_dup 1)
4648 (ashiftrt:SI (match_dup 1) (const_int 31))))
4650 (minus:SI (match_dup 0)
4651 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4653 emit_insn (gen_rtx_SET (operands[0],
4654 gen_rtx_XOR (SImode,
4655 gen_rtx_ASHIFTRT (SImode,
4659 emit_insn (gen_rtx_SET (operands[0],
4660 gen_rtx_MINUS (SImode,
4662 gen_rtx_ASHIFTRT (SImode,
4668 [(set_attr "conds" "clob,*")
4669 (set_attr "shift" "1")
4670 (set_attr "predicable" "no, yes")
4671 (set_attr "length" "8")
4672 (set_attr "type" "multiple")]
4675 (define_insn_and_split "*arm_neg_abssi2"
4676 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4677 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4678 (clobber (reg:CC CC_REGNUM))]
4681 "&& reload_completed"
4684 /* if (which_alternative == 0) */
4685 if (REGNO (operands[0]) == REGNO (operands[1]))
4687 /* Emit the pattern:
4688 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4690 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4691 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4692 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4694 gen_rtx_REG (CCmode, CC_REGNUM),
4696 gen_rtx_SET (operands[0],
4697 (gen_rtx_MINUS (SImode,
4703 /* Emit the pattern:
4704 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4706 emit_insn (gen_rtx_SET (operands[0],
4707 gen_rtx_XOR (SImode,
4708 gen_rtx_ASHIFTRT (SImode,
4712 emit_insn (gen_rtx_SET (operands[0],
4713 gen_rtx_MINUS (SImode,
4714 gen_rtx_ASHIFTRT (SImode,
4721 [(set_attr "conds" "clob,*")
4722 (set_attr "shift" "1")
4723 (set_attr "predicable" "no, yes")
4724 (set_attr "length" "8")
4725 (set_attr "type" "multiple")]
4728 (define_expand "abssf2"
4729 [(set (match_operand:SF 0 "s_register_operand")
4730 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4731 "TARGET_32BIT && TARGET_HARD_FLOAT"
4734 (define_expand "absdf2"
4735 [(set (match_operand:DF 0 "s_register_operand")
4736 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4737 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4740 (define_expand "sqrtsf2"
4741 [(set (match_operand:SF 0 "s_register_operand")
4742 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4743 "TARGET_32BIT && TARGET_HARD_FLOAT"
4746 (define_expand "sqrtdf2"
4747 [(set (match_operand:DF 0 "s_register_operand")
4748 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4749 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4752 (define_expand "one_cmplsi2"
4753 [(set (match_operand:SI 0 "s_register_operand")
4754 (not:SI (match_operand:SI 1 "s_register_operand")))]
4759 (define_insn "*arm_one_cmplsi2"
4760 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4761 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4764 [(set_attr "predicable" "yes")
4765 (set_attr "predicable_short_it" "yes,no")
4766 (set_attr "arch" "t2,*")
4767 (set_attr "length" "4")
4768 (set_attr "type" "mvn_reg")]
4771 (define_insn "*notsi_compare0"
4772 [(set (reg:CC_NOOV CC_REGNUM)
4773 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4775 (set (match_operand:SI 0 "s_register_operand" "=r")
4776 (not:SI (match_dup 1)))]
4779 [(set_attr "conds" "set")
4780 (set_attr "type" "mvn_reg")]
4783 (define_insn "*notsi_compare0_scratch"
4784 [(set (reg:CC_NOOV CC_REGNUM)
4785 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4787 (clobber (match_scratch:SI 0 "=r"))]
4790 [(set_attr "conds" "set")
4791 (set_attr "type" "mvn_reg")]
4794 ;; Fixed <--> Floating conversion insns
4796 (define_expand "floatsihf2"
4797 [(set (match_operand:HF 0 "general_operand")
4798 (float:HF (match_operand:SI 1 "general_operand")))]
4802 rtx op1 = gen_reg_rtx (SFmode);
4803 expand_float (op1, operands[1], 0);
4804 op1 = convert_to_mode (HFmode, op1, 0);
4805 emit_move_insn (operands[0], op1);
4810 (define_expand "floatdihf2"
4811 [(set (match_operand:HF 0 "general_operand")
4812 (float:HF (match_operand:DI 1 "general_operand")))]
4816 rtx op1 = gen_reg_rtx (SFmode);
4817 expand_float (op1, operands[1], 0);
4818 op1 = convert_to_mode (HFmode, op1, 0);
4819 emit_move_insn (operands[0], op1);
4824 (define_expand "floatsisf2"
4825 [(set (match_operand:SF 0 "s_register_operand")
4826 (float:SF (match_operand:SI 1 "s_register_operand")))]
4827 "TARGET_32BIT && TARGET_HARD_FLOAT"
4831 (define_expand "floatsidf2"
4832 [(set (match_operand:DF 0 "s_register_operand")
4833 (float:DF (match_operand:SI 1 "s_register_operand")))]
4834 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4838 (define_expand "fix_trunchfsi2"
4839 [(set (match_operand:SI 0 "general_operand")
4840 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4844 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4845 expand_fix (operands[0], op1, 0);
4850 (define_expand "fix_trunchfdi2"
4851 [(set (match_operand:DI 0 "general_operand")
4852 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4856 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4857 expand_fix (operands[0], op1, 0);
4862 (define_expand "fix_truncsfsi2"
4863 [(set (match_operand:SI 0 "s_register_operand")
4864 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4865 "TARGET_32BIT && TARGET_HARD_FLOAT"
4869 (define_expand "fix_truncdfsi2"
4870 [(set (match_operand:SI 0 "s_register_operand")
4871 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4872 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4878 (define_expand "truncdfsf2"
4879 [(set (match_operand:SF 0 "s_register_operand")
4881 (match_operand:DF 1 "s_register_operand")))]
4882 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4886 ;; DFmode to HFmode conversions on targets without a single-step hardware
4887 ;; instruction for it would have to go through SFmode. This is dangerous
4888 ;; as it introduces double rounding.
4890 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4891 ;; a single-step instruction.
4893 (define_expand "truncdfhf2"
4894 [(set (match_operand:HF 0 "s_register_operand")
4896 (match_operand:DF 1 "s_register_operand")))]
4897 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4898 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4900 /* We don't have a direct instruction for this, so we must be in
4901 an unsafe math mode, and going via SFmode. */
4903 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4906 op1 = convert_to_mode (SFmode, operands[1], 0);
4907 op1 = convert_to_mode (HFmode, op1, 0);
4908 emit_move_insn (operands[0], op1);
4911 /* Otherwise, we will pick this up as a single instruction with
4912 no intermediary rounding. */
4916 ;; Zero and sign extension instructions.
4918 (define_expand "zero_extend<mode>di2"
4919 [(set (match_operand:DI 0 "s_register_operand" "")
4920 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4921 "TARGET_32BIT <qhs_zextenddi_cond>"
4923 rtx res_lo, res_hi, op0_lo, op0_hi;
4924 res_lo = gen_lowpart (SImode, operands[0]);
4925 res_hi = gen_highpart (SImode, operands[0]);
4926 if (can_create_pseudo_p ())
4928 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4929 op0_hi = gen_reg_rtx (SImode);
4933 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4936 if (<MODE>mode != SImode)
4937 emit_insn (gen_rtx_SET (op0_lo,
4938 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4939 emit_insn (gen_movsi (op0_hi, const0_rtx));
4940 if (res_lo != op0_lo)
4941 emit_move_insn (res_lo, op0_lo);
4942 if (res_hi != op0_hi)
4943 emit_move_insn (res_hi, op0_hi);
4948 (define_expand "extend<mode>di2"
4949 [(set (match_operand:DI 0 "s_register_operand" "")
4950 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
4951 "TARGET_32BIT <qhs_sextenddi_cond>"
4953 rtx res_lo, res_hi, op0_lo, op0_hi;
4954 res_lo = gen_lowpart (SImode, operands[0]);
4955 res_hi = gen_highpart (SImode, operands[0]);
4956 if (can_create_pseudo_p ())
4958 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4959 op0_hi = gen_reg_rtx (SImode);
4963 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4966 if (<MODE>mode != SImode)
4967 emit_insn (gen_rtx_SET (op0_lo,
4968 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4969 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
4970 if (res_lo != op0_lo)
4971 emit_move_insn (res_lo, op0_lo);
4972 if (res_hi != op0_hi)
4973 emit_move_insn (res_hi, op0_hi);
4978 ;; Splits for all extensions to DImode
4980 [(set (match_operand:DI 0 "s_register_operand" "")
4981 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4983 [(set (match_dup 0) (match_dup 1))]
4985 rtx lo_part = gen_lowpart (SImode, operands[0]);
4986 machine_mode src_mode = GET_MODE (operands[1]);
4988 if (src_mode == SImode)
4989 emit_move_insn (lo_part, operands[1]);
4991 emit_insn (gen_rtx_SET (lo_part,
4992 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4993 operands[0] = gen_highpart (SImode, operands[0]);
4994 operands[1] = const0_rtx;
4998 [(set (match_operand:DI 0 "s_register_operand" "")
4999 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5001 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5003 rtx lo_part = gen_lowpart (SImode, operands[0]);
5004 machine_mode src_mode = GET_MODE (operands[1]);
5006 if (src_mode == SImode)
5007 emit_move_insn (lo_part, operands[1]);
5009 emit_insn (gen_rtx_SET (lo_part,
5010 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5011 operands[1] = lo_part;
5012 operands[0] = gen_highpart (SImode, operands[0]);
5015 (define_expand "zero_extendhisi2"
5016 [(set (match_operand:SI 0 "s_register_operand")
5017 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5020 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5022 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5025 if (!arm_arch6 && !MEM_P (operands[1]))
5027 rtx t = gen_lowpart (SImode, operands[1]);
5028 rtx tmp = gen_reg_rtx (SImode);
5029 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5030 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5036 [(set (match_operand:SI 0 "s_register_operand" "")
5037 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5038 "!TARGET_THUMB2 && !arm_arch6"
5039 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5040 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5042 operands[2] = gen_lowpart (SImode, operands[1]);
5045 (define_insn "*arm_zero_extendhisi2"
5046 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5047 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5048 "TARGET_ARM && arm_arch4 && !arm_arch6"
5052 [(set_attr "type" "alu_shift_reg,load_byte")
5053 (set_attr "predicable" "yes")]
5056 (define_insn "*arm_zero_extendhisi2_v6"
5057 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5058 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5059 "TARGET_ARM && arm_arch6"
5063 [(set_attr "predicable" "yes")
5064 (set_attr "type" "extend,load_byte")]
5067 (define_insn "*arm_zero_extendhisi2addsi"
5068 [(set (match_operand:SI 0 "s_register_operand" "=r")
5069 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5070 (match_operand:SI 2 "s_register_operand" "r")))]
5072 "uxtah%?\\t%0, %2, %1"
5073 [(set_attr "type" "alu_shift_reg")
5074 (set_attr "predicable" "yes")]
5077 (define_expand "zero_extendqisi2"
5078 [(set (match_operand:SI 0 "s_register_operand")
5079 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5082 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5084 emit_insn (gen_andsi3 (operands[0],
5085 gen_lowpart (SImode, operands[1]),
5089 if (!arm_arch6 && !MEM_P (operands[1]))
5091 rtx t = gen_lowpart (SImode, operands[1]);
5092 rtx tmp = gen_reg_rtx (SImode);
5093 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5094 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5100 [(set (match_operand:SI 0 "s_register_operand" "")
5101 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5103 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5104 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5106 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5109 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5114 (define_insn "*arm_zero_extendqisi2"
5115 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5116 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5117 "TARGET_ARM && !arm_arch6"
5120 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5121 [(set_attr "length" "8,4")
5122 (set_attr "type" "alu_shift_reg,load_byte")
5123 (set_attr "predicable" "yes")]
5126 (define_insn "*arm_zero_extendqisi2_v6"
5127 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5128 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5129 "TARGET_ARM && arm_arch6"
5132 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5133 [(set_attr "type" "extend,load_byte")
5134 (set_attr "predicable" "yes")]
5137 (define_insn "*arm_zero_extendqisi2addsi"
5138 [(set (match_operand:SI 0 "s_register_operand" "=r")
5139 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5140 (match_operand:SI 2 "s_register_operand" "r")))]
5142 "uxtab%?\\t%0, %2, %1"
5143 [(set_attr "predicable" "yes")
5144 (set_attr "type" "alu_shift_reg")]
5148 [(set (match_operand:SI 0 "s_register_operand" "")
5149 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5150 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5151 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5152 [(set (match_dup 2) (match_dup 1))
5153 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5158 [(set (match_operand:SI 0 "s_register_operand" "")
5159 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5160 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5161 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5162 [(set (match_dup 2) (match_dup 1))
5163 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5169 [(set (match_operand:SI 0 "s_register_operand" "")
5170 (IOR_XOR:SI (and:SI (ashift:SI
5171 (match_operand:SI 1 "s_register_operand" "")
5172 (match_operand:SI 2 "const_int_operand" ""))
5173 (match_operand:SI 3 "const_int_operand" ""))
5175 (match_operator 5 "subreg_lowpart_operator"
5176 [(match_operand:SI 4 "s_register_operand" "")]))))]
5178 && (UINTVAL (operands[3])
5179 == (GET_MODE_MASK (GET_MODE (operands[5]))
5180 & (GET_MODE_MASK (GET_MODE (operands[5]))
5181 << (INTVAL (operands[2])))))"
5182 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5184 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5185 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5188 (define_insn "*compareqi_eq0"
5189 [(set (reg:CC_Z CC_REGNUM)
5190 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5194 [(set_attr "conds" "set")
5195 (set_attr "predicable" "yes")
5196 (set_attr "type" "logic_imm")]
5199 (define_expand "extendhisi2"
5200 [(set (match_operand:SI 0 "s_register_operand")
5201 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5206 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5209 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5211 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5215 if (!arm_arch6 && !MEM_P (operands[1]))
5217 rtx t = gen_lowpart (SImode, operands[1]);
5218 rtx tmp = gen_reg_rtx (SImode);
5219 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5220 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5227 [(set (match_operand:SI 0 "register_operand" "")
5228 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5229 (clobber (match_scratch:SI 2 ""))])]
5231 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5232 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5234 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5237 ;; This pattern will only be used when ldsh is not available
5238 (define_expand "extendhisi2_mem"
5239 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5241 (zero_extend:SI (match_dup 7)))
5242 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5243 (set (match_operand:SI 0 "" "")
5244 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5249 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5251 mem1 = change_address (operands[1], QImode, addr);
5252 mem2 = change_address (operands[1], QImode,
5253 plus_constant (Pmode, addr, 1));
5254 operands[0] = gen_lowpart (SImode, operands[0]);
5256 operands[2] = gen_reg_rtx (SImode);
5257 operands[3] = gen_reg_rtx (SImode);
5258 operands[6] = gen_reg_rtx (SImode);
5261 if (BYTES_BIG_ENDIAN)
5263 operands[4] = operands[2];
5264 operands[5] = operands[3];
5268 operands[4] = operands[3];
5269 operands[5] = operands[2];
5275 [(set (match_operand:SI 0 "register_operand" "")
5276 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5278 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5279 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5281 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5284 (define_insn "*arm_extendhisi2"
5285 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5286 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5287 "TARGET_ARM && arm_arch4 && !arm_arch6"
5291 [(set_attr "length" "8,4")
5292 (set_attr "type" "alu_shift_reg,load_byte")
5293 (set_attr "predicable" "yes")]
5296 ;; ??? Check Thumb-2 pool range
5297 (define_insn "*arm_extendhisi2_v6"
5298 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5299 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5300 "TARGET_32BIT && arm_arch6"
5304 [(set_attr "type" "extend,load_byte")
5305 (set_attr "predicable" "yes")]
5308 (define_insn "*arm_extendhisi2addsi"
5309 [(set (match_operand:SI 0 "s_register_operand" "=r")
5310 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5311 (match_operand:SI 2 "s_register_operand" "r")))]
5313 "sxtah%?\\t%0, %2, %1"
5314 [(set_attr "type" "alu_shift_reg")]
5317 (define_expand "extendqihi2"
5319 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5321 (set (match_operand:HI 0 "s_register_operand")
5322 (ashiftrt:SI (match_dup 2)
5327 if (arm_arch4 && MEM_P (operands[1]))
5329 emit_insn (gen_rtx_SET (operands[0],
5330 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5333 if (!s_register_operand (operands[1], QImode))
5334 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5335 operands[0] = gen_lowpart (SImode, operands[0]);
5336 operands[1] = gen_lowpart (SImode, operands[1]);
5337 operands[2] = gen_reg_rtx (SImode);
5341 (define_insn "*arm_extendqihi_insn"
5342 [(set (match_operand:HI 0 "s_register_operand" "=r")
5343 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5344 "TARGET_ARM && arm_arch4"
5346 [(set_attr "type" "load_byte")
5347 (set_attr "predicable" "yes")]
5350 (define_expand "extendqisi2"
5351 [(set (match_operand:SI 0 "s_register_operand")
5352 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5355 if (!arm_arch4 && MEM_P (operands[1]))
5356 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5358 if (!arm_arch6 && !MEM_P (operands[1]))
5360 rtx t = gen_lowpart (SImode, operands[1]);
5361 rtx tmp = gen_reg_rtx (SImode);
5362 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5363 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5369 [(set (match_operand:SI 0 "register_operand" "")
5370 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5372 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5373 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5375 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5378 (define_insn "*arm_extendqisi"
5379 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5380 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5381 "TARGET_ARM && arm_arch4 && !arm_arch6"
5385 [(set_attr "length" "8,4")
5386 (set_attr "type" "alu_shift_reg,load_byte")
5387 (set_attr "predicable" "yes")]
5390 (define_insn "*arm_extendqisi_v6"
5391 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5393 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5394 "TARGET_ARM && arm_arch6"
5398 [(set_attr "type" "extend,load_byte")
5399 (set_attr "predicable" "yes")]
5402 (define_insn "*arm_extendqisi2addsi"
5403 [(set (match_operand:SI 0 "s_register_operand" "=r")
5404 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5405 (match_operand:SI 2 "s_register_operand" "r")))]
5407 "sxtab%?\\t%0, %2, %1"
5408 [(set_attr "type" "alu_shift_reg")
5409 (set_attr "predicable" "yes")]
5412 (define_insn "arm_<sup>xtb16"
5413 [(set (match_operand:SI 0 "s_register_operand" "=r")
5415 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5417 "<sup>xtb16%?\\t%0, %1"
5418 [(set_attr "predicable" "yes")
5419 (set_attr "type" "alu_dsp_reg")])
5421 (define_insn "arm_<simd32_op>"
5422 [(set (match_operand:SI 0 "s_register_operand" "=r")
5424 [(match_operand:SI 1 "s_register_operand" "r")
5425 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5427 "<simd32_op>%?\\t%0, %1, %2"
5428 [(set_attr "predicable" "yes")
5429 (set_attr "type" "alu_dsp_reg")])
5431 (define_insn "arm_usada8"
5432 [(set (match_operand:SI 0 "s_register_operand" "=r")
5434 [(match_operand:SI 1 "s_register_operand" "r")
5435 (match_operand:SI 2 "s_register_operand" "r")
5436 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5438 "usada8%?\\t%0, %1, %2, %3"
5439 [(set_attr "predicable" "yes")
5440 (set_attr "type" "alu_dsp_reg")])
5442 (define_insn "arm_<simd32_op>"
5443 [(set (match_operand:DI 0 "s_register_operand" "=r")
5445 [(match_operand:SI 1 "s_register_operand" "r")
5446 (match_operand:SI 2 "s_register_operand" "r")
5447 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5449 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5450 [(set_attr "predicable" "yes")
5451 (set_attr "type" "smlald")])
5453 (define_expand "extendsfdf2"
5454 [(set (match_operand:DF 0 "s_register_operand")
5455 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5456 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5460 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5461 ;; must go through SFmode.
5463 ;; This is always safe for an extend.
5465 (define_expand "extendhfdf2"
5466 [(set (match_operand:DF 0 "s_register_operand")
5467 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5470 /* We don't have a direct instruction for this, so go via SFmode. */
5471 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5474 op1 = convert_to_mode (SFmode, operands[1], 0);
5475 op1 = convert_to_mode (DFmode, op1, 0);
5476 emit_insn (gen_movdf (operands[0], op1));
5479 /* Otherwise, we're done producing RTL and will pick up the correct
5480 pattern to do this with one rounding-step in a single instruction. */
5484 ;; Move insns (including loads and stores)
5486 ;; XXX Just some ideas about movti.
5487 ;; I don't think these are a good idea on the arm, there just aren't enough
5489 ;;(define_expand "loadti"
5490 ;; [(set (match_operand:TI 0 "s_register_operand")
5491 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5494 ;;(define_expand "storeti"
5495 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5496 ;; (match_operand:TI 1 "s_register_operand"))]
5499 ;;(define_expand "movti"
5500 ;; [(set (match_operand:TI 0 "general_operand")
5501 ;; (match_operand:TI 1 "general_operand"))]
5507 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5508 ;; operands[1] = copy_to_reg (operands[1]);
5509 ;; if (MEM_P (operands[0]))
5510 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5511 ;; else if (MEM_P (operands[1]))
5512 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5516 ;; emit_insn (insn);
5520 ;; Recognize garbage generated above.
5523 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5524 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5528 ;; register mem = (which_alternative < 3);
5529 ;; register const char *template;
5531 ;; operands[mem] = XEXP (operands[mem], 0);
5532 ;; switch (which_alternative)
5534 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5535 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5536 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5537 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5538 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5539 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5541 ;; output_asm_insn (template, operands);
5545 (define_expand "movdi"
5546 [(set (match_operand:DI 0 "general_operand")
5547 (match_operand:DI 1 "general_operand"))]
5550 gcc_checking_assert (aligned_operand (operands[0], DImode));
5551 gcc_checking_assert (aligned_operand (operands[1], DImode));
5552 if (can_create_pseudo_p ())
5554 if (!REG_P (operands[0]))
5555 operands[1] = force_reg (DImode, operands[1]);
5557 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
5558 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
5560 /* Avoid LDRD's into an odd-numbered register pair in ARM state
5561 when expanding function calls. */
5562 gcc_assert (can_create_pseudo_p ());
5563 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
5565 /* Perform load into legal reg pair first, then move. */
5566 rtx reg = gen_reg_rtx (DImode);
5567 emit_insn (gen_movdi (reg, operands[1]));
5570 emit_move_insn (gen_lowpart (SImode, operands[0]),
5571 gen_lowpart (SImode, operands[1]));
5572 emit_move_insn (gen_highpart (SImode, operands[0]),
5573 gen_highpart (SImode, operands[1]));
5576 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
5577 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
5579 /* Avoid STRD's from an odd-numbered register pair in ARM state
5580 when expanding function prologue. */
5581 gcc_assert (can_create_pseudo_p ());
5582 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
5583 ? gen_reg_rtx (DImode)
5585 emit_move_insn (gen_lowpart (SImode, split_dest),
5586 gen_lowpart (SImode, operands[1]));
5587 emit_move_insn (gen_highpart (SImode, split_dest),
5588 gen_highpart (SImode, operands[1]));
5589 if (split_dest != operands[0])
5590 emit_insn (gen_movdi (operands[0], split_dest));
5596 (define_insn "*arm_movdi"
5597 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5598 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5600 && !(TARGET_HARD_FLOAT)
5602 && ( register_operand (operands[0], DImode)
5603 || register_operand (operands[1], DImode))"
5605 switch (which_alternative)
5612 /* Cannot load it directly, split to load it via MOV / MOVT. */
5613 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
5617 return output_move_double (operands, true, NULL);
5620 [(set_attr "length" "8,12,16,8,8")
5621 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5622 (set_attr "arm_pool_range" "*,*,*,1020,*")
5623 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5624 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5625 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5629 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5630 (match_operand:ANY64 1 "immediate_operand" ""))]
5633 && (arm_disable_literal_pool
5634 || (arm_const_double_inline_cost (operands[1])
5635 <= arm_max_const_double_inline_cost ()))"
5638 arm_split_constant (SET, SImode, curr_insn,
5639 INTVAL (gen_lowpart (SImode, operands[1])),
5640 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5641 arm_split_constant (SET, SImode, curr_insn,
5642 INTVAL (gen_highpart_mode (SImode,
5643 GET_MODE (operands[0]),
5645 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5650 ; If optimizing for size, or if we have load delay slots, then
5651 ; we want to split the constant into two separate operations.
5652 ; In both cases this may split a trivial part into a single data op
5653 ; leaving a single complex constant to load. We can also get longer
5654 ; offsets in a LDR which means we get better chances of sharing the pool
5655 ; entries. Finally, we can normally do a better job of scheduling
5656 ; LDR instructions than we can with LDM.
5657 ; This pattern will only match if the one above did not.
5659 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5660 (match_operand:ANY64 1 "const_double_operand" ""))]
5661 "TARGET_ARM && reload_completed
5662 && arm_const_double_by_parts (operands[1])"
5663 [(set (match_dup 0) (match_dup 1))
5664 (set (match_dup 2) (match_dup 3))]
5666 operands[2] = gen_highpart (SImode, operands[0]);
5667 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5669 operands[0] = gen_lowpart (SImode, operands[0]);
5670 operands[1] = gen_lowpart (SImode, operands[1]);
5675 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5676 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5677 "TARGET_EITHER && reload_completed"
5678 [(set (match_dup 0) (match_dup 1))
5679 (set (match_dup 2) (match_dup 3))]
5681 operands[2] = gen_highpart (SImode, operands[0]);
5682 operands[3] = gen_highpart (SImode, operands[1]);
5683 operands[0] = gen_lowpart (SImode, operands[0]);
5684 operands[1] = gen_lowpart (SImode, operands[1]);
5686 /* Handle a partial overlap. */
5687 if (rtx_equal_p (operands[0], operands[3]))
5689 rtx tmp0 = operands[0];
5690 rtx tmp1 = operands[1];
5692 operands[0] = operands[2];
5693 operands[1] = operands[3];
5700 ;; We can't actually do base+index doubleword loads if the index and
5701 ;; destination overlap. Split here so that we at least have chance to
5704 [(set (match_operand:DI 0 "s_register_operand" "")
5705 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5706 (match_operand:SI 2 "s_register_operand" ""))))]
5708 && reg_overlap_mentioned_p (operands[0], operands[1])
5709 && reg_overlap_mentioned_p (operands[0], operands[2])"
5711 (plus:SI (match_dup 1)
5714 (mem:DI (match_dup 4)))]
5716 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5720 (define_expand "movsi"
5721 [(set (match_operand:SI 0 "general_operand")
5722 (match_operand:SI 1 "general_operand"))]
5726 rtx base, offset, tmp;
5728 gcc_checking_assert (aligned_operand (operands[0], SImode));
5729 gcc_checking_assert (aligned_operand (operands[1], SImode));
5730 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5732 /* Everything except mem = const or mem = mem can be done easily. */
5733 if (MEM_P (operands[0]))
5734 operands[1] = force_reg (SImode, operands[1]);
5735 if (arm_general_register_operand (operands[0], SImode)
5736 && CONST_INT_P (operands[1])
5737 && !(const_ok_for_arm (INTVAL (operands[1]))
5738 || const_ok_for_arm (~INTVAL (operands[1]))))
5740 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5742 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5747 arm_split_constant (SET, SImode, NULL_RTX,
5748 INTVAL (operands[1]), operands[0], NULL_RTX,
5749 optimize && can_create_pseudo_p ());
5754 else /* Target doesn't have MOVT... */
5756 if (can_create_pseudo_p ())
5758 if (!REG_P (operands[0]))
5759 operands[1] = force_reg (SImode, operands[1]);
5763 split_const (operands[1], &base, &offset);
5764 if (INTVAL (offset) != 0
5765 && targetm.cannot_force_const_mem (SImode, operands[1]))
5767 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5768 emit_move_insn (tmp, base);
5769 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5773 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5775 /* Recognize the case where operand[1] is a reference to thread-local
5776 data and load its address to a register. Offsets have been split off
5778 if (arm_tls_referenced_p (operands[1]))
5779 operands[1] = legitimize_tls_address (operands[1], tmp);
5781 && (CONSTANT_P (operands[1])
5782 || symbol_mentioned_p (operands[1])
5783 || label_mentioned_p (operands[1])))
5785 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5790 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5791 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5792 ;; so this does not matter.
5793 (define_insn "*arm_movt"
5794 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5795 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5796 (match_operand:SI 2 "general_operand" "i,i")))]
5797 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5799 movt%?\t%0, #:upper16:%c2
5800 movt\t%0, #:upper16:%c2"
5801 [(set_attr "arch" "32,v8mb")
5802 (set_attr "predicable" "yes")
5803 (set_attr "length" "4")
5804 (set_attr "type" "alu_sreg")]
5807 (define_insn "*arm_movsi_insn"
5808 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5809 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5810 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5811 && ( register_operand (operands[0], SImode)
5812 || register_operand (operands[1], SImode))"
5820 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5821 (set_attr "predicable" "yes")
5822 (set_attr "arch" "*,*,*,v6t2,*,*")
5823 (set_attr "pool_range" "*,*,*,*,4096,*")
5824 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5828 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5829 (match_operand:SI 1 "const_int_operand" ""))]
5830 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5831 && (!(const_ok_for_arm (INTVAL (operands[1]))
5832 || const_ok_for_arm (~INTVAL (operands[1]))))"
5833 [(clobber (const_int 0))]
5835 arm_split_constant (SET, SImode, NULL_RTX,
5836 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5841 ;; A normal way to do (symbol + offset) requires three instructions at least
5842 ;; (depends on how big the offset is) as below:
5843 ;; movw r0, #:lower16:g
5844 ;; movw r0, #:upper16:g
5847 ;; A better way would be:
5848 ;; movw r0, #:lower16:g+4
5849 ;; movw r0, #:upper16:g+4
5851 ;; The limitation of this way is that the length of offset should be a 16-bit
5852 ;; signed value, because current assembler only supports REL type relocation for
5853 ;; such case. If the more powerful RELA type is supported in future, we should
5854 ;; update this pattern to go with better way.
5856 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5857 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5858 (match_operand:SI 2 "const_int_operand" ""))))]
5861 && arm_disable_literal_pool
5863 && GET_CODE (operands[1]) == SYMBOL_REF"
5864 [(clobber (const_int 0))]
5866 int offset = INTVAL (operands[2]);
5868 if (offset < -0x8000 || offset > 0x7fff)
5870 arm_emit_movpair (operands[0], operands[1]);
5871 emit_insn (gen_rtx_SET (operands[0],
5872 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5876 rtx op = gen_rtx_CONST (SImode,
5877 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5878 arm_emit_movpair (operands[0], op);
5883 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5884 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5885 ;; and lo_sum would be merged back into memory load at cprop. However,
5886 ;; if the default is to prefer movt/movw rather than a load from the constant
5887 ;; pool, the performance is better.
5889 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5890 (match_operand:SI 1 "general_operand" ""))]
5891 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5892 && !target_word_relocations
5893 && !arm_tls_referenced_p (operands[1])"
5894 [(clobber (const_int 0))]
5896 arm_emit_movpair (operands[0], operands[1]);
5900 ;; When generating pic, we need to load the symbol offset into a register.
5901 ;; So that the optimizer does not confuse this with a normal symbol load
5902 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5903 ;; since that is the only type of relocation we can use.
5905 ;; Wrap calculation of the whole PIC address in a single pattern for the
5906 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5907 ;; a PIC address involves two loads from memory, so we want to CSE it
5908 ;; as often as possible.
5909 ;; This pattern will be split into one of the pic_load_addr_* patterns
5910 ;; and a move after GCSE optimizations.
5912 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5913 (define_expand "calculate_pic_address"
5914 [(set (match_operand:SI 0 "register_operand")
5915 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5916 (unspec:SI [(match_operand:SI 2 "" "")]
5921 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5923 [(set (match_operand:SI 0 "register_operand" "")
5924 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5925 (unspec:SI [(match_operand:SI 2 "" "")]
5928 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5929 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5930 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5933 ;; operand1 is the memory address to go into
5934 ;; pic_load_addr_32bit.
5935 ;; operand2 is the PIC label to be emitted
5936 ;; from pic_add_dot_plus_eight.
5937 ;; We do this to allow hoisting of the entire insn.
5938 (define_insn_and_split "pic_load_addr_unified"
5939 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5940 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5941 (match_operand:SI 2 "" "")]
5942 UNSPEC_PIC_UNIFIED))]
5945 "&& reload_completed"
5946 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5947 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5948 (match_dup 2)] UNSPEC_PIC_BASE))]
5949 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5950 [(set_attr "type" "load_4,load_4,load_4")
5951 (set_attr "pool_range" "4096,4094,1022")
5952 (set_attr "neg_pool_range" "4084,0,0")
5953 (set_attr "arch" "a,t2,t1")
5954 (set_attr "length" "8,6,4")]
5957 ;; The rather odd constraints on the following are to force reload to leave
5958 ;; the insn alone, and to force the minipool generation pass to then move
5959 ;; the GOT symbol to memory.
5961 (define_insn "pic_load_addr_32bit"
5962 [(set (match_operand:SI 0 "s_register_operand" "=r")
5963 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5964 "TARGET_32BIT && flag_pic"
5966 [(set_attr "type" "load_4")
5967 (set (attr "pool_range")
5968 (if_then_else (eq_attr "is_thumb" "no")
5971 (set (attr "neg_pool_range")
5972 (if_then_else (eq_attr "is_thumb" "no")
5977 (define_insn "pic_load_addr_thumb1"
5978 [(set (match_operand:SI 0 "s_register_operand" "=l")
5979 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5980 "TARGET_THUMB1 && flag_pic"
5982 [(set_attr "type" "load_4")
5983 (set (attr "pool_range") (const_int 1018))]
5986 (define_insn "pic_add_dot_plus_four"
5987 [(set (match_operand:SI 0 "register_operand" "=r")
5988 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
5990 (match_operand 2 "" "")]
5994 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5995 INTVAL (operands[2]));
5996 return \"add\\t%0, %|pc\";
5998 [(set_attr "length" "2")
5999 (set_attr "type" "alu_sreg")]
6002 (define_insn "pic_add_dot_plus_eight"
6003 [(set (match_operand:SI 0 "register_operand" "=r")
6004 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6006 (match_operand 2 "" "")]
6010 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6011 INTVAL (operands[2]));
6012 return \"add%?\\t%0, %|pc, %1\";
6014 [(set_attr "predicable" "yes")
6015 (set_attr "type" "alu_sreg")]
6018 (define_insn "tls_load_dot_plus_eight"
6019 [(set (match_operand:SI 0 "register_operand" "=r")
6020 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6022 (match_operand 2 "" "")]
6026 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6027 INTVAL (operands[2]));
6028 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6030 [(set_attr "predicable" "yes")
6031 (set_attr "type" "load_4")]
6034 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6035 ;; followed by a load. These sequences can be crunched down to
6036 ;; tls_load_dot_plus_eight by a peephole.
6039 [(set (match_operand:SI 0 "register_operand" "")
6040 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6042 (match_operand 1 "" "")]
6044 (set (match_operand:SI 2 "arm_general_register_operand" "")
6045 (mem:SI (match_dup 0)))]
6046 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6048 (mem:SI (unspec:SI [(match_dup 3)
6055 (define_insn "pic_offset_arm"
6056 [(set (match_operand:SI 0 "register_operand" "=r")
6057 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6058 (unspec:SI [(match_operand:SI 2 "" "X")]
6059 UNSPEC_PIC_OFFSET))))]
6060 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6061 "ldr%?\\t%0, [%1,%2]"
6062 [(set_attr "type" "load_4")]
6065 (define_expand "builtin_setjmp_receiver"
6066 [(label_ref (match_operand 0 "" ""))]
6070 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6072 if (arm_pic_register != INVALID_REGNUM)
6073 arm_load_pic_register (1UL << 3, NULL_RTX);
6077 ;; If copying one reg to another we can set the condition codes according to
6078 ;; its value. Such a move is common after a return from subroutine and the
6079 ;; result is being tested against zero.
6081 (define_insn "*movsi_compare0"
6082 [(set (reg:CC CC_REGNUM)
6083 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6085 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6090 subs%?\\t%0, %1, #0"
6091 [(set_attr "conds" "set")
6092 (set_attr "type" "alus_imm,alus_imm")]
6095 ;; Subroutine to store a half word from a register into memory.
6096 ;; Operand 0 is the source register (HImode)
6097 ;; Operand 1 is the destination address in a register (SImode)
6099 ;; In both this routine and the next, we must be careful not to spill
6100 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6101 ;; can generate unrecognizable rtl.
6103 (define_expand "storehi"
6104 [;; store the low byte
6105 (set (match_operand 1 "" "") (match_dup 3))
6106 ;; extract the high byte
6108 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6109 ;; store the high byte
6110 (set (match_dup 4) (match_dup 5))]
6114 rtx op1 = operands[1];
6115 rtx addr = XEXP (op1, 0);
6116 enum rtx_code code = GET_CODE (addr);
6118 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6120 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6122 operands[4] = adjust_address (op1, QImode, 1);
6123 operands[1] = adjust_address (operands[1], QImode, 0);
6124 operands[3] = gen_lowpart (QImode, operands[0]);
6125 operands[0] = gen_lowpart (SImode, operands[0]);
6126 operands[2] = gen_reg_rtx (SImode);
6127 operands[5] = gen_lowpart (QImode, operands[2]);
6131 (define_expand "storehi_bigend"
6132 [(set (match_dup 4) (match_dup 3))
6134 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6135 (set (match_operand 1 "" "") (match_dup 5))]
6139 rtx op1 = operands[1];
6140 rtx addr = XEXP (op1, 0);
6141 enum rtx_code code = GET_CODE (addr);
6143 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6145 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6147 operands[4] = adjust_address (op1, QImode, 1);
6148 operands[1] = adjust_address (operands[1], QImode, 0);
6149 operands[3] = gen_lowpart (QImode, operands[0]);
6150 operands[0] = gen_lowpart (SImode, operands[0]);
6151 operands[2] = gen_reg_rtx (SImode);
6152 operands[5] = gen_lowpart (QImode, operands[2]);
6156 ;; Subroutine to store a half word integer constant into memory.
6157 (define_expand "storeinthi"
6158 [(set (match_operand 0 "" "")
6159 (match_operand 1 "" ""))
6160 (set (match_dup 3) (match_dup 2))]
6164 HOST_WIDE_INT value = INTVAL (operands[1]);
6165 rtx addr = XEXP (operands[0], 0);
6166 rtx op0 = operands[0];
6167 enum rtx_code code = GET_CODE (addr);
6169 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6171 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6173 operands[1] = gen_reg_rtx (SImode);
6174 if (BYTES_BIG_ENDIAN)
6176 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6177 if ((value & 255) == ((value >> 8) & 255))
6178 operands[2] = operands[1];
6181 operands[2] = gen_reg_rtx (SImode);
6182 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6187 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6188 if ((value & 255) == ((value >> 8) & 255))
6189 operands[2] = operands[1];
6192 operands[2] = gen_reg_rtx (SImode);
6193 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6197 operands[3] = adjust_address (op0, QImode, 1);
6198 operands[0] = adjust_address (operands[0], QImode, 0);
6199 operands[2] = gen_lowpart (QImode, operands[2]);
6200 operands[1] = gen_lowpart (QImode, operands[1]);
6204 (define_expand "storehi_single_op"
6205 [(set (match_operand:HI 0 "memory_operand")
6206 (match_operand:HI 1 "general_operand"))]
6207 "TARGET_32BIT && arm_arch4"
6209 if (!s_register_operand (operands[1], HImode))
6210 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6214 (define_expand "movhi"
6215 [(set (match_operand:HI 0 "general_operand")
6216 (match_operand:HI 1 "general_operand"))]
6219 gcc_checking_assert (aligned_operand (operands[0], HImode));
6220 gcc_checking_assert (aligned_operand (operands[1], HImode));
6223 if (can_create_pseudo_p ())
6225 if (MEM_P (operands[0]))
6229 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6232 if (CONST_INT_P (operands[1]))
6233 emit_insn (gen_storeinthi (operands[0], operands[1]));
6236 if (MEM_P (operands[1]))
6237 operands[1] = force_reg (HImode, operands[1]);
6238 if (BYTES_BIG_ENDIAN)
6239 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6241 emit_insn (gen_storehi (operands[1], operands[0]));
6245 /* Sign extend a constant, and keep it in an SImode reg. */
6246 else if (CONST_INT_P (operands[1]))
6248 rtx reg = gen_reg_rtx (SImode);
6249 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6251 /* If the constant is already valid, leave it alone. */
6252 if (!const_ok_for_arm (val))
6254 /* If setting all the top bits will make the constant
6255 loadable in a single instruction, then set them.
6256 Otherwise, sign extend the number. */
6258 if (const_ok_for_arm (~(val | ~0xffff)))
6260 else if (val & 0x8000)
6264 emit_insn (gen_movsi (reg, GEN_INT (val)));
6265 operands[1] = gen_lowpart (HImode, reg);
6267 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6268 && MEM_P (operands[1]))
6270 rtx reg = gen_reg_rtx (SImode);
6272 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6273 operands[1] = gen_lowpart (HImode, reg);
6275 else if (!arm_arch4)
6277 if (MEM_P (operands[1]))
6280 rtx offset = const0_rtx;
6281 rtx reg = gen_reg_rtx (SImode);
6283 if ((REG_P (base = XEXP (operands[1], 0))
6284 || (GET_CODE (base) == PLUS
6285 && (CONST_INT_P (offset = XEXP (base, 1)))
6286 && ((INTVAL(offset) & 1) != 1)
6287 && REG_P (base = XEXP (base, 0))))
6288 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6292 new_rtx = widen_memory_access (operands[1], SImode,
6293 ((INTVAL (offset) & ~3)
6294 - INTVAL (offset)));
6295 emit_insn (gen_movsi (reg, new_rtx));
6296 if (((INTVAL (offset) & 2) != 0)
6297 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6299 rtx reg2 = gen_reg_rtx (SImode);
6301 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6306 emit_insn (gen_movhi_bytes (reg, operands[1]));
6308 operands[1] = gen_lowpart (HImode, reg);
6312 /* Handle loading a large integer during reload. */
6313 else if (CONST_INT_P (operands[1])
6314 && !const_ok_for_arm (INTVAL (operands[1]))
6315 && !const_ok_for_arm (~INTVAL (operands[1])))
6317 /* Writing a constant to memory needs a scratch, which should
6318 be handled with SECONDARY_RELOADs. */
6319 gcc_assert (REG_P (operands[0]));
6321 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6322 emit_insn (gen_movsi (operands[0], operands[1]));
6326 else if (TARGET_THUMB2)
6328 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6329 if (can_create_pseudo_p ())
6331 if (!REG_P (operands[0]))
6332 operands[1] = force_reg (HImode, operands[1]);
6333 /* Zero extend a constant, and keep it in an SImode reg. */
6334 else if (CONST_INT_P (operands[1]))
6336 rtx reg = gen_reg_rtx (SImode);
6337 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6339 emit_insn (gen_movsi (reg, GEN_INT (val)));
6340 operands[1] = gen_lowpart (HImode, reg);
6344 else /* TARGET_THUMB1 */
6346 if (can_create_pseudo_p ())
6348 if (CONST_INT_P (operands[1]))
6350 rtx reg = gen_reg_rtx (SImode);
6352 emit_insn (gen_movsi (reg, operands[1]));
6353 operands[1] = gen_lowpart (HImode, reg);
6356 /* ??? We shouldn't really get invalid addresses here, but this can
6357 happen if we are passed a SP (never OK for HImode/QImode) or
6358 virtual register (also rejected as illegitimate for HImode/QImode)
6359 relative address. */
6360 /* ??? This should perhaps be fixed elsewhere, for instance, in
6361 fixup_stack_1, by checking for other kinds of invalid addresses,
6362 e.g. a bare reference to a virtual register. This may confuse the
6363 alpha though, which must handle this case differently. */
6364 if (MEM_P (operands[0])
6365 && !memory_address_p (GET_MODE (operands[0]),
6366 XEXP (operands[0], 0)))
6368 = replace_equiv_address (operands[0],
6369 copy_to_reg (XEXP (operands[0], 0)));
6371 if (MEM_P (operands[1])
6372 && !memory_address_p (GET_MODE (operands[1]),
6373 XEXP (operands[1], 0)))
6375 = replace_equiv_address (operands[1],
6376 copy_to_reg (XEXP (operands[1], 0)));
6378 if (MEM_P (operands[1]) && optimize > 0)
6380 rtx reg = gen_reg_rtx (SImode);
6382 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6383 operands[1] = gen_lowpart (HImode, reg);
6386 if (MEM_P (operands[0]))
6387 operands[1] = force_reg (HImode, operands[1]);
6389 else if (CONST_INT_P (operands[1])
6390 && !satisfies_constraint_I (operands[1]))
6392 /* Handle loading a large integer during reload. */
6394 /* Writing a constant to memory needs a scratch, which should
6395 be handled with SECONDARY_RELOADs. */
6396 gcc_assert (REG_P (operands[0]));
6398 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6399 emit_insn (gen_movsi (operands[0], operands[1]));
6406 (define_expand "movhi_bytes"
6407 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6409 (zero_extend:SI (match_dup 6)))
6410 (set (match_operand:SI 0 "" "")
6411 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6416 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6418 mem1 = change_address (operands[1], QImode, addr);
6419 mem2 = change_address (operands[1], QImode,
6420 plus_constant (Pmode, addr, 1));
6421 operands[0] = gen_lowpart (SImode, operands[0]);
6423 operands[2] = gen_reg_rtx (SImode);
6424 operands[3] = gen_reg_rtx (SImode);
6427 if (BYTES_BIG_ENDIAN)
6429 operands[4] = operands[2];
6430 operands[5] = operands[3];
6434 operands[4] = operands[3];
6435 operands[5] = operands[2];
6440 (define_expand "movhi_bigend"
6442 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6445 (ashiftrt:SI (match_dup 2) (const_int 16)))
6446 (set (match_operand:HI 0 "s_register_operand")
6450 operands[2] = gen_reg_rtx (SImode);
6451 operands[3] = gen_reg_rtx (SImode);
6452 operands[4] = gen_lowpart (HImode, operands[3]);
6456 ;; Pattern to recognize insn generated default case above
6457 (define_insn "*movhi_insn_arch4"
6458 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6459 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6461 && arm_arch4 && !TARGET_HARD_FLOAT
6462 && (register_operand (operands[0], HImode)
6463 || register_operand (operands[1], HImode))"
6465 mov%?\\t%0, %1\\t%@ movhi
6466 mvn%?\\t%0, #%B1\\t%@ movhi
6467 movw%?\\t%0, %L1\\t%@ movhi
6468 strh%?\\t%1, %0\\t%@ movhi
6469 ldrh%?\\t%0, %1\\t%@ movhi"
6470 [(set_attr "predicable" "yes")
6471 (set_attr "pool_range" "*,*,*,*,256")
6472 (set_attr "neg_pool_range" "*,*,*,*,244")
6473 (set_attr "arch" "*,*,v6t2,*,*")
6474 (set_attr_alternative "type"
6475 [(if_then_else (match_operand 1 "const_int_operand" "")
6476 (const_string "mov_imm" )
6477 (const_string "mov_reg"))
6478 (const_string "mvn_imm")
6479 (const_string "mov_imm")
6480 (const_string "store_4")
6481 (const_string "load_4")])]
6484 (define_insn "*movhi_bytes"
6485 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6486 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6487 "TARGET_ARM && !TARGET_HARD_FLOAT"
6489 mov%?\\t%0, %1\\t%@ movhi
6490 mov%?\\t%0, %1\\t%@ movhi
6491 mvn%?\\t%0, #%B1\\t%@ movhi"
6492 [(set_attr "predicable" "yes")
6493 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6496 ;; We use a DImode scratch because we may occasionally need an additional
6497 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6498 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6499 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6500 ;; to be correctly handled in default_secondary_reload function.
6501 (define_expand "reload_outhi"
6502 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6503 (match_operand:HI 1 "s_register_operand" "r")
6504 (match_operand:DI 2 "s_register_operand" "=&l")])]
6507 arm_reload_out_hi (operands);
6509 thumb_reload_out_hi (operands);
6514 (define_expand "reload_inhi"
6515 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6516 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6517 (match_operand:DI 2 "s_register_operand" "=&r")])]
6521 arm_reload_in_hi (operands);
6523 thumb_reload_out_hi (operands);
6527 (define_expand "movqi"
6528 [(set (match_operand:QI 0 "general_operand")
6529 (match_operand:QI 1 "general_operand"))]
6532 /* Everything except mem = const or mem = mem can be done easily */
6534 if (can_create_pseudo_p ())
6536 if (CONST_INT_P (operands[1]))
6538 rtx reg = gen_reg_rtx (SImode);
6540 /* For thumb we want an unsigned immediate, then we are more likely
6541 to be able to use a movs insn. */
6543 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6545 emit_insn (gen_movsi (reg, operands[1]));
6546 operands[1] = gen_lowpart (QImode, reg);
6551 /* ??? We shouldn't really get invalid addresses here, but this can
6552 happen if we are passed a SP (never OK for HImode/QImode) or
6553 virtual register (also rejected as illegitimate for HImode/QImode)
6554 relative address. */
6555 /* ??? This should perhaps be fixed elsewhere, for instance, in
6556 fixup_stack_1, by checking for other kinds of invalid addresses,
6557 e.g. a bare reference to a virtual register. This may confuse the
6558 alpha though, which must handle this case differently. */
6559 if (MEM_P (operands[0])
6560 && !memory_address_p (GET_MODE (operands[0]),
6561 XEXP (operands[0], 0)))
6563 = replace_equiv_address (operands[0],
6564 copy_to_reg (XEXP (operands[0], 0)));
6565 if (MEM_P (operands[1])
6566 && !memory_address_p (GET_MODE (operands[1]),
6567 XEXP (operands[1], 0)))
6569 = replace_equiv_address (operands[1],
6570 copy_to_reg (XEXP (operands[1], 0)));
6573 if (MEM_P (operands[1]) && optimize > 0)
6575 rtx reg = gen_reg_rtx (SImode);
6577 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6578 operands[1] = gen_lowpart (QImode, reg);
6581 if (MEM_P (operands[0]))
6582 operands[1] = force_reg (QImode, operands[1]);
6584 else if (TARGET_THUMB
6585 && CONST_INT_P (operands[1])
6586 && !satisfies_constraint_I (operands[1]))
6588 /* Handle loading a large integer during reload. */
6590 /* Writing a constant to memory needs a scratch, which should
6591 be handled with SECONDARY_RELOADs. */
6592 gcc_assert (REG_P (operands[0]));
6594 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6595 emit_insn (gen_movsi (operands[0], operands[1]));
6601 (define_insn "*arm_movqi_insn"
6602 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
6603 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
6605 && ( register_operand (operands[0], QImode)
6606 || register_operand (operands[1], QImode))"
6617 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
6618 (set_attr "predicable" "yes")
6619 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6620 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6621 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6625 (define_expand "movhf"
6626 [(set (match_operand:HF 0 "general_operand")
6627 (match_operand:HF 1 "general_operand"))]
6630 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6631 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6634 if (MEM_P (operands[0]))
6635 operands[1] = force_reg (HFmode, operands[1]);
6637 else /* TARGET_THUMB1 */
6639 if (can_create_pseudo_p ())
6641 if (!REG_P (operands[0]))
6642 operands[1] = force_reg (HFmode, operands[1]);
6648 (define_insn "*arm32_movhf"
6649 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6650 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6651 "TARGET_32BIT && !TARGET_HARD_FLOAT
6652 && ( s_register_operand (operands[0], HFmode)
6653 || s_register_operand (operands[1], HFmode))"
6655 switch (which_alternative)
6657 case 0: /* ARM register from memory */
6658 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6659 case 1: /* memory from ARM register */
6660 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6661 case 2: /* ARM register from ARM register */
6662 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6663 case 3: /* ARM register from constant */
6668 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6670 ops[0] = operands[0];
6671 ops[1] = GEN_INT (bits);
6672 ops[2] = GEN_INT (bits & 0xff00);
6673 ops[3] = GEN_INT (bits & 0x00ff);
6675 if (arm_arch_thumb2)
6676 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6678 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6685 [(set_attr "conds" "unconditional")
6686 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6687 (set_attr "length" "4,4,4,8")
6688 (set_attr "predicable" "yes")]
6691 (define_expand "movsf"
6692 [(set (match_operand:SF 0 "general_operand")
6693 (match_operand:SF 1 "general_operand"))]
6696 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6697 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6700 if (MEM_P (operands[0]))
6701 operands[1] = force_reg (SFmode, operands[1]);
6703 else /* TARGET_THUMB1 */
6705 if (can_create_pseudo_p ())
6707 if (!REG_P (operands[0]))
6708 operands[1] = force_reg (SFmode, operands[1]);
6712 /* Cannot load it directly, generate a load with clobber so that it can be
6713 loaded via GPR with MOV / MOVT. */
6714 if (arm_disable_literal_pool
6715 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6716 && CONST_DOUBLE_P (operands[1])
6717 && TARGET_HARD_FLOAT
6718 && !vfp3_const_double_rtx (operands[1]))
6720 rtx clobreg = gen_reg_rtx (SFmode);
6721 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6728 ;; Transform a floating-point move of a constant into a core register into
6729 ;; an SImode operation.
6731 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6732 (match_operand:SF 1 "immediate_operand" ""))]
6735 && CONST_DOUBLE_P (operands[1])"
6736 [(set (match_dup 2) (match_dup 3))]
6738 operands[2] = gen_lowpart (SImode, operands[0]);
6739 operands[3] = gen_lowpart (SImode, operands[1]);
6740 if (operands[2] == 0 || operands[3] == 0)
6745 (define_insn "*arm_movsf_soft_insn"
6746 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6747 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6749 && TARGET_SOFT_FLOAT
6750 && (!MEM_P (operands[0])
6751 || register_operand (operands[1], SFmode))"
6753 switch (which_alternative)
6755 case 0: return \"mov%?\\t%0, %1\";
6757 /* Cannot load it directly, split to load it via MOV / MOVT. */
6758 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6760 return \"ldr%?\\t%0, %1\\t%@ float\";
6761 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6762 default: gcc_unreachable ();
6765 [(set_attr "predicable" "yes")
6766 (set_attr "type" "mov_reg,load_4,store_4")
6767 (set_attr "arm_pool_range" "*,4096,*")
6768 (set_attr "thumb2_pool_range" "*,4094,*")
6769 (set_attr "arm_neg_pool_range" "*,4084,*")
6770 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6773 ;; Splitter for the above.
6775 [(set (match_operand:SF 0 "s_register_operand")
6776 (match_operand:SF 1 "const_double_operand"))]
6777 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6781 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6782 rtx cst = gen_int_mode (buf, SImode);
6783 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6788 (define_expand "movdf"
6789 [(set (match_operand:DF 0 "general_operand")
6790 (match_operand:DF 1 "general_operand"))]
6793 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6794 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6797 if (MEM_P (operands[0]))
6798 operands[1] = force_reg (DFmode, operands[1]);
6800 else /* TARGET_THUMB */
6802 if (can_create_pseudo_p ())
6804 if (!REG_P (operands[0]))
6805 operands[1] = force_reg (DFmode, operands[1]);
6809 /* Cannot load it directly, generate a load with clobber so that it can be
6810 loaded via GPR with MOV / MOVT. */
6811 if (arm_disable_literal_pool
6812 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6813 && CONSTANT_P (operands[1])
6814 && TARGET_HARD_FLOAT
6815 && !arm_const_double_rtx (operands[1])
6816 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6818 rtx clobreg = gen_reg_rtx (DFmode);
6819 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6826 ;; Reloading a df mode value stored in integer regs to memory can require a
6828 ;; Another reload_out<m> pattern that requires special constraints.
6829 (define_expand "reload_outdf"
6830 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6831 (match_operand:DF 1 "s_register_operand" "r")
6832 (match_operand:SI 2 "s_register_operand" "=&r")]
6836 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6839 operands[2] = XEXP (operands[0], 0);
6840 else if (code == POST_INC || code == PRE_DEC)
6842 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6843 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6844 emit_insn (gen_movdi (operands[0], operands[1]));
6847 else if (code == PRE_INC)
6849 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6851 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6854 else if (code == POST_DEC)
6855 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6857 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6858 XEXP (XEXP (operands[0], 0), 1)));
6860 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6863 if (code == POST_DEC)
6864 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6870 (define_insn "*movdf_soft_insn"
6871 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6872 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6873 "TARGET_32BIT && TARGET_SOFT_FLOAT
6874 && ( register_operand (operands[0], DFmode)
6875 || register_operand (operands[1], DFmode))"
6877 switch (which_alternative)
6884 /* Cannot load it directly, split to load it via MOV / MOVT. */
6885 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6889 return output_move_double (operands, true, NULL);
6892 [(set_attr "length" "8,12,16,8,8")
6893 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6894 (set_attr "arm_pool_range" "*,*,*,1020,*")
6895 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6896 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6897 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6900 ;; Splitter for the above.
6902 [(set (match_operand:DF 0 "s_register_operand")
6903 (match_operand:DF 1 "const_double_operand"))]
6904 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6908 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6909 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6910 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6911 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6912 rtx cst = gen_int_mode (ival, DImode);
6913 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6919 ;; load- and store-multiple insns
6920 ;; The arm can load/store any set of registers, provided that they are in
6921 ;; ascending order, but these expanders assume a contiguous set.
6923 (define_expand "load_multiple"
6924 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6925 (match_operand:SI 1 "" ""))
6926 (use (match_operand:SI 2 "" ""))])]
6929 HOST_WIDE_INT offset = 0;
6931 /* Support only fixed point registers. */
6932 if (!CONST_INT_P (operands[2])
6933 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6934 || INTVAL (operands[2]) < 2
6935 || !MEM_P (operands[1])
6936 || !REG_P (operands[0])
6937 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6938 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6942 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6943 INTVAL (operands[2]),
6944 force_reg (SImode, XEXP (operands[1], 0)),
6945 FALSE, operands[1], &offset);
6948 (define_expand "store_multiple"
6949 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6950 (match_operand:SI 1 "" ""))
6951 (use (match_operand:SI 2 "" ""))])]
6954 HOST_WIDE_INT offset = 0;
6956 /* Support only fixed point registers. */
6957 if (!CONST_INT_P (operands[2])
6958 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6959 || INTVAL (operands[2]) < 2
6960 || !REG_P (operands[1])
6961 || !MEM_P (operands[0])
6962 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6963 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6967 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6968 INTVAL (operands[2]),
6969 force_reg (SImode, XEXP (operands[0], 0)),
6970 FALSE, operands[0], &offset);
6974 (define_expand "setmemsi"
6975 [(match_operand:BLK 0 "general_operand")
6976 (match_operand:SI 1 "const_int_operand")
6977 (match_operand:SI 2 "const_int_operand")
6978 (match_operand:SI 3 "const_int_operand")]
6981 if (arm_gen_setmem (operands))
6988 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6989 ;; We could let this apply for blocks of less than this, but it clobbers so
6990 ;; many registers that there is then probably a better way.
6992 (define_expand "cpymemqi"
6993 [(match_operand:BLK 0 "general_operand")
6994 (match_operand:BLK 1 "general_operand")
6995 (match_operand:SI 2 "const_int_operand")
6996 (match_operand:SI 3 "const_int_operand")]
7001 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7002 && !optimize_function_for_size_p (cfun))
7004 if (gen_cpymem_ldrd_strd (operands))
7009 if (arm_gen_cpymemqi (operands))
7013 else /* TARGET_THUMB1 */
7015 if ( INTVAL (operands[3]) != 4
7016 || INTVAL (operands[2]) > 48)
7019 thumb_expand_cpymemqi (operands);
7026 ;; Compare & branch insns
7027 ;; The range calculations are based as follows:
7028 ;; For forward branches, the address calculation returns the address of
7029 ;; the next instruction. This is 2 beyond the branch instruction.
7030 ;; For backward branches, the address calculation returns the address of
7031 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7032 ;; instruction for the shortest sequence, and 4 before the branch instruction
7033 ;; if we have to jump around an unconditional branch.
7034 ;; To the basic branch range the PC offset must be added (this is +4).
7035 ;; So for forward branches we have
7036 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7037 ;; And for backward branches we have
7038 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7040 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7041 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7043 (define_expand "cbranchsi4"
7044 [(set (pc) (if_then_else
7045 (match_operator 0 "expandable_comparison_operator"
7046 [(match_operand:SI 1 "s_register_operand")
7047 (match_operand:SI 2 "nonmemory_operand")])
7048 (label_ref (match_operand 3 "" ""))
7054 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7056 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7060 if (thumb1_cmpneg_operand (operands[2], SImode))
7062 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7063 operands[3], operands[0]));
7066 if (!thumb1_cmp_operand (operands[2], SImode))
7067 operands[2] = force_reg (SImode, operands[2]);
7070 (define_expand "cbranchsf4"
7071 [(set (pc) (if_then_else
7072 (match_operator 0 "expandable_comparison_operator"
7073 [(match_operand:SF 1 "s_register_operand")
7074 (match_operand:SF 2 "vfp_compare_operand")])
7075 (label_ref (match_operand 3 "" ""))
7077 "TARGET_32BIT && TARGET_HARD_FLOAT"
7078 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7079 operands[3])); DONE;"
7082 (define_expand "cbranchdf4"
7083 [(set (pc) (if_then_else
7084 (match_operator 0 "expandable_comparison_operator"
7085 [(match_operand:DF 1 "s_register_operand")
7086 (match_operand:DF 2 "vfp_compare_operand")])
7087 (label_ref (match_operand 3 "" ""))
7089 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7090 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7091 operands[3])); DONE;"
7094 (define_expand "cbranchdi4"
7095 [(set (pc) (if_then_else
7096 (match_operator 0 "expandable_comparison_operator"
7097 [(match_operand:DI 1 "s_register_operand")
7098 (match_operand:DI 2 "reg_or_int_operand")])
7099 (label_ref (match_operand 3 "" ""))
7103 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7105 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7111 ;; Comparison and test insns
7113 (define_insn "*arm_cmpsi_insn"
7114 [(set (reg:CC CC_REGNUM)
7115 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7116 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7124 [(set_attr "conds" "set")
7125 (set_attr "arch" "t2,t2,any,any,any")
7126 (set_attr "length" "2,2,4,4,4")
7127 (set_attr "predicable" "yes")
7128 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7129 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7132 (define_insn "*cmpsi_shiftsi"
7133 [(set (reg:CC CC_REGNUM)
7134 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
7135 (match_operator:SI 3 "shift_operator"
7136 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7137 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
7140 [(set_attr "conds" "set")
7141 (set_attr "shift" "1")
7142 (set_attr "arch" "32,a,a")
7143 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7145 (define_insn "*cmpsi_shiftsi_swp"
7146 [(set (reg:CC_SWP CC_REGNUM)
7147 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7148 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7149 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
7150 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
7153 [(set_attr "conds" "set")
7154 (set_attr "shift" "1")
7155 (set_attr "arch" "32,a,a")
7156 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7158 (define_insn "*arm_cmpsi_negshiftsi_si"
7159 [(set (reg:CC_Z CC_REGNUM)
7161 (neg:SI (match_operator:SI 1 "shift_operator"
7162 [(match_operand:SI 2 "s_register_operand" "r")
7163 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7164 (match_operand:SI 0 "s_register_operand" "r")))]
7167 [(set_attr "conds" "set")
7168 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7169 (const_string "alus_shift_imm")
7170 (const_string "alus_shift_reg")))
7171 (set_attr "predicable" "yes")]
7174 ; This insn allows redundant compares to be removed by cse, nothing should
7175 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7176 ; is deleted later on. The match_dup will match the mode here, so that
7177 ; mode changes of the condition codes aren't lost by this even though we don't
7178 ; specify what they are.
7180 (define_insn "*deleted_compare"
7181 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7183 "\\t%@ deleted compare"
7184 [(set_attr "conds" "set")
7185 (set_attr "length" "0")
7186 (set_attr "type" "no_insn")]
7190 ;; Conditional branch insns
7192 (define_expand "cbranch_cc"
7194 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7195 (match_operand 2 "" "")])
7196 (label_ref (match_operand 3 "" ""))
7199 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7200 operands[1], operands[2], NULL_RTX);
7201 operands[2] = const0_rtx;"
7205 ;; Patterns to match conditional branch insns.
7208 (define_insn "arm_cond_branch"
7210 (if_then_else (match_operator 1 "arm_comparison_operator"
7211 [(match_operand 2 "cc_register" "") (const_int 0)])
7212 (label_ref (match_operand 0 "" ""))
7216 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7218 arm_ccfsm_state += 2;
7221 return \"b%d1\\t%l0\";
7223 [(set_attr "conds" "use")
7224 (set_attr "type" "branch")
7225 (set (attr "length")
7227 (and (match_test "TARGET_THUMB2")
7228 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7229 (le (minus (match_dup 0) (pc)) (const_int 256))))
7234 (define_insn "*arm_cond_branch_reversed"
7236 (if_then_else (match_operator 1 "arm_comparison_operator"
7237 [(match_operand 2 "cc_register" "") (const_int 0)])
7239 (label_ref (match_operand 0 "" ""))))]
7242 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7244 arm_ccfsm_state += 2;
7247 return \"b%D1\\t%l0\";
7249 [(set_attr "conds" "use")
7250 (set_attr "type" "branch")
7251 (set (attr "length")
7253 (and (match_test "TARGET_THUMB2")
7254 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7255 (le (minus (match_dup 0) (pc)) (const_int 256))))
7264 (define_expand "cstore_cc"
7265 [(set (match_operand:SI 0 "s_register_operand")
7266 (match_operator:SI 1 "" [(match_operand 2 "" "")
7267 (match_operand 3 "" "")]))]
7269 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7270 operands[2], operands[3], NULL_RTX);
7271 operands[3] = const0_rtx;"
7274 (define_insn_and_split "*mov_scc"
7275 [(set (match_operand:SI 0 "s_register_operand" "=r")
7276 (match_operator:SI 1 "arm_comparison_operator_mode"
7277 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7279 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7282 (if_then_else:SI (match_dup 1)
7286 [(set_attr "conds" "use")
7287 (set_attr "length" "8")
7288 (set_attr "type" "multiple")]
7291 (define_insn "*negscc_borrow"
7292 [(set (match_operand:SI 0 "s_register_operand" "=r")
7293 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7296 [(set_attr "conds" "use")
7297 (set_attr "length" "4")
7298 (set_attr "type" "adc_reg")]
7301 (define_insn_and_split "*mov_negscc"
7302 [(set (match_operand:SI 0 "s_register_operand" "=r")
7303 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7304 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7305 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
7306 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7309 (if_then_else:SI (match_dup 1)
7313 operands[3] = GEN_INT (~0);
7315 [(set_attr "conds" "use")
7316 (set_attr "length" "8")
7317 (set_attr "type" "multiple")]
7320 (define_insn_and_split "*mov_notscc"
7321 [(set (match_operand:SI 0 "s_register_operand" "=r")
7322 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7323 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7325 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7328 (if_then_else:SI (match_dup 1)
7332 operands[3] = GEN_INT (~1);
7333 operands[4] = GEN_INT (~0);
7335 [(set_attr "conds" "use")
7336 (set_attr "length" "8")
7337 (set_attr "type" "multiple")]
7340 (define_expand "cstoresi4"
7341 [(set (match_operand:SI 0 "s_register_operand")
7342 (match_operator:SI 1 "expandable_comparison_operator"
7343 [(match_operand:SI 2 "s_register_operand")
7344 (match_operand:SI 3 "reg_or_int_operand")]))]
7345 "TARGET_32BIT || TARGET_THUMB1"
7347 rtx op3, scratch, scratch2;
7351 if (!arm_add_operand (operands[3], SImode))
7352 operands[3] = force_reg (SImode, operands[3]);
7353 emit_insn (gen_cstore_cc (operands[0], operands[1],
7354 operands[2], operands[3]));
7358 if (operands[3] == const0_rtx)
7360 switch (GET_CODE (operands[1]))
7363 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7367 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7371 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7372 NULL_RTX, 0, OPTAB_WIDEN);
7373 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7374 NULL_RTX, 0, OPTAB_WIDEN);
7375 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7376 operands[0], 1, OPTAB_WIDEN);
7380 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7382 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7383 NULL_RTX, 1, OPTAB_WIDEN);
7387 scratch = expand_binop (SImode, ashr_optab, operands[2],
7388 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7389 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7390 NULL_RTX, 0, OPTAB_WIDEN);
7391 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7395 /* LT is handled by generic code. No need for unsigned with 0. */
7402 switch (GET_CODE (operands[1]))
7405 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7406 NULL_RTX, 0, OPTAB_WIDEN);
7407 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7411 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7412 NULL_RTX, 0, OPTAB_WIDEN);
7413 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7417 op3 = force_reg (SImode, operands[3]);
7419 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7420 NULL_RTX, 1, OPTAB_WIDEN);
7421 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7422 NULL_RTX, 0, OPTAB_WIDEN);
7423 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7429 if (!thumb1_cmp_operand (op3, SImode))
7430 op3 = force_reg (SImode, op3);
7431 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7432 NULL_RTX, 0, OPTAB_WIDEN);
7433 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7434 NULL_RTX, 1, OPTAB_WIDEN);
7435 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7440 op3 = force_reg (SImode, operands[3]);
7441 scratch = force_reg (SImode, const0_rtx);
7442 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7448 if (!thumb1_cmp_operand (op3, SImode))
7449 op3 = force_reg (SImode, op3);
7450 scratch = force_reg (SImode, const0_rtx);
7451 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7457 if (!thumb1_cmp_operand (op3, SImode))
7458 op3 = force_reg (SImode, op3);
7459 scratch = gen_reg_rtx (SImode);
7460 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7464 op3 = force_reg (SImode, operands[3]);
7465 scratch = gen_reg_rtx (SImode);
7466 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7469 /* No good sequences for GT, LT. */
7476 (define_expand "cstorehf4"
7477 [(set (match_operand:SI 0 "s_register_operand")
7478 (match_operator:SI 1 "expandable_comparison_operator"
7479 [(match_operand:HF 2 "s_register_operand")
7480 (match_operand:HF 3 "vfp_compare_operand")]))]
7481 "TARGET_VFP_FP16INST"
7483 if (!arm_validize_comparison (&operands[1],
7488 emit_insn (gen_cstore_cc (operands[0], operands[1],
7489 operands[2], operands[3]));
7494 (define_expand "cstoresf4"
7495 [(set (match_operand:SI 0 "s_register_operand")
7496 (match_operator:SI 1 "expandable_comparison_operator"
7497 [(match_operand:SF 2 "s_register_operand")
7498 (match_operand:SF 3 "vfp_compare_operand")]))]
7499 "TARGET_32BIT && TARGET_HARD_FLOAT"
7500 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7501 operands[2], operands[3])); DONE;"
7504 (define_expand "cstoredf4"
7505 [(set (match_operand:SI 0 "s_register_operand")
7506 (match_operator:SI 1 "expandable_comparison_operator"
7507 [(match_operand:DF 2 "s_register_operand")
7508 (match_operand:DF 3 "vfp_compare_operand")]))]
7509 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7510 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7511 operands[2], operands[3])); DONE;"
7514 (define_expand "cstoredi4"
7515 [(set (match_operand:SI 0 "s_register_operand")
7516 (match_operator:SI 1 "expandable_comparison_operator"
7517 [(match_operand:DI 2 "s_register_operand")
7518 (match_operand:DI 3 "reg_or_int_operand")]))]
7521 if (!arm_validize_comparison (&operands[1],
7525 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7532 ;; Conditional move insns
7534 (define_expand "movsicc"
7535 [(set (match_operand:SI 0 "s_register_operand")
7536 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
7537 (match_operand:SI 2 "arm_not_operand")
7538 (match_operand:SI 3 "arm_not_operand")))]
7545 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7546 &XEXP (operands[1], 1)))
7549 code = GET_CODE (operands[1]);
7550 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7551 XEXP (operands[1], 1), NULL_RTX);
7552 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7556 (define_expand "movhfcc"
7557 [(set (match_operand:HF 0 "s_register_operand")
7558 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
7559 (match_operand:HF 2 "s_register_operand")
7560 (match_operand:HF 3 "s_register_operand")))]
7561 "TARGET_VFP_FP16INST"
7564 enum rtx_code code = GET_CODE (operands[1]);
7567 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7568 &XEXP (operands[1], 1)))
7571 code = GET_CODE (operands[1]);
7572 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7573 XEXP (operands[1], 1), NULL_RTX);
7574 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7578 (define_expand "movsfcc"
7579 [(set (match_operand:SF 0 "s_register_operand")
7580 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7581 (match_operand:SF 2 "s_register_operand")
7582 (match_operand:SF 3 "s_register_operand")))]
7583 "TARGET_32BIT && TARGET_HARD_FLOAT"
7586 enum rtx_code code = GET_CODE (operands[1]);
7589 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7590 &XEXP (operands[1], 1)))
7593 code = GET_CODE (operands[1]);
7594 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7595 XEXP (operands[1], 1), NULL_RTX);
7596 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7600 (define_expand "movdfcc"
7601 [(set (match_operand:DF 0 "s_register_operand")
7602 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7603 (match_operand:DF 2 "s_register_operand")
7604 (match_operand:DF 3 "s_register_operand")))]
7605 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7608 enum rtx_code code = GET_CODE (operands[1]);
7611 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7612 &XEXP (operands[1], 1)))
7614 code = GET_CODE (operands[1]);
7615 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7616 XEXP (operands[1], 1), NULL_RTX);
7617 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7621 (define_insn "*cmov<mode>"
7622 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7623 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7624 [(match_operand 2 "cc_register" "") (const_int 0)])
7625 (match_operand:SDF 3 "s_register_operand"
7627 (match_operand:SDF 4 "s_register_operand"
7628 "<F_constraint>")))]
7629 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7632 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7639 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7644 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7650 [(set_attr "conds" "use")
7651 (set_attr "type" "fcsel")]
7654 (define_insn "*cmovhf"
7655 [(set (match_operand:HF 0 "s_register_operand" "=t")
7656 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7657 [(match_operand 2 "cc_register" "") (const_int 0)])
7658 (match_operand:HF 3 "s_register_operand" "t")
7659 (match_operand:HF 4 "s_register_operand" "t")))]
7660 "TARGET_VFP_FP16INST"
7663 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7670 return \"vsel%d1.f16\\t%0, %3, %4\";
7675 return \"vsel%D1.f16\\t%0, %4, %3\";
7681 [(set_attr "conds" "use")
7682 (set_attr "type" "fcsel")]
7685 (define_insn_and_split "*movsicc_insn"
7686 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7688 (match_operator 3 "arm_comparison_operator"
7689 [(match_operand 4 "cc_register" "") (const_int 0)])
7690 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7691 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7702 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7703 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7704 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7705 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7706 "&& reload_completed"
7709 enum rtx_code rev_code;
7713 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7715 gen_rtx_SET (operands[0], operands[1])));
7717 rev_code = GET_CODE (operands[3]);
7718 mode = GET_MODE (operands[4]);
7719 if (mode == CCFPmode || mode == CCFPEmode)
7720 rev_code = reverse_condition_maybe_unordered (rev_code);
7722 rev_code = reverse_condition (rev_code);
7724 rev_cond = gen_rtx_fmt_ee (rev_code,
7728 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7730 gen_rtx_SET (operands[0], operands[2])));
7733 [(set_attr "length" "4,4,4,4,8,8,8,8")
7734 (set_attr "conds" "use")
7735 (set_attr_alternative "type"
7736 [(if_then_else (match_operand 2 "const_int_operand" "")
7737 (const_string "mov_imm")
7738 (const_string "mov_reg"))
7739 (const_string "mvn_imm")
7740 (if_then_else (match_operand 1 "const_int_operand" "")
7741 (const_string "mov_imm")
7742 (const_string "mov_reg"))
7743 (const_string "mvn_imm")
7744 (const_string "multiple")
7745 (const_string "multiple")
7746 (const_string "multiple")
7747 (const_string "multiple")])]
7750 (define_insn "*movsfcc_soft_insn"
7751 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7752 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7753 [(match_operand 4 "cc_register" "") (const_int 0)])
7754 (match_operand:SF 1 "s_register_operand" "0,r")
7755 (match_operand:SF 2 "s_register_operand" "r,0")))]
7756 "TARGET_ARM && TARGET_SOFT_FLOAT"
7760 [(set_attr "conds" "use")
7761 (set_attr "type" "mov_reg")]
7765 ;; Jump and linkage insns
7767 (define_expand "jump"
7769 (label_ref (match_operand 0 "" "")))]
7774 (define_insn "*arm_jump"
7776 (label_ref (match_operand 0 "" "")))]
7780 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7782 arm_ccfsm_state += 2;
7785 return \"b%?\\t%l0\";
7788 [(set_attr "predicable" "yes")
7789 (set (attr "length")
7791 (and (match_test "TARGET_THUMB2")
7792 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7793 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7796 (set_attr "type" "branch")]
7799 (define_expand "call"
7800 [(parallel [(call (match_operand 0 "memory_operand")
7801 (match_operand 1 "general_operand"))
7802 (use (match_operand 2 "" ""))
7803 (clobber (reg:SI LR_REGNUM))])]
7808 tree addr = MEM_EXPR (operands[0]);
7810 /* In an untyped call, we can get NULL for operand 2. */
7811 if (operands[2] == NULL_RTX)
7812 operands[2] = const0_rtx;
7814 /* Decide if we should generate indirect calls by loading the
7815 32-bit address of the callee into a register before performing the
7817 callee = XEXP (operands[0], 0);
7818 if (GET_CODE (callee) == SYMBOL_REF
7819 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7821 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7823 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7824 /* Indirect call: set r9 with FDPIC value of callee. */
7825 XEXP (operands[0], 0)
7826 = arm_load_function_descriptor (XEXP (operands[0], 0));
7828 if (detect_cmse_nonsecure_call (addr))
7830 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7832 emit_call_insn (pat);
7836 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7837 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7840 /* Restore FDPIC register (r9) after call. */
7843 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7844 rtx initial_fdpic_reg
7845 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7847 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7848 initial_fdpic_reg));
7855 (define_insn "restore_pic_register_after_call"
7856 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7857 (unspec:SI [(match_dup 0)
7858 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7859 UNSPEC_PIC_RESTORE))]
7866 (define_expand "call_internal"
7867 [(parallel [(call (match_operand 0 "memory_operand")
7868 (match_operand 1 "general_operand"))
7869 (use (match_operand 2 "" ""))
7870 (clobber (reg:SI LR_REGNUM))])])
7872 (define_expand "nonsecure_call_internal"
7873 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7874 UNSPEC_NONSECURE_MEM)
7875 (match_operand 1 "general_operand"))
7876 (use (match_operand 2 "" ""))
7877 (clobber (reg:SI LR_REGNUM))])]
7882 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7883 gen_rtx_REG (SImode, R4_REGNUM),
7886 operands[0] = replace_equiv_address (operands[0], tmp);
7889 (define_insn "*call_reg_armv5"
7890 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7891 (match_operand 1 "" ""))
7892 (use (match_operand 2 "" ""))
7893 (clobber (reg:SI LR_REGNUM))]
7894 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7896 [(set_attr "type" "call")]
7899 (define_insn "*call_reg_arm"
7900 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7901 (match_operand 1 "" ""))
7902 (use (match_operand 2 "" ""))
7903 (clobber (reg:SI LR_REGNUM))]
7904 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7906 return output_call (operands);
7908 ;; length is worst case, normally it is only two
7909 [(set_attr "length" "12")
7910 (set_attr "type" "call")]
7914 (define_expand "call_value"
7915 [(parallel [(set (match_operand 0 "" "")
7916 (call (match_operand 1 "memory_operand")
7917 (match_operand 2 "general_operand")))
7918 (use (match_operand 3 "" ""))
7919 (clobber (reg:SI LR_REGNUM))])]
7924 tree addr = MEM_EXPR (operands[1]);
7926 /* In an untyped call, we can get NULL for operand 2. */
7927 if (operands[3] == 0)
7928 operands[3] = const0_rtx;
7930 /* Decide if we should generate indirect calls by loading the
7931 32-bit address of the callee into a register before performing the
7933 callee = XEXP (operands[1], 0);
7934 if (GET_CODE (callee) == SYMBOL_REF
7935 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7937 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7939 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7940 /* Indirect call: set r9 with FDPIC value of callee. */
7941 XEXP (operands[1], 0)
7942 = arm_load_function_descriptor (XEXP (operands[1], 0));
7944 if (detect_cmse_nonsecure_call (addr))
7946 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
7947 operands[2], operands[3]);
7948 emit_call_insn (pat);
7952 pat = gen_call_value_internal (operands[0], operands[1],
7953 operands[2], operands[3]);
7954 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
7957 /* Restore FDPIC register (r9) after call. */
7960 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7961 rtx initial_fdpic_reg
7962 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7964 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7965 initial_fdpic_reg));
7972 (define_expand "call_value_internal"
7973 [(parallel [(set (match_operand 0 "" "")
7974 (call (match_operand 1 "memory_operand")
7975 (match_operand 2 "general_operand")))
7976 (use (match_operand 3 "" ""))
7977 (clobber (reg:SI LR_REGNUM))])])
7979 (define_expand "nonsecure_call_value_internal"
7980 [(parallel [(set (match_operand 0 "" "")
7981 (call (unspec:SI [(match_operand 1 "memory_operand")]
7982 UNSPEC_NONSECURE_MEM)
7983 (match_operand 2 "general_operand")))
7984 (use (match_operand 3 "" ""))
7985 (clobber (reg:SI LR_REGNUM))])]
7990 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
7991 gen_rtx_REG (SImode, R4_REGNUM),
7994 operands[1] = replace_equiv_address (operands[1], tmp);
7997 (define_insn "*call_value_reg_armv5"
7998 [(set (match_operand 0 "" "")
7999 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8000 (match_operand 2 "" "")))
8001 (use (match_operand 3 "" ""))
8002 (clobber (reg:SI LR_REGNUM))]
8003 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8005 [(set_attr "type" "call")]
8008 (define_insn "*call_value_reg_arm"
8009 [(set (match_operand 0 "" "")
8010 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8011 (match_operand 2 "" "")))
8012 (use (match_operand 3 "" ""))
8013 (clobber (reg:SI LR_REGNUM))]
8014 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8016 return output_call (&operands[1]);
8018 [(set_attr "length" "12")
8019 (set_attr "type" "call")]
8022 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8023 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8025 (define_insn "*call_symbol"
8026 [(call (mem:SI (match_operand:SI 0 "" ""))
8027 (match_operand 1 "" ""))
8028 (use (match_operand 2 "" ""))
8029 (clobber (reg:SI LR_REGNUM))]
8031 && !SIBLING_CALL_P (insn)
8032 && (GET_CODE (operands[0]) == SYMBOL_REF)
8033 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8036 rtx op = operands[0];
8038 /* Switch mode now when possible. */
8039 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8040 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8041 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8043 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8045 [(set_attr "type" "call")]
8048 (define_insn "*call_value_symbol"
8049 [(set (match_operand 0 "" "")
8050 (call (mem:SI (match_operand:SI 1 "" ""))
8051 (match_operand:SI 2 "" "")))
8052 (use (match_operand 3 "" ""))
8053 (clobber (reg:SI LR_REGNUM))]
8055 && !SIBLING_CALL_P (insn)
8056 && (GET_CODE (operands[1]) == SYMBOL_REF)
8057 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8060 rtx op = operands[1];
8062 /* Switch mode now when possible. */
8063 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8064 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8065 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8067 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8069 [(set_attr "type" "call")]
8072 (define_expand "sibcall_internal"
8073 [(parallel [(call (match_operand 0 "memory_operand")
8074 (match_operand 1 "general_operand"))
8076 (use (match_operand 2 "" ""))])])
8078 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8079 (define_expand "sibcall"
8080 [(parallel [(call (match_operand 0 "memory_operand")
8081 (match_operand 1 "general_operand"))
8083 (use (match_operand 2 "" ""))])]
8089 if ((!REG_P (XEXP (operands[0], 0))
8090 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8091 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8092 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8093 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8095 if (operands[2] == NULL_RTX)
8096 operands[2] = const0_rtx;
8098 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8099 arm_emit_call_insn (pat, operands[0], true);
8104 (define_expand "sibcall_value_internal"
8105 [(parallel [(set (match_operand 0 "" "")
8106 (call (match_operand 1 "memory_operand")
8107 (match_operand 2 "general_operand")))
8109 (use (match_operand 3 "" ""))])])
8111 (define_expand "sibcall_value"
8112 [(parallel [(set (match_operand 0 "" "")
8113 (call (match_operand 1 "memory_operand")
8114 (match_operand 2 "general_operand")))
8116 (use (match_operand 3 "" ""))])]
8122 if ((!REG_P (XEXP (operands[1], 0))
8123 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8124 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8125 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8126 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8128 if (operands[3] == NULL_RTX)
8129 operands[3] = const0_rtx;
8131 pat = gen_sibcall_value_internal (operands[0], operands[1],
8132 operands[2], operands[3]);
8133 arm_emit_call_insn (pat, operands[1], true);
8138 (define_insn "*sibcall_insn"
8139 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8140 (match_operand 1 "" ""))
8142 (use (match_operand 2 "" ""))]
8143 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8145 if (which_alternative == 1)
8146 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8149 if (arm_arch5t || arm_arch4t)
8150 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8152 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8155 [(set_attr "type" "call")]
8158 (define_insn "*sibcall_value_insn"
8159 [(set (match_operand 0 "" "")
8160 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8161 (match_operand 2 "" "")))
8163 (use (match_operand 3 "" ""))]
8164 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8166 if (which_alternative == 1)
8167 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8170 if (arm_arch5t || arm_arch4t)
8171 return \"bx%?\\t%1\";
8173 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8176 [(set_attr "type" "call")]
8179 (define_expand "<return_str>return"
8181 "(TARGET_ARM || (TARGET_THUMB2
8182 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8183 && !IS_STACKALIGN (arm_current_func_type ())))
8184 <return_cond_false>"
8189 thumb2_expand_return (<return_simple_p>);
8196 ;; Often the return insn will be the same as loading from memory, so set attr
8197 (define_insn "*arm_return"
8199 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8202 if (arm_ccfsm_state == 2)
8204 arm_ccfsm_state += 2;
8207 return output_return_instruction (const_true_rtx, true, false, false);
8209 [(set_attr "type" "load_4")
8210 (set_attr "length" "12")
8211 (set_attr "predicable" "yes")]
8214 (define_insn "*cond_<return_str>return"
8216 (if_then_else (match_operator 0 "arm_comparison_operator"
8217 [(match_operand 1 "cc_register" "") (const_int 0)])
8220 "TARGET_ARM <return_cond_true>"
8223 if (arm_ccfsm_state == 2)
8225 arm_ccfsm_state += 2;
8228 return output_return_instruction (operands[0], true, false,
8231 [(set_attr "conds" "use")
8232 (set_attr "length" "12")
8233 (set_attr "type" "load_4")]
8236 (define_insn "*cond_<return_str>return_inverted"
8238 (if_then_else (match_operator 0 "arm_comparison_operator"
8239 [(match_operand 1 "cc_register" "") (const_int 0)])
8242 "TARGET_ARM <return_cond_true>"
8245 if (arm_ccfsm_state == 2)
8247 arm_ccfsm_state += 2;
8250 return output_return_instruction (operands[0], true, true,
8253 [(set_attr "conds" "use")
8254 (set_attr "length" "12")
8255 (set_attr "type" "load_4")]
8258 (define_insn "*arm_simple_return"
8263 if (arm_ccfsm_state == 2)
8265 arm_ccfsm_state += 2;
8268 return output_return_instruction (const_true_rtx, true, false, true);
8270 [(set_attr "type" "branch")
8271 (set_attr "length" "4")
8272 (set_attr "predicable" "yes")]
8275 ;; Generate a sequence of instructions to determine if the processor is
8276 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8279 (define_expand "return_addr_mask"
8281 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8283 (set (match_operand:SI 0 "s_register_operand")
8284 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8286 (const_int 67108860)))] ; 0x03fffffc
8289 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8292 (define_insn "*check_arch2"
8293 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8294 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8297 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8298 [(set_attr "length" "8")
8299 (set_attr "conds" "set")
8300 (set_attr "type" "multiple")]
8303 ;; Call subroutine returning any type.
8305 (define_expand "untyped_call"
8306 [(parallel [(call (match_operand 0 "" "")
8308 (match_operand 1 "" "")
8309 (match_operand 2 "" "")])]
8310 "TARGET_EITHER && !TARGET_FDPIC"
8314 rtx par = gen_rtx_PARALLEL (VOIDmode,
8315 rtvec_alloc (XVECLEN (operands[2], 0)));
8316 rtx addr = gen_reg_rtx (Pmode);
8320 emit_move_insn (addr, XEXP (operands[1], 0));
8321 mem = change_address (operands[1], BLKmode, addr);
8323 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8325 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8327 /* Default code only uses r0 as a return value, but we could
8328 be using anything up to 4 registers. */
8329 if (REGNO (src) == R0_REGNUM)
8330 src = gen_rtx_REG (TImode, R0_REGNUM);
8332 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8334 size += GET_MODE_SIZE (GET_MODE (src));
8337 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8341 for (i = 0; i < XVECLEN (par, 0); i++)
8343 HOST_WIDE_INT offset = 0;
8344 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8347 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8349 mem = change_address (mem, GET_MODE (reg), NULL);
8350 if (REGNO (reg) == R0_REGNUM)
8352 /* On thumb we have to use a write-back instruction. */
8353 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8354 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8355 size = TARGET_ARM ? 16 : 0;
8359 emit_move_insn (mem, reg);
8360 size = GET_MODE_SIZE (GET_MODE (reg));
8364 /* The optimizer does not know that the call sets the function value
8365 registers we stored in the result block. We avoid problems by
8366 claiming that all hard registers are used and clobbered at this
8368 emit_insn (gen_blockage ());
8374 (define_expand "untyped_return"
8375 [(match_operand:BLK 0 "memory_operand")
8376 (match_operand 1 "" "")]
8377 "TARGET_EITHER && !TARGET_FDPIC"
8381 rtx addr = gen_reg_rtx (Pmode);
8385 emit_move_insn (addr, XEXP (operands[0], 0));
8386 mem = change_address (operands[0], BLKmode, addr);
8388 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8390 HOST_WIDE_INT offset = 0;
8391 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8394 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8396 mem = change_address (mem, GET_MODE (reg), NULL);
8397 if (REGNO (reg) == R0_REGNUM)
8399 /* On thumb we have to use a write-back instruction. */
8400 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8401 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8402 size = TARGET_ARM ? 16 : 0;
8406 emit_move_insn (reg, mem);
8407 size = GET_MODE_SIZE (GET_MODE (reg));
8411 /* Emit USE insns before the return. */
8412 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8413 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8415 /* Construct the return. */
8416 expand_naked_return ();
8422 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8423 ;; all of memory. This blocks insns from being moved across this point.
8425 (define_insn "blockage"
8426 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8429 [(set_attr "length" "0")
8430 (set_attr "type" "block")]
8433 ;; Since we hard code r0 here use the 'o' constraint to prevent
8434 ;; provoking undefined behaviour in the hardware with putting out
8435 ;; auto-increment operations with potentially r0 as the base register.
8436 (define_insn "probe_stack"
8437 [(set (match_operand:SI 0 "memory_operand" "=o")
8438 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8441 [(set_attr "type" "store_4")
8442 (set_attr "predicable" "yes")]
8445 (define_insn "probe_stack_range"
8446 [(set (match_operand:SI 0 "register_operand" "=r")
8447 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8448 (match_operand:SI 2 "register_operand" "r")]
8449 VUNSPEC_PROBE_STACK_RANGE))]
8452 return output_probe_stack_range (operands[0], operands[2]);
8454 [(set_attr "type" "multiple")
8455 (set_attr "conds" "clob")]
8458 ;; Named patterns for stack smashing protection.
8459 (define_expand "stack_protect_combined_set"
8461 [(set (match_operand:SI 0 "memory_operand")
8462 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8464 (clobber (match_scratch:SI 2 ""))
8465 (clobber (match_scratch:SI 3 ""))])]
8470 ;; Use a separate insn from the above expand to be able to have the mem outside
8471 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8472 ;; try to reload the guard since we need to control how PIC access is done in
8473 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8474 ;; legitimize_pic_address ()).
8475 (define_insn_and_split "*stack_protect_combined_set_insn"
8476 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8477 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8479 (clobber (match_scratch:SI 2 "=&l,&r"))
8480 (clobber (match_scratch:SI 3 "=&l,&r"))]
8484 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8486 (clobber (match_dup 2))])]
8494 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8496 pic_reg = operands[3];
8498 /* Forces recomputing of GOT base now. */
8499 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
8500 true /*compute_now*/);
8504 if (address_operand (operands[1], SImode))
8505 operands[2] = operands[1];
8508 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8509 emit_move_insn (operands[2], mem);
8513 [(set_attr "arch" "t1,32")]
8516 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8517 ;; canary value does not live beyond the life of this sequence.
8518 (define_insn "*stack_protect_set_insn"
8519 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8520 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
8522 (clobber (match_dup 1))]
8525 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
8526 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
8527 [(set_attr "length" "8,12")
8528 (set_attr "conds" "clob,nocond")
8529 (set_attr "type" "multiple")
8530 (set_attr "arch" "t1,32")]
8533 (define_expand "stack_protect_combined_test"
8537 (eq (match_operand:SI 0 "memory_operand")
8538 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8540 (label_ref (match_operand 2))
8542 (clobber (match_scratch:SI 3 ""))
8543 (clobber (match_scratch:SI 4 ""))
8544 (clobber (reg:CC CC_REGNUM))])]
8549 ;; Use a separate insn from the above expand to be able to have the mem outside
8550 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8551 ;; try to reload the guard since we need to control how PIC access is done in
8552 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8553 ;; legitimize_pic_address ()).
8554 (define_insn_and_split "*stack_protect_combined_test_insn"
8557 (eq (match_operand:SI 0 "memory_operand" "m,m")
8558 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8560 (label_ref (match_operand 2))
8562 (clobber (match_scratch:SI 3 "=&l,&r"))
8563 (clobber (match_scratch:SI 4 "=&l,&r"))
8564 (clobber (reg:CC CC_REGNUM))]
8577 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8579 pic_reg = operands[4];
8581 /* Forces recomputing of GOT base now. */
8582 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
8583 true /*compute_now*/);
8587 if (address_operand (operands[1], SImode))
8588 operands[3] = operands[1];
8591 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8592 emit_move_insn (operands[3], mem);
8597 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8599 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8600 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8601 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8605 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8607 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8608 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8613 [(set_attr "arch" "t1,32")]
8616 (define_insn "arm_stack_protect_test_insn"
8617 [(set (reg:CC_Z CC_REGNUM)
8618 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8619 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8622 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8623 (clobber (match_dup 2))]
8625 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8626 [(set_attr "length" "8,12")
8627 (set_attr "conds" "set")
8628 (set_attr "type" "multiple")
8629 (set_attr "arch" "t,32")]
8632 (define_expand "casesi"
8633 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8634 (match_operand:SI 1 "const_int_operand") ; lower bound
8635 (match_operand:SI 2 "const_int_operand") ; total range
8636 (match_operand:SI 3 "" "") ; table label
8637 (match_operand:SI 4 "" "")] ; Out of range label
8638 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8641 enum insn_code code;
8642 if (operands[1] != const0_rtx)
8644 rtx reg = gen_reg_rtx (SImode);
8646 emit_insn (gen_addsi3 (reg, operands[0],
8647 gen_int_mode (-INTVAL (operands[1]),
8653 code = CODE_FOR_arm_casesi_internal;
8654 else if (TARGET_THUMB1)
8655 code = CODE_FOR_thumb1_casesi_internal_pic;
8657 code = CODE_FOR_thumb2_casesi_internal_pic;
8659 code = CODE_FOR_thumb2_casesi_internal;
8661 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8662 operands[2] = force_reg (SImode, operands[2]);
8664 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8665 operands[3], operands[4]));
8670 ;; The USE in this pattern is needed to tell flow analysis that this is
8671 ;; a CASESI insn. It has no other purpose.
8672 (define_expand "arm_casesi_internal"
8673 [(parallel [(set (pc)
8675 (leu (match_operand:SI 0 "s_register_operand")
8676 (match_operand:SI 1 "arm_rhs_operand"))
8678 (label_ref:SI (match_operand 3 ""))))
8679 (clobber (reg:CC CC_REGNUM))
8680 (use (label_ref:SI (match_operand 2 "")))])]
8683 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8684 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8685 gen_rtx_LABEL_REF (SImode, operands[2]));
8686 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8687 MEM_READONLY_P (operands[4]) = 1;
8688 MEM_NOTRAP_P (operands[4]) = 1;
8691 (define_insn "*arm_casesi_internal"
8692 [(parallel [(set (pc)
8694 (leu (match_operand:SI 0 "s_register_operand" "r")
8695 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8696 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8697 (label_ref:SI (match_operand 2 "" ""))))
8698 (label_ref:SI (match_operand 3 "" ""))))
8699 (clobber (reg:CC CC_REGNUM))
8700 (use (label_ref:SI (match_dup 2)))])]
8704 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8705 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8707 [(set_attr "conds" "clob")
8708 (set_attr "length" "12")
8709 (set_attr "type" "multiple")]
8712 (define_expand "indirect_jump"
8714 (match_operand:SI 0 "s_register_operand"))]
8717 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8718 address and use bx. */
8722 tmp = gen_reg_rtx (SImode);
8723 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8729 ;; NB Never uses BX.
8730 (define_insn "*arm_indirect_jump"
8732 (match_operand:SI 0 "s_register_operand" "r"))]
8734 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8735 [(set_attr "predicable" "yes")
8736 (set_attr "type" "branch")]
8739 (define_insn "*load_indirect_jump"
8741 (match_operand:SI 0 "memory_operand" "m"))]
8743 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8744 [(set_attr "type" "load_4")
8745 (set_attr "pool_range" "4096")
8746 (set_attr "neg_pool_range" "4084")
8747 (set_attr "predicable" "yes")]
8757 [(set (attr "length")
8758 (if_then_else (eq_attr "is_thumb" "yes")
8761 (set_attr "type" "mov_reg")]
8765 [(trap_if (const_int 1) (const_int 0))]
8769 return \".inst\\t0xe7f000f0\";
8771 return \".inst\\t0xdeff\";
8773 [(set (attr "length")
8774 (if_then_else (eq_attr "is_thumb" "yes")
8777 (set_attr "type" "trap")
8778 (set_attr "conds" "unconditional")]
8782 ;; Patterns to allow combination of arithmetic, cond code and shifts
8784 (define_insn "*<arith_shift_insn>_multsi"
8785 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8787 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8788 (match_operand:SI 3 "power_of_two_operand" ""))
8789 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8791 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8792 [(set_attr "predicable" "yes")
8793 (set_attr "shift" "2")
8794 (set_attr "arch" "a,t2")
8795 (set_attr "type" "alu_shift_imm")])
8797 (define_insn "*<arith_shift_insn>_shiftsi"
8798 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8800 (match_operator:SI 2 "shift_nomul_operator"
8801 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8802 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8803 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8804 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8805 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8806 [(set_attr "predicable" "yes")
8807 (set_attr "shift" "3")
8808 (set_attr "arch" "a,t2,a")
8809 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8812 [(set (match_operand:SI 0 "s_register_operand" "")
8813 (match_operator:SI 1 "shiftable_operator"
8814 [(match_operator:SI 2 "shiftable_operator"
8815 [(match_operator:SI 3 "shift_operator"
8816 [(match_operand:SI 4 "s_register_operand" "")
8817 (match_operand:SI 5 "reg_or_int_operand" "")])
8818 (match_operand:SI 6 "s_register_operand" "")])
8819 (match_operand:SI 7 "arm_rhs_operand" "")]))
8820 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8823 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8826 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8829 (define_insn "*arith_shiftsi_compare0"
8830 [(set (reg:CC_NOOV CC_REGNUM)
8832 (match_operator:SI 1 "shiftable_operator"
8833 [(match_operator:SI 3 "shift_operator"
8834 [(match_operand:SI 4 "s_register_operand" "r,r")
8835 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8836 (match_operand:SI 2 "s_register_operand" "r,r")])
8838 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8839 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8842 "%i1s%?\\t%0, %2, %4%S3"
8843 [(set_attr "conds" "set")
8844 (set_attr "shift" "4")
8845 (set_attr "arch" "32,a")
8846 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8848 (define_insn "*arith_shiftsi_compare0_scratch"
8849 [(set (reg:CC_NOOV CC_REGNUM)
8851 (match_operator:SI 1 "shiftable_operator"
8852 [(match_operator:SI 3 "shift_operator"
8853 [(match_operand:SI 4 "s_register_operand" "r,r")
8854 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8855 (match_operand:SI 2 "s_register_operand" "r,r")])
8857 (clobber (match_scratch:SI 0 "=r,r"))]
8859 "%i1s%?\\t%0, %2, %4%S3"
8860 [(set_attr "conds" "set")
8861 (set_attr "shift" "4")
8862 (set_attr "arch" "32,a")
8863 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8865 (define_insn "*sub_shiftsi"
8866 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8867 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8868 (match_operator:SI 2 "shift_operator"
8869 [(match_operand:SI 3 "s_register_operand" "r,r")
8870 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8872 "sub%?\\t%0, %1, %3%S2"
8873 [(set_attr "predicable" "yes")
8874 (set_attr "predicable_short_it" "no")
8875 (set_attr "shift" "3")
8876 (set_attr "arch" "32,a")
8877 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8879 (define_insn "*sub_shiftsi_compare0"
8880 [(set (reg:CC_NOOV CC_REGNUM)
8882 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8883 (match_operator:SI 2 "shift_operator"
8884 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8885 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8887 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8888 (minus:SI (match_dup 1)
8889 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8891 "subs%?\\t%0, %1, %3%S2"
8892 [(set_attr "conds" "set")
8893 (set_attr "shift" "3")
8894 (set_attr "arch" "32,a,a")
8895 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8897 (define_insn "*sub_shiftsi_compare0_scratch"
8898 [(set (reg:CC_NOOV CC_REGNUM)
8900 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8901 (match_operator:SI 2 "shift_operator"
8902 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8903 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8905 (clobber (match_scratch:SI 0 "=r,r,r"))]
8907 "subs%?\\t%0, %1, %3%S2"
8908 [(set_attr "conds" "set")
8909 (set_attr "shift" "3")
8910 (set_attr "arch" "32,a,a")
8911 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8914 (define_insn_and_split "*and_scc"
8915 [(set (match_operand:SI 0 "s_register_operand" "=r")
8916 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8917 [(match_operand 2 "cc_register" "") (const_int 0)])
8918 (match_operand:SI 3 "s_register_operand" "r")))]
8920 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8921 "&& reload_completed"
8922 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8923 (cond_exec (match_dup 4) (set (match_dup 0)
8924 (and:SI (match_dup 3) (const_int 1))))]
8926 machine_mode mode = GET_MODE (operands[2]);
8927 enum rtx_code rc = GET_CODE (operands[1]);
8929 /* Note that operands[4] is the same as operands[1],
8930 but with VOIDmode as the result. */
8931 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8932 if (mode == CCFPmode || mode == CCFPEmode)
8933 rc = reverse_condition_maybe_unordered (rc);
8935 rc = reverse_condition (rc);
8936 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8938 [(set_attr "conds" "use")
8939 (set_attr "type" "multiple")
8940 (set_attr "length" "8")]
8943 (define_insn_and_split "*ior_scc"
8944 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8945 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
8946 [(match_operand 2 "cc_register" "") (const_int 0)])
8947 (match_operand:SI 3 "s_register_operand" "0,?r")))]
8952 "&& reload_completed
8953 && REGNO (operands [0]) != REGNO (operands[3])"
8954 ;; && which_alternative == 1
8955 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
8956 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
8957 (cond_exec (match_dup 4) (set (match_dup 0)
8958 (ior:SI (match_dup 3) (const_int 1))))]
8960 machine_mode mode = GET_MODE (operands[2]);
8961 enum rtx_code rc = GET_CODE (operands[1]);
8963 /* Note that operands[4] is the same as operands[1],
8964 but with VOIDmode as the result. */
8965 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8966 if (mode == CCFPmode || mode == CCFPEmode)
8967 rc = reverse_condition_maybe_unordered (rc);
8969 rc = reverse_condition (rc);
8970 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8972 [(set_attr "conds" "use")
8973 (set_attr "length" "4,8")
8974 (set_attr "type" "logic_imm,multiple")]
8977 ; A series of splitters for the compare_scc pattern below. Note that
8978 ; order is important.
8980 [(set (match_operand:SI 0 "s_register_operand" "")
8981 (lt:SI (match_operand:SI 1 "s_register_operand" "")
8983 (clobber (reg:CC CC_REGNUM))]
8984 "TARGET_32BIT && reload_completed"
8985 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
8988 [(set (match_operand:SI 0 "s_register_operand" "")
8989 (ge:SI (match_operand:SI 1 "s_register_operand" "")
8991 (clobber (reg:CC CC_REGNUM))]
8992 "TARGET_32BIT && reload_completed"
8993 [(set (match_dup 0) (not:SI (match_dup 1)))
8994 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
8997 [(set (match_operand:SI 0 "s_register_operand" "")
8998 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9000 (clobber (reg:CC CC_REGNUM))]
9001 "arm_arch5t && TARGET_32BIT"
9002 [(set (match_dup 0) (clz:SI (match_dup 1)))
9003 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9007 [(set (match_operand:SI 0 "s_register_operand" "")
9008 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9010 (clobber (reg:CC CC_REGNUM))]
9011 "TARGET_32BIT && reload_completed"
9013 [(set (reg:CC CC_REGNUM)
9014 (compare:CC (const_int 1) (match_dup 1)))
9016 (minus:SI (const_int 1) (match_dup 1)))])
9017 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9018 (set (match_dup 0) (const_int 0)))])
9021 [(set (match_operand:SI 0 "s_register_operand" "")
9022 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9023 (match_operand:SI 2 "const_int_operand" "")))
9024 (clobber (reg:CC CC_REGNUM))]
9025 "TARGET_32BIT && reload_completed"
9027 [(set (reg:CC CC_REGNUM)
9028 (compare:CC (match_dup 1) (match_dup 2)))
9029 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9030 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9031 (set (match_dup 0) (const_int 1)))]
9033 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9037 [(set (match_operand:SI 0 "s_register_operand" "")
9038 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9039 (match_operand:SI 2 "arm_add_operand" "")))
9040 (clobber (reg:CC CC_REGNUM))]
9041 "TARGET_32BIT && reload_completed"
9043 [(set (reg:CC_NOOV CC_REGNUM)
9044 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9046 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9047 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9048 (set (match_dup 0) (const_int 1)))])
9050 (define_insn_and_split "*compare_scc"
9051 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9052 (match_operator:SI 1 "arm_comparison_operator"
9053 [(match_operand:SI 2 "s_register_operand" "r,r")
9054 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9055 (clobber (reg:CC CC_REGNUM))]
9058 "&& reload_completed"
9059 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9060 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9061 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9064 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9065 operands[2], operands[3]);
9066 enum rtx_code rc = GET_CODE (operands[1]);
9068 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9070 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9071 if (mode == CCFPmode || mode == CCFPEmode)
9072 rc = reverse_condition_maybe_unordered (rc);
9074 rc = reverse_condition (rc);
9075 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9077 [(set_attr "type" "multiple")]
9080 ;; Attempt to improve the sequence generated by the compare_scc splitters
9081 ;; not to use conditional execution.
9083 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9087 [(set (reg:CC CC_REGNUM)
9088 (compare:CC (match_operand:SI 1 "register_operand" "")
9090 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9091 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9092 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9093 (set (match_dup 0) (const_int 1)))]
9094 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9095 [(set (match_dup 0) (clz:SI (match_dup 1)))
9096 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9099 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9103 [(set (reg:CC CC_REGNUM)
9104 (compare:CC (match_operand:SI 1 "register_operand" "")
9106 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9107 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9108 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9109 (set (match_dup 0) (const_int 1)))
9110 (match_scratch:SI 2 "r")]
9111 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9113 [(set (reg:CC CC_REGNUM)
9114 (compare:CC (const_int 0) (match_dup 1)))
9115 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9117 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9118 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9121 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9122 ;; sub Rd, Reg1, reg2
9126 [(set (reg:CC CC_REGNUM)
9127 (compare:CC (match_operand:SI 1 "register_operand" "")
9128 (match_operand:SI 2 "arm_rhs_operand" "")))
9129 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9130 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9131 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9132 (set (match_dup 0) (const_int 1)))]
9133 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9134 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9135 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9136 (set (match_dup 0) (clz:SI (match_dup 0)))
9137 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9141 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9142 ;; sub T1, Reg1, reg2
9146 [(set (reg:CC CC_REGNUM)
9147 (compare:CC (match_operand:SI 1 "register_operand" "")
9148 (match_operand:SI 2 "arm_rhs_operand" "")))
9149 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9150 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9151 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9152 (set (match_dup 0) (const_int 1)))
9153 (match_scratch:SI 3 "r")]
9154 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9155 [(set (match_dup 3) (match_dup 4))
9157 [(set (reg:CC CC_REGNUM)
9158 (compare:CC (const_int 0) (match_dup 3)))
9159 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9161 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9162 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9164 if (CONST_INT_P (operands[2]))
9165 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9167 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9170 (define_insn "*cond_move"
9171 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9172 (if_then_else:SI (match_operator 3 "equality_operator"
9173 [(match_operator 4 "arm_comparison_operator"
9174 [(match_operand 5 "cc_register" "") (const_int 0)])
9176 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9177 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9180 if (GET_CODE (operands[3]) == NE)
9182 if (which_alternative != 1)
9183 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9184 if (which_alternative != 0)
9185 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9188 if (which_alternative != 0)
9189 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9190 if (which_alternative != 1)
9191 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9194 [(set_attr "conds" "use")
9195 (set_attr_alternative "type"
9196 [(if_then_else (match_operand 2 "const_int_operand" "")
9197 (const_string "mov_imm")
9198 (const_string "mov_reg"))
9199 (if_then_else (match_operand 1 "const_int_operand" "")
9200 (const_string "mov_imm")
9201 (const_string "mov_reg"))
9202 (const_string "multiple")])
9203 (set_attr "length" "4,4,8")]
9206 (define_insn "*cond_arith"
9207 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9208 (match_operator:SI 5 "shiftable_operator"
9209 [(match_operator:SI 4 "arm_comparison_operator"
9210 [(match_operand:SI 2 "s_register_operand" "r,r")
9211 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9212 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9213 (clobber (reg:CC CC_REGNUM))]
9216 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9217 return \"%i5\\t%0, %1, %2, lsr #31\";
9219 output_asm_insn (\"cmp\\t%2, %3\", operands);
9220 if (GET_CODE (operands[5]) == AND)
9221 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9222 else if (GET_CODE (operands[5]) == MINUS)
9223 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9224 else if (which_alternative != 0)
9225 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9226 return \"%i5%d4\\t%0, %1, #1\";
9228 [(set_attr "conds" "clob")
9229 (set_attr "length" "12")
9230 (set_attr "type" "multiple")]
9233 (define_insn "*cond_sub"
9234 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9235 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9236 (match_operator:SI 4 "arm_comparison_operator"
9237 [(match_operand:SI 2 "s_register_operand" "r,r")
9238 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9239 (clobber (reg:CC CC_REGNUM))]
9242 output_asm_insn (\"cmp\\t%2, %3\", operands);
9243 if (which_alternative != 0)
9244 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9245 return \"sub%d4\\t%0, %1, #1\";
9247 [(set_attr "conds" "clob")
9248 (set_attr "length" "8,12")
9249 (set_attr "type" "multiple")]
9252 (define_insn "*cmp_ite0"
9253 [(set (match_operand 6 "dominant_cc_register" "")
9256 (match_operator 4 "arm_comparison_operator"
9257 [(match_operand:SI 0 "s_register_operand"
9258 "l,l,l,r,r,r,r,r,r")
9259 (match_operand:SI 1 "arm_add_operand"
9260 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9261 (match_operator:SI 5 "arm_comparison_operator"
9262 [(match_operand:SI 2 "s_register_operand"
9263 "l,r,r,l,l,r,r,r,r")
9264 (match_operand:SI 3 "arm_add_operand"
9265 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9271 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9273 {\"cmp%d5\\t%0, %1\",
9274 \"cmp%d4\\t%2, %3\"},
9275 {\"cmn%d5\\t%0, #%n1\",
9276 \"cmp%d4\\t%2, %3\"},
9277 {\"cmp%d5\\t%0, %1\",
9278 \"cmn%d4\\t%2, #%n3\"},
9279 {\"cmn%d5\\t%0, #%n1\",
9280 \"cmn%d4\\t%2, #%n3\"}
9282 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9287 \"cmn\\t%0, #%n1\"},
9288 {\"cmn\\t%2, #%n3\",
9290 {\"cmn\\t%2, #%n3\",
9293 static const char * const ite[2] =
9298 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9299 CMP_CMP, CMN_CMP, CMP_CMP,
9300 CMN_CMP, CMP_CMN, CMN_CMN};
9302 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9304 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9305 if (TARGET_THUMB2) {
9306 output_asm_insn (ite[swap], operands);
9308 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9311 [(set_attr "conds" "set")
9312 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9313 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9314 (set_attr "type" "multiple")
9315 (set_attr_alternative "length"
9321 (if_then_else (eq_attr "is_thumb" "no")
9324 (if_then_else (eq_attr "is_thumb" "no")
9327 (if_then_else (eq_attr "is_thumb" "no")
9330 (if_then_else (eq_attr "is_thumb" "no")
9335 (define_insn "*cmp_ite1"
9336 [(set (match_operand 6 "dominant_cc_register" "")
9339 (match_operator 4 "arm_comparison_operator"
9340 [(match_operand:SI 0 "s_register_operand"
9341 "l,l,l,r,r,r,r,r,r")
9342 (match_operand:SI 1 "arm_add_operand"
9343 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9344 (match_operator:SI 5 "arm_comparison_operator"
9345 [(match_operand:SI 2 "s_register_operand"
9346 "l,r,r,l,l,r,r,r,r")
9347 (match_operand:SI 3 "arm_add_operand"
9348 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9354 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9358 {\"cmn\\t%0, #%n1\",
9361 \"cmn\\t%2, #%n3\"},
9362 {\"cmn\\t%0, #%n1\",
9365 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9367 {\"cmp%d4\\t%2, %3\",
9368 \"cmp%D5\\t%0, %1\"},
9369 {\"cmp%d4\\t%2, %3\",
9370 \"cmn%D5\\t%0, #%n1\"},
9371 {\"cmn%d4\\t%2, #%n3\",
9372 \"cmp%D5\\t%0, %1\"},
9373 {\"cmn%d4\\t%2, #%n3\",
9374 \"cmn%D5\\t%0, #%n1\"}
9376 static const char * const ite[2] =
9381 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9382 CMP_CMP, CMN_CMP, CMP_CMP,
9383 CMN_CMP, CMP_CMN, CMN_CMN};
9385 comparison_dominates_p (GET_CODE (operands[5]),
9386 reverse_condition (GET_CODE (operands[4])));
9388 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9389 if (TARGET_THUMB2) {
9390 output_asm_insn (ite[swap], operands);
9392 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9395 [(set_attr "conds" "set")
9396 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9397 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9398 (set_attr_alternative "length"
9404 (if_then_else (eq_attr "is_thumb" "no")
9407 (if_then_else (eq_attr "is_thumb" "no")
9410 (if_then_else (eq_attr "is_thumb" "no")
9413 (if_then_else (eq_attr "is_thumb" "no")
9416 (set_attr "type" "multiple")]
9419 (define_insn "*cmp_and"
9420 [(set (match_operand 6 "dominant_cc_register" "")
9423 (match_operator 4 "arm_comparison_operator"
9424 [(match_operand:SI 0 "s_register_operand"
9425 "l,l,l,r,r,r,r,r,r,r")
9426 (match_operand:SI 1 "arm_add_operand"
9427 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9428 (match_operator:SI 5 "arm_comparison_operator"
9429 [(match_operand:SI 2 "s_register_operand"
9430 "l,r,r,l,l,r,r,r,r,r")
9431 (match_operand:SI 3 "arm_add_operand"
9432 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9437 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9439 {\"cmp%d5\\t%0, %1\",
9440 \"cmp%d4\\t%2, %3\"},
9441 {\"cmn%d5\\t%0, #%n1\",
9442 \"cmp%d4\\t%2, %3\"},
9443 {\"cmp%d5\\t%0, %1\",
9444 \"cmn%d4\\t%2, #%n3\"},
9445 {\"cmn%d5\\t%0, #%n1\",
9446 \"cmn%d4\\t%2, #%n3\"}
9448 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9453 \"cmn\\t%0, #%n1\"},
9454 {\"cmn\\t%2, #%n3\",
9456 {\"cmn\\t%2, #%n3\",
9459 static const char *const ite[2] =
9464 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9465 CMP_CMP, CMN_CMP, CMP_CMP,
9466 CMP_CMP, CMN_CMP, CMP_CMN,
9469 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9471 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9472 if (TARGET_THUMB2) {
9473 output_asm_insn (ite[swap], operands);
9475 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9478 [(set_attr "conds" "set")
9479 (set_attr "predicable" "no")
9480 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9481 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9482 (set_attr_alternative "length"
9489 (if_then_else (eq_attr "is_thumb" "no")
9492 (if_then_else (eq_attr "is_thumb" "no")
9495 (if_then_else (eq_attr "is_thumb" "no")
9498 (if_then_else (eq_attr "is_thumb" "no")
9501 (set_attr "type" "multiple")]
9504 (define_insn "*cmp_ior"
9505 [(set (match_operand 6 "dominant_cc_register" "")
9508 (match_operator 4 "arm_comparison_operator"
9509 [(match_operand:SI 0 "s_register_operand"
9510 "l,l,l,r,r,r,r,r,r,r")
9511 (match_operand:SI 1 "arm_add_operand"
9512 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9513 (match_operator:SI 5 "arm_comparison_operator"
9514 [(match_operand:SI 2 "s_register_operand"
9515 "l,r,r,l,l,r,r,r,r,r")
9516 (match_operand:SI 3 "arm_add_operand"
9517 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9522 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9526 {\"cmn\\t%0, #%n1\",
9529 \"cmn\\t%2, #%n3\"},
9530 {\"cmn\\t%0, #%n1\",
9533 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9535 {\"cmp%D4\\t%2, %3\",
9536 \"cmp%D5\\t%0, %1\"},
9537 {\"cmp%D4\\t%2, %3\",
9538 \"cmn%D5\\t%0, #%n1\"},
9539 {\"cmn%D4\\t%2, #%n3\",
9540 \"cmp%D5\\t%0, %1\"},
9541 {\"cmn%D4\\t%2, #%n3\",
9542 \"cmn%D5\\t%0, #%n1\"}
9544 static const char *const ite[2] =
9549 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9550 CMP_CMP, CMN_CMP, CMP_CMP,
9551 CMP_CMP, CMN_CMP, CMP_CMN,
9554 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9556 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9557 if (TARGET_THUMB2) {
9558 output_asm_insn (ite[swap], operands);
9560 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9564 [(set_attr "conds" "set")
9565 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9566 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9567 (set_attr_alternative "length"
9574 (if_then_else (eq_attr "is_thumb" "no")
9577 (if_then_else (eq_attr "is_thumb" "no")
9580 (if_then_else (eq_attr "is_thumb" "no")
9583 (if_then_else (eq_attr "is_thumb" "no")
9586 (set_attr "type" "multiple")]
9589 (define_insn_and_split "*ior_scc_scc"
9590 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9591 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9592 [(match_operand:SI 1 "s_register_operand" "l,r")
9593 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9594 (match_operator:SI 6 "arm_comparison_operator"
9595 [(match_operand:SI 4 "s_register_operand" "l,r")
9596 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9597 (clobber (reg:CC CC_REGNUM))]
9599 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9602 "TARGET_32BIT && reload_completed"
9606 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9607 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9609 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9611 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9614 [(set_attr "conds" "clob")
9615 (set_attr "enabled_for_short_it" "yes,no")
9616 (set_attr "length" "16")
9617 (set_attr "type" "multiple")]
9620 ; If the above pattern is followed by a CMP insn, then the compare is
9621 ; redundant, since we can rework the conditional instruction that follows.
9622 (define_insn_and_split "*ior_scc_scc_cmp"
9623 [(set (match_operand 0 "dominant_cc_register" "")
9624 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9625 [(match_operand:SI 1 "s_register_operand" "l,r")
9626 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9627 (match_operator:SI 6 "arm_comparison_operator"
9628 [(match_operand:SI 4 "s_register_operand" "l,r")
9629 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9631 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9632 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9633 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9636 "TARGET_32BIT && reload_completed"
9640 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9641 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9643 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9645 [(set_attr "conds" "set")
9646 (set_attr "enabled_for_short_it" "yes,no")
9647 (set_attr "length" "16")
9648 (set_attr "type" "multiple")]
9651 (define_insn_and_split "*and_scc_scc"
9652 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9653 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9654 [(match_operand:SI 1 "s_register_operand" "l,r")
9655 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9656 (match_operator:SI 6 "arm_comparison_operator"
9657 [(match_operand:SI 4 "s_register_operand" "l,r")
9658 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9659 (clobber (reg:CC CC_REGNUM))]
9661 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9664 "TARGET_32BIT && reload_completed
9665 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9670 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9671 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9673 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9675 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9678 [(set_attr "conds" "clob")
9679 (set_attr "enabled_for_short_it" "yes,no")
9680 (set_attr "length" "16")
9681 (set_attr "type" "multiple")]
9684 ; If the above pattern is followed by a CMP insn, then the compare is
9685 ; redundant, since we can rework the conditional instruction that follows.
9686 (define_insn_and_split "*and_scc_scc_cmp"
9687 [(set (match_operand 0 "dominant_cc_register" "")
9688 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9689 [(match_operand:SI 1 "s_register_operand" "l,r")
9690 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9691 (match_operator:SI 6 "arm_comparison_operator"
9692 [(match_operand:SI 4 "s_register_operand" "l,r")
9693 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9695 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9696 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9697 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9700 "TARGET_32BIT && reload_completed"
9704 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9705 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9707 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9709 [(set_attr "conds" "set")
9710 (set_attr "enabled_for_short_it" "yes,no")
9711 (set_attr "length" "16")
9712 (set_attr "type" "multiple")]
9715 ;; If there is no dominance in the comparison, then we can still save an
9716 ;; instruction in the AND case, since we can know that the second compare
9717 ;; need only zero the value if false (if true, then the value is already
9719 (define_insn_and_split "*and_scc_scc_nodom"
9720 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9721 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9722 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9723 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9724 (match_operator:SI 6 "arm_comparison_operator"
9725 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9726 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9727 (clobber (reg:CC CC_REGNUM))]
9729 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9732 "TARGET_32BIT && reload_completed"
9733 [(parallel [(set (match_dup 0)
9734 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9735 (clobber (reg:CC CC_REGNUM))])
9736 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9738 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9741 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9742 operands[4], operands[5]),
9744 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9746 [(set_attr "conds" "clob")
9747 (set_attr "length" "20")
9748 (set_attr "type" "multiple")]
9752 [(set (reg:CC_NOOV CC_REGNUM)
9753 (compare:CC_NOOV (ior:SI
9754 (and:SI (match_operand:SI 0 "s_register_operand" "")
9756 (match_operator:SI 1 "arm_comparison_operator"
9757 [(match_operand:SI 2 "s_register_operand" "")
9758 (match_operand:SI 3 "arm_add_operand" "")]))
9760 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9763 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9765 (set (reg:CC_NOOV CC_REGNUM)
9766 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9771 [(set (reg:CC_NOOV CC_REGNUM)
9772 (compare:CC_NOOV (ior:SI
9773 (match_operator:SI 1 "arm_comparison_operator"
9774 [(match_operand:SI 2 "s_register_operand" "")
9775 (match_operand:SI 3 "arm_add_operand" "")])
9776 (and:SI (match_operand:SI 0 "s_register_operand" "")
9779 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9782 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9784 (set (reg:CC_NOOV CC_REGNUM)
9785 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9788 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9790 (define_insn_and_split "*negscc"
9791 [(set (match_operand:SI 0 "s_register_operand" "=r")
9792 (neg:SI (match_operator 3 "arm_comparison_operator"
9793 [(match_operand:SI 1 "s_register_operand" "r")
9794 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9795 (clobber (reg:CC CC_REGNUM))]
9798 "&& reload_completed"
9801 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9803 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9805 /* Emit mov\\t%0, %1, asr #31 */
9806 emit_insn (gen_rtx_SET (operands[0],
9807 gen_rtx_ASHIFTRT (SImode,
9812 else if (GET_CODE (operands[3]) == NE)
9814 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9815 if (CONST_INT_P (operands[2]))
9816 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9817 gen_int_mode (-INTVAL (operands[2]),
9820 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9822 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9826 gen_rtx_SET (operands[0],
9832 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9833 emit_insn (gen_rtx_SET (cc_reg,
9834 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9835 enum rtx_code rc = GET_CODE (operands[3]);
9837 rc = reverse_condition (rc);
9838 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9843 gen_rtx_SET (operands[0], const0_rtx)));
9844 rc = GET_CODE (operands[3]);
9845 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9850 gen_rtx_SET (operands[0],
9856 [(set_attr "conds" "clob")
9857 (set_attr "length" "12")
9858 (set_attr "type" "multiple")]
9861 (define_insn_and_split "movcond_addsi"
9862 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9864 (match_operator 5 "comparison_operator"
9865 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9866 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9868 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9869 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9870 (clobber (reg:CC CC_REGNUM))]
9873 "&& reload_completed"
9874 [(set (reg:CC_NOOV CC_REGNUM)
9876 (plus:SI (match_dup 3)
9879 (set (match_dup 0) (match_dup 1))
9880 (cond_exec (match_dup 6)
9881 (set (match_dup 0) (match_dup 2)))]
9884 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9885 operands[3], operands[4]);
9886 enum rtx_code rc = GET_CODE (operands[5]);
9887 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9888 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9889 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9890 rc = reverse_condition (rc);
9892 std::swap (operands[1], operands[2]);
9894 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9897 [(set_attr "conds" "clob")
9898 (set_attr "enabled_for_short_it" "no,yes,yes")
9899 (set_attr "type" "multiple")]
9902 (define_insn "movcond"
9903 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9905 (match_operator 5 "arm_comparison_operator"
9906 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9907 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9908 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9909 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9910 (clobber (reg:CC CC_REGNUM))]
9913 if (GET_CODE (operands[5]) == LT
9914 && (operands[4] == const0_rtx))
9916 if (which_alternative != 1 && REG_P (operands[1]))
9918 if (operands[2] == const0_rtx)
9919 return \"and\\t%0, %1, %3, asr #31\";
9920 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9922 else if (which_alternative != 0 && REG_P (operands[2]))
9924 if (operands[1] == const0_rtx)
9925 return \"bic\\t%0, %2, %3, asr #31\";
9926 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9928 /* The only case that falls through to here is when both ops 1 & 2
9932 if (GET_CODE (operands[5]) == GE
9933 && (operands[4] == const0_rtx))
9935 if (which_alternative != 1 && REG_P (operands[1]))
9937 if (operands[2] == const0_rtx)
9938 return \"bic\\t%0, %1, %3, asr #31\";
9939 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9941 else if (which_alternative != 0 && REG_P (operands[2]))
9943 if (operands[1] == const0_rtx)
9944 return \"and\\t%0, %2, %3, asr #31\";
9945 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9947 /* The only case that falls through to here is when both ops 1 & 2
9950 if (CONST_INT_P (operands[4])
9951 && !const_ok_for_arm (INTVAL (operands[4])))
9952 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9954 output_asm_insn (\"cmp\\t%3, %4\", operands);
9955 if (which_alternative != 0)
9956 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9957 if (which_alternative != 1)
9958 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9961 [(set_attr "conds" "clob")
9962 (set_attr "length" "8,8,12")
9963 (set_attr "type" "multiple")]
9966 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9968 (define_insn "*ifcompare_plus_move"
9969 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9970 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9971 [(match_operand:SI 4 "s_register_operand" "r,r")
9972 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9974 (match_operand:SI 2 "s_register_operand" "r,r")
9975 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9976 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9977 (clobber (reg:CC CC_REGNUM))]
9980 [(set_attr "conds" "clob")
9981 (set_attr "length" "8,12")
9982 (set_attr "type" "multiple")]
9985 (define_insn "*if_plus_move"
9986 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9988 (match_operator 4 "arm_comparison_operator"
9989 [(match_operand 5 "cc_register" "") (const_int 0)])
9991 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9992 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9993 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9997 sub%d4\\t%0, %2, #%n3
9998 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9999 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10000 [(set_attr "conds" "use")
10001 (set_attr "length" "4,4,8,8")
10002 (set_attr_alternative "type"
10003 [(if_then_else (match_operand 3 "const_int_operand" "")
10004 (const_string "alu_imm" )
10005 (const_string "alu_sreg"))
10006 (const_string "alu_imm")
10007 (const_string "multiple")
10008 (const_string "multiple")])]
10011 (define_insn "*ifcompare_move_plus"
10012 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10013 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10014 [(match_operand:SI 4 "s_register_operand" "r,r")
10015 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10016 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10018 (match_operand:SI 2 "s_register_operand" "r,r")
10019 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10020 (clobber (reg:CC CC_REGNUM))]
10023 [(set_attr "conds" "clob")
10024 (set_attr "length" "8,12")
10025 (set_attr "type" "multiple")]
10028 (define_insn "*if_move_plus"
10029 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10031 (match_operator 4 "arm_comparison_operator"
10032 [(match_operand 5 "cc_register" "") (const_int 0)])
10033 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10035 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10036 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10039 add%D4\\t%0, %2, %3
10040 sub%D4\\t%0, %2, #%n3
10041 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10042 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10043 [(set_attr "conds" "use")
10044 (set_attr "length" "4,4,8,8")
10045 (set_attr_alternative "type"
10046 [(if_then_else (match_operand 3 "const_int_operand" "")
10047 (const_string "alu_imm" )
10048 (const_string "alu_sreg"))
10049 (const_string "alu_imm")
10050 (const_string "multiple")
10051 (const_string "multiple")])]
10054 (define_insn "*ifcompare_arith_arith"
10055 [(set (match_operand:SI 0 "s_register_operand" "=r")
10056 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10057 [(match_operand:SI 5 "s_register_operand" "r")
10058 (match_operand:SI 6 "arm_add_operand" "rIL")])
10059 (match_operator:SI 8 "shiftable_operator"
10060 [(match_operand:SI 1 "s_register_operand" "r")
10061 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10062 (match_operator:SI 7 "shiftable_operator"
10063 [(match_operand:SI 3 "s_register_operand" "r")
10064 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10065 (clobber (reg:CC CC_REGNUM))]
10068 [(set_attr "conds" "clob")
10069 (set_attr "length" "12")
10070 (set_attr "type" "multiple")]
10073 (define_insn "*if_arith_arith"
10074 [(set (match_operand:SI 0 "s_register_operand" "=r")
10075 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10076 [(match_operand 8 "cc_register" "") (const_int 0)])
10077 (match_operator:SI 6 "shiftable_operator"
10078 [(match_operand:SI 1 "s_register_operand" "r")
10079 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10080 (match_operator:SI 7 "shiftable_operator"
10081 [(match_operand:SI 3 "s_register_operand" "r")
10082 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10084 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10085 [(set_attr "conds" "use")
10086 (set_attr "length" "8")
10087 (set_attr "type" "multiple")]
10090 (define_insn "*ifcompare_arith_move"
10091 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10092 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10093 [(match_operand:SI 2 "s_register_operand" "r,r")
10094 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10095 (match_operator:SI 7 "shiftable_operator"
10096 [(match_operand:SI 4 "s_register_operand" "r,r")
10097 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10098 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10099 (clobber (reg:CC CC_REGNUM))]
10102 /* If we have an operation where (op x 0) is the identity operation and
10103 the conditional operator is LT or GE and we are comparing against zero and
10104 everything is in registers then we can do this in two instructions. */
10105 if (operands[3] == const0_rtx
10106 && GET_CODE (operands[7]) != AND
10107 && REG_P (operands[5])
10108 && REG_P (operands[1])
10109 && REGNO (operands[1]) == REGNO (operands[4])
10110 && REGNO (operands[4]) != REGNO (operands[0]))
10112 if (GET_CODE (operands[6]) == LT)
10113 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10114 else if (GET_CODE (operands[6]) == GE)
10115 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10117 if (CONST_INT_P (operands[3])
10118 && !const_ok_for_arm (INTVAL (operands[3])))
10119 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10121 output_asm_insn (\"cmp\\t%2, %3\", operands);
10122 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10123 if (which_alternative != 0)
10124 return \"mov%D6\\t%0, %1\";
10127 [(set_attr "conds" "clob")
10128 (set_attr "length" "8,12")
10129 (set_attr "type" "multiple")]
10132 (define_insn "*if_arith_move"
10133 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10134 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10135 [(match_operand 6 "cc_register" "") (const_int 0)])
10136 (match_operator:SI 5 "shiftable_operator"
10137 [(match_operand:SI 2 "s_register_operand" "r,r")
10138 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10139 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10142 %I5%d4\\t%0, %2, %3
10143 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10144 [(set_attr "conds" "use")
10145 (set_attr "length" "4,8")
10146 (set_attr_alternative "type"
10147 [(if_then_else (match_operand 3 "const_int_operand" "")
10148 (const_string "alu_shift_imm" )
10149 (const_string "alu_shift_reg"))
10150 (const_string "multiple")])]
10153 (define_insn "*ifcompare_move_arith"
10154 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10155 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10156 [(match_operand:SI 4 "s_register_operand" "r,r")
10157 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10158 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10159 (match_operator:SI 7 "shiftable_operator"
10160 [(match_operand:SI 2 "s_register_operand" "r,r")
10161 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10162 (clobber (reg:CC CC_REGNUM))]
10165 /* If we have an operation where (op x 0) is the identity operation and
10166 the conditional operator is LT or GE and we are comparing against zero and
10167 everything is in registers then we can do this in two instructions */
10168 if (operands[5] == const0_rtx
10169 && GET_CODE (operands[7]) != AND
10170 && REG_P (operands[3])
10171 && REG_P (operands[1])
10172 && REGNO (operands[1]) == REGNO (operands[2])
10173 && REGNO (operands[2]) != REGNO (operands[0]))
10175 if (GET_CODE (operands[6]) == GE)
10176 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10177 else if (GET_CODE (operands[6]) == LT)
10178 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10181 if (CONST_INT_P (operands[5])
10182 && !const_ok_for_arm (INTVAL (operands[5])))
10183 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10185 output_asm_insn (\"cmp\\t%4, %5\", operands);
10187 if (which_alternative != 0)
10188 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10189 return \"%I7%D6\\t%0, %2, %3\";
10191 [(set_attr "conds" "clob")
10192 (set_attr "length" "8,12")
10193 (set_attr "type" "multiple")]
10196 (define_insn "*if_move_arith"
10197 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10199 (match_operator 4 "arm_comparison_operator"
10200 [(match_operand 6 "cc_register" "") (const_int 0)])
10201 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10202 (match_operator:SI 5 "shiftable_operator"
10203 [(match_operand:SI 2 "s_register_operand" "r,r")
10204 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10207 %I5%D4\\t%0, %2, %3
10208 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10209 [(set_attr "conds" "use")
10210 (set_attr "length" "4,8")
10211 (set_attr_alternative "type"
10212 [(if_then_else (match_operand 3 "const_int_operand" "")
10213 (const_string "alu_shift_imm" )
10214 (const_string "alu_shift_reg"))
10215 (const_string "multiple")])]
10218 (define_insn "*ifcompare_move_not"
10219 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10221 (match_operator 5 "arm_comparison_operator"
10222 [(match_operand:SI 3 "s_register_operand" "r,r")
10223 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10224 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10226 (match_operand:SI 2 "s_register_operand" "r,r"))))
10227 (clobber (reg:CC CC_REGNUM))]
10230 [(set_attr "conds" "clob")
10231 (set_attr "length" "8,12")
10232 (set_attr "type" "multiple")]
10235 (define_insn "*if_move_not"
10236 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10238 (match_operator 4 "arm_comparison_operator"
10239 [(match_operand 3 "cc_register" "") (const_int 0)])
10240 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10241 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10245 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10246 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10247 [(set_attr "conds" "use")
10248 (set_attr "type" "mvn_reg")
10249 (set_attr "length" "4,8,8")
10250 (set_attr "type" "mvn_reg,multiple,multiple")]
10253 (define_insn "*ifcompare_not_move"
10254 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10256 (match_operator 5 "arm_comparison_operator"
10257 [(match_operand:SI 3 "s_register_operand" "r,r")
10258 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10260 (match_operand:SI 2 "s_register_operand" "r,r"))
10261 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10262 (clobber (reg:CC CC_REGNUM))]
10265 [(set_attr "conds" "clob")
10266 (set_attr "length" "8,12")
10267 (set_attr "type" "multiple")]
10270 (define_insn "*if_not_move"
10271 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10273 (match_operator 4 "arm_comparison_operator"
10274 [(match_operand 3 "cc_register" "") (const_int 0)])
10275 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10276 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10280 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10281 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10282 [(set_attr "conds" "use")
10283 (set_attr "type" "mvn_reg,multiple,multiple")
10284 (set_attr "length" "4,8,8")]
10287 (define_insn "*ifcompare_shift_move"
10288 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10290 (match_operator 6 "arm_comparison_operator"
10291 [(match_operand:SI 4 "s_register_operand" "r,r")
10292 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10293 (match_operator:SI 7 "shift_operator"
10294 [(match_operand:SI 2 "s_register_operand" "r,r")
10295 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10296 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10297 (clobber (reg:CC CC_REGNUM))]
10300 [(set_attr "conds" "clob")
10301 (set_attr "length" "8,12")
10302 (set_attr "type" "multiple")]
10305 (define_insn "*if_shift_move"
10306 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10308 (match_operator 5 "arm_comparison_operator"
10309 [(match_operand 6 "cc_register" "") (const_int 0)])
10310 (match_operator:SI 4 "shift_operator"
10311 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10312 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10313 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10317 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10318 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10319 [(set_attr "conds" "use")
10320 (set_attr "shift" "2")
10321 (set_attr "length" "4,8,8")
10322 (set_attr_alternative "type"
10323 [(if_then_else (match_operand 3 "const_int_operand" "")
10324 (const_string "mov_shift" )
10325 (const_string "mov_shift_reg"))
10326 (const_string "multiple")
10327 (const_string "multiple")])]
10330 (define_insn "*ifcompare_move_shift"
10331 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10333 (match_operator 6 "arm_comparison_operator"
10334 [(match_operand:SI 4 "s_register_operand" "r,r")
10335 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10336 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10337 (match_operator:SI 7 "shift_operator"
10338 [(match_operand:SI 2 "s_register_operand" "r,r")
10339 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10340 (clobber (reg:CC CC_REGNUM))]
10343 [(set_attr "conds" "clob")
10344 (set_attr "length" "8,12")
10345 (set_attr "type" "multiple")]
10348 (define_insn "*if_move_shift"
10349 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10351 (match_operator 5 "arm_comparison_operator"
10352 [(match_operand 6 "cc_register" "") (const_int 0)])
10353 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10354 (match_operator:SI 4 "shift_operator"
10355 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10356 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10360 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10361 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10362 [(set_attr "conds" "use")
10363 (set_attr "shift" "2")
10364 (set_attr "length" "4,8,8")
10365 (set_attr_alternative "type"
10366 [(if_then_else (match_operand 3 "const_int_operand" "")
10367 (const_string "mov_shift" )
10368 (const_string "mov_shift_reg"))
10369 (const_string "multiple")
10370 (const_string "multiple")])]
10373 (define_insn "*ifcompare_shift_shift"
10374 [(set (match_operand:SI 0 "s_register_operand" "=r")
10376 (match_operator 7 "arm_comparison_operator"
10377 [(match_operand:SI 5 "s_register_operand" "r")
10378 (match_operand:SI 6 "arm_add_operand" "rIL")])
10379 (match_operator:SI 8 "shift_operator"
10380 [(match_operand:SI 1 "s_register_operand" "r")
10381 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10382 (match_operator:SI 9 "shift_operator"
10383 [(match_operand:SI 3 "s_register_operand" "r")
10384 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10385 (clobber (reg:CC CC_REGNUM))]
10388 [(set_attr "conds" "clob")
10389 (set_attr "length" "12")
10390 (set_attr "type" "multiple")]
10393 (define_insn "*if_shift_shift"
10394 [(set (match_operand:SI 0 "s_register_operand" "=r")
10396 (match_operator 5 "arm_comparison_operator"
10397 [(match_operand 8 "cc_register" "") (const_int 0)])
10398 (match_operator:SI 6 "shift_operator"
10399 [(match_operand:SI 1 "s_register_operand" "r")
10400 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10401 (match_operator:SI 7 "shift_operator"
10402 [(match_operand:SI 3 "s_register_operand" "r")
10403 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10405 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10406 [(set_attr "conds" "use")
10407 (set_attr "shift" "1")
10408 (set_attr "length" "8")
10409 (set (attr "type") (if_then_else
10410 (and (match_operand 2 "const_int_operand" "")
10411 (match_operand 4 "const_int_operand" ""))
10412 (const_string "mov_shift")
10413 (const_string "mov_shift_reg")))]
10416 (define_insn "*ifcompare_not_arith"
10417 [(set (match_operand:SI 0 "s_register_operand" "=r")
10419 (match_operator 6 "arm_comparison_operator"
10420 [(match_operand:SI 4 "s_register_operand" "r")
10421 (match_operand:SI 5 "arm_add_operand" "rIL")])
10422 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10423 (match_operator:SI 7 "shiftable_operator"
10424 [(match_operand:SI 2 "s_register_operand" "r")
10425 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10426 (clobber (reg:CC CC_REGNUM))]
10429 [(set_attr "conds" "clob")
10430 (set_attr "length" "12")
10431 (set_attr "type" "multiple")]
10434 (define_insn "*if_not_arith"
10435 [(set (match_operand:SI 0 "s_register_operand" "=r")
10437 (match_operator 5 "arm_comparison_operator"
10438 [(match_operand 4 "cc_register" "") (const_int 0)])
10439 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10440 (match_operator:SI 6 "shiftable_operator"
10441 [(match_operand:SI 2 "s_register_operand" "r")
10442 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10444 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10445 [(set_attr "conds" "use")
10446 (set_attr "type" "mvn_reg")
10447 (set_attr "length" "8")]
10450 (define_insn "*ifcompare_arith_not"
10451 [(set (match_operand:SI 0 "s_register_operand" "=r")
10453 (match_operator 6 "arm_comparison_operator"
10454 [(match_operand:SI 4 "s_register_operand" "r")
10455 (match_operand:SI 5 "arm_add_operand" "rIL")])
10456 (match_operator:SI 7 "shiftable_operator"
10457 [(match_operand:SI 2 "s_register_operand" "r")
10458 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10459 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10460 (clobber (reg:CC CC_REGNUM))]
10463 [(set_attr "conds" "clob")
10464 (set_attr "length" "12")
10465 (set_attr "type" "multiple")]
10468 (define_insn "*if_arith_not"
10469 [(set (match_operand:SI 0 "s_register_operand" "=r")
10471 (match_operator 5 "arm_comparison_operator"
10472 [(match_operand 4 "cc_register" "") (const_int 0)])
10473 (match_operator:SI 6 "shiftable_operator"
10474 [(match_operand:SI 2 "s_register_operand" "r")
10475 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10476 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10478 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10479 [(set_attr "conds" "use")
10480 (set_attr "type" "multiple")
10481 (set_attr "length" "8")]
10484 (define_insn "*ifcompare_neg_move"
10485 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10487 (match_operator 5 "arm_comparison_operator"
10488 [(match_operand:SI 3 "s_register_operand" "r,r")
10489 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10490 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10491 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10492 (clobber (reg:CC CC_REGNUM))]
10495 [(set_attr "conds" "clob")
10496 (set_attr "length" "8,12")
10497 (set_attr "type" "multiple")]
10500 (define_insn_and_split "*if_neg_move"
10501 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10503 (match_operator 4 "arm_comparison_operator"
10504 [(match_operand 3 "cc_register" "") (const_int 0)])
10505 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10506 (match_operand:SI 1 "s_register_operand" "0,0")))]
10509 "&& reload_completed"
10510 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10511 (set (match_dup 0) (neg:SI (match_dup 2))))]
10513 [(set_attr "conds" "use")
10514 (set_attr "length" "4")
10515 (set_attr "arch" "t2,32")
10516 (set_attr "enabled_for_short_it" "yes,no")
10517 (set_attr "type" "logic_shift_imm")]
10520 (define_insn "*ifcompare_move_neg"
10521 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10523 (match_operator 5 "arm_comparison_operator"
10524 [(match_operand:SI 3 "s_register_operand" "r,r")
10525 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10526 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10527 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10528 (clobber (reg:CC CC_REGNUM))]
10531 [(set_attr "conds" "clob")
10532 (set_attr "length" "8,12")
10533 (set_attr "type" "multiple")]
10536 (define_insn_and_split "*if_move_neg"
10537 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10539 (match_operator 4 "arm_comparison_operator"
10540 [(match_operand 3 "cc_register" "") (const_int 0)])
10541 (match_operand:SI 1 "s_register_operand" "0,0")
10542 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
10545 "&& reload_completed"
10546 [(cond_exec (match_dup 5)
10547 (set (match_dup 0) (neg:SI (match_dup 2))))]
10549 machine_mode mode = GET_MODE (operands[3]);
10550 rtx_code rc = GET_CODE (operands[4]);
10552 if (mode == CCFPmode || mode == CCFPEmode)
10553 rc = reverse_condition_maybe_unordered (rc);
10555 rc = reverse_condition (rc);
10557 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
10559 [(set_attr "conds" "use")
10560 (set_attr "length" "4")
10561 (set_attr "arch" "t2,32")
10562 (set_attr "enabled_for_short_it" "yes,no")
10563 (set_attr "type" "logic_shift_imm")]
10566 (define_insn "*arith_adjacentmem"
10567 [(set (match_operand:SI 0 "s_register_operand" "=r")
10568 (match_operator:SI 1 "shiftable_operator"
10569 [(match_operand:SI 2 "memory_operand" "m")
10570 (match_operand:SI 3 "memory_operand" "m")]))
10571 (clobber (match_scratch:SI 4 "=r"))]
10572 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10578 HOST_WIDE_INT val1 = 0, val2 = 0;
10580 if (REGNO (operands[0]) > REGNO (operands[4]))
10582 ldm[1] = operands[4];
10583 ldm[2] = operands[0];
10587 ldm[1] = operands[0];
10588 ldm[2] = operands[4];
10591 base_reg = XEXP (operands[2], 0);
10593 if (!REG_P (base_reg))
10595 val1 = INTVAL (XEXP (base_reg, 1));
10596 base_reg = XEXP (base_reg, 0);
10599 if (!REG_P (XEXP (operands[3], 0)))
10600 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10602 arith[0] = operands[0];
10603 arith[3] = operands[1];
10617 if (val1 !=0 && val2 != 0)
10621 if (val1 == 4 || val2 == 4)
10622 /* Other val must be 8, since we know they are adjacent and neither
10624 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10625 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10627 ldm[0] = ops[0] = operands[4];
10629 ops[2] = GEN_INT (val1);
10630 output_add_immediate (ops);
10632 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10634 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10638 /* Offset is out of range for a single add, so use two ldr. */
10641 ops[2] = GEN_INT (val1);
10642 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10644 ops[2] = GEN_INT (val2);
10645 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10648 else if (val1 != 0)
10651 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10653 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10658 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10660 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10662 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10665 [(set_attr "length" "12")
10666 (set_attr "predicable" "yes")
10667 (set_attr "type" "load_4")]
10670 ; This pattern is never tried by combine, so do it as a peephole
10673 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10674 (match_operand:SI 1 "arm_general_register_operand" ""))
10675 (set (reg:CC CC_REGNUM)
10676 (compare:CC (match_dup 1) (const_int 0)))]
10678 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10679 (set (match_dup 0) (match_dup 1))])]
10684 [(set (match_operand:SI 0 "s_register_operand" "")
10685 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10687 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10688 [(match_operand:SI 3 "s_register_operand" "")
10689 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10690 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10692 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10693 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10698 ;; This split can be used because CC_Z mode implies that the following
10699 ;; branch will be an equality, or an unsigned inequality, so the sign
10700 ;; extension is not needed.
10703 [(set (reg:CC_Z CC_REGNUM)
10705 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10707 (match_operand 1 "const_int_operand" "")))
10708 (clobber (match_scratch:SI 2 ""))]
10710 && ((UINTVAL (operands[1]))
10711 == ((UINTVAL (operands[1])) >> 24) << 24)"
10712 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10713 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10715 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10718 ;; ??? Check the patterns above for Thumb-2 usefulness
10720 (define_expand "prologue"
10721 [(clobber (const_int 0))]
10724 arm_expand_prologue ();
10726 thumb1_expand_prologue ();
10731 (define_expand "epilogue"
10732 [(clobber (const_int 0))]
10735 if (crtl->calls_eh_return)
10736 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10739 thumb1_expand_epilogue ();
10740 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10741 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10743 else if (HAVE_return)
10745 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10746 no need for explicit testing again. */
10747 emit_jump_insn (gen_return ());
10749 else if (TARGET_32BIT)
10751 arm_expand_epilogue (true);
10757 ;; Note - although unspec_volatile's USE all hard registers,
10758 ;; USEs are ignored after relaod has completed. Thus we need
10759 ;; to add an unspec of the link register to ensure that flow
10760 ;; does not think that it is unused by the sibcall branch that
10761 ;; will replace the standard function epilogue.
10762 (define_expand "sibcall_epilogue"
10763 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10764 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10767 arm_expand_epilogue (false);
10772 (define_expand "eh_epilogue"
10773 [(use (match_operand:SI 0 "register_operand"))
10774 (use (match_operand:SI 1 "register_operand"))
10775 (use (match_operand:SI 2 "register_operand"))]
10779 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10780 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10782 rtx ra = gen_rtx_REG (Pmode, 2);
10784 emit_move_insn (ra, operands[2]);
10787 /* This is a hack -- we may have crystalized the function type too
10789 cfun->machine->func_type = 0;
10793 ;; This split is only used during output to reduce the number of patterns
10794 ;; that need assembler instructions adding to them. We allowed the setting
10795 ;; of the conditions to be implicit during rtl generation so that
10796 ;; the conditional compare patterns would work. However this conflicts to
10797 ;; some extent with the conditional data operations, so we have to split them
10800 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10801 ;; conditional execution sufficient?
10804 [(set (match_operand:SI 0 "s_register_operand" "")
10805 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10806 [(match_operand 2 "" "") (match_operand 3 "" "")])
10808 (match_operand 4 "" "")))
10809 (clobber (reg:CC CC_REGNUM))]
10810 "TARGET_ARM && reload_completed"
10811 [(set (match_dup 5) (match_dup 6))
10812 (cond_exec (match_dup 7)
10813 (set (match_dup 0) (match_dup 4)))]
10816 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10817 operands[2], operands[3]);
10818 enum rtx_code rc = GET_CODE (operands[1]);
10820 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10821 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10822 if (mode == CCFPmode || mode == CCFPEmode)
10823 rc = reverse_condition_maybe_unordered (rc);
10825 rc = reverse_condition (rc);
10827 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10832 [(set (match_operand:SI 0 "s_register_operand" "")
10833 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10834 [(match_operand 2 "" "") (match_operand 3 "" "")])
10835 (match_operand 4 "" "")
10837 (clobber (reg:CC CC_REGNUM))]
10838 "TARGET_ARM && reload_completed"
10839 [(set (match_dup 5) (match_dup 6))
10840 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10841 (set (match_dup 0) (match_dup 4)))]
10844 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10845 operands[2], operands[3]);
10847 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10848 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10853 [(set (match_operand:SI 0 "s_register_operand" "")
10854 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10855 [(match_operand 2 "" "") (match_operand 3 "" "")])
10856 (match_operand 4 "" "")
10857 (match_operand 5 "" "")))
10858 (clobber (reg:CC CC_REGNUM))]
10859 "TARGET_ARM && reload_completed"
10860 [(set (match_dup 6) (match_dup 7))
10861 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10862 (set (match_dup 0) (match_dup 4)))
10863 (cond_exec (match_dup 8)
10864 (set (match_dup 0) (match_dup 5)))]
10867 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10868 operands[2], operands[3]);
10869 enum rtx_code rc = GET_CODE (operands[1]);
10871 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10872 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10873 if (mode == CCFPmode || mode == CCFPEmode)
10874 rc = reverse_condition_maybe_unordered (rc);
10876 rc = reverse_condition (rc);
10878 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10883 [(set (match_operand:SI 0 "s_register_operand" "")
10884 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10885 [(match_operand:SI 2 "s_register_operand" "")
10886 (match_operand:SI 3 "arm_add_operand" "")])
10887 (match_operand:SI 4 "arm_rhs_operand" "")
10889 (match_operand:SI 5 "s_register_operand" ""))))
10890 (clobber (reg:CC CC_REGNUM))]
10891 "TARGET_ARM && reload_completed"
10892 [(set (match_dup 6) (match_dup 7))
10893 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10894 (set (match_dup 0) (match_dup 4)))
10895 (cond_exec (match_dup 8)
10896 (set (match_dup 0) (not:SI (match_dup 5))))]
10899 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10900 operands[2], operands[3]);
10901 enum rtx_code rc = GET_CODE (operands[1]);
10903 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10904 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10905 if (mode == CCFPmode || mode == CCFPEmode)
10906 rc = reverse_condition_maybe_unordered (rc);
10908 rc = reverse_condition (rc);
10910 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10914 (define_insn "*cond_move_not"
10915 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10916 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10917 [(match_operand 3 "cc_register" "") (const_int 0)])
10918 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10920 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10924 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10925 [(set_attr "conds" "use")
10926 (set_attr "type" "mvn_reg,multiple")
10927 (set_attr "length" "4,8")]
10930 ;; The next two patterns occur when an AND operation is followed by a
10931 ;; scc insn sequence
10933 (define_insn "*sign_extract_onebit"
10934 [(set (match_operand:SI 0 "s_register_operand" "=r")
10935 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10937 (match_operand:SI 2 "const_int_operand" "n")))
10938 (clobber (reg:CC CC_REGNUM))]
10941 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10942 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10943 return \"mvnne\\t%0, #0\";
10945 [(set_attr "conds" "clob")
10946 (set_attr "length" "8")
10947 (set_attr "type" "multiple")]
10950 (define_insn "*not_signextract_onebit"
10951 [(set (match_operand:SI 0 "s_register_operand" "=r")
10953 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10955 (match_operand:SI 2 "const_int_operand" "n"))))
10956 (clobber (reg:CC CC_REGNUM))]
10959 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10960 output_asm_insn (\"tst\\t%1, %2\", operands);
10961 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10962 return \"movne\\t%0, #0\";
10964 [(set_attr "conds" "clob")
10965 (set_attr "length" "12")
10966 (set_attr "type" "multiple")]
10968 ;; ??? The above patterns need auditing for Thumb-2
10970 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10971 ;; expressions. For simplicity, the first register is also in the unspec
10973 ;; To avoid the usage of GNU extension, the length attribute is computed
10974 ;; in a C function arm_attr_length_push_multi.
10975 (define_insn "*push_multi"
10976 [(match_parallel 2 "multi_register_push"
10977 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10978 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10979 UNSPEC_PUSH_MULT))])]
10983 int num_saves = XVECLEN (operands[2], 0);
10985 /* For the StrongARM at least it is faster to
10986 use STR to store only a single register.
10987 In Thumb mode always use push, and the assembler will pick
10988 something appropriate. */
10989 if (num_saves == 1 && TARGET_ARM)
10990 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10997 strcpy (pattern, \"push%?\\t{%1\");
10999 strcpy (pattern, \"push\\t{%1\");
11001 for (i = 1; i < num_saves; i++)
11003 strcat (pattern, \", %|\");
11005 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11008 strcat (pattern, \"}\");
11009 output_asm_insn (pattern, operands);
11014 [(set_attr "type" "store_16")
11015 (set (attr "length")
11016 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11019 (define_insn "stack_tie"
11020 [(set (mem:BLK (scratch))
11021 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11022 (match_operand:SI 1 "s_register_operand" "rk")]
11026 [(set_attr "length" "0")
11027 (set_attr "type" "block")]
11030 ;; Pop (as used in epilogue RTL)
11032 (define_insn "*load_multiple_with_writeback"
11033 [(match_parallel 0 "load_multiple_operation"
11034 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11035 (plus:SI (match_dup 1)
11036 (match_operand:SI 2 "const_int_I_operand" "I")))
11037 (set (match_operand:SI 3 "s_register_operand" "=rk")
11038 (mem:SI (match_dup 1)))
11040 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11043 arm_output_multireg_pop (operands, /*return_pc=*/false,
11044 /*cond=*/const_true_rtx,
11050 [(set_attr "type" "load_16")
11051 (set_attr "predicable" "yes")
11052 (set (attr "length")
11053 (symbol_ref "arm_attr_length_pop_multi (operands,
11054 /*return_pc=*/false,
11055 /*write_back_p=*/true)"))]
11058 ;; Pop with return (as used in epilogue RTL)
11060 ;; This instruction is generated when the registers are popped at the end of
11061 ;; epilogue. Here, instead of popping the value into LR and then generating
11062 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11064 (define_insn "*pop_multiple_with_writeback_and_return"
11065 [(match_parallel 0 "pop_multiple_return"
11067 (set (match_operand:SI 1 "s_register_operand" "+rk")
11068 (plus:SI (match_dup 1)
11069 (match_operand:SI 2 "const_int_I_operand" "I")))
11070 (set (match_operand:SI 3 "s_register_operand" "=rk")
11071 (mem:SI (match_dup 1)))
11073 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11076 arm_output_multireg_pop (operands, /*return_pc=*/true,
11077 /*cond=*/const_true_rtx,
11083 [(set_attr "type" "load_16")
11084 (set_attr "predicable" "yes")
11085 (set (attr "length")
11086 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11087 /*write_back_p=*/true)"))]
11090 (define_insn "*pop_multiple_with_return"
11091 [(match_parallel 0 "pop_multiple_return"
11093 (set (match_operand:SI 2 "s_register_operand" "=rk")
11094 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11096 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11099 arm_output_multireg_pop (operands, /*return_pc=*/true,
11100 /*cond=*/const_true_rtx,
11106 [(set_attr "type" "load_16")
11107 (set_attr "predicable" "yes")
11108 (set (attr "length")
11109 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11110 /*write_back_p=*/false)"))]
11113 ;; Load into PC and return
11114 (define_insn "*ldr_with_return"
11116 (set (reg:SI PC_REGNUM)
11117 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11118 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11119 "ldr%?\t%|pc, [%0], #4"
11120 [(set_attr "type" "load_4")
11121 (set_attr "predicable" "yes")]
11123 ;; Pop for floating point registers (as used in epilogue RTL)
11124 (define_insn "*vfp_pop_multiple_with_writeback"
11125 [(match_parallel 0 "pop_multiple_fp"
11126 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11127 (plus:SI (match_dup 1)
11128 (match_operand:SI 2 "const_int_I_operand" "I")))
11129 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11130 (mem:DF (match_dup 1)))])]
11131 "TARGET_32BIT && TARGET_HARD_FLOAT"
11134 int num_regs = XVECLEN (operands[0], 0);
11137 strcpy (pattern, \"vldm\\t\");
11138 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11139 strcat (pattern, \"!, {\");
11140 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11141 strcat (pattern, \"%P0\");
11142 if ((num_regs - 1) > 1)
11144 strcat (pattern, \"-%P1\");
11145 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11148 strcat (pattern, \"}\");
11149 output_asm_insn (pattern, op_list);
11153 [(set_attr "type" "load_16")
11154 (set_attr "conds" "unconditional")
11155 (set_attr "predicable" "no")]
11158 ;; Special patterns for dealing with the constant pool
11160 (define_insn "align_4"
11161 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11164 assemble_align (32);
11167 [(set_attr "type" "no_insn")]
11170 (define_insn "align_8"
11171 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11174 assemble_align (64);
11177 [(set_attr "type" "no_insn")]
11180 (define_insn "consttable_end"
11181 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11184 making_const_table = FALSE;
11187 [(set_attr "type" "no_insn")]
11190 (define_insn "consttable_1"
11191 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11194 making_const_table = TRUE;
11195 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11196 assemble_zeros (3);
11199 [(set_attr "length" "4")
11200 (set_attr "type" "no_insn")]
11203 (define_insn "consttable_2"
11204 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11208 rtx x = operands[0];
11209 making_const_table = TRUE;
11210 switch (GET_MODE_CLASS (GET_MODE (x)))
11213 arm_emit_fp16_const (x);
11216 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11217 assemble_zeros (2);
11222 [(set_attr "length" "4")
11223 (set_attr "type" "no_insn")]
11226 (define_insn "consttable_4"
11227 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11231 rtx x = operands[0];
11232 making_const_table = TRUE;
11233 scalar_float_mode float_mode;
11234 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11235 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11238 /* XXX: Sometimes gcc does something really dumb and ends up with
11239 a HIGH in a constant pool entry, usually because it's trying to
11240 load into a VFP register. We know this will always be used in
11241 combination with a LO_SUM which ignores the high bits, so just
11242 strip off the HIGH. */
11243 if (GET_CODE (x) == HIGH)
11245 assemble_integer (x, 4, BITS_PER_WORD, 1);
11246 mark_symbol_refs_as_used (x);
11250 [(set_attr "length" "4")
11251 (set_attr "type" "no_insn")]
11254 (define_insn "consttable_8"
11255 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11259 making_const_table = TRUE;
11260 scalar_float_mode float_mode;
11261 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11262 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11263 float_mode, BITS_PER_WORD);
11265 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11268 [(set_attr "length" "8")
11269 (set_attr "type" "no_insn")]
11272 (define_insn "consttable_16"
11273 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11277 making_const_table = TRUE;
11278 scalar_float_mode float_mode;
11279 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11280 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11281 float_mode, BITS_PER_WORD);
11283 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11286 [(set_attr "length" "16")
11287 (set_attr "type" "no_insn")]
11290 ;; V5 Instructions,
11292 (define_insn "clzsi2"
11293 [(set (match_operand:SI 0 "s_register_operand" "=r")
11294 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11295 "TARGET_32BIT && arm_arch5t"
11297 [(set_attr "predicable" "yes")
11298 (set_attr "type" "clz")])
11300 (define_insn "rbitsi2"
11301 [(set (match_operand:SI 0 "s_register_operand" "=r")
11302 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11303 "TARGET_32BIT && arm_arch_thumb2"
11305 [(set_attr "predicable" "yes")
11306 (set_attr "type" "clz")])
11308 ;; Keep this as a CTZ expression until after reload and then split
11309 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11310 ;; to fold with any other expression.
11312 (define_insn_and_split "ctzsi2"
11313 [(set (match_operand:SI 0 "s_register_operand" "=r")
11314 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11315 "TARGET_32BIT && arm_arch_thumb2"
11317 "&& reload_completed"
11320 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11321 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11325 ;; V5E instructions.
11327 (define_insn "prefetch"
11328 [(prefetch (match_operand:SI 0 "address_operand" "p")
11329 (match_operand:SI 1 "" "")
11330 (match_operand:SI 2 "" ""))]
11331 "TARGET_32BIT && arm_arch5te"
11333 [(set_attr "type" "load_4")]
11336 ;; General predication pattern
11339 [(match_operator 0 "arm_comparison_operator"
11340 [(match_operand 1 "cc_register" "")
11343 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11345 [(set_attr "predicated" "yes")]
11348 (define_insn "force_register_use"
11349 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11352 [(set_attr "length" "0")
11353 (set_attr "type" "no_insn")]
11357 ;; Patterns for exception handling
11359 (define_expand "eh_return"
11360 [(use (match_operand 0 "general_operand"))]
11365 emit_insn (gen_arm_eh_return (operands[0]));
11367 emit_insn (gen_thumb_eh_return (operands[0]));
11372 ;; We can't expand this before we know where the link register is stored.
11373 (define_insn_and_split "arm_eh_return"
11374 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11376 (clobber (match_scratch:SI 1 "=&r"))]
11379 "&& reload_completed"
11383 arm_set_return_address (operands[0], operands[1]);
11391 (define_insn "load_tp_hard"
11392 [(set (match_operand:SI 0 "register_operand" "=r")
11393 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11395 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11396 [(set_attr "predicable" "yes")
11397 (set_attr "type" "mrs")]
11400 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11401 (define_insn "load_tp_soft_fdpic"
11402 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11403 (clobber (reg:SI FDPIC_REGNUM))
11404 (clobber (reg:SI LR_REGNUM))
11405 (clobber (reg:SI IP_REGNUM))
11406 (clobber (reg:CC CC_REGNUM))]
11407 "TARGET_SOFT_TP && TARGET_FDPIC"
11408 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11409 [(set_attr "conds" "clob")
11410 (set_attr "type" "branch")]
11413 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11414 (define_insn "load_tp_soft"
11415 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11416 (clobber (reg:SI LR_REGNUM))
11417 (clobber (reg:SI IP_REGNUM))
11418 (clobber (reg:CC CC_REGNUM))]
11419 "TARGET_SOFT_TP && !TARGET_FDPIC"
11420 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11421 [(set_attr "conds" "clob")
11422 (set_attr "type" "branch")]
11425 ;; tls descriptor call
11426 (define_insn "tlscall"
11427 [(set (reg:SI R0_REGNUM)
11428 (unspec:SI [(reg:SI R0_REGNUM)
11429 (match_operand:SI 0 "" "X")
11430 (match_operand 1 "" "")] UNSPEC_TLS))
11431 (clobber (reg:SI R1_REGNUM))
11432 (clobber (reg:SI LR_REGNUM))
11433 (clobber (reg:SI CC_REGNUM))]
11436 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11437 INTVAL (operands[1]));
11438 return "bl\\t%c0(tlscall)";
11440 [(set_attr "conds" "clob")
11441 (set_attr "length" "4")
11442 (set_attr "type" "branch")]
11445 ;; For thread pointer builtin
11446 (define_expand "get_thread_pointersi"
11447 [(match_operand:SI 0 "s_register_operand")]
11451 arm_load_tp (operands[0]);
11457 ;; We only care about the lower 16 bits of the constant
11458 ;; being inserted into the upper 16 bits of the register.
11459 (define_insn "*arm_movtas_ze"
11460 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11463 (match_operand:SI 1 "const_int_operand" ""))]
11468 [(set_attr "arch" "32,v8mb")
11469 (set_attr "predicable" "yes")
11470 (set_attr "length" "4")
11471 (set_attr "type" "alu_sreg")]
11474 (define_insn "*arm_rev"
11475 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11476 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11482 [(set_attr "arch" "t1,t2,32")
11483 (set_attr "length" "2,2,4")
11484 (set_attr "predicable" "no,yes,yes")
11485 (set_attr "type" "rev")]
11488 (define_expand "arm_legacy_rev"
11489 [(set (match_operand:SI 2 "s_register_operand")
11490 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11494 (lshiftrt:SI (match_dup 2)
11496 (set (match_operand:SI 3 "s_register_operand")
11497 (rotatert:SI (match_dup 1)
11500 (and:SI (match_dup 2)
11501 (const_int -65281)))
11502 (set (match_operand:SI 0 "s_register_operand")
11503 (xor:SI (match_dup 3)
11509 ;; Reuse temporaries to keep register pressure down.
11510 (define_expand "thumb_legacy_rev"
11511 [(set (match_operand:SI 2 "s_register_operand")
11512 (ashift:SI (match_operand:SI 1 "s_register_operand")
11514 (set (match_operand:SI 3 "s_register_operand")
11515 (lshiftrt:SI (match_dup 1)
11518 (ior:SI (match_dup 3)
11520 (set (match_operand:SI 4 "s_register_operand")
11522 (set (match_operand:SI 5 "s_register_operand")
11523 (rotatert:SI (match_dup 1)
11526 (ashift:SI (match_dup 5)
11529 (lshiftrt:SI (match_dup 5)
11532 (ior:SI (match_dup 5)
11535 (rotatert:SI (match_dup 5)
11537 (set (match_operand:SI 0 "s_register_operand")
11538 (ior:SI (match_dup 5)
11544 ;; ARM-specific expansion of signed mod by power of 2
11545 ;; using conditional negate.
11546 ;; For r0 % n where n is a power of 2 produce:
11548 ;; and r0, r0, #(n - 1)
11549 ;; and r1, r1, #(n - 1)
11550 ;; rsbpl r0, r1, #0
11552 (define_expand "modsi3"
11553 [(match_operand:SI 0 "register_operand")
11554 (match_operand:SI 1 "register_operand")
11555 (match_operand:SI 2 "const_int_operand")]
11558 HOST_WIDE_INT val = INTVAL (operands[2]);
11561 || exact_log2 (val) <= 0)
11564 rtx mask = GEN_INT (val - 1);
11566 /* In the special case of x0 % 2 we can do the even shorter:
11569 rsblt r0, r0, #0. */
11573 rtx cc_reg = arm_gen_compare_reg (LT,
11574 operands[1], const0_rtx, NULL_RTX);
11575 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
11576 rtx masked = gen_reg_rtx (SImode);
11578 emit_insn (gen_andsi3 (masked, operands[1], mask));
11579 emit_move_insn (operands[0],
11580 gen_rtx_IF_THEN_ELSE (SImode, cond,
11581 gen_rtx_NEG (SImode,
11587 rtx neg_op = gen_reg_rtx (SImode);
11588 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11591 /* Extract the condition register and mode. */
11592 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11593 rtx cc_reg = SET_DEST (cmp);
11594 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11596 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11598 rtx masked_neg = gen_reg_rtx (SImode);
11599 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11601 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11602 during expand does not always work. Do an IF_THEN_ELSE instead. */
11603 emit_move_insn (operands[0],
11604 gen_rtx_IF_THEN_ELSE (SImode, cond,
11605 gen_rtx_NEG (SImode, masked_neg),
11613 (define_expand "bswapsi2"
11614 [(set (match_operand:SI 0 "s_register_operand")
11615 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11616 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11620 rtx op2 = gen_reg_rtx (SImode);
11621 rtx op3 = gen_reg_rtx (SImode);
11625 rtx op4 = gen_reg_rtx (SImode);
11626 rtx op5 = gen_reg_rtx (SImode);
11628 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11629 op2, op3, op4, op5));
11633 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11642 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11643 ;; and unsigned variants, respectively. For rev16, expose
11644 ;; byte-swapping in the lower 16 bits only.
11645 (define_insn "*arm_revsh"
11646 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11647 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11653 [(set_attr "arch" "t1,t2,32")
11654 (set_attr "length" "2,2,4")
11655 (set_attr "type" "rev")]
11658 (define_insn "*arm_rev16"
11659 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11660 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11666 [(set_attr "arch" "t1,t2,32")
11667 (set_attr "length" "2,2,4")
11668 (set_attr "type" "rev")]
11671 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11672 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11673 ;; each valid permutation.
11675 (define_insn "arm_rev16si2"
11676 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11677 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11679 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11680 (and:SI (lshiftrt:SI (match_dup 1)
11682 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11684 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11685 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11687 [(set_attr "arch" "t1,t2,32")
11688 (set_attr "length" "2,2,4")
11689 (set_attr "type" "rev")]
11692 (define_insn "arm_rev16si2_alt"
11693 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11694 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11696 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11697 (and:SI (ashift:SI (match_dup 1)
11699 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11701 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11702 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11704 [(set_attr "arch" "t1,t2,32")
11705 (set_attr "length" "2,2,4")
11706 (set_attr "type" "rev")]
11709 (define_expand "bswaphi2"
11710 [(set (match_operand:HI 0 "s_register_operand")
11711 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11716 ;; Patterns for LDRD/STRD in Thumb2 mode
11718 (define_insn "*thumb2_ldrd"
11719 [(set (match_operand:SI 0 "s_register_operand" "=r")
11720 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11721 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11722 (set (match_operand:SI 3 "s_register_operand" "=r")
11723 (mem:SI (plus:SI (match_dup 1)
11724 (match_operand:SI 4 "const_int_operand" ""))))]
11725 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11726 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11727 && (operands_ok_ldrd_strd (operands[0], operands[3],
11728 operands[1], INTVAL (operands[2]),
11730 "ldrd%?\t%0, %3, [%1, %2]"
11731 [(set_attr "type" "load_8")
11732 (set_attr "predicable" "yes")])
11734 (define_insn "*thumb2_ldrd_base"
11735 [(set (match_operand:SI 0 "s_register_operand" "=r")
11736 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11737 (set (match_operand:SI 2 "s_register_operand" "=r")
11738 (mem:SI (plus:SI (match_dup 1)
11740 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11741 && (operands_ok_ldrd_strd (operands[0], operands[2],
11742 operands[1], 0, false, true))"
11743 "ldrd%?\t%0, %2, [%1]"
11744 [(set_attr "type" "load_8")
11745 (set_attr "predicable" "yes")])
11747 (define_insn "*thumb2_ldrd_base_neg"
11748 [(set (match_operand:SI 0 "s_register_operand" "=r")
11749 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11751 (set (match_operand:SI 2 "s_register_operand" "=r")
11752 (mem:SI (match_dup 1)))]
11753 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11754 && (operands_ok_ldrd_strd (operands[0], operands[2],
11755 operands[1], -4, false, true))"
11756 "ldrd%?\t%0, %2, [%1, #-4]"
11757 [(set_attr "type" "load_8")
11758 (set_attr "predicable" "yes")])
11760 (define_insn "*thumb2_strd"
11761 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11762 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11763 (match_operand:SI 2 "s_register_operand" "r"))
11764 (set (mem:SI (plus:SI (match_dup 0)
11765 (match_operand:SI 3 "const_int_operand" "")))
11766 (match_operand:SI 4 "s_register_operand" "r"))]
11767 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11768 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11769 && (operands_ok_ldrd_strd (operands[2], operands[4],
11770 operands[0], INTVAL (operands[1]),
11772 "strd%?\t%2, %4, [%0, %1]"
11773 [(set_attr "type" "store_8")
11774 (set_attr "predicable" "yes")])
11776 (define_insn "*thumb2_strd_base"
11777 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11778 (match_operand:SI 1 "s_register_operand" "r"))
11779 (set (mem:SI (plus:SI (match_dup 0)
11781 (match_operand:SI 2 "s_register_operand" "r"))]
11782 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11783 && (operands_ok_ldrd_strd (operands[1], operands[2],
11784 operands[0], 0, false, false))"
11785 "strd%?\t%1, %2, [%0]"
11786 [(set_attr "type" "store_8")
11787 (set_attr "predicable" "yes")])
11789 (define_insn "*thumb2_strd_base_neg"
11790 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11792 (match_operand:SI 1 "s_register_operand" "r"))
11793 (set (mem:SI (match_dup 0))
11794 (match_operand:SI 2 "s_register_operand" "r"))]
11795 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11796 && (operands_ok_ldrd_strd (operands[1], operands[2],
11797 operands[0], -4, false, false))"
11798 "strd%?\t%1, %2, [%0, #-4]"
11799 [(set_attr "type" "store_8")
11800 (set_attr "predicable" "yes")])
11802 ;; ARMv8 CRC32 instructions.
11803 (define_insn "arm_<crc_variant>"
11804 [(set (match_operand:SI 0 "s_register_operand" "=r")
11805 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11806 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11809 "<crc_variant>\\t%0, %1, %2"
11810 [(set_attr "type" "crc")
11811 (set_attr "conds" "unconditional")]
11814 ;; Load the load/store double peephole optimizations.
11815 (include "ldrdstrd.md")
11817 ;; Load the load/store multiple patterns
11818 (include "ldmstm.md")
11820 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11821 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11822 ;; The operands are validated through the load_multiple_operation
11823 ;; match_parallel predicate rather than through constraints so enable it only
11825 (define_insn "*load_multiple"
11826 [(match_parallel 0 "load_multiple_operation"
11827 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11828 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11830 "TARGET_32BIT && reload_completed"
11833 arm_output_multireg_pop (operands, /*return_pc=*/false,
11834 /*cond=*/const_true_rtx,
11840 [(set_attr "predicable" "yes")]
11843 (define_expand "copysignsf3"
11844 [(match_operand:SF 0 "register_operand")
11845 (match_operand:SF 1 "register_operand")
11846 (match_operand:SF 2 "register_operand")]
11847 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11849 emit_move_insn (operands[0], operands[2]);
11850 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11851 GEN_INT (31), GEN_INT (0),
11852 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11857 (define_expand "copysigndf3"
11858 [(match_operand:DF 0 "register_operand")
11859 (match_operand:DF 1 "register_operand")
11860 (match_operand:DF 2 "register_operand")]
11861 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11863 rtx op0_low = gen_lowpart (SImode, operands[0]);
11864 rtx op0_high = gen_highpart (SImode, operands[0]);
11865 rtx op1_low = gen_lowpart (SImode, operands[1]);
11866 rtx op1_high = gen_highpart (SImode, operands[1]);
11867 rtx op2_high = gen_highpart (SImode, operands[2]);
11869 rtx scratch1 = gen_reg_rtx (SImode);
11870 rtx scratch2 = gen_reg_rtx (SImode);
11871 emit_move_insn (scratch1, op2_high);
11872 emit_move_insn (scratch2, op1_high);
11874 emit_insn(gen_rtx_SET(scratch1,
11875 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11876 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11877 emit_move_insn (op0_low, op1_low);
11878 emit_move_insn (op0_high, scratch2);
11884 ;; movmisalign patterns for HImode and SImode.
11885 (define_expand "movmisalign<mode>"
11886 [(match_operand:HSI 0 "general_operand")
11887 (match_operand:HSI 1 "general_operand")]
11890 /* This pattern is not permitted to fail during expansion: if both arguments
11891 are non-registers (e.g. memory := constant), force operand 1 into a
11893 rtx (* gen_unaligned_load)(rtx, rtx);
11894 rtx tmp_dest = operands[0];
11895 if (!s_register_operand (operands[0], <MODE>mode)
11896 && !s_register_operand (operands[1], <MODE>mode))
11897 operands[1] = force_reg (<MODE>mode, operands[1]);
11899 if (<MODE>mode == HImode)
11901 gen_unaligned_load = gen_unaligned_loadhiu;
11902 tmp_dest = gen_reg_rtx (SImode);
11905 gen_unaligned_load = gen_unaligned_loadsi;
11907 if (MEM_P (operands[1]))
11909 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11910 if (<MODE>mode == HImode)
11911 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11914 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11919 (define_insn "arm_<cdp>"
11920 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11921 (match_operand:SI 1 "immediate_operand" "n")
11922 (match_operand:SI 2 "immediate_operand" "n")
11923 (match_operand:SI 3 "immediate_operand" "n")
11924 (match_operand:SI 4 "immediate_operand" "n")
11925 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11926 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11928 arm_const_bounds (operands[0], 0, 16);
11929 arm_const_bounds (operands[1], 0, 16);
11930 arm_const_bounds (operands[2], 0, (1 << 5));
11931 arm_const_bounds (operands[3], 0, (1 << 5));
11932 arm_const_bounds (operands[4], 0, (1 << 5));
11933 arm_const_bounds (operands[5], 0, 8);
11934 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11936 [(set_attr "length" "4")
11937 (set_attr "type" "coproc")])
11939 (define_insn "*ldc"
11940 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11941 (match_operand:SI 1 "immediate_operand" "n")
11942 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
11943 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
11945 arm_const_bounds (operands[0], 0, 16);
11946 arm_const_bounds (operands[1], 0, (1 << 5));
11947 return "<ldc>\\tp%c0, CR%c1, %2";
11949 [(set_attr "length" "4")
11950 (set_attr "type" "coproc")])
11952 (define_insn "*stc"
11953 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11954 (match_operand:SI 1 "immediate_operand" "n")
11955 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
11956 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
11958 arm_const_bounds (operands[0], 0, 16);
11959 arm_const_bounds (operands[1], 0, (1 << 5));
11960 return "<stc>\\tp%c0, CR%c1, %2";
11962 [(set_attr "length" "4")
11963 (set_attr "type" "coproc")])
11965 (define_expand "arm_<ldc>"
11966 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11967 (match_operand:SI 1 "immediate_operand")
11968 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
11969 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
11971 (define_expand "arm_<stc>"
11972 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
11973 (match_operand:SI 1 "immediate_operand")
11974 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
11975 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
11977 (define_insn "arm_<mcr>"
11978 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11979 (match_operand:SI 1 "immediate_operand" "n")
11980 (match_operand:SI 2 "s_register_operand" "r")
11981 (match_operand:SI 3 "immediate_operand" "n")
11982 (match_operand:SI 4 "immediate_operand" "n")
11983 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
11984 (use (match_dup 2))]
11985 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
11987 arm_const_bounds (operands[0], 0, 16);
11988 arm_const_bounds (operands[1], 0, 8);
11989 arm_const_bounds (operands[3], 0, (1 << 5));
11990 arm_const_bounds (operands[4], 0, (1 << 5));
11991 arm_const_bounds (operands[5], 0, 8);
11992 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
11994 [(set_attr "length" "4")
11995 (set_attr "type" "coproc")])
11997 (define_insn "arm_<mrc>"
11998 [(set (match_operand:SI 0 "s_register_operand" "=r")
11999 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12000 (match_operand:SI 2 "immediate_operand" "n")
12001 (match_operand:SI 3 "immediate_operand" "n")
12002 (match_operand:SI 4 "immediate_operand" "n")
12003 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12004 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12006 arm_const_bounds (operands[1], 0, 16);
12007 arm_const_bounds (operands[2], 0, 8);
12008 arm_const_bounds (operands[3], 0, (1 << 5));
12009 arm_const_bounds (operands[4], 0, (1 << 5));
12010 arm_const_bounds (operands[5], 0, 8);
12011 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12013 [(set_attr "length" "4")
12014 (set_attr "type" "coproc")])
12016 (define_insn "arm_<mcrr>"
12017 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12018 (match_operand:SI 1 "immediate_operand" "n")
12019 (match_operand:DI 2 "s_register_operand" "r")
12020 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12021 (use (match_dup 2))]
12022 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12024 arm_const_bounds (operands[0], 0, 16);
12025 arm_const_bounds (operands[1], 0, 8);
12026 arm_const_bounds (operands[3], 0, (1 << 5));
12027 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12029 [(set_attr "length" "4")
12030 (set_attr "type" "coproc")])
12032 (define_insn "arm_<mrrc>"
12033 [(set (match_operand:DI 0 "s_register_operand" "=r")
12034 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12035 (match_operand:SI 2 "immediate_operand" "n")
12036 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12037 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12039 arm_const_bounds (operands[1], 0, 16);
12040 arm_const_bounds (operands[2], 0, 8);
12041 arm_const_bounds (operands[3], 0, (1 << 5));
12042 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12044 [(set_attr "length" "4")
12045 (set_attr "type" "coproc")])
12047 (define_expand "speculation_barrier"
12048 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12051 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12052 have a usable barrier (and probably don't need one in practice).
12053 But to be safe if such code is run on later architectures, call a
12054 helper function in libgcc that will do the thing for the active
12056 if (!(arm_arch7 || arm_arch8))
12058 arm_emit_speculation_barrier_function ();
12064 ;; Generate a hard speculation barrier when we have not enabled speculation
12066 (define_insn "*speculation_barrier_insn"
12067 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12068 "arm_arch7 || arm_arch8"
12070 [(set_attr "type" "block")
12071 (set_attr "length" "8")]
12074 ;; Vector bits common to IWMMXT and Neon
12075 (include "vec-common.md")
12076 ;; Load the Intel Wireless Multimedia Extension patterns
12077 (include "iwmmxt.md")
12078 ;; Load the VFP co-processor patterns
12080 ;; Thumb-1 patterns
12081 (include "thumb1.md")
12082 ;; Thumb-2 patterns
12083 (include "thumb2.md")
12085 (include "neon.md")
12087 (include "crypto.md")
12088 ;; Synchronization Primitives
12089 (include "sync.md")
12090 ;; Fixed-point patterns
12091 (include "arm-fixed.md")