1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C) 1991-2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
491 (define_expand "addvsi4"
492 [(match_operand:SI 0 "s_register_operand")
493 (match_operand:SI 1 "s_register_operand")
494 (match_operand:SI 2 "arm_add_operand")
495 (match_operand 3 "")]
498 if (CONST_INT_P (operands[2]))
499 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
501 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
507 (define_expand "addvdi4"
508 [(match_operand:DI 0 "s_register_operand")
509 (match_operand:DI 1 "s_register_operand")
510 (match_operand:DI 2 "reg_or_int_operand")
511 (match_operand 3 "")]
514 rtx lo_result, hi_result;
515 rtx lo_op1, hi_op1, lo_op2, hi_op2;
516 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
518 lo_result = gen_lowpart (SImode, operands[0]);
519 hi_result = gen_highpart (SImode, operands[0]);
521 if (lo_op2 == const0_rtx)
523 emit_move_insn (lo_result, lo_op1);
524 if (!arm_add_operand (hi_op2, SImode))
525 hi_op2 = force_reg (SImode, hi_op2);
527 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
531 if (!arm_add_operand (lo_op2, SImode))
532 lo_op2 = force_reg (SImode, lo_op2);
533 if (!arm_not_operand (hi_op2, SImode))
534 hi_op2 = force_reg (SImode, hi_op2);
536 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
538 if (hi_op2 == const0_rtx)
539 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
540 else if (CONST_INT_P (hi_op2))
541 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
543 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
545 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
551 (define_expand "addsi3_cin_vout_reg"
556 (plus:DI (match_dup 4)
557 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
558 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
559 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
561 (set (match_operand:SI 0 "s_register_operand")
562 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
566 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
567 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
568 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
569 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
573 (define_insn "*addsi3_cin_vout_reg_insn"
574 [(set (reg:CC_V CC_REGNUM)
578 (match_operand:DI 3 "arm_carry_operation" "")
579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
582 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
585 (set (match_operand:SI 0 "s_register_operand" "=l,r")
586 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
592 [(set_attr "type" "alus_sreg")
593 (set_attr "arch" "t2,*")
594 (set_attr "length" "2,4")]
597 (define_expand "addsi3_cin_vout_imm"
602 (plus:DI (match_dup 4)
603 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
605 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
607 (set (match_operand:SI 0 "s_register_operand")
608 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
609 (match_operand 2 "arm_adcimm_operand")))])]
612 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
613 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
614 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
615 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
619 (define_insn "*addsi3_cin_vout_imm_insn"
620 [(set (reg:CC_V CC_REGNUM)
624 (match_operand:DI 3 "arm_carry_operation" "")
625 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
626 (match_operand 2 "arm_adcimm_operand" "I,K"))
628 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
631 (set (match_operand:SI 0 "s_register_operand" "=r,r")
632 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
637 sbcs%?\\t%0, %1, #%B2"
638 [(set_attr "type" "alus_imm")]
641 (define_expand "addsi3_cin_vout_0"
645 (plus:DI (match_dup 3)
646 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
647 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
648 (set (match_operand:SI 0 "s_register_operand")
649 (plus:SI (match_dup 4) (match_dup 1)))])]
652 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
659 (define_insn "*addsi3_cin_vout_0_insn"
660 [(set (reg:CC_V CC_REGNUM)
663 (match_operand:DI 2 "arm_carry_operation" "")
664 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
665 (sign_extend:DI (plus:SI
666 (match_operand:SI 3 "arm_carry_operation" "")
668 (set (match_operand:SI 0 "s_register_operand" "=r")
669 (plus:SI (match_dup 3) (match_dup 1)))]
671 "adcs%?\\t%0, %1, #0"
672 [(set_attr "type" "alus_imm")]
675 (define_expand "uaddvsi4"
676 [(match_operand:SI 0 "s_register_operand")
677 (match_operand:SI 1 "s_register_operand")
678 (match_operand:SI 2 "arm_add_operand")
679 (match_operand 3 "")]
682 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
683 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
688 (define_expand "uaddvdi4"
689 [(match_operand:DI 0 "s_register_operand")
690 (match_operand:DI 1 "s_register_operand")
691 (match_operand:DI 2 "reg_or_int_operand")
692 (match_operand 3 "")]
695 rtx lo_result, hi_result;
696 rtx lo_op1, hi_op1, lo_op2, hi_op2;
697 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
699 lo_result = gen_lowpart (SImode, operands[0]);
700 hi_result = gen_highpart (SImode, operands[0]);
702 if (lo_op2 == const0_rtx)
704 emit_move_insn (lo_result, lo_op1);
705 if (!arm_add_operand (hi_op2, SImode))
706 hi_op2 = force_reg (SImode, hi_op2);
708 gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
712 if (!arm_add_operand (lo_op2, SImode))
713 lo_op2 = force_reg (SImode, lo_op2);
714 if (!arm_not_operand (hi_op2, SImode))
715 hi_op2 = force_reg (SImode, hi_op2);
717 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
719 if (hi_op2 == const0_rtx)
720 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
721 else if (CONST_INT_P (hi_op2))
722 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
724 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
726 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
732 (define_expand "addsi3_cin_cout_reg"
737 (plus:DI (match_dup 4)
738 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
739 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
740 (const_int 4294967296)))
741 (set (match_operand:SI 0 "s_register_operand")
742 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
746 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
747 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
748 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
749 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
753 (define_insn "*addsi3_cin_cout_reg_insn"
754 [(set (reg:CC_ADC CC_REGNUM)
758 (match_operand:DI 3 "arm_carry_operation" "")
759 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
760 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
761 (const_int 4294967296)))
762 (set (match_operand:SI 0 "s_register_operand" "=l,r")
763 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
770 [(set_attr "type" "alus_sreg")
771 (set_attr "arch" "t2,*")
772 (set_attr "length" "2,4")]
775 (define_expand "addsi3_cin_cout_imm"
780 (plus:DI (match_dup 4)
781 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
783 (const_int 4294967296)))
784 (set (match_operand:SI 0 "s_register_operand")
785 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
786 (match_operand:SI 2 "arm_adcimm_operand")))])]
789 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
790 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
791 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
792 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
793 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
797 (define_insn "*addsi3_cin_cout_imm_insn"
798 [(set (reg:CC_ADC CC_REGNUM)
802 (match_operand:DI 3 "arm_carry_operation" "")
803 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
804 (match_operand:DI 5 "const_int_operand" "n,n"))
805 (const_int 4294967296)))
806 (set (match_operand:SI 0 "s_register_operand" "=r,r")
807 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
809 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
811 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
814 sbcs%?\\t%0, %1, #%B2"
815 [(set_attr "type" "alus_imm")]
818 (define_expand "addsi3_cin_cout_0"
822 (plus:DI (match_dup 3)
823 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
824 (const_int 4294967296)))
825 (set (match_operand:SI 0 "s_register_operand")
826 (plus:SI (match_dup 4) (match_dup 1)))])]
829 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
836 (define_insn "*addsi3_cin_cout_0_insn"
837 [(set (reg:CC_ADC CC_REGNUM)
840 (match_operand:DI 2 "arm_carry_operation" "")
841 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
842 (const_int 4294967296)))
843 (set (match_operand:SI 0 "s_register_operand" "=r")
844 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
846 "adcs%?\\t%0, %1, #0"
847 [(set_attr "type" "alus_imm")]
850 (define_expand "addsi3"
851 [(set (match_operand:SI 0 "s_register_operand")
852 (plus:SI (match_operand:SI 1 "s_register_operand")
853 (match_operand:SI 2 "reg_or_int_operand")))]
856 if (TARGET_32BIT && CONST_INT_P (operands[2]))
858 arm_split_constant (PLUS, SImode, NULL_RTX,
859 INTVAL (operands[2]), operands[0], operands[1],
860 optimize && can_create_pseudo_p ());
866 ; If there is a scratch available, this will be faster than synthesizing the
869 [(match_scratch:SI 3 "r")
870 (set (match_operand:SI 0 "arm_general_register_operand" "")
871 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
872 (match_operand:SI 2 "const_int_operand" "")))]
874 !(const_ok_for_arm (INTVAL (operands[2]))
875 || const_ok_for_arm (-INTVAL (operands[2])))
876 && const_ok_for_arm (~INTVAL (operands[2]))"
877 [(set (match_dup 3) (match_dup 2))
878 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
882 ;; The r/r/k alternative is required when reloading the address
883 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
884 ;; put the duplicated register first, and not try the commutative version.
885 (define_insn_and_split "*arm_addsi3"
886 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
887 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
888 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
904 subw%?\\t%0, %1, #%n2
905 subw%?\\t%0, %1, #%n2
908 && CONST_INT_P (operands[2])
909 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
910 && (reload_completed || !arm_eliminable_register (operands[1]))"
911 [(clobber (const_int 0))]
913 arm_split_constant (PLUS, SImode, curr_insn,
914 INTVAL (operands[2]), operands[0],
918 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
919 (set_attr "predicable" "yes")
920 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
921 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
922 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
923 (const_string "alu_imm")
924 (const_string "alu_sreg")))
928 (define_insn "addsi3_compareV_reg"
929 [(set (reg:CC_V CC_REGNUM)
932 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
933 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
934 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
935 (set (match_operand:SI 0 "register_operand" "=l,r,r")
936 (plus:SI (match_dup 1) (match_dup 2)))]
938 "adds%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "arch" "t2,t2,*")
941 (set_attr "length" "2,2,4")
942 (set_attr "type" "alus_sreg")]
945 (define_insn "*addsi3_compareV_reg_nosum"
946 [(set (reg:CC_V CC_REGNUM)
949 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
950 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
951 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
954 [(set_attr "conds" "set")
955 (set_attr "arch" "t2,*")
956 (set_attr "length" "2,4")
957 (set_attr "type" "alus_sreg")]
960 (define_insn "subvsi3_intmin"
961 [(set (reg:CC_V CC_REGNUM)
965 (match_operand:SI 1 "register_operand" "r"))
966 (const_int 2147483648))
967 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
968 (set (match_operand:SI 0 "register_operand" "=r")
969 (plus:SI (match_dup 1) (const_int -2147483648)))]
971 "subs%?\\t%0, %1, #-2147483648"
972 [(set_attr "conds" "set")
973 (set_attr "type" "alus_imm")]
976 (define_insn "addsi3_compareV_imm"
977 [(set (reg:CC_V CC_REGNUM)
981 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
982 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
983 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
984 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
985 (plus:SI (match_dup 1) (match_dup 2)))]
987 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
991 subs%?\\t%0, %1, #%n2
992 subs%?\\t%0, %0, #%n2
994 subs%?\\t%0, %1, #%n2"
995 [(set_attr "conds" "set")
996 (set_attr "arch" "t2,t2,t2,t2,*,*")
997 (set_attr "length" "2,2,2,2,4,4")
998 (set_attr "type" "alus_imm")]
1001 (define_insn "addsi3_compareV_imm_nosum"
1002 [(set (reg:CC_V CC_REGNUM)
1006 (match_operand:SI 0 "register_operand" "l,r,r"))
1007 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1008 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1010 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1015 [(set_attr "conds" "set")
1016 (set_attr "arch" "t2,*,*")
1017 (set_attr "length" "2,4,4")
1018 (set_attr "type" "alus_imm")]
1021 ;; We can handle more constants efficently if we can clobber either a scratch
1022 ;; or the other source operand. We deliberately leave this late as in
1023 ;; high register pressure situations it's not worth forcing any reloads.
1025 [(match_scratch:SI 2 "l")
1026 (set (reg:CC_V CC_REGNUM)
1030 (match_operand:SI 0 "low_register_operand"))
1031 (match_operand 1 "const_int_operand"))
1032 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1034 && satisfies_constraint_Pd (operands[1])"
1036 (set (reg:CC_V CC_REGNUM)
1038 (plus:DI (sign_extend:DI (match_dup 0))
1039 (sign_extend:DI (match_dup 1)))
1040 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1041 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1045 [(set (reg:CC_V CC_REGNUM)
1049 (match_operand:SI 0 "low_register_operand"))
1050 (match_operand 1 "const_int_operand"))
1051 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1053 && dead_or_set_p (peep2_next_insn (0), operands[0])
1054 && satisfies_constraint_Py (operands[1])"
1056 (set (reg:CC_V CC_REGNUM)
1058 (plus:DI (sign_extend:DI (match_dup 0))
1059 (sign_extend:DI (match_dup 1)))
1060 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1061 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1064 (define_insn "addsi3_compare0"
1065 [(set (reg:CC_NOOV CC_REGNUM)
1067 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1068 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1070 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1071 (plus:SI (match_dup 1) (match_dup 2)))]
1075 subs%?\\t%0, %1, #%n2
1076 adds%?\\t%0, %1, %2"
1077 [(set_attr "conds" "set")
1078 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1081 (define_insn "*addsi3_compare0_scratch"
1082 [(set (reg:CC_NOOV CC_REGNUM)
1084 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1085 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1092 [(set_attr "conds" "set")
1093 (set_attr "predicable" "yes")
1094 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1097 (define_insn "*compare_negsi_si"
1098 [(set (reg:CC_Z CC_REGNUM)
1100 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1101 (match_operand:SI 1 "s_register_operand" "l,r")))]
1104 [(set_attr "conds" "set")
1105 (set_attr "predicable" "yes")
1106 (set_attr "arch" "t2,*")
1107 (set_attr "length" "2,4")
1108 (set_attr "predicable_short_it" "yes,no")
1109 (set_attr "type" "alus_sreg")]
1112 ;; This is the canonicalization of subsi3_compare when the
1113 ;; addend is a constant.
1114 (define_insn "cmpsi2_addneg"
1115 [(set (reg:CC CC_REGNUM)
1117 (match_operand:SI 1 "s_register_operand" "r,r")
1118 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1119 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1120 (plus:SI (match_dup 1)
1121 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1123 && (INTVAL (operands[2])
1124 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1126 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1127 in different condition codes (like cmn rather than like cmp), so that
1128 alternative comes first. Both alternatives can match for any 0x??000000
1129 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1130 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1131 as it is shorter. */
1132 if (which_alternative == 0 && operands[3] != const1_rtx)
1133 return "subs%?\\t%0, %1, #%n3";
1135 return "adds%?\\t%0, %1, %3";
1137 [(set_attr "conds" "set")
1138 (set_attr "type" "alus_sreg")]
1141 ;; Convert the sequence
1143 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1147 ;; bcs dest ((unsigned)rn >= 1)
1148 ;; similarly for the beq variant using bcc.
1149 ;; This is a common looping idiom (while (n--))
1151 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1152 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1154 (set (match_operand 2 "cc_register" "")
1155 (compare (match_dup 0) (const_int -1)))
1157 (if_then_else (match_operator 3 "equality_operator"
1158 [(match_dup 2) (const_int 0)])
1159 (match_operand 4 "" "")
1160 (match_operand 5 "" "")))]
1161 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1165 (match_dup 1) (const_int 1)))
1166 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1168 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1171 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1172 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1175 operands[2], const0_rtx);"
1178 ;; The next four insns work because they compare the result with one of
1179 ;; the operands, and we know that the use of the condition code is
1180 ;; either GEU or LTU, so we can use the carry flag from the addition
1181 ;; instead of doing the compare a second time.
1182 (define_insn "addsi3_compare_op1"
1183 [(set (reg:CC_C CC_REGNUM)
1185 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1186 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1188 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1189 (plus:SI (match_dup 1) (match_dup 2)))]
1194 subs%?\\t%0, %1, #%n2
1195 subs%?\\t%0, %0, #%n2
1197 subs%?\\t%0, %1, #%n2"
1198 [(set_attr "conds" "set")
1199 (set_attr "arch" "t2,t2,t2,t2,*,*")
1200 (set_attr "length" "2,2,2,2,4,4")
1202 (if_then_else (match_operand 2 "const_int_operand")
1203 (const_string "alu_imm")
1204 (const_string "alu_sreg")))]
1207 (define_insn "*addsi3_compare_op2"
1208 [(set (reg:CC_C CC_REGNUM)
1210 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1211 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1213 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1214 (plus:SI (match_dup 1) (match_dup 2)))]
1219 subs%?\\t%0, %1, #%n2
1220 subs%?\\t%0, %0, #%n2
1222 subs%?\\t%0, %1, #%n2"
1223 [(set_attr "conds" "set")
1224 (set_attr "arch" "t2,t2,t2,t2,*,*")
1225 (set_attr "length" "2,2,2,2,4,4")
1227 (if_then_else (match_operand 2 "const_int_operand")
1228 (const_string "alu_imm")
1229 (const_string "alu_sreg")))]
1232 (define_insn "*compare_addsi2_op0"
1233 [(set (reg:CC_C CC_REGNUM)
1235 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1236 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1244 [(set_attr "conds" "set")
1245 (set_attr "predicable" "yes")
1246 (set_attr "arch" "t2,t2,*,*")
1247 (set_attr "predicable_short_it" "yes,yes,no,no")
1248 (set_attr "length" "2,2,4,4")
1250 (if_then_else (match_operand 1 "const_int_operand")
1251 (const_string "alu_imm")
1252 (const_string "alu_sreg")))]
1255 (define_insn "*compare_addsi2_op1"
1256 [(set (reg:CC_C CC_REGNUM)
1258 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1259 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1267 [(set_attr "conds" "set")
1268 (set_attr "predicable" "yes")
1269 (set_attr "arch" "t2,t2,*,*")
1270 (set_attr "predicable_short_it" "yes,yes,no,no")
1271 (set_attr "length" "2,2,4,4")
1273 (if_then_else (match_operand 1 "const_int_operand")
1274 (const_string "alu_imm")
1275 (const_string "alu_sreg")))]
1278 (define_insn "addsi3_carryin"
1279 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1280 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1281 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1282 (match_operand:SI 3 "arm_carry_operation" "")))]
1287 sbc%?\\t%0, %1, #%B2"
1288 [(set_attr "conds" "use")
1289 (set_attr "predicable" "yes")
1290 (set_attr "arch" "t2,*,*")
1291 (set_attr "length" "4")
1292 (set_attr "predicable_short_it" "yes,no,no")
1293 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1296 ;; Canonicalization of the above when the immediate is zero.
1297 (define_insn "add0si3_carryin"
1298 [(set (match_operand:SI 0 "s_register_operand" "=r")
1299 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1300 (match_operand:SI 1 "arm_not_operand" "r")))]
1302 "adc%?\\t%0, %1, #0"
1303 [(set_attr "conds" "use")
1304 (set_attr "predicable" "yes")
1305 (set_attr "length" "4")
1306 (set_attr "type" "adc_imm")]
1309 (define_insn "*addsi3_carryin_alt2"
1310 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1311 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1312 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1313 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1318 sbc%?\\t%0, %1, #%B2"
1319 [(set_attr "conds" "use")
1320 (set_attr "predicable" "yes")
1321 (set_attr "arch" "t2,*,*")
1322 (set_attr "length" "4")
1323 (set_attr "predicable_short_it" "yes,no,no")
1324 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1327 (define_insn "*addsi3_carryin_shift"
1328 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1330 (match_operator:SI 2 "shift_operator"
1331 [(match_operand:SI 3 "s_register_operand" "r,r")
1332 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1333 (match_operand:SI 5 "arm_carry_operation" ""))
1334 (match_operand:SI 1 "s_register_operand" "r,r")))]
1336 "adc%?\\t%0, %1, %3%S2"
1337 [(set_attr "conds" "use")
1338 (set_attr "arch" "32,a")
1339 (set_attr "shift" "3")
1340 (set_attr "predicable" "yes")
1341 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1344 (define_insn "*addsi3_carryin_clobercc"
1345 [(set (match_operand:SI 0 "s_register_operand" "=r")
1346 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1347 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1348 (match_operand:SI 3 "arm_carry_operation" "")))
1349 (clobber (reg:CC CC_REGNUM))]
1351 "adcs%?\\t%0, %1, %2"
1352 [(set_attr "conds" "set")
1353 (set_attr "type" "adcs_reg")]
1356 (define_expand "subvsi4"
1357 [(match_operand:SI 0 "s_register_operand")
1358 (match_operand:SI 1 "arm_rhs_operand")
1359 (match_operand:SI 2 "arm_add_operand")
1360 (match_operand 3 "")]
1363 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1365 /* If both operands are constants we can decide the result statically. */
1366 wi::overflow_type overflow;
1367 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1368 rtx_mode_t (operands[2], SImode),
1370 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1371 if (overflow != wi::OVF_NONE)
1372 emit_jump_insn (gen_jump (operands[3]));
1375 else if (CONST_INT_P (operands[2]))
1377 operands[2] = GEN_INT (-INTVAL (operands[2]));
1378 /* Special case for INT_MIN. */
1379 if (INTVAL (operands[2]) == 0x80000000)
1380 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1382 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1385 else if (CONST_INT_P (operands[1]))
1386 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1388 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1390 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1394 (define_expand "subvdi4"
1395 [(match_operand:DI 0 "s_register_operand")
1396 (match_operand:DI 1 "reg_or_int_operand")
1397 (match_operand:DI 2 "reg_or_int_operand")
1398 (match_operand 3 "")]
1401 rtx lo_result, hi_result;
1402 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1403 lo_result = gen_lowpart (SImode, operands[0]);
1404 hi_result = gen_highpart (SImode, operands[0]);
1405 machine_mode mode = CCmode;
1407 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1409 /* If both operands are constants we can decide the result statically. */
1410 wi::overflow_type overflow;
1411 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1412 rtx_mode_t (operands[2], DImode),
1414 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1415 if (overflow != wi::OVF_NONE)
1416 emit_jump_insn (gen_jump (operands[3]));
1419 else if (CONST_INT_P (operands[1]))
1421 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1423 if (const_ok_for_arm (INTVAL (lo_op1)))
1425 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1426 GEN_INT (~UINTVAL (lo_op1))));
1427 /* We could potentially use RSC here in Arm state, but not
1428 in Thumb, so it's probably not worth the effort of handling
1430 hi_op1 = force_reg (SImode, hi_op1);
1434 operands[1] = force_reg (DImode, operands[1]);
1437 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1439 if (lo_op2 == const0_rtx)
1441 emit_move_insn (lo_result, lo_op1);
1442 if (!arm_add_operand (hi_op2, SImode))
1443 hi_op2 = force_reg (SImode, hi_op2);
1444 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1448 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1449 lo_op2 = force_reg (SImode, lo_op2);
1450 if (CONST_INT_P (lo_op2))
1451 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1452 GEN_INT (-INTVAL (lo_op2))));
1454 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1457 if (!arm_not_operand (hi_op2, SImode))
1458 hi_op2 = force_reg (SImode, hi_op2);
1459 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1460 if (CONST_INT_P (hi_op2))
1461 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1462 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1463 gen_rtx_LTU (DImode, ccreg,
1466 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1467 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1468 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1469 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1474 (define_expand "usubvsi4"
1475 [(match_operand:SI 0 "s_register_operand")
1476 (match_operand:SI 1 "arm_rhs_operand")
1477 (match_operand:SI 2 "arm_add_operand")
1478 (match_operand 3 "")]
1481 machine_mode mode = CCmode;
1482 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1484 /* If both operands are constants we can decide the result statically. */
1485 wi::overflow_type overflow;
1486 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1487 rtx_mode_t (operands[2], SImode),
1488 UNSIGNED, &overflow);
1489 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1490 if (overflow != wi::OVF_NONE)
1491 emit_jump_insn (gen_jump (operands[3]));
1494 else if (CONST_INT_P (operands[2]))
1495 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1496 GEN_INT (-INTVAL (operands[2]))));
1497 else if (CONST_INT_P (operands[1]))
1500 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1501 GEN_INT (~UINTVAL (operands[1]))));
1504 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1505 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1510 (define_expand "usubvdi4"
1511 [(match_operand:DI 0 "s_register_operand")
1512 (match_operand:DI 1 "reg_or_int_operand")
1513 (match_operand:DI 2 "reg_or_int_operand")
1514 (match_operand 3 "")]
1517 rtx lo_result, hi_result;
1518 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1519 lo_result = gen_lowpart (SImode, operands[0]);
1520 hi_result = gen_highpart (SImode, operands[0]);
1521 machine_mode mode = CCmode;
1523 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1525 /* If both operands are constants we can decide the result statically. */
1526 wi::overflow_type overflow;
1527 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1528 rtx_mode_t (operands[2], DImode),
1529 UNSIGNED, &overflow);
1530 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1531 if (overflow != wi::OVF_NONE)
1532 emit_jump_insn (gen_jump (operands[3]));
1535 else if (CONST_INT_P (operands[1]))
1537 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1539 if (const_ok_for_arm (INTVAL (lo_op1)))
1541 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1542 GEN_INT (~UINTVAL (lo_op1))));
1543 /* We could potentially use RSC here in Arm state, but not
1544 in Thumb, so it's probably not worth the effort of handling
1546 hi_op1 = force_reg (SImode, hi_op1);
1550 operands[1] = force_reg (DImode, operands[1]);
1553 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1555 if (lo_op2 == const0_rtx)
1557 emit_move_insn (lo_result, lo_op1);
1558 if (!arm_add_operand (hi_op2, SImode))
1559 hi_op2 = force_reg (SImode, hi_op2);
1560 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1564 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1565 lo_op2 = force_reg (SImode, lo_op2);
1566 if (CONST_INT_P (lo_op2))
1567 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1568 GEN_INT (-INTVAL (lo_op2))));
1570 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1573 if (!arm_not_operand (hi_op2, SImode))
1574 hi_op2 = force_reg (SImode, hi_op2);
1575 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1576 if (CONST_INT_P (hi_op2))
1577 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1578 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1579 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1580 gen_rtx_LTU (DImode, ccreg,
1583 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1584 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1585 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1586 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1591 (define_insn "subsi3_compare1"
1592 [(set (reg:CC CC_REGNUM)
1594 (match_operand:SI 1 "register_operand" "r")
1595 (match_operand:SI 2 "register_operand" "r")))
1596 (set (match_operand:SI 0 "register_operand" "=r")
1597 (minus:SI (match_dup 1) (match_dup 2)))]
1599 "subs%?\\t%0, %1, %2"
1600 [(set_attr "conds" "set")
1601 (set_attr "type" "alus_sreg")]
1604 (define_insn "subvsi3"
1605 [(set (reg:CC_V CC_REGNUM)
1608 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1609 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1610 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1611 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1612 (minus:SI (match_dup 1) (match_dup 2)))]
1614 "subs%?\\t%0, %1, %2"
1615 [(set_attr "conds" "set")
1616 (set_attr "arch" "t2,*")
1617 (set_attr "length" "2,4")
1618 (set_attr "type" "alus_sreg")]
1621 (define_insn "subvsi3_imm1"
1622 [(set (reg:CC_V CC_REGNUM)
1625 (match_operand 1 "arm_immediate_operand" "I")
1626 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1627 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1628 (set (match_operand:SI 0 "s_register_operand" "=r")
1629 (minus:SI (match_dup 1) (match_dup 2)))]
1631 "rsbs%?\\t%0, %2, %1"
1632 [(set_attr "conds" "set")
1633 (set_attr "type" "alus_imm")]
1636 (define_insn "subsi3_carryin"
1637 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1638 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1639 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1640 (match_operand:SI 3 "arm_borrow_operation" "")))]
1645 sbc%?\\t%0, %2, %2, lsl #1"
1646 [(set_attr "conds" "use")
1647 (set_attr "arch" "*,a,t2")
1648 (set_attr "predicable" "yes")
1649 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1652 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1653 [(set (reg:<CC_EXTEND> CC_REGNUM)
1654 (compare:<CC_EXTEND>
1655 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1656 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1657 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1658 (clobber (match_scratch:SI 0 "=l,r"))]
1661 [(set_attr "conds" "set")
1662 (set_attr "arch" "t2,*")
1663 (set_attr "length" "2,4")
1664 (set_attr "type" "adc_reg")]
1667 ;; Similar to the above, but handling a constant which has a different
1668 ;; canonicalization.
1669 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1670 [(set (reg:<CC_EXTEND> CC_REGNUM)
1671 (compare:<CC_EXTEND>
1672 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1673 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1674 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1675 (clobber (match_scratch:SI 0 "=l,r"))]
1679 adcs\\t%0, %1, #%B2"
1680 [(set_attr "conds" "set")
1681 (set_attr "type" "adc_imm")]
1684 ;; Further canonicalization when the constant is zero.
1685 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1686 [(set (reg:<CC_EXTEND> CC_REGNUM)
1687 (compare:<CC_EXTEND>
1688 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1689 (match_operand:DI 2 "arm_borrow_operation" "")))
1690 (clobber (match_scratch:SI 0 "=l,r"))]
1693 [(set_attr "conds" "set")
1694 (set_attr "type" "adc_imm")]
1697 (define_insn "*subsi3_carryin_const"
1698 [(set (match_operand:SI 0 "s_register_operand" "=r")
1700 (match_operand:SI 1 "s_register_operand" "r")
1701 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1702 (match_operand:SI 3 "arm_borrow_operation" "")))]
1704 "sbc\\t%0, %1, #%n2"
1705 [(set_attr "conds" "use")
1706 (set_attr "type" "adc_imm")]
1709 (define_insn "*subsi3_carryin_const0"
1710 [(set (match_operand:SI 0 "s_register_operand" "=r")
1711 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1712 (match_operand:SI 2 "arm_borrow_operation" "")))]
1715 [(set_attr "conds" "use")
1716 (set_attr "type" "adc_imm")]
1719 (define_insn "*subsi3_carryin_shift"
1720 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1722 (match_operand:SI 1 "s_register_operand" "r,r")
1723 (match_operator:SI 2 "shift_operator"
1724 [(match_operand:SI 3 "s_register_operand" "r,r")
1725 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1726 (match_operand:SI 5 "arm_borrow_operation" "")))]
1728 "sbc%?\\t%0, %1, %3%S2"
1729 [(set_attr "conds" "use")
1730 (set_attr "arch" "32,a")
1731 (set_attr "shift" "3")
1732 (set_attr "predicable" "yes")
1733 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1736 (define_insn "*subsi3_carryin_shift_alt"
1737 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1739 (match_operand:SI 1 "s_register_operand" "r,r")
1740 (match_operand:SI 5 "arm_borrow_operation" ""))
1741 (match_operator:SI 2 "shift_operator"
1742 [(match_operand:SI 3 "s_register_operand" "r,r")
1743 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1745 "sbc%?\\t%0, %1, %3%S2"
1746 [(set_attr "conds" "use")
1747 (set_attr "arch" "32,a")
1748 (set_attr "shift" "3")
1749 (set_attr "predicable" "yes")
1750 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1754 (define_insn "*rsbsi3_carryin_shift"
1755 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1757 (match_operator:SI 2 "shift_operator"
1758 [(match_operand:SI 3 "s_register_operand" "r,r")
1759 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1760 (match_operand:SI 1 "s_register_operand" "r,r"))
1761 (match_operand:SI 5 "arm_borrow_operation" "")))]
1763 "rsc%?\\t%0, %1, %3%S2"
1764 [(set_attr "conds" "use")
1765 (set_attr "predicable" "yes")
1766 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1769 (define_insn "*rsbsi3_carryin_shift_alt"
1770 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1772 (match_operator:SI 2 "shift_operator"
1773 [(match_operand:SI 3 "s_register_operand" "r,r")
1774 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1775 (match_operand:SI 5 "arm_borrow_operation" ""))
1776 (match_operand:SI 1 "s_register_operand" "r,r")))]
1778 "rsc%?\\t%0, %1, %3%S2"
1779 [(set_attr "conds" "use")
1780 (set_attr "predicable" "yes")
1781 (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1784 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1786 [(set (match_operand:SI 0 "s_register_operand" "")
1787 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1788 (match_operand:SI 2 "s_register_operand" ""))
1790 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1792 [(set (match_dup 3) (match_dup 1))
1793 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1795 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1798 (define_expand "addsf3"
1799 [(set (match_operand:SF 0 "s_register_operand")
1800 (plus:SF (match_operand:SF 1 "s_register_operand")
1801 (match_operand:SF 2 "s_register_operand")))]
1802 "TARGET_32BIT && TARGET_HARD_FLOAT"
1806 (define_expand "adddf3"
1807 [(set (match_operand:DF 0 "s_register_operand")
1808 (plus:DF (match_operand:DF 1 "s_register_operand")
1809 (match_operand:DF 2 "s_register_operand")))]
1810 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1814 (define_expand "subdi3"
1816 [(set (match_operand:DI 0 "s_register_operand")
1817 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1818 (match_operand:DI 2 "s_register_operand")))
1819 (clobber (reg:CC CC_REGNUM))])]
1824 if (!REG_P (operands[1]))
1825 operands[1] = force_reg (DImode, operands[1]);
1829 rtx lo_result, hi_result, lo_dest, hi_dest;
1830 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1833 /* Since operands[1] may be an integer, pass it second, so that
1834 any necessary simplifications will be done on the decomposed
1836 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1838 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1839 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1841 if (!arm_rhs_operand (lo_op1, SImode))
1842 lo_op1 = force_reg (SImode, lo_op1);
1844 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1845 || !arm_rhs_operand (hi_op1, SImode))
1846 hi_op1 = force_reg (SImode, hi_op1);
1849 if (lo_op1 == const0_rtx)
1851 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1852 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1854 else if (CONST_INT_P (lo_op1))
1856 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1857 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1858 GEN_INT (~UINTVAL (lo_op1))));
1862 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1863 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1866 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1868 if (hi_op1 == const0_rtx)
1869 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1871 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1873 if (lo_result != lo_dest)
1874 emit_move_insn (lo_result, lo_dest);
1876 if (hi_result != hi_dest)
1877 emit_move_insn (hi_result, hi_dest);
1884 (define_expand "subsi3"
1885 [(set (match_operand:SI 0 "s_register_operand")
1886 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1887 (match_operand:SI 2 "s_register_operand")))]
1890 if (CONST_INT_P (operands[1]))
1894 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1895 operands[1] = force_reg (SImode, operands[1]);
1898 arm_split_constant (MINUS, SImode, NULL_RTX,
1899 INTVAL (operands[1]), operands[0],
1901 optimize && can_create_pseudo_p ());
1905 else /* TARGET_THUMB1 */
1906 operands[1] = force_reg (SImode, operands[1]);
1911 ; ??? Check Thumb-2 split length
1912 (define_insn_and_split "*arm_subsi3_insn"
1913 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1914 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1915 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1927 "&& (CONST_INT_P (operands[1])
1928 && !const_ok_for_arm (INTVAL (operands[1])))"
1929 [(clobber (const_int 0))]
1931 arm_split_constant (MINUS, SImode, curr_insn,
1932 INTVAL (operands[1]), operands[0], operands[2], 0);
1935 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1936 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1937 (set_attr "predicable" "yes")
1938 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1939 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1943 [(match_scratch:SI 3 "r")
1944 (set (match_operand:SI 0 "arm_general_register_operand" "")
1945 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1946 (match_operand:SI 2 "arm_general_register_operand" "")))]
1948 && !const_ok_for_arm (INTVAL (operands[1]))
1949 && const_ok_for_arm (~INTVAL (operands[1]))"
1950 [(set (match_dup 3) (match_dup 1))
1951 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1955 (define_insn "subsi3_compare0"
1956 [(set (reg:CC_NOOV CC_REGNUM)
1958 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1959 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1961 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1962 (minus:SI (match_dup 1) (match_dup 2)))]
1967 rsbs%?\\t%0, %2, %1"
1968 [(set_attr "conds" "set")
1969 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1972 (define_insn "subsi3_compare"
1973 [(set (reg:CC CC_REGNUM)
1974 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1975 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1976 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1977 (minus:SI (match_dup 1) (match_dup 2)))]
1982 rsbs%?\\t%0, %2, %1"
1983 [(set_attr "conds" "set")
1984 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1987 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1988 ;; rather than (0 cmp reg). This gives the same results for unsigned
1989 ;; and equality compares which is what we mostly need here.
1990 (define_insn "rsb_imm_compare"
1991 [(set (reg:CC_RSB CC_REGNUM)
1992 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1993 (match_operand 3 "const_int_operand" "")))
1994 (set (match_operand:SI 0 "s_register_operand" "=r")
1995 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
1997 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
1999 [(set_attr "conds" "set")
2000 (set_attr "type" "alus_imm")]
2003 ;; Similarly, but the result is unused.
2004 (define_insn "rsb_imm_compare_scratch"
2005 [(set (reg:CC_RSB CC_REGNUM)
2006 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2007 (match_operand 1 "arm_not_immediate_operand" "K")))
2008 (clobber (match_scratch:SI 0 "=r"))]
2010 "rsbs\\t%0, %2, #%B1"
2011 [(set_attr "conds" "set")
2012 (set_attr "type" "alus_imm")]
2015 ;; Compare the sum of a value plus a carry against a constant. Uses
2016 ;; RSC, so the result is swapped. Only available on Arm
2017 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2018 [(set (reg:CC_SWP CC_REGNUM)
2020 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2021 (match_operand:DI 3 "arm_borrow_operation" ""))
2022 (match_operand 1 "arm_immediate_operand" "I")))
2023 (clobber (match_scratch:SI 0 "=r"))]
2026 [(set_attr "conds" "set")
2027 (set_attr "type" "alus_imm")]
2030 (define_insn "usubvsi3_borrow"
2031 [(set (reg:CC_B CC_REGNUM)
2033 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2034 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2036 (match_operand:SI 2 "s_register_operand" "l,r")))))
2037 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2038 (minus:SI (match_dup 1)
2039 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2042 "sbcs%?\\t%0, %1, %2"
2043 [(set_attr "conds" "set")
2044 (set_attr "arch" "t2,*")
2045 (set_attr "length" "2,4")]
2048 (define_insn "usubvsi3_borrow_imm"
2049 [(set (reg:CC_B CC_REGNUM)
2051 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2052 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2053 (match_operand:DI 3 "const_int_operand" "n,n"))))
2054 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2055 (minus:SI (match_dup 1)
2056 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2057 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2059 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2062 adcs%?\\t%0, %1, #%B2"
2063 [(set_attr "conds" "set")
2064 (set_attr "type" "alus_imm")]
2067 (define_insn "subvsi3_borrow"
2068 [(set (reg:CC_V CC_REGNUM)
2072 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2073 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2074 (match_operand:DI 4 "arm_borrow_operation" ""))
2076 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2077 (match_operand:SI 3 "arm_borrow_operation" "")))))
2078 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2079 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2082 "sbcs%?\\t%0, %1, %2"
2083 [(set_attr "conds" "set")
2084 (set_attr "arch" "t2,*")
2085 (set_attr "length" "2,4")]
2088 (define_insn "subvsi3_borrow_imm"
2089 [(set (reg:CC_V CC_REGNUM)
2093 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2094 (match_operand 2 "arm_adcimm_operand" "I,K"))
2095 (match_operand:DI 4 "arm_borrow_operation" ""))
2097 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2098 (match_operand:SI 3 "arm_borrow_operation" "")))))
2099 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2100 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2103 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2106 adcs%?\\t%0, %1, #%B2"
2107 [(set_attr "conds" "set")
2108 (set_attr "type" "alus_imm")]
2111 (define_expand "subsf3"
2112 [(set (match_operand:SF 0 "s_register_operand")
2113 (minus:SF (match_operand:SF 1 "s_register_operand")
2114 (match_operand:SF 2 "s_register_operand")))]
2115 "TARGET_32BIT && TARGET_HARD_FLOAT"
2119 (define_expand "subdf3"
2120 [(set (match_operand:DF 0 "s_register_operand")
2121 (minus:DF (match_operand:DF 1 "s_register_operand")
2122 (match_operand:DF 2 "s_register_operand")))]
2123 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2128 ;; Multiplication insns
2130 (define_expand "mulhi3"
2131 [(set (match_operand:HI 0 "s_register_operand")
2132 (mult:HI (match_operand:HI 1 "s_register_operand")
2133 (match_operand:HI 2 "s_register_operand")))]
2134 "TARGET_DSP_MULTIPLY"
2137 rtx result = gen_reg_rtx (SImode);
2138 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2139 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2144 (define_expand "mulsi3"
2145 [(set (match_operand:SI 0 "s_register_operand")
2146 (mult:SI (match_operand:SI 2 "s_register_operand")
2147 (match_operand:SI 1 "s_register_operand")))]
2152 ;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2154 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2155 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2156 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2158 "mul%?\\t%0, %2, %1"
2159 [(set_attr "type" "mul")
2160 (set_attr "predicable" "yes")
2161 (set_attr "arch" "t2,v6,nov6,nov6")
2162 (set_attr "length" "4")
2163 (set_attr "predicable_short_it" "yes,no,*,*")]
2166 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2167 ;; reusing the same register.
2170 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2172 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2173 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2174 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2176 "mla%?\\t%0, %3, %2, %1"
2177 [(set_attr "type" "mla")
2178 (set_attr "predicable" "yes")
2179 (set_attr "arch" "v6,nov6,nov6,nov6")]
2183 [(set (match_operand:SI 0 "s_register_operand" "=r")
2185 (match_operand:SI 1 "s_register_operand" "r")
2186 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2187 (match_operand:SI 2 "s_register_operand" "r"))))]
2188 "TARGET_32BIT && arm_arch_thumb2"
2189 "mls%?\\t%0, %3, %2, %1"
2190 [(set_attr "type" "mla")
2191 (set_attr "predicable" "yes")]
2194 (define_insn "*mulsi3_compare0"
2195 [(set (reg:CC_NOOV CC_REGNUM)
2196 (compare:CC_NOOV (mult:SI
2197 (match_operand:SI 2 "s_register_operand" "r,r")
2198 (match_operand:SI 1 "s_register_operand" "%0,r"))
2200 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2201 (mult:SI (match_dup 2) (match_dup 1)))]
2202 "TARGET_ARM && !arm_arch6"
2203 "muls%?\\t%0, %2, %1"
2204 [(set_attr "conds" "set")
2205 (set_attr "type" "muls")]
2208 (define_insn "*mulsi3_compare0_v6"
2209 [(set (reg:CC_NOOV CC_REGNUM)
2210 (compare:CC_NOOV (mult:SI
2211 (match_operand:SI 2 "s_register_operand" "r")
2212 (match_operand:SI 1 "s_register_operand" "r"))
2214 (set (match_operand:SI 0 "s_register_operand" "=r")
2215 (mult:SI (match_dup 2) (match_dup 1)))]
2216 "TARGET_ARM && arm_arch6 && optimize_size"
2217 "muls%?\\t%0, %2, %1"
2218 [(set_attr "conds" "set")
2219 (set_attr "type" "muls")]
2222 (define_insn "*mulsi_compare0_scratch"
2223 [(set (reg:CC_NOOV CC_REGNUM)
2224 (compare:CC_NOOV (mult:SI
2225 (match_operand:SI 2 "s_register_operand" "r,r")
2226 (match_operand:SI 1 "s_register_operand" "%0,r"))
2228 (clobber (match_scratch:SI 0 "=&r,&r"))]
2229 "TARGET_ARM && !arm_arch6"
2230 "muls%?\\t%0, %2, %1"
2231 [(set_attr "conds" "set")
2232 (set_attr "type" "muls")]
2235 (define_insn "*mulsi_compare0_scratch_v6"
2236 [(set (reg:CC_NOOV CC_REGNUM)
2237 (compare:CC_NOOV (mult:SI
2238 (match_operand:SI 2 "s_register_operand" "r")
2239 (match_operand:SI 1 "s_register_operand" "r"))
2241 (clobber (match_scratch:SI 0 "=r"))]
2242 "TARGET_ARM && arm_arch6 && optimize_size"
2243 "muls%?\\t%0, %2, %1"
2244 [(set_attr "conds" "set")
2245 (set_attr "type" "muls")]
2248 (define_insn "*mulsi3addsi_compare0"
2249 [(set (reg:CC_NOOV CC_REGNUM)
2252 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2253 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2254 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2256 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2257 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2259 "TARGET_ARM && arm_arch6"
2260 "mlas%?\\t%0, %2, %1, %3"
2261 [(set_attr "conds" "set")
2262 (set_attr "type" "mlas")]
2265 (define_insn "*mulsi3addsi_compare0_v6"
2266 [(set (reg:CC_NOOV CC_REGNUM)
2269 (match_operand:SI 2 "s_register_operand" "r")
2270 (match_operand:SI 1 "s_register_operand" "r"))
2271 (match_operand:SI 3 "s_register_operand" "r"))
2273 (set (match_operand:SI 0 "s_register_operand" "=r")
2274 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2276 "TARGET_ARM && arm_arch6 && optimize_size"
2277 "mlas%?\\t%0, %2, %1, %3"
2278 [(set_attr "conds" "set")
2279 (set_attr "type" "mlas")]
2282 (define_insn "*mulsi3addsi_compare0_scratch"
2283 [(set (reg:CC_NOOV CC_REGNUM)
2286 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2287 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2288 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2290 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2291 "TARGET_ARM && !arm_arch6"
2292 "mlas%?\\t%0, %2, %1, %3"
2293 [(set_attr "conds" "set")
2294 (set_attr "type" "mlas")]
2297 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2298 [(set (reg:CC_NOOV CC_REGNUM)
2301 (match_operand:SI 2 "s_register_operand" "r")
2302 (match_operand:SI 1 "s_register_operand" "r"))
2303 (match_operand:SI 3 "s_register_operand" "r"))
2305 (clobber (match_scratch:SI 0 "=r"))]
2306 "TARGET_ARM && arm_arch6 && optimize_size"
2307 "mlas%?\\t%0, %2, %1, %3"
2308 [(set_attr "conds" "set")
2309 (set_attr "type" "mlas")]
2312 ;; 32x32->64 widening multiply.
2313 ;; The only difference between the v3-5 and v6+ versions is the requirement
2314 ;; that the output does not overlap with either input.
2316 (define_expand "<Us>mulsidi3"
2317 [(set (match_operand:DI 0 "s_register_operand")
2319 (SE:DI (match_operand:SI 1 "s_register_operand"))
2320 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2323 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2324 gen_highpart (SImode, operands[0]),
2325 operands[1], operands[2]));
2330 (define_insn "<US>mull"
2331 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2333 (match_operand:SI 2 "s_register_operand" "%r,r")
2334 (match_operand:SI 3 "s_register_operand" "r,r")))
2335 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2338 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2341 "<US>mull%?\\t%0, %1, %2, %3"
2342 [(set_attr "type" "umull")
2343 (set_attr "predicable" "yes")
2344 (set_attr "arch" "v6,nov6")]
2347 (define_expand "<Us>maddsidi4"
2348 [(set (match_operand:DI 0 "s_register_operand")
2351 (SE:DI (match_operand:SI 1 "s_register_operand"))
2352 (SE:DI (match_operand:SI 2 "s_register_operand")))
2353 (match_operand:DI 3 "s_register_operand")))]
2356 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2357 gen_lowpart (SImode, operands[3]),
2358 gen_highpart (SImode, operands[0]),
2359 gen_highpart (SImode, operands[3]),
2360 operands[1], operands[2]));
2365 (define_insn "<US>mlal"
2366 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2369 (match_operand:SI 4 "s_register_operand" "%r,r")
2370 (match_operand:SI 5 "s_register_operand" "r,r"))
2371 (match_operand:SI 1 "s_register_operand" "0,0")))
2372 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2377 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2378 (zero_extend:DI (match_dup 1)))
2380 (match_operand:SI 3 "s_register_operand" "2,2")))]
2382 "<US>mlal%?\\t%0, %2, %4, %5"
2383 [(set_attr "type" "umlal")
2384 (set_attr "predicable" "yes")
2385 (set_attr "arch" "v6,nov6")]
2388 (define_expand "<US>mulsi3_highpart"
2390 [(set (match_operand:SI 0 "s_register_operand")
2394 (SE:DI (match_operand:SI 1 "s_register_operand"))
2395 (SE:DI (match_operand:SI 2 "s_register_operand")))
2397 (clobber (match_scratch:SI 3 ""))])]
2402 (define_insn "*<US>mull_high"
2403 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2407 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2408 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2410 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2412 "<US>mull%?\\t%3, %0, %2, %1"
2413 [(set_attr "type" "umull")
2414 (set_attr "predicable" "yes")
2415 (set_attr "arch" "v6,nov6,nov6")]
2418 (define_insn "mulhisi3"
2419 [(set (match_operand:SI 0 "s_register_operand" "=r")
2420 (mult:SI (sign_extend:SI
2421 (match_operand:HI 1 "s_register_operand" "%r"))
2423 (match_operand:HI 2 "s_register_operand" "r"))))]
2424 "TARGET_DSP_MULTIPLY"
2425 "smulbb%?\\t%0, %1, %2"
2426 [(set_attr "type" "smulxy")
2427 (set_attr "predicable" "yes")]
2430 (define_insn "*mulhisi3tb"
2431 [(set (match_operand:SI 0 "s_register_operand" "=r")
2432 (mult:SI (ashiftrt:SI
2433 (match_operand:SI 1 "s_register_operand" "r")
2436 (match_operand:HI 2 "s_register_operand" "r"))))]
2437 "TARGET_DSP_MULTIPLY"
2438 "smultb%?\\t%0, %1, %2"
2439 [(set_attr "type" "smulxy")
2440 (set_attr "predicable" "yes")]
2443 (define_insn "*mulhisi3bt"
2444 [(set (match_operand:SI 0 "s_register_operand" "=r")
2445 (mult:SI (sign_extend:SI
2446 (match_operand:HI 1 "s_register_operand" "r"))
2448 (match_operand:SI 2 "s_register_operand" "r")
2450 "TARGET_DSP_MULTIPLY"
2451 "smulbt%?\\t%0, %1, %2"
2452 [(set_attr "type" "smulxy")
2453 (set_attr "predicable" "yes")]
2456 (define_insn "*mulhisi3tt"
2457 [(set (match_operand:SI 0 "s_register_operand" "=r")
2458 (mult:SI (ashiftrt:SI
2459 (match_operand:SI 1 "s_register_operand" "r")
2462 (match_operand:SI 2 "s_register_operand" "r")
2464 "TARGET_DSP_MULTIPLY"
2465 "smultt%?\\t%0, %1, %2"
2466 [(set_attr "type" "smulxy")
2467 (set_attr "predicable" "yes")]
2470 (define_insn "maddhisi4"
2471 [(set (match_operand:SI 0 "s_register_operand" "=r")
2472 (plus:SI (mult:SI (sign_extend:SI
2473 (match_operand:HI 1 "s_register_operand" "r"))
2475 (match_operand:HI 2 "s_register_operand" "r")))
2476 (match_operand:SI 3 "s_register_operand" "r")))]
2477 "TARGET_DSP_MULTIPLY"
2478 "smlabb%?\\t%0, %1, %2, %3"
2479 [(set_attr "type" "smlaxy")
2480 (set_attr "predicable" "yes")]
2483 ;; Note: there is no maddhisi4ibt because this one is canonical form
2484 (define_insn "*maddhisi4tb"
2485 [(set (match_operand:SI 0 "s_register_operand" "=r")
2486 (plus:SI (mult:SI (ashiftrt:SI
2487 (match_operand:SI 1 "s_register_operand" "r")
2490 (match_operand:HI 2 "s_register_operand" "r")))
2491 (match_operand:SI 3 "s_register_operand" "r")))]
2492 "TARGET_DSP_MULTIPLY"
2493 "smlatb%?\\t%0, %1, %2, %3"
2494 [(set_attr "type" "smlaxy")
2495 (set_attr "predicable" "yes")]
2498 (define_insn "*maddhisi4tt"
2499 [(set (match_operand:SI 0 "s_register_operand" "=r")
2500 (plus:SI (mult:SI (ashiftrt:SI
2501 (match_operand:SI 1 "s_register_operand" "r")
2504 (match_operand:SI 2 "s_register_operand" "r")
2506 (match_operand:SI 3 "s_register_operand" "r")))]
2507 "TARGET_DSP_MULTIPLY"
2508 "smlatt%?\\t%0, %1, %2, %3"
2509 [(set_attr "type" "smlaxy")
2510 (set_attr "predicable" "yes")]
2513 (define_insn "maddhidi4"
2514 [(set (match_operand:DI 0 "s_register_operand" "=r")
2516 (mult:DI (sign_extend:DI
2517 (match_operand:HI 1 "s_register_operand" "r"))
2519 (match_operand:HI 2 "s_register_operand" "r")))
2520 (match_operand:DI 3 "s_register_operand" "0")))]
2521 "TARGET_DSP_MULTIPLY"
2522 "smlalbb%?\\t%Q0, %R0, %1, %2"
2523 [(set_attr "type" "smlalxy")
2524 (set_attr "predicable" "yes")])
2526 ;; Note: there is no maddhidi4ibt because this one is canonical form
2527 (define_insn "*maddhidi4tb"
2528 [(set (match_operand:DI 0 "s_register_operand" "=r")
2530 (mult:DI (sign_extend:DI
2532 (match_operand:SI 1 "s_register_operand" "r")
2535 (match_operand:HI 2 "s_register_operand" "r")))
2536 (match_operand:DI 3 "s_register_operand" "0")))]
2537 "TARGET_DSP_MULTIPLY"
2538 "smlaltb%?\\t%Q0, %R0, %1, %2"
2539 [(set_attr "type" "smlalxy")
2540 (set_attr "predicable" "yes")])
2542 (define_insn "*maddhidi4tt"
2543 [(set (match_operand:DI 0 "s_register_operand" "=r")
2545 (mult:DI (sign_extend:DI
2547 (match_operand:SI 1 "s_register_operand" "r")
2551 (match_operand:SI 2 "s_register_operand" "r")
2553 (match_operand:DI 3 "s_register_operand" "0")))]
2554 "TARGET_DSP_MULTIPLY"
2555 "smlaltt%?\\t%Q0, %R0, %1, %2"
2556 [(set_attr "type" "smlalxy")
2557 (set_attr "predicable" "yes")])
2559 (define_expand "mulsf3"
2560 [(set (match_operand:SF 0 "s_register_operand")
2561 (mult:SF (match_operand:SF 1 "s_register_operand")
2562 (match_operand:SF 2 "s_register_operand")))]
2563 "TARGET_32BIT && TARGET_HARD_FLOAT"
2567 (define_expand "muldf3"
2568 [(set (match_operand:DF 0 "s_register_operand")
2569 (mult:DF (match_operand:DF 1 "s_register_operand")
2570 (match_operand:DF 2 "s_register_operand")))]
2571 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2577 (define_expand "divsf3"
2578 [(set (match_operand:SF 0 "s_register_operand")
2579 (div:SF (match_operand:SF 1 "s_register_operand")
2580 (match_operand:SF 2 "s_register_operand")))]
2581 "TARGET_32BIT && TARGET_HARD_FLOAT"
2584 (define_expand "divdf3"
2585 [(set (match_operand:DF 0 "s_register_operand")
2586 (div:DF (match_operand:DF 1 "s_register_operand")
2587 (match_operand:DF 2 "s_register_operand")))]
2588 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2592 ; Expand logical operations. The mid-end expander does not split off memory
2593 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2594 ; So an explicit expander is needed to generate better code.
2596 (define_expand "<LOGICAL:optab>di3"
2597 [(set (match_operand:DI 0 "s_register_operand")
2598 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2599 (match_operand:DI 2 "arm_<optab>di_operand")))]
2602 rtx low = simplify_gen_binary (<CODE>, SImode,
2603 gen_lowpart (SImode, operands[1]),
2604 gen_lowpart (SImode, operands[2]));
2605 rtx high = simplify_gen_binary (<CODE>, SImode,
2606 gen_highpart (SImode, operands[1]),
2607 gen_highpart_mode (SImode, DImode,
2610 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2611 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2616 (define_expand "one_cmpldi2"
2617 [(set (match_operand:DI 0 "s_register_operand")
2618 (not:DI (match_operand:DI 1 "s_register_operand")))]
2621 rtx low = simplify_gen_unary (NOT, SImode,
2622 gen_lowpart (SImode, operands[1]),
2624 rtx high = simplify_gen_unary (NOT, SImode,
2625 gen_highpart_mode (SImode, DImode,
2629 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2630 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2635 ;; Split DImode and, ior, xor operations. Simply perform the logical
2636 ;; operation on the upper and lower halves of the registers.
2637 ;; This is needed for atomic operations in arm_split_atomic_op.
2638 ;; Avoid splitting IWMMXT instructions.
2640 [(set (match_operand:DI 0 "s_register_operand" "")
2641 (match_operator:DI 6 "logical_binary_operator"
2642 [(match_operand:DI 1 "s_register_operand" "")
2643 (match_operand:DI 2 "s_register_operand" "")]))]
2644 "TARGET_32BIT && reload_completed
2645 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2646 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2647 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2650 operands[3] = gen_highpart (SImode, operands[0]);
2651 operands[0] = gen_lowpart (SImode, operands[0]);
2652 operands[4] = gen_highpart (SImode, operands[1]);
2653 operands[1] = gen_lowpart (SImode, operands[1]);
2654 operands[5] = gen_highpart (SImode, operands[2]);
2655 operands[2] = gen_lowpart (SImode, operands[2]);
2659 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2660 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2662 [(set (match_operand:DI 0 "s_register_operand")
2663 (not:DI (match_operand:DI 1 "s_register_operand")))]
2665 [(set (match_dup 0) (not:SI (match_dup 1)))
2666 (set (match_dup 2) (not:SI (match_dup 3)))]
2669 operands[2] = gen_highpart (SImode, operands[0]);
2670 operands[0] = gen_lowpart (SImode, operands[0]);
2671 operands[3] = gen_highpart (SImode, operands[1]);
2672 operands[1] = gen_lowpart (SImode, operands[1]);
2676 (define_expand "andsi3"
2677 [(set (match_operand:SI 0 "s_register_operand")
2678 (and:SI (match_operand:SI 1 "s_register_operand")
2679 (match_operand:SI 2 "reg_or_int_operand")))]
2684 if (CONST_INT_P (operands[2]))
2686 if (INTVAL (operands[2]) == 255 && arm_arch6)
2688 operands[1] = convert_to_mode (QImode, operands[1], 1);
2689 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2693 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2694 operands[2] = force_reg (SImode, operands[2]);
2697 arm_split_constant (AND, SImode, NULL_RTX,
2698 INTVAL (operands[2]), operands[0],
2700 optimize && can_create_pseudo_p ());
2706 else /* TARGET_THUMB1 */
2708 if (!CONST_INT_P (operands[2]))
2710 rtx tmp = force_reg (SImode, operands[2]);
2711 if (rtx_equal_p (operands[0], operands[1]))
2715 operands[2] = operands[1];
2723 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2725 operands[2] = force_reg (SImode,
2726 GEN_INT (~INTVAL (operands[2])));
2728 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2733 for (i = 9; i <= 31; i++)
2735 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2737 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2741 else if ((HOST_WIDE_INT_1 << i) - 1
2742 == ~INTVAL (operands[2]))
2744 rtx shift = GEN_INT (i);
2745 rtx reg = gen_reg_rtx (SImode);
2747 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2748 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2754 operands[2] = force_reg (SImode, operands[2]);
2760 ; ??? Check split length for Thumb-2
2761 (define_insn_and_split "*arm_andsi3_insn"
2762 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2763 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2764 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2769 bic%?\\t%0, %1, #%B2
2773 && CONST_INT_P (operands[2])
2774 && !(const_ok_for_arm (INTVAL (operands[2]))
2775 || const_ok_for_arm (~INTVAL (operands[2])))"
2776 [(clobber (const_int 0))]
2778 arm_split_constant (AND, SImode, curr_insn,
2779 INTVAL (operands[2]), operands[0], operands[1], 0);
2782 [(set_attr "length" "4,4,4,4,16")
2783 (set_attr "predicable" "yes")
2784 (set_attr "predicable_short_it" "no,yes,no,no,no")
2785 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2788 (define_insn "*andsi3_compare0"
2789 [(set (reg:CC_NOOV CC_REGNUM)
2791 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2792 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2794 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2795 (and:SI (match_dup 1) (match_dup 2)))]
2799 bics%?\\t%0, %1, #%B2
2800 ands%?\\t%0, %1, %2"
2801 [(set_attr "conds" "set")
2802 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2805 (define_insn "*andsi3_compare0_scratch"
2806 [(set (reg:CC_NOOV CC_REGNUM)
2808 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2809 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2811 (clobber (match_scratch:SI 2 "=X,r,X"))]
2815 bics%?\\t%2, %0, #%B1
2817 [(set_attr "conds" "set")
2818 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2821 (define_insn "*zeroextractsi_compare0_scratch"
2822 [(set (reg:CC_NOOV CC_REGNUM)
2823 (compare:CC_NOOV (zero_extract:SI
2824 (match_operand:SI 0 "s_register_operand" "r")
2825 (match_operand 1 "const_int_operand" "n")
2826 (match_operand 2 "const_int_operand" "n"))
2829 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2830 && INTVAL (operands[1]) > 0
2831 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2832 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2834 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2835 << INTVAL (operands[2]));
2836 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2839 [(set_attr "conds" "set")
2840 (set_attr "predicable" "yes")
2841 (set_attr "type" "logics_imm")]
2844 (define_insn_and_split "*ne_zeroextractsi"
2845 [(set (match_operand:SI 0 "s_register_operand" "=r")
2846 (ne:SI (zero_extract:SI
2847 (match_operand:SI 1 "s_register_operand" "r")
2848 (match_operand:SI 2 "const_int_operand" "n")
2849 (match_operand:SI 3 "const_int_operand" "n"))
2851 (clobber (reg:CC CC_REGNUM))]
2853 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2854 && INTVAL (operands[2]) > 0
2855 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2856 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2859 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2860 && INTVAL (operands[2]) > 0
2861 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2862 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2863 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2864 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2866 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2868 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2869 (match_dup 0) (const_int 1)))]
2871 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2872 << INTVAL (operands[3]));
2874 [(set_attr "conds" "clob")
2875 (set (attr "length")
2876 (if_then_else (eq_attr "is_thumb" "yes")
2879 (set_attr "type" "multiple")]
2882 (define_insn_and_split "*ne_zeroextractsi_shifted"
2883 [(set (match_operand:SI 0 "s_register_operand" "=r")
2884 (ne:SI (zero_extract:SI
2885 (match_operand:SI 1 "s_register_operand" "r")
2886 (match_operand:SI 2 "const_int_operand" "n")
2889 (clobber (reg:CC CC_REGNUM))]
2893 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2894 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2896 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2898 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2899 (match_dup 0) (const_int 1)))]
2901 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2903 [(set_attr "conds" "clob")
2904 (set_attr "length" "8")
2905 (set_attr "type" "multiple")]
2908 (define_insn_and_split "*ite_ne_zeroextractsi"
2909 [(set (match_operand:SI 0 "s_register_operand" "=r")
2910 (if_then_else:SI (ne (zero_extract:SI
2911 (match_operand:SI 1 "s_register_operand" "r")
2912 (match_operand:SI 2 "const_int_operand" "n")
2913 (match_operand:SI 3 "const_int_operand" "n"))
2915 (match_operand:SI 4 "arm_not_operand" "rIK")
2917 (clobber (reg:CC CC_REGNUM))]
2919 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2920 && INTVAL (operands[2]) > 0
2921 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2922 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2923 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2926 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2927 && INTVAL (operands[2]) > 0
2928 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2929 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2930 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2931 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2932 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2934 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2936 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2937 (match_dup 0) (match_dup 4)))]
2939 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2940 << INTVAL (operands[3]));
2942 [(set_attr "conds" "clob")
2943 (set_attr "length" "8")
2944 (set_attr "type" "multiple")]
2947 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2948 [(set (match_operand:SI 0 "s_register_operand" "=r")
2949 (if_then_else:SI (ne (zero_extract:SI
2950 (match_operand:SI 1 "s_register_operand" "r")
2951 (match_operand:SI 2 "const_int_operand" "n")
2954 (match_operand:SI 3 "arm_not_operand" "rIK")
2956 (clobber (reg:CC CC_REGNUM))]
2957 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2959 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2960 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2961 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2963 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2965 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2966 (match_dup 0) (match_dup 3)))]
2968 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2970 [(set_attr "conds" "clob")
2971 (set_attr "length" "8")
2972 (set_attr "type" "multiple")]
2975 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2977 [(set (match_operand:SI 0 "s_register_operand" "")
2978 (match_operator:SI 1 "shiftable_operator"
2979 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2980 (match_operand:SI 3 "const_int_operand" "")
2981 (match_operand:SI 4 "const_int_operand" ""))
2982 (match_operand:SI 5 "s_register_operand" "")]))
2983 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2985 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2988 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2991 HOST_WIDE_INT temp = INTVAL (operands[3]);
2993 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2994 operands[4] = GEN_INT (32 - temp);
2999 [(set (match_operand:SI 0 "s_register_operand" "")
3000 (match_operator:SI 1 "shiftable_operator"
3001 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3002 (match_operand:SI 3 "const_int_operand" "")
3003 (match_operand:SI 4 "const_int_operand" ""))
3004 (match_operand:SI 5 "s_register_operand" "")]))
3005 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3007 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3010 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3013 HOST_WIDE_INT temp = INTVAL (operands[3]);
3015 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3016 operands[4] = GEN_INT (32 - temp);
3020 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3021 ;;; represented by the bitfield, then this will produce incorrect results.
3022 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3023 ;;; which have a real bit-field insert instruction, the truncation happens
3024 ;;; in the bit-field insert instruction itself. Since arm does not have a
3025 ;;; bit-field insert instruction, we would have to emit code here to truncate
3026 ;;; the value before we insert. This loses some of the advantage of having
3027 ;;; this insv pattern, so this pattern needs to be reevalutated.
3029 (define_expand "insv"
3030 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3031 (match_operand 1 "general_operand")
3032 (match_operand 2 "general_operand"))
3033 (match_operand 3 "reg_or_int_operand"))]
3034 "TARGET_ARM || arm_arch_thumb2"
3037 int start_bit = INTVAL (operands[2]);
3038 int width = INTVAL (operands[1]);
3039 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3040 rtx target, subtarget;
3042 if (arm_arch_thumb2)
3044 if (unaligned_access && MEM_P (operands[0])
3045 && s_register_operand (operands[3], GET_MODE (operands[3]))
3046 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3050 if (BYTES_BIG_ENDIAN)
3051 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3056 base_addr = adjust_address (operands[0], SImode,
3057 start_bit / BITS_PER_UNIT);
3058 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3062 rtx tmp = gen_reg_rtx (HImode);
3064 base_addr = adjust_address (operands[0], HImode,
3065 start_bit / BITS_PER_UNIT);
3066 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3067 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3071 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3073 bool use_bfi = TRUE;
3075 if (CONST_INT_P (operands[3]))
3077 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3081 emit_insn (gen_insv_zero (operands[0], operands[1],
3086 /* See if the set can be done with a single orr instruction. */
3087 if (val == mask && const_ok_for_arm (val << start_bit))
3093 if (!REG_P (operands[3]))
3094 operands[3] = force_reg (SImode, operands[3]);
3096 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3105 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3108 target = copy_rtx (operands[0]);
3109 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3110 subreg as the final target. */
3111 if (GET_CODE (target) == SUBREG)
3113 subtarget = gen_reg_rtx (SImode);
3114 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3115 < GET_MODE_SIZE (SImode))
3116 target = SUBREG_REG (target);
3121 if (CONST_INT_P (operands[3]))
3123 /* Since we are inserting a known constant, we may be able to
3124 reduce the number of bits that we have to clear so that
3125 the mask becomes simple. */
3126 /* ??? This code does not check to see if the new mask is actually
3127 simpler. It may not be. */
3128 rtx op1 = gen_reg_rtx (SImode);
3129 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3130 start of this pattern. */
3131 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3132 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3134 emit_insn (gen_andsi3 (op1, operands[0],
3135 gen_int_mode (~mask2, SImode)));
3136 emit_insn (gen_iorsi3 (subtarget, op1,
3137 gen_int_mode (op3_value << start_bit, SImode)));
3139 else if (start_bit == 0
3140 && !(const_ok_for_arm (mask)
3141 || const_ok_for_arm (~mask)))
3143 /* A Trick, since we are setting the bottom bits in the word,
3144 we can shift operand[3] up, operand[0] down, OR them together
3145 and rotate the result back again. This takes 3 insns, and
3146 the third might be mergeable into another op. */
3147 /* The shift up copes with the possibility that operand[3] is
3148 wider than the bitfield. */
3149 rtx op0 = gen_reg_rtx (SImode);
3150 rtx op1 = gen_reg_rtx (SImode);
3152 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3153 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3154 emit_insn (gen_iorsi3 (op1, op1, op0));
3155 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3157 else if ((width + start_bit == 32)
3158 && !(const_ok_for_arm (mask)
3159 || const_ok_for_arm (~mask)))
3161 /* Similar trick, but slightly less efficient. */
3163 rtx op0 = gen_reg_rtx (SImode);
3164 rtx op1 = gen_reg_rtx (SImode);
3166 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3167 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3168 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3169 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3173 rtx op0 = gen_int_mode (mask, SImode);
3174 rtx op1 = gen_reg_rtx (SImode);
3175 rtx op2 = gen_reg_rtx (SImode);
3177 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3179 rtx tmp = gen_reg_rtx (SImode);
3181 emit_insn (gen_movsi (tmp, op0));
3185 /* Mask out any bits in operand[3] that are not needed. */
3186 emit_insn (gen_andsi3 (op1, operands[3], op0));
3188 if (CONST_INT_P (op0)
3189 && (const_ok_for_arm (mask << start_bit)
3190 || const_ok_for_arm (~(mask << start_bit))))
3192 op0 = gen_int_mode (~(mask << start_bit), SImode);
3193 emit_insn (gen_andsi3 (op2, operands[0], op0));
3197 if (CONST_INT_P (op0))
3199 rtx tmp = gen_reg_rtx (SImode);
3201 emit_insn (gen_movsi (tmp, op0));
3206 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3208 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3212 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3214 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3217 if (subtarget != target)
3219 /* If TARGET is still a SUBREG, then it must be wider than a word,
3220 so we must be careful only to set the subword we were asked to. */
3221 if (GET_CODE (target) == SUBREG)
3222 emit_move_insn (target, subtarget);
3224 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3231 (define_insn "insv_zero"
3232 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3233 (match_operand:SI 1 "const_int_M_operand" "M")
3234 (match_operand:SI 2 "const_int_M_operand" "M"))
3238 [(set_attr "length" "4")
3239 (set_attr "predicable" "yes")
3240 (set_attr "type" "bfm")]
3243 (define_insn "insv_t2"
3244 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3245 (match_operand:SI 1 "const_int_M_operand" "M")
3246 (match_operand:SI 2 "const_int_M_operand" "M"))
3247 (match_operand:SI 3 "s_register_operand" "r"))]
3249 "bfi%?\t%0, %3, %2, %1"
3250 [(set_attr "length" "4")
3251 (set_attr "predicable" "yes")
3252 (set_attr "type" "bfm")]
3255 (define_insn "andsi_notsi_si"
3256 [(set (match_operand:SI 0 "s_register_operand" "=r")
3257 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3258 (match_operand:SI 1 "s_register_operand" "r")))]
3260 "bic%?\\t%0, %1, %2"
3261 [(set_attr "predicable" "yes")
3262 (set_attr "type" "logic_reg")]
3265 (define_insn "andsi_not_shiftsi_si"
3266 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3267 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3268 [(match_operand:SI 2 "s_register_operand" "r,r")
3269 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3270 (match_operand:SI 1 "s_register_operand" "r,r")))]
3272 "bic%?\\t%0, %1, %2%S4"
3273 [(set_attr "predicable" "yes")
3274 (set_attr "shift" "2")
3275 (set_attr "arch" "32,a")
3276 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3279 ;; Shifted bics pattern used to set up CC status register and not reusing
3280 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3281 ;; does not support shift by register.
3282 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3283 [(set (reg:CC_NOOV CC_REGNUM)
3285 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3286 [(match_operand:SI 1 "s_register_operand" "r,r")
3287 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3288 (match_operand:SI 3 "s_register_operand" "r,r"))
3290 (clobber (match_scratch:SI 4 "=r,r"))]
3292 "bics%?\\t%4, %3, %1%S0"
3293 [(set_attr "predicable" "yes")
3294 (set_attr "arch" "32,a")
3295 (set_attr "conds" "set")
3296 (set_attr "shift" "1")
3297 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3300 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3301 ;; getting reused later.
3302 (define_insn "andsi_not_shiftsi_si_scc"
3303 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3305 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3306 [(match_operand:SI 1 "s_register_operand" "r,r")
3307 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3308 (match_operand:SI 3 "s_register_operand" "r,r"))
3310 (set (match_operand:SI 4 "s_register_operand" "=r,r")
3311 (and:SI (not:SI (match_op_dup 0
3316 "bics%?\\t%4, %3, %1%S0"
3317 [(set_attr "predicable" "yes")
3318 (set_attr "arch" "32,a")
3319 (set_attr "conds" "set")
3320 (set_attr "shift" "1")
3321 (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3324 (define_insn "*andsi_notsi_si_compare0"
3325 [(set (reg:CC_NOOV CC_REGNUM)
3327 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3328 (match_operand:SI 1 "s_register_operand" "r"))
3330 (set (match_operand:SI 0 "s_register_operand" "=r")
3331 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3334 [(set_attr "conds" "set")
3335 (set_attr "type" "logics_shift_reg")]
3338 (define_insn "*andsi_notsi_si_compare0_scratch"
3339 [(set (reg:CC_NOOV CC_REGNUM)
3341 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3342 (match_operand:SI 1 "s_register_operand" "r"))
3344 (clobber (match_scratch:SI 0 "=r"))]
3347 [(set_attr "conds" "set")
3348 (set_attr "type" "logics_shift_reg")]
3351 (define_expand "iorsi3"
3352 [(set (match_operand:SI 0 "s_register_operand")
3353 (ior:SI (match_operand:SI 1 "s_register_operand")
3354 (match_operand:SI 2 "reg_or_int_operand")))]
3357 if (CONST_INT_P (operands[2]))
3361 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3362 operands[2] = force_reg (SImode, operands[2]);
3365 arm_split_constant (IOR, SImode, NULL_RTX,
3366 INTVAL (operands[2]), operands[0],
3368 optimize && can_create_pseudo_p ());
3372 else /* TARGET_THUMB1 */
3374 rtx tmp = force_reg (SImode, operands[2]);
3375 if (rtx_equal_p (operands[0], operands[1]))
3379 operands[2] = operands[1];
3387 (define_insn_and_split "*iorsi3_insn"
3388 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3389 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3390 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3395 orn%?\\t%0, %1, #%B2
3399 && CONST_INT_P (operands[2])
3400 && !(const_ok_for_arm (INTVAL (operands[2]))
3401 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3402 [(clobber (const_int 0))]
3404 arm_split_constant (IOR, SImode, curr_insn,
3405 INTVAL (operands[2]), operands[0], operands[1], 0);
3408 [(set_attr "length" "4,4,4,4,16")
3409 (set_attr "arch" "32,t2,t2,32,32")
3410 (set_attr "predicable" "yes")
3411 (set_attr "predicable_short_it" "no,yes,no,no,no")
3412 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3416 [(match_scratch:SI 3 "r")
3417 (set (match_operand:SI 0 "arm_general_register_operand" "")
3418 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3419 (match_operand:SI 2 "const_int_operand" "")))]
3421 && !const_ok_for_arm (INTVAL (operands[2]))
3422 && const_ok_for_arm (~INTVAL (operands[2]))"
3423 [(set (match_dup 3) (match_dup 2))
3424 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3428 (define_insn "*iorsi3_compare0"
3429 [(set (reg:CC_NOOV CC_REGNUM)
3431 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3432 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3434 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3435 (ior:SI (match_dup 1) (match_dup 2)))]
3437 "orrs%?\\t%0, %1, %2"
3438 [(set_attr "conds" "set")
3439 (set_attr "arch" "*,t2,*")
3440 (set_attr "length" "4,2,4")
3441 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3444 (define_insn "*iorsi3_compare0_scratch"
3445 [(set (reg:CC_NOOV CC_REGNUM)
3447 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3448 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3450 (clobber (match_scratch:SI 0 "=r,l,r"))]
3452 "orrs%?\\t%0, %1, %2"
3453 [(set_attr "conds" "set")
3454 (set_attr "arch" "*,t2,*")
3455 (set_attr "length" "4,2,4")
3456 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3459 (define_expand "xorsi3"
3460 [(set (match_operand:SI 0 "s_register_operand")
3461 (xor:SI (match_operand:SI 1 "s_register_operand")
3462 (match_operand:SI 2 "reg_or_int_operand")))]
3464 "if (CONST_INT_P (operands[2]))
3468 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3469 operands[2] = force_reg (SImode, operands[2]);
3472 arm_split_constant (XOR, SImode, NULL_RTX,
3473 INTVAL (operands[2]), operands[0],
3475 optimize && can_create_pseudo_p ());
3479 else /* TARGET_THUMB1 */
3481 rtx tmp = force_reg (SImode, operands[2]);
3482 if (rtx_equal_p (operands[0], operands[1]))
3486 operands[2] = operands[1];
3493 (define_insn_and_split "*arm_xorsi3"
3494 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3495 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3496 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3504 && CONST_INT_P (operands[2])
3505 && !const_ok_for_arm (INTVAL (operands[2]))"
3506 [(clobber (const_int 0))]
3508 arm_split_constant (XOR, SImode, curr_insn,
3509 INTVAL (operands[2]), operands[0], operands[1], 0);
3512 [(set_attr "length" "4,4,4,16")
3513 (set_attr "predicable" "yes")
3514 (set_attr "predicable_short_it" "no,yes,no,no")
3515 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3518 (define_insn "*xorsi3_compare0"
3519 [(set (reg:CC_NOOV CC_REGNUM)
3520 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3521 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3523 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3524 (xor:SI (match_dup 1) (match_dup 2)))]
3526 "eors%?\\t%0, %1, %2"
3527 [(set_attr "conds" "set")
3528 (set_attr "type" "logics_imm,logics_reg")]
3531 (define_insn "*xorsi3_compare0_scratch"
3532 [(set (reg:CC_NOOV CC_REGNUM)
3533 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3534 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3538 [(set_attr "conds" "set")
3539 (set_attr "type" "logics_imm,logics_reg")]
3542 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3543 ; (NOT D) we can sometimes merge the final NOT into one of the following
3547 [(set (match_operand:SI 0 "s_register_operand" "")
3548 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3549 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3550 (match_operand:SI 3 "arm_rhs_operand" "")))
3551 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3553 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3554 (not:SI (match_dup 3))))
3555 (set (match_dup 0) (not:SI (match_dup 4)))]
3559 (define_insn_and_split "*andsi_iorsi3_notsi"
3560 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3561 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3562 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3563 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3565 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3566 "&& reload_completed"
3567 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3568 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3570 /* If operands[3] is a constant make sure to fold the NOT into it
3571 to avoid creating a NOT of a CONST_INT. */
3572 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3573 if (CONST_INT_P (not_rtx))
3575 operands[4] = operands[0];
3576 operands[5] = not_rtx;
3580 operands[5] = operands[0];
3581 operands[4] = not_rtx;
3584 [(set_attr "length" "8")
3585 (set_attr "ce_count" "2")
3586 (set_attr "predicable" "yes")
3587 (set_attr "type" "multiple")]
3590 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3591 ; insns are available?
3593 [(set (match_operand:SI 0 "s_register_operand" "")
3594 (match_operator:SI 1 "logical_binary_operator"
3595 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3596 (match_operand:SI 3 "const_int_operand" "")
3597 (match_operand:SI 4 "const_int_operand" ""))
3598 (match_operator:SI 9 "logical_binary_operator"
3599 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3600 (match_operand:SI 6 "const_int_operand" ""))
3601 (match_operand:SI 7 "s_register_operand" "")])]))
3602 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3604 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3605 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3608 [(ashift:SI (match_dup 2) (match_dup 4))
3612 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3615 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3619 [(set (match_operand:SI 0 "s_register_operand" "")
3620 (match_operator:SI 1 "logical_binary_operator"
3621 [(match_operator:SI 9 "logical_binary_operator"
3622 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3623 (match_operand:SI 6 "const_int_operand" ""))
3624 (match_operand:SI 7 "s_register_operand" "")])
3625 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3626 (match_operand:SI 3 "const_int_operand" "")
3627 (match_operand:SI 4 "const_int_operand" ""))]))
3628 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3630 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3631 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3634 [(ashift:SI (match_dup 2) (match_dup 4))
3638 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3641 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3645 [(set (match_operand:SI 0 "s_register_operand" "")
3646 (match_operator:SI 1 "logical_binary_operator"
3647 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3648 (match_operand:SI 3 "const_int_operand" "")
3649 (match_operand:SI 4 "const_int_operand" ""))
3650 (match_operator:SI 9 "logical_binary_operator"
3651 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3652 (match_operand:SI 6 "const_int_operand" ""))
3653 (match_operand:SI 7 "s_register_operand" "")])]))
3654 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3656 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3657 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3660 [(ashift:SI (match_dup 2) (match_dup 4))
3664 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3667 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3671 [(set (match_operand:SI 0 "s_register_operand" "")
3672 (match_operator:SI 1 "logical_binary_operator"
3673 [(match_operator:SI 9 "logical_binary_operator"
3674 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3675 (match_operand:SI 6 "const_int_operand" ""))
3676 (match_operand:SI 7 "s_register_operand" "")])
3677 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3678 (match_operand:SI 3 "const_int_operand" "")
3679 (match_operand:SI 4 "const_int_operand" ""))]))
3680 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3682 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3683 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3686 [(ashift:SI (match_dup 2) (match_dup 4))
3690 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3693 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3697 ;; Minimum and maximum insns
3699 (define_expand "smaxsi3"
3701 (set (match_operand:SI 0 "s_register_operand")
3702 (smax:SI (match_operand:SI 1 "s_register_operand")
3703 (match_operand:SI 2 "arm_rhs_operand")))
3704 (clobber (reg:CC CC_REGNUM))])]
3707 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3709 /* No need for a clobber of the condition code register here. */
3710 emit_insn (gen_rtx_SET (operands[0],
3711 gen_rtx_SMAX (SImode, operands[1],
3717 (define_insn "*smax_0"
3718 [(set (match_operand:SI 0 "s_register_operand" "=r")
3719 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3722 "bic%?\\t%0, %1, %1, asr #31"
3723 [(set_attr "predicable" "yes")
3724 (set_attr "type" "logic_shift_reg")]
3727 (define_insn "*smax_m1"
3728 [(set (match_operand:SI 0 "s_register_operand" "=r")
3729 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3732 "orr%?\\t%0, %1, %1, asr #31"
3733 [(set_attr "predicable" "yes")
3734 (set_attr "type" "logic_shift_reg")]
3737 (define_insn_and_split "*arm_smax_insn"
3738 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3739 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3740 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3741 (clobber (reg:CC CC_REGNUM))]
3744 ; cmp\\t%1, %2\;movlt\\t%0, %2
3745 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3747 [(set (reg:CC CC_REGNUM)
3748 (compare:CC (match_dup 1) (match_dup 2)))
3750 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3754 [(set_attr "conds" "clob")
3755 (set_attr "length" "8,12")
3756 (set_attr "type" "multiple")]
3759 (define_expand "sminsi3"
3761 (set (match_operand:SI 0 "s_register_operand")
3762 (smin:SI (match_operand:SI 1 "s_register_operand")
3763 (match_operand:SI 2 "arm_rhs_operand")))
3764 (clobber (reg:CC CC_REGNUM))])]
3767 if (operands[2] == const0_rtx)
3769 /* No need for a clobber of the condition code register here. */
3770 emit_insn (gen_rtx_SET (operands[0],
3771 gen_rtx_SMIN (SImode, operands[1],
3777 (define_insn "*smin_0"
3778 [(set (match_operand:SI 0 "s_register_operand" "=r")
3779 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3782 "and%?\\t%0, %1, %1, asr #31"
3783 [(set_attr "predicable" "yes")
3784 (set_attr "type" "logic_shift_reg")]
3787 (define_insn_and_split "*arm_smin_insn"
3788 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3789 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3790 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3791 (clobber (reg:CC CC_REGNUM))]
3794 ; cmp\\t%1, %2\;movge\\t%0, %2
3795 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3797 [(set (reg:CC CC_REGNUM)
3798 (compare:CC (match_dup 1) (match_dup 2)))
3800 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3804 [(set_attr "conds" "clob")
3805 (set_attr "length" "8,12")
3806 (set_attr "type" "multiple,multiple")]
3809 (define_expand "umaxsi3"
3811 (set (match_operand:SI 0 "s_register_operand")
3812 (umax:SI (match_operand:SI 1 "s_register_operand")
3813 (match_operand:SI 2 "arm_rhs_operand")))
3814 (clobber (reg:CC CC_REGNUM))])]
3819 (define_insn_and_split "*arm_umaxsi3"
3820 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3821 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3822 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3823 (clobber (reg:CC CC_REGNUM))]
3826 ; cmp\\t%1, %2\;movcc\\t%0, %2
3827 ; cmp\\t%1, %2\;movcs\\t%0, %1
3828 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3830 [(set (reg:CC CC_REGNUM)
3831 (compare:CC (match_dup 1) (match_dup 2)))
3833 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3837 [(set_attr "conds" "clob")
3838 (set_attr "length" "8,8,12")
3839 (set_attr "type" "store_4")]
3842 (define_expand "uminsi3"
3844 (set (match_operand:SI 0 "s_register_operand")
3845 (umin:SI (match_operand:SI 1 "s_register_operand")
3846 (match_operand:SI 2 "arm_rhs_operand")))
3847 (clobber (reg:CC CC_REGNUM))])]
3852 (define_insn_and_split "*arm_uminsi3"
3853 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3854 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3855 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3856 (clobber (reg:CC CC_REGNUM))]
3859 ; cmp\\t%1, %2\;movcs\\t%0, %2
3860 ; cmp\\t%1, %2\;movcc\\t%0, %1
3861 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3863 [(set (reg:CC CC_REGNUM)
3864 (compare:CC (match_dup 1) (match_dup 2)))
3866 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3870 [(set_attr "conds" "clob")
3871 (set_attr "length" "8,8,12")
3872 (set_attr "type" "store_4")]
3875 (define_insn "*store_minmaxsi"
3876 [(set (match_operand:SI 0 "memory_operand" "=m")
3877 (match_operator:SI 3 "minmax_operator"
3878 [(match_operand:SI 1 "s_register_operand" "r")
3879 (match_operand:SI 2 "s_register_operand" "r")]))
3880 (clobber (reg:CC CC_REGNUM))]
3881 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3883 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3884 operands[1], operands[2]);
3885 output_asm_insn (\"cmp\\t%1, %2\", operands);
3887 output_asm_insn (\"ite\t%d3\", operands);
3888 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3889 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3892 [(set_attr "conds" "clob")
3893 (set (attr "length")
3894 (if_then_else (eq_attr "is_thumb" "yes")
3897 (set_attr "type" "store_4")]
3900 ; Reject the frame pointer in operand[1], since reloading this after
3901 ; it has been eliminated can cause carnage.
3902 (define_insn "*minmax_arithsi"
3903 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3904 (match_operator:SI 4 "shiftable_operator"
3905 [(match_operator:SI 5 "minmax_operator"
3906 [(match_operand:SI 2 "s_register_operand" "r,r")
3907 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3908 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3909 (clobber (reg:CC CC_REGNUM))]
3910 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3913 enum rtx_code code = GET_CODE (operands[4]);
3916 if (which_alternative != 0 || operands[3] != const0_rtx
3917 || (code != PLUS && code != IOR && code != XOR))
3922 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3923 operands[2], operands[3]);
3924 output_asm_insn (\"cmp\\t%2, %3\", operands);
3928 output_asm_insn (\"ite\\t%d5\", operands);
3930 output_asm_insn (\"it\\t%d5\", operands);
3932 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3934 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3937 [(set_attr "conds" "clob")
3938 (set (attr "length")
3939 (if_then_else (eq_attr "is_thumb" "yes")
3942 (set_attr "type" "multiple")]
3945 ; Reject the frame pointer in operand[1], since reloading this after
3946 ; it has been eliminated can cause carnage.
3947 (define_insn_and_split "*minmax_arithsi_non_canon"
3948 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3950 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3951 (match_operator:SI 4 "minmax_operator"
3952 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3953 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3954 (clobber (reg:CC CC_REGNUM))]
3955 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3956 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3958 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3959 [(set (reg:CC CC_REGNUM)
3960 (compare:CC (match_dup 2) (match_dup 3)))
3962 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3964 (minus:SI (match_dup 1)
3966 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3970 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3971 operands[2], operands[3]);
3972 enum rtx_code rc = minmax_code (operands[4]);
3973 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3974 operands[2], operands[3]);
3976 if (mode == CCFPmode || mode == CCFPEmode)
3977 rc = reverse_condition_maybe_unordered (rc);
3979 rc = reverse_condition (rc);
3980 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3981 if (CONST_INT_P (operands[3]))
3982 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3984 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3986 [(set_attr "conds" "clob")
3987 (set (attr "length")
3988 (if_then_else (eq_attr "is_thumb" "yes")
3991 (set_attr "type" "multiple")]
3994 (define_code_iterator SAT [smin smax])
3995 (define_code_attr SATrev [(smin "smax") (smax "smin")])
3996 (define_code_attr SATlo [(smin "1") (smax "2")])
3997 (define_code_attr SAThi [(smin "2") (smax "1")])
3999 (define_insn "*satsi_<SAT:code>"
4000 [(set (match_operand:SI 0 "s_register_operand" "=r")
4001 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4002 (match_operand:SI 1 "const_int_operand" "i"))
4003 (match_operand:SI 2 "const_int_operand" "i")))]
4004 "TARGET_32BIT && arm_arch6
4005 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4009 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4010 &mask, &signed_sat))
4013 operands[1] = GEN_INT (mask);
4015 return "ssat%?\t%0, %1, %3";
4017 return "usat%?\t%0, %1, %3";
4019 [(set_attr "predicable" "yes")
4020 (set_attr "type" "alus_imm")]
4023 (define_insn "*satsi_<SAT:code>_shift"
4024 [(set (match_operand:SI 0 "s_register_operand" "=r")
4025 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4026 [(match_operand:SI 4 "s_register_operand" "r")
4027 (match_operand:SI 5 "const_int_operand" "i")])
4028 (match_operand:SI 1 "const_int_operand" "i"))
4029 (match_operand:SI 2 "const_int_operand" "i")))]
4030 "TARGET_32BIT && arm_arch6
4031 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4035 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4036 &mask, &signed_sat))
4039 operands[1] = GEN_INT (mask);
4041 return "ssat%?\t%0, %1, %4%S3";
4043 return "usat%?\t%0, %1, %4%S3";
4045 [(set_attr "predicable" "yes")
4046 (set_attr "shift" "3")
4047 (set_attr "type" "logic_shift_reg")])
4049 ;; Shift and rotation insns
4051 (define_expand "ashldi3"
4052 [(set (match_operand:DI 0 "s_register_operand")
4053 (ashift:DI (match_operand:DI 1 "s_register_operand")
4054 (match_operand:SI 2 "reg_or_int_operand")))]
4057 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4058 operands[2], gen_reg_rtx (SImode),
4059 gen_reg_rtx (SImode));
4063 (define_expand "ashlsi3"
4064 [(set (match_operand:SI 0 "s_register_operand")
4065 (ashift:SI (match_operand:SI 1 "s_register_operand")
4066 (match_operand:SI 2 "arm_rhs_operand")))]
4069 if (CONST_INT_P (operands[2])
4070 && (UINTVAL (operands[2])) > 31)
4072 emit_insn (gen_movsi (operands[0], const0_rtx));
4078 (define_expand "ashrdi3"
4079 [(set (match_operand:DI 0 "s_register_operand")
4080 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4081 (match_operand:SI 2 "reg_or_int_operand")))]
4084 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4085 operands[2], gen_reg_rtx (SImode),
4086 gen_reg_rtx (SImode));
4090 (define_expand "ashrsi3"
4091 [(set (match_operand:SI 0 "s_register_operand")
4092 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4093 (match_operand:SI 2 "arm_rhs_operand")))]
4096 if (CONST_INT_P (operands[2])
4097 && UINTVAL (operands[2]) > 31)
4098 operands[2] = GEN_INT (31);
4102 (define_expand "lshrdi3"
4103 [(set (match_operand:DI 0 "s_register_operand")
4104 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4105 (match_operand:SI 2 "reg_or_int_operand")))]
4108 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4109 operands[2], gen_reg_rtx (SImode),
4110 gen_reg_rtx (SImode));
4114 (define_expand "lshrsi3"
4115 [(set (match_operand:SI 0 "s_register_operand")
4116 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4117 (match_operand:SI 2 "arm_rhs_operand")))]
4120 if (CONST_INT_P (operands[2])
4121 && (UINTVAL (operands[2])) > 31)
4123 emit_insn (gen_movsi (operands[0], const0_rtx));
4129 (define_expand "rotlsi3"
4130 [(set (match_operand:SI 0 "s_register_operand")
4131 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4132 (match_operand:SI 2 "reg_or_int_operand")))]
4135 if (CONST_INT_P (operands[2]))
4136 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4139 rtx reg = gen_reg_rtx (SImode);
4140 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4146 (define_expand "rotrsi3"
4147 [(set (match_operand:SI 0 "s_register_operand")
4148 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4149 (match_operand:SI 2 "arm_rhs_operand")))]
4154 if (CONST_INT_P (operands[2])
4155 && UINTVAL (operands[2]) > 31)
4156 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4158 else /* TARGET_THUMB1 */
4160 if (CONST_INT_P (operands [2]))
4161 operands [2] = force_reg (SImode, operands[2]);
4166 (define_insn "*arm_shiftsi3"
4167 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4168 (match_operator:SI 3 "shift_operator"
4169 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4170 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4172 "* return arm_output_shift(operands, 0);"
4173 [(set_attr "predicable" "yes")
4174 (set_attr "arch" "t2,t2,*,*")
4175 (set_attr "predicable_short_it" "yes,yes,no,no")
4176 (set_attr "length" "4")
4177 (set_attr "shift" "1")
4178 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4181 (define_insn "*shiftsi3_compare0"
4182 [(set (reg:CC_NOOV CC_REGNUM)
4183 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4184 [(match_operand:SI 1 "s_register_operand" "r,r")
4185 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4187 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4188 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4190 "* return arm_output_shift(operands, 1);"
4191 [(set_attr "conds" "set")
4192 (set_attr "shift" "1")
4193 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4196 (define_insn "*shiftsi3_compare0_scratch"
4197 [(set (reg:CC_NOOV CC_REGNUM)
4198 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4199 [(match_operand:SI 1 "s_register_operand" "r,r")
4200 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4202 (clobber (match_scratch:SI 0 "=r,r"))]
4204 "* return arm_output_shift(operands, 1);"
4205 [(set_attr "conds" "set")
4206 (set_attr "shift" "1")
4207 (set_attr "type" "shift_imm,shift_reg")]
4210 (define_insn "*not_shiftsi"
4211 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4212 (not:SI (match_operator:SI 3 "shift_operator"
4213 [(match_operand:SI 1 "s_register_operand" "r,r")
4214 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4217 [(set_attr "predicable" "yes")
4218 (set_attr "shift" "1")
4219 (set_attr "arch" "32,a")
4220 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4222 (define_insn "*not_shiftsi_compare0"
4223 [(set (reg:CC_NOOV CC_REGNUM)
4225 (not:SI (match_operator:SI 3 "shift_operator"
4226 [(match_operand:SI 1 "s_register_operand" "r,r")
4227 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4229 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4230 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4232 "mvns%?\\t%0, %1%S3"
4233 [(set_attr "conds" "set")
4234 (set_attr "shift" "1")
4235 (set_attr "arch" "32,a")
4236 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4238 (define_insn "*not_shiftsi_compare0_scratch"
4239 [(set (reg:CC_NOOV CC_REGNUM)
4241 (not:SI (match_operator:SI 3 "shift_operator"
4242 [(match_operand:SI 1 "s_register_operand" "r,r")
4243 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4245 (clobber (match_scratch:SI 0 "=r,r"))]
4247 "mvns%?\\t%0, %1%S3"
4248 [(set_attr "conds" "set")
4249 (set_attr "shift" "1")
4250 (set_attr "arch" "32,a")
4251 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4253 ;; We don't really have extzv, but defining this using shifts helps
4254 ;; to reduce register pressure later on.
4256 (define_expand "extzv"
4257 [(set (match_operand 0 "s_register_operand")
4258 (zero_extract (match_operand 1 "nonimmediate_operand")
4259 (match_operand 2 "const_int_operand")
4260 (match_operand 3 "const_int_operand")))]
4261 "TARGET_THUMB1 || arm_arch_thumb2"
4264 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4265 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4267 if (arm_arch_thumb2)
4269 HOST_WIDE_INT width = INTVAL (operands[2]);
4270 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4272 if (unaligned_access && MEM_P (operands[1])
4273 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4277 if (BYTES_BIG_ENDIAN)
4278 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4283 base_addr = adjust_address (operands[1], SImode,
4284 bitpos / BITS_PER_UNIT);
4285 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4289 rtx dest = operands[0];
4290 rtx tmp = gen_reg_rtx (SImode);
4292 /* We may get a paradoxical subreg here. Strip it off. */
4293 if (GET_CODE (dest) == SUBREG
4294 && GET_MODE (dest) == SImode
4295 && GET_MODE (SUBREG_REG (dest)) == HImode)
4296 dest = SUBREG_REG (dest);
4298 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4301 base_addr = adjust_address (operands[1], HImode,
4302 bitpos / BITS_PER_UNIT);
4303 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4304 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4308 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4310 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4318 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4321 operands[3] = GEN_INT (rshift);
4325 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4329 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4330 operands[3], gen_reg_rtx (SImode)));
4335 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4337 (define_expand "extzv_t1"
4338 [(set (match_operand:SI 4 "s_register_operand")
4339 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4340 (match_operand:SI 2 "const_int_operand")))
4341 (set (match_operand:SI 0 "s_register_operand")
4342 (lshiftrt:SI (match_dup 4)
4343 (match_operand:SI 3 "const_int_operand")))]
4347 (define_expand "extv"
4348 [(set (match_operand 0 "s_register_operand")
4349 (sign_extract (match_operand 1 "nonimmediate_operand")
4350 (match_operand 2 "const_int_operand")
4351 (match_operand 3 "const_int_operand")))]
4354 HOST_WIDE_INT width = INTVAL (operands[2]);
4355 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4357 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4358 && (bitpos % BITS_PER_UNIT) == 0)
4362 if (BYTES_BIG_ENDIAN)
4363 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4367 base_addr = adjust_address (operands[1], SImode,
4368 bitpos / BITS_PER_UNIT);
4369 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4373 rtx dest = operands[0];
4374 rtx tmp = gen_reg_rtx (SImode);
4376 /* We may get a paradoxical subreg here. Strip it off. */
4377 if (GET_CODE (dest) == SUBREG
4378 && GET_MODE (dest) == SImode
4379 && GET_MODE (SUBREG_REG (dest)) == HImode)
4380 dest = SUBREG_REG (dest);
4382 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4385 base_addr = adjust_address (operands[1], HImode,
4386 bitpos / BITS_PER_UNIT);
4387 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4388 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4393 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4395 else if (GET_MODE (operands[0]) == SImode
4396 && GET_MODE (operands[1]) == SImode)
4398 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4406 ; Helper to expand register forms of extv with the proper modes.
4408 (define_expand "extv_regsi"
4409 [(set (match_operand:SI 0 "s_register_operand")
4410 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4411 (match_operand 2 "const_int_operand")
4412 (match_operand 3 "const_int_operand")))]
4417 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4419 (define_insn "unaligned_loaddi"
4420 [(set (match_operand:DI 0 "s_register_operand" "=r")
4421 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4422 UNSPEC_UNALIGNED_LOAD))]
4423 "TARGET_32BIT && TARGET_LDRD"
4425 return output_move_double (operands, true, NULL);
4427 [(set_attr "length" "8")
4428 (set_attr "type" "load_8")])
4430 (define_insn "unaligned_loadsi"
4431 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4432 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4433 UNSPEC_UNALIGNED_LOAD))]
4436 ldr\t%0, %1\t@ unaligned
4437 ldr%?\t%0, %1\t@ unaligned
4438 ldr%?\t%0, %1\t@ unaligned"
4439 [(set_attr "arch" "t1,t2,32")
4440 (set_attr "length" "2,2,4")
4441 (set_attr "predicable" "no,yes,yes")
4442 (set_attr "predicable_short_it" "no,yes,no")
4443 (set_attr "type" "load_4")])
4445 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4446 ;; address (there's no immediate format). That's tricky to support
4447 ;; here and we don't really need this pattern for that case, so only
4448 ;; enable for 32-bit ISAs.
4449 (define_insn "unaligned_loadhis"
4450 [(set (match_operand:SI 0 "s_register_operand" "=r")
4452 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4453 UNSPEC_UNALIGNED_LOAD)))]
4454 "unaligned_access && TARGET_32BIT"
4455 "ldrsh%?\t%0, %1\t@ unaligned"
4456 [(set_attr "predicable" "yes")
4457 (set_attr "type" "load_byte")])
4459 (define_insn "unaligned_loadhiu"
4460 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4462 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4463 UNSPEC_UNALIGNED_LOAD)))]
4466 ldrh\t%0, %1\t@ unaligned
4467 ldrh%?\t%0, %1\t@ unaligned
4468 ldrh%?\t%0, %1\t@ unaligned"
4469 [(set_attr "arch" "t1,t2,32")
4470 (set_attr "length" "2,2,4")
4471 (set_attr "predicable" "no,yes,yes")
4472 (set_attr "predicable_short_it" "no,yes,no")
4473 (set_attr "type" "load_byte")])
4475 (define_insn "unaligned_storedi"
4476 [(set (match_operand:DI 0 "memory_operand" "=m")
4477 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4478 UNSPEC_UNALIGNED_STORE))]
4479 "TARGET_32BIT && TARGET_LDRD"
4481 return output_move_double (operands, true, NULL);
4483 [(set_attr "length" "8")
4484 (set_attr "type" "store_8")])
4486 (define_insn "unaligned_storesi"
4487 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4488 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4489 UNSPEC_UNALIGNED_STORE))]
4492 str\t%1, %0\t@ unaligned
4493 str%?\t%1, %0\t@ unaligned
4494 str%?\t%1, %0\t@ unaligned"
4495 [(set_attr "arch" "t1,t2,32")
4496 (set_attr "length" "2,2,4")
4497 (set_attr "predicable" "no,yes,yes")
4498 (set_attr "predicable_short_it" "no,yes,no")
4499 (set_attr "type" "store_4")])
4501 (define_insn "unaligned_storehi"
4502 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4503 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4504 UNSPEC_UNALIGNED_STORE))]
4507 strh\t%1, %0\t@ unaligned
4508 strh%?\t%1, %0\t@ unaligned
4509 strh%?\t%1, %0\t@ unaligned"
4510 [(set_attr "arch" "t1,t2,32")
4511 (set_attr "length" "2,2,4")
4512 (set_attr "predicable" "no,yes,yes")
4513 (set_attr "predicable_short_it" "no,yes,no")
4514 (set_attr "type" "store_4")])
4517 (define_insn "*extv_reg"
4518 [(set (match_operand:SI 0 "s_register_operand" "=r")
4519 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4520 (match_operand:SI 2 "const_int_operand" "n")
4521 (match_operand:SI 3 "const_int_operand" "n")))]
4523 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4524 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4525 "sbfx%?\t%0, %1, %3, %2"
4526 [(set_attr "length" "4")
4527 (set_attr "predicable" "yes")
4528 (set_attr "type" "bfm")]
4531 (define_insn "extzv_t2"
4532 [(set (match_operand:SI 0 "s_register_operand" "=r")
4533 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4534 (match_operand:SI 2 "const_int_operand" "n")
4535 (match_operand:SI 3 "const_int_operand" "n")))]
4537 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4538 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4539 "ubfx%?\t%0, %1, %3, %2"
4540 [(set_attr "length" "4")
4541 (set_attr "predicable" "yes")
4542 (set_attr "type" "bfm")]
4546 ;; Division instructions
4547 (define_insn "divsi3"
4548 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4549 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4550 (match_operand:SI 2 "s_register_operand" "r,r")))]
4555 [(set_attr "arch" "32,v8mb")
4556 (set_attr "predicable" "yes")
4557 (set_attr "type" "sdiv")]
4560 (define_insn "udivsi3"
4561 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4562 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4563 (match_operand:SI 2 "s_register_operand" "r,r")))]
4568 [(set_attr "arch" "32,v8mb")
4569 (set_attr "predicable" "yes")
4570 (set_attr "type" "udiv")]
4574 ;; Unary arithmetic insns
4576 (define_expand "negv<SIDI:mode>3"
4577 [(match_operand:SIDI 0 "s_register_operand")
4578 (match_operand:SIDI 1 "s_register_operand")
4579 (match_operand 2 "")]
4582 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
4587 (define_expand "negsi2"
4588 [(set (match_operand:SI 0 "s_register_operand")
4589 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4594 (define_insn "*arm_negsi2"
4595 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4596 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4598 "rsb%?\\t%0, %1, #0"
4599 [(set_attr "predicable" "yes")
4600 (set_attr "predicable_short_it" "yes,no")
4601 (set_attr "arch" "t2,*")
4602 (set_attr "length" "4")
4603 (set_attr "type" "alu_imm")]
4606 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
4607 ;; rather than (0 cmp reg). This gives the same results for unsigned
4608 ;; and equality compares which is what we mostly need here.
4609 (define_insn "negsi2_0compare"
4610 [(set (reg:CC_RSB CC_REGNUM)
4611 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
4613 (set (match_operand:SI 0 "s_register_operand" "=l,r")
4614 (neg:SI (match_dup 1)))]
4619 [(set_attr "conds" "set")
4620 (set_attr "arch" "t2,*")
4621 (set_attr "length" "2,*")
4622 (set_attr "type" "alus_imm")]
4625 (define_insn "negsi2_carryin"
4626 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4627 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
4628 (match_operand:SI 2 "arm_borrow_operation" "")))]
4632 sbc\\t%0, %1, %1, lsl #1"
4633 [(set_attr "conds" "use")
4634 (set_attr "arch" "a,t2")
4635 (set_attr "type" "adc_imm,adc_reg")]
4638 (define_expand "negsf2"
4639 [(set (match_operand:SF 0 "s_register_operand")
4640 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4641 "TARGET_32BIT && TARGET_HARD_FLOAT"
4645 (define_expand "negdf2"
4646 [(set (match_operand:DF 0 "s_register_operand")
4647 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4648 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4651 ;; abssi2 doesn't really clobber the condition codes if a different register
4652 ;; is being set. To keep things simple, assume during rtl manipulations that
4653 ;; it does, but tell the final scan operator the truth. Similarly for
4656 (define_expand "abssi2"
4658 [(set (match_operand:SI 0 "s_register_operand")
4659 (abs:SI (match_operand:SI 1 "s_register_operand")))
4660 (clobber (match_dup 2))])]
4664 operands[2] = gen_rtx_SCRATCH (SImode);
4666 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4669 (define_insn_and_split "*arm_abssi2"
4670 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4671 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4672 (clobber (reg:CC CC_REGNUM))]
4675 "&& reload_completed"
4678 /* if (which_alternative == 0) */
4679 if (REGNO(operands[0]) == REGNO(operands[1]))
4681 /* Emit the pattern:
4682 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4683 [(set (reg:CC CC_REGNUM)
4684 (compare:CC (match_dup 0) (const_int 0)))
4685 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4686 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4688 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4689 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4690 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4691 (gen_rtx_LT (SImode,
4692 gen_rtx_REG (CCmode, CC_REGNUM),
4694 (gen_rtx_SET (operands[0],
4695 (gen_rtx_MINUS (SImode,
4702 /* Emit the pattern:
4703 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4705 (xor:SI (match_dup 1)
4706 (ashiftrt:SI (match_dup 1) (const_int 31))))
4708 (minus:SI (match_dup 0)
4709 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4711 emit_insn (gen_rtx_SET (operands[0],
4712 gen_rtx_XOR (SImode,
4713 gen_rtx_ASHIFTRT (SImode,
4717 emit_insn (gen_rtx_SET (operands[0],
4718 gen_rtx_MINUS (SImode,
4720 gen_rtx_ASHIFTRT (SImode,
4726 [(set_attr "conds" "clob,*")
4727 (set_attr "shift" "1")
4728 (set_attr "predicable" "no, yes")
4729 (set_attr "length" "8")
4730 (set_attr "type" "multiple")]
4733 (define_insn_and_split "*arm_neg_abssi2"
4734 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4735 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4736 (clobber (reg:CC CC_REGNUM))]
4739 "&& reload_completed"
4742 /* if (which_alternative == 0) */
4743 if (REGNO (operands[0]) == REGNO (operands[1]))
4745 /* Emit the pattern:
4746 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4748 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4749 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4750 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4752 gen_rtx_REG (CCmode, CC_REGNUM),
4754 gen_rtx_SET (operands[0],
4755 (gen_rtx_MINUS (SImode,
4761 /* Emit the pattern:
4762 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4764 emit_insn (gen_rtx_SET (operands[0],
4765 gen_rtx_XOR (SImode,
4766 gen_rtx_ASHIFTRT (SImode,
4770 emit_insn (gen_rtx_SET (operands[0],
4771 gen_rtx_MINUS (SImode,
4772 gen_rtx_ASHIFTRT (SImode,
4779 [(set_attr "conds" "clob,*")
4780 (set_attr "shift" "1")
4781 (set_attr "predicable" "no, yes")
4782 (set_attr "length" "8")
4783 (set_attr "type" "multiple")]
4786 (define_expand "abssf2"
4787 [(set (match_operand:SF 0 "s_register_operand")
4788 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4789 "TARGET_32BIT && TARGET_HARD_FLOAT"
4792 (define_expand "absdf2"
4793 [(set (match_operand:DF 0 "s_register_operand")
4794 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4795 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4798 (define_expand "sqrtsf2"
4799 [(set (match_operand:SF 0 "s_register_operand")
4800 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4801 "TARGET_32BIT && TARGET_HARD_FLOAT"
4804 (define_expand "sqrtdf2"
4805 [(set (match_operand:DF 0 "s_register_operand")
4806 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4807 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4810 (define_expand "one_cmplsi2"
4811 [(set (match_operand:SI 0 "s_register_operand")
4812 (not:SI (match_operand:SI 1 "s_register_operand")))]
4817 (define_insn "*arm_one_cmplsi2"
4818 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4819 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4822 [(set_attr "predicable" "yes")
4823 (set_attr "predicable_short_it" "yes,no")
4824 (set_attr "arch" "t2,*")
4825 (set_attr "length" "4")
4826 (set_attr "type" "mvn_reg")]
4829 (define_insn "*notsi_compare0"
4830 [(set (reg:CC_NOOV CC_REGNUM)
4831 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4833 (set (match_operand:SI 0 "s_register_operand" "=r")
4834 (not:SI (match_dup 1)))]
4837 [(set_attr "conds" "set")
4838 (set_attr "type" "mvn_reg")]
4841 (define_insn "*notsi_compare0_scratch"
4842 [(set (reg:CC_NOOV CC_REGNUM)
4843 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4845 (clobber (match_scratch:SI 0 "=r"))]
4848 [(set_attr "conds" "set")
4849 (set_attr "type" "mvn_reg")]
4852 ;; Fixed <--> Floating conversion insns
4854 (define_expand "floatsihf2"
4855 [(set (match_operand:HF 0 "general_operand")
4856 (float:HF (match_operand:SI 1 "general_operand")))]
4860 rtx op1 = gen_reg_rtx (SFmode);
4861 expand_float (op1, operands[1], 0);
4862 op1 = convert_to_mode (HFmode, op1, 0);
4863 emit_move_insn (operands[0], op1);
4868 (define_expand "floatdihf2"
4869 [(set (match_operand:HF 0 "general_operand")
4870 (float:HF (match_operand:DI 1 "general_operand")))]
4874 rtx op1 = gen_reg_rtx (SFmode);
4875 expand_float (op1, operands[1], 0);
4876 op1 = convert_to_mode (HFmode, op1, 0);
4877 emit_move_insn (operands[0], op1);
4882 (define_expand "floatsisf2"
4883 [(set (match_operand:SF 0 "s_register_operand")
4884 (float:SF (match_operand:SI 1 "s_register_operand")))]
4885 "TARGET_32BIT && TARGET_HARD_FLOAT"
4889 (define_expand "floatsidf2"
4890 [(set (match_operand:DF 0 "s_register_operand")
4891 (float:DF (match_operand:SI 1 "s_register_operand")))]
4892 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4896 (define_expand "fix_trunchfsi2"
4897 [(set (match_operand:SI 0 "general_operand")
4898 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4902 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4903 expand_fix (operands[0], op1, 0);
4908 (define_expand "fix_trunchfdi2"
4909 [(set (match_operand:DI 0 "general_operand")
4910 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4914 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4915 expand_fix (operands[0], op1, 0);
4920 (define_expand "fix_truncsfsi2"
4921 [(set (match_operand:SI 0 "s_register_operand")
4922 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4923 "TARGET_32BIT && TARGET_HARD_FLOAT"
4927 (define_expand "fix_truncdfsi2"
4928 [(set (match_operand:SI 0 "s_register_operand")
4929 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4930 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4936 (define_expand "truncdfsf2"
4937 [(set (match_operand:SF 0 "s_register_operand")
4939 (match_operand:DF 1 "s_register_operand")))]
4940 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4944 ;; DFmode to HFmode conversions on targets without a single-step hardware
4945 ;; instruction for it would have to go through SFmode. This is dangerous
4946 ;; as it introduces double rounding.
4948 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4949 ;; a single-step instruction.
4951 (define_expand "truncdfhf2"
4952 [(set (match_operand:HF 0 "s_register_operand")
4954 (match_operand:DF 1 "s_register_operand")))]
4955 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4956 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4958 /* We don't have a direct instruction for this, so we must be in
4959 an unsafe math mode, and going via SFmode. */
4961 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
4964 op1 = convert_to_mode (SFmode, operands[1], 0);
4965 op1 = convert_to_mode (HFmode, op1, 0);
4966 emit_move_insn (operands[0], op1);
4969 /* Otherwise, we will pick this up as a single instruction with
4970 no intermediary rounding. */
4974 ;; Zero and sign extension instructions.
4976 (define_expand "zero_extend<mode>di2"
4977 [(set (match_operand:DI 0 "s_register_operand" "")
4978 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
4979 "TARGET_32BIT <qhs_zextenddi_cond>"
4981 rtx res_lo, res_hi, op0_lo, op0_hi;
4982 res_lo = gen_lowpart (SImode, operands[0]);
4983 res_hi = gen_highpart (SImode, operands[0]);
4984 if (can_create_pseudo_p ())
4986 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
4987 op0_hi = gen_reg_rtx (SImode);
4991 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
4994 if (<MODE>mode != SImode)
4995 emit_insn (gen_rtx_SET (op0_lo,
4996 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4997 emit_insn (gen_movsi (op0_hi, const0_rtx));
4998 if (res_lo != op0_lo)
4999 emit_move_insn (res_lo, op0_lo);
5000 if (res_hi != op0_hi)
5001 emit_move_insn (res_hi, op0_hi);
5006 (define_expand "extend<mode>di2"
5007 [(set (match_operand:DI 0 "s_register_operand" "")
5008 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5009 "TARGET_32BIT <qhs_sextenddi_cond>"
5011 rtx res_lo, res_hi, op0_lo, op0_hi;
5012 res_lo = gen_lowpart (SImode, operands[0]);
5013 res_hi = gen_highpart (SImode, operands[0]);
5014 if (can_create_pseudo_p ())
5016 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5017 op0_hi = gen_reg_rtx (SImode);
5021 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5024 if (<MODE>mode != SImode)
5025 emit_insn (gen_rtx_SET (op0_lo,
5026 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5027 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5028 if (res_lo != op0_lo)
5029 emit_move_insn (res_lo, op0_lo);
5030 if (res_hi != op0_hi)
5031 emit_move_insn (res_hi, op0_hi);
5036 ;; Splits for all extensions to DImode
5038 [(set (match_operand:DI 0 "s_register_operand" "")
5039 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5041 [(set (match_dup 0) (match_dup 1))]
5043 rtx lo_part = gen_lowpart (SImode, operands[0]);
5044 machine_mode src_mode = GET_MODE (operands[1]);
5046 if (src_mode == SImode)
5047 emit_move_insn (lo_part, operands[1]);
5049 emit_insn (gen_rtx_SET (lo_part,
5050 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5051 operands[0] = gen_highpart (SImode, operands[0]);
5052 operands[1] = const0_rtx;
5056 [(set (match_operand:DI 0 "s_register_operand" "")
5057 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5059 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5061 rtx lo_part = gen_lowpart (SImode, operands[0]);
5062 machine_mode src_mode = GET_MODE (operands[1]);
5064 if (src_mode == SImode)
5065 emit_move_insn (lo_part, operands[1]);
5067 emit_insn (gen_rtx_SET (lo_part,
5068 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5069 operands[1] = lo_part;
5070 operands[0] = gen_highpart (SImode, operands[0]);
5073 (define_expand "zero_extendhisi2"
5074 [(set (match_operand:SI 0 "s_register_operand")
5075 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5078 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5080 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5083 if (!arm_arch6 && !MEM_P (operands[1]))
5085 rtx t = gen_lowpart (SImode, operands[1]);
5086 rtx tmp = gen_reg_rtx (SImode);
5087 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5088 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5094 [(set (match_operand:SI 0 "s_register_operand" "")
5095 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5096 "!TARGET_THUMB2 && !arm_arch6"
5097 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5098 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5100 operands[2] = gen_lowpart (SImode, operands[1]);
5103 (define_insn "*arm_zero_extendhisi2"
5104 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5105 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5106 "TARGET_ARM && arm_arch4 && !arm_arch6"
5110 [(set_attr "type" "alu_shift_reg,load_byte")
5111 (set_attr "predicable" "yes")]
5114 (define_insn "*arm_zero_extendhisi2_v6"
5115 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5116 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5117 "TARGET_ARM && arm_arch6"
5121 [(set_attr "predicable" "yes")
5122 (set_attr "type" "extend,load_byte")]
5125 (define_insn "*arm_zero_extendhisi2addsi"
5126 [(set (match_operand:SI 0 "s_register_operand" "=r")
5127 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5128 (match_operand:SI 2 "s_register_operand" "r")))]
5130 "uxtah%?\\t%0, %2, %1"
5131 [(set_attr "type" "alu_shift_reg")
5132 (set_attr "predicable" "yes")]
5135 (define_expand "zero_extendqisi2"
5136 [(set (match_operand:SI 0 "s_register_operand")
5137 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5140 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5142 emit_insn (gen_andsi3 (operands[0],
5143 gen_lowpart (SImode, operands[1]),
5147 if (!arm_arch6 && !MEM_P (operands[1]))
5149 rtx t = gen_lowpart (SImode, operands[1]);
5150 rtx tmp = gen_reg_rtx (SImode);
5151 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5152 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5158 [(set (match_operand:SI 0 "s_register_operand" "")
5159 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5161 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5162 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5164 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5167 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5172 (define_insn "*arm_zero_extendqisi2"
5173 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5174 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5175 "TARGET_ARM && !arm_arch6"
5178 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5179 [(set_attr "length" "8,4")
5180 (set_attr "type" "alu_shift_reg,load_byte")
5181 (set_attr "predicable" "yes")]
5184 (define_insn "*arm_zero_extendqisi2_v6"
5185 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5186 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5187 "TARGET_ARM && arm_arch6"
5190 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5191 [(set_attr "type" "extend,load_byte")
5192 (set_attr "predicable" "yes")]
5195 (define_insn "*arm_zero_extendqisi2addsi"
5196 [(set (match_operand:SI 0 "s_register_operand" "=r")
5197 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5198 (match_operand:SI 2 "s_register_operand" "r")))]
5200 "uxtab%?\\t%0, %2, %1"
5201 [(set_attr "predicable" "yes")
5202 (set_attr "type" "alu_shift_reg")]
5206 [(set (match_operand:SI 0 "s_register_operand" "")
5207 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5208 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5209 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5210 [(set (match_dup 2) (match_dup 1))
5211 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5216 [(set (match_operand:SI 0 "s_register_operand" "")
5217 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5218 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5219 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5220 [(set (match_dup 2) (match_dup 1))
5221 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5227 [(set (match_operand:SI 0 "s_register_operand" "")
5228 (IOR_XOR:SI (and:SI (ashift:SI
5229 (match_operand:SI 1 "s_register_operand" "")
5230 (match_operand:SI 2 "const_int_operand" ""))
5231 (match_operand:SI 3 "const_int_operand" ""))
5233 (match_operator 5 "subreg_lowpart_operator"
5234 [(match_operand:SI 4 "s_register_operand" "")]))))]
5236 && (UINTVAL (operands[3])
5237 == (GET_MODE_MASK (GET_MODE (operands[5]))
5238 & (GET_MODE_MASK (GET_MODE (operands[5]))
5239 << (INTVAL (operands[2])))))"
5240 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5242 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5243 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5246 (define_insn "*compareqi_eq0"
5247 [(set (reg:CC_Z CC_REGNUM)
5248 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5252 [(set_attr "conds" "set")
5253 (set_attr "predicable" "yes")
5254 (set_attr "type" "logic_imm")]
5257 (define_expand "extendhisi2"
5258 [(set (match_operand:SI 0 "s_register_operand")
5259 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5264 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5267 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5269 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5273 if (!arm_arch6 && !MEM_P (operands[1]))
5275 rtx t = gen_lowpart (SImode, operands[1]);
5276 rtx tmp = gen_reg_rtx (SImode);
5277 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5278 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5285 [(set (match_operand:SI 0 "register_operand" "")
5286 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5287 (clobber (match_scratch:SI 2 ""))])]
5289 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5290 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5292 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5295 ;; This pattern will only be used when ldsh is not available
5296 (define_expand "extendhisi2_mem"
5297 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5299 (zero_extend:SI (match_dup 7)))
5300 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5301 (set (match_operand:SI 0 "" "")
5302 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5307 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5309 mem1 = change_address (operands[1], QImode, addr);
5310 mem2 = change_address (operands[1], QImode,
5311 plus_constant (Pmode, addr, 1));
5312 operands[0] = gen_lowpart (SImode, operands[0]);
5314 operands[2] = gen_reg_rtx (SImode);
5315 operands[3] = gen_reg_rtx (SImode);
5316 operands[6] = gen_reg_rtx (SImode);
5319 if (BYTES_BIG_ENDIAN)
5321 operands[4] = operands[2];
5322 operands[5] = operands[3];
5326 operands[4] = operands[3];
5327 operands[5] = operands[2];
5333 [(set (match_operand:SI 0 "register_operand" "")
5334 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5336 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5337 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5339 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5342 (define_insn "*arm_extendhisi2"
5343 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5344 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5345 "TARGET_ARM && arm_arch4 && !arm_arch6"
5349 [(set_attr "length" "8,4")
5350 (set_attr "type" "alu_shift_reg,load_byte")
5351 (set_attr "predicable" "yes")]
5354 ;; ??? Check Thumb-2 pool range
5355 (define_insn "*arm_extendhisi2_v6"
5356 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5357 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5358 "TARGET_32BIT && arm_arch6"
5362 [(set_attr "type" "extend,load_byte")
5363 (set_attr "predicable" "yes")]
5366 (define_insn "*arm_extendhisi2addsi"
5367 [(set (match_operand:SI 0 "s_register_operand" "=r")
5368 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5369 (match_operand:SI 2 "s_register_operand" "r")))]
5371 "sxtah%?\\t%0, %2, %1"
5372 [(set_attr "type" "alu_shift_reg")]
5375 (define_expand "extendqihi2"
5377 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5379 (set (match_operand:HI 0 "s_register_operand")
5380 (ashiftrt:SI (match_dup 2)
5385 if (arm_arch4 && MEM_P (operands[1]))
5387 emit_insn (gen_rtx_SET (operands[0],
5388 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5391 if (!s_register_operand (operands[1], QImode))
5392 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5393 operands[0] = gen_lowpart (SImode, operands[0]);
5394 operands[1] = gen_lowpart (SImode, operands[1]);
5395 operands[2] = gen_reg_rtx (SImode);
5399 (define_insn "*arm_extendqihi_insn"
5400 [(set (match_operand:HI 0 "s_register_operand" "=r")
5401 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5402 "TARGET_ARM && arm_arch4"
5404 [(set_attr "type" "load_byte")
5405 (set_attr "predicable" "yes")]
5408 (define_expand "extendqisi2"
5409 [(set (match_operand:SI 0 "s_register_operand")
5410 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5413 if (!arm_arch4 && MEM_P (operands[1]))
5414 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5416 if (!arm_arch6 && !MEM_P (operands[1]))
5418 rtx t = gen_lowpart (SImode, operands[1]);
5419 rtx tmp = gen_reg_rtx (SImode);
5420 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5421 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5427 [(set (match_operand:SI 0 "register_operand" "")
5428 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5430 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5431 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5433 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5436 (define_insn "*arm_extendqisi"
5437 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5438 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5439 "TARGET_ARM && arm_arch4 && !arm_arch6"
5443 [(set_attr "length" "8,4")
5444 (set_attr "type" "alu_shift_reg,load_byte")
5445 (set_attr "predicable" "yes")]
5448 (define_insn "*arm_extendqisi_v6"
5449 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5451 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5452 "TARGET_ARM && arm_arch6"
5456 [(set_attr "type" "extend,load_byte")
5457 (set_attr "predicable" "yes")]
5460 (define_insn "*arm_extendqisi2addsi"
5461 [(set (match_operand:SI 0 "s_register_operand" "=r")
5462 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5463 (match_operand:SI 2 "s_register_operand" "r")))]
5465 "sxtab%?\\t%0, %2, %1"
5466 [(set_attr "type" "alu_shift_reg")
5467 (set_attr "predicable" "yes")]
5470 (define_insn "arm_<sup>xtb16"
5471 [(set (match_operand:SI 0 "s_register_operand" "=r")
5473 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5475 "<sup>xtb16%?\\t%0, %1"
5476 [(set_attr "predicable" "yes")
5477 (set_attr "type" "alu_dsp_reg")])
5479 (define_insn "arm_<simd32_op>"
5480 [(set (match_operand:SI 0 "s_register_operand" "=r")
5482 [(match_operand:SI 1 "s_register_operand" "r")
5483 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5485 "<simd32_op>%?\\t%0, %1, %2"
5486 [(set_attr "predicable" "yes")
5487 (set_attr "type" "alu_dsp_reg")])
5489 (define_insn "arm_usada8"
5490 [(set (match_operand:SI 0 "s_register_operand" "=r")
5492 [(match_operand:SI 1 "s_register_operand" "r")
5493 (match_operand:SI 2 "s_register_operand" "r")
5494 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5496 "usada8%?\\t%0, %1, %2, %3"
5497 [(set_attr "predicable" "yes")
5498 (set_attr "type" "alu_dsp_reg")])
5500 (define_insn "arm_<simd32_op>"
5501 [(set (match_operand:DI 0 "s_register_operand" "=r")
5503 [(match_operand:SI 1 "s_register_operand" "r")
5504 (match_operand:SI 2 "s_register_operand" "r")
5505 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5507 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5508 [(set_attr "predicable" "yes")
5509 (set_attr "type" "smlald")])
5511 (define_expand "extendsfdf2"
5512 [(set (match_operand:DF 0 "s_register_operand")
5513 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5514 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5518 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5519 ;; must go through SFmode.
5521 ;; This is always safe for an extend.
5523 (define_expand "extendhfdf2"
5524 [(set (match_operand:DF 0 "s_register_operand")
5525 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5528 /* We don't have a direct instruction for this, so go via SFmode. */
5529 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5532 op1 = convert_to_mode (SFmode, operands[1], 0);
5533 op1 = convert_to_mode (DFmode, op1, 0);
5534 emit_insn (gen_movdf (operands[0], op1));
5537 /* Otherwise, we're done producing RTL and will pick up the correct
5538 pattern to do this with one rounding-step in a single instruction. */
5542 ;; Move insns (including loads and stores)
5544 ;; XXX Just some ideas about movti.
5545 ;; I don't think these are a good idea on the arm, there just aren't enough
5547 ;;(define_expand "loadti"
5548 ;; [(set (match_operand:TI 0 "s_register_operand")
5549 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5552 ;;(define_expand "storeti"
5553 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5554 ;; (match_operand:TI 1 "s_register_operand"))]
5557 ;;(define_expand "movti"
5558 ;; [(set (match_operand:TI 0 "general_operand")
5559 ;; (match_operand:TI 1 "general_operand"))]
5565 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5566 ;; operands[1] = copy_to_reg (operands[1]);
5567 ;; if (MEM_P (operands[0]))
5568 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5569 ;; else if (MEM_P (operands[1]))
5570 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5574 ;; emit_insn (insn);
5578 ;; Recognize garbage generated above.
5581 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5582 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5586 ;; register mem = (which_alternative < 3);
5587 ;; register const char *template;
5589 ;; operands[mem] = XEXP (operands[mem], 0);
5590 ;; switch (which_alternative)
5592 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5593 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5594 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5595 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5596 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5597 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5599 ;; output_asm_insn (template, operands);
5603 (define_expand "movdi"
5604 [(set (match_operand:DI 0 "general_operand")
5605 (match_operand:DI 1 "general_operand"))]
5608 gcc_checking_assert (aligned_operand (operands[0], DImode));
5609 gcc_checking_assert (aligned_operand (operands[1], DImode));
5610 if (can_create_pseudo_p ())
5612 if (!REG_P (operands[0]))
5613 operands[1] = force_reg (DImode, operands[1]);
5615 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
5616 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
5618 /* Avoid LDRD's into an odd-numbered register pair in ARM state
5619 when expanding function calls. */
5620 gcc_assert (can_create_pseudo_p ());
5621 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
5623 /* Perform load into legal reg pair first, then move. */
5624 rtx reg = gen_reg_rtx (DImode);
5625 emit_insn (gen_movdi (reg, operands[1]));
5628 emit_move_insn (gen_lowpart (SImode, operands[0]),
5629 gen_lowpart (SImode, operands[1]));
5630 emit_move_insn (gen_highpart (SImode, operands[0]),
5631 gen_highpart (SImode, operands[1]));
5634 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
5635 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
5637 /* Avoid STRD's from an odd-numbered register pair in ARM state
5638 when expanding function prologue. */
5639 gcc_assert (can_create_pseudo_p ());
5640 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
5641 ? gen_reg_rtx (DImode)
5643 emit_move_insn (gen_lowpart (SImode, split_dest),
5644 gen_lowpart (SImode, operands[1]));
5645 emit_move_insn (gen_highpart (SImode, split_dest),
5646 gen_highpart (SImode, operands[1]));
5647 if (split_dest != operands[0])
5648 emit_insn (gen_movdi (operands[0], split_dest));
5654 (define_insn "*arm_movdi"
5655 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5656 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5658 && !(TARGET_HARD_FLOAT)
5660 && ( register_operand (operands[0], DImode)
5661 || register_operand (operands[1], DImode))"
5663 switch (which_alternative)
5670 /* Cannot load it directly, split to load it via MOV / MOVT. */
5671 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
5675 return output_move_double (operands, true, NULL);
5678 [(set_attr "length" "8,12,16,8,8")
5679 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5680 (set_attr "arm_pool_range" "*,*,*,1020,*")
5681 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5682 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5683 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5687 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5688 (match_operand:ANY64 1 "immediate_operand" ""))]
5691 && (arm_disable_literal_pool
5692 || (arm_const_double_inline_cost (operands[1])
5693 <= arm_max_const_double_inline_cost ()))"
5696 arm_split_constant (SET, SImode, curr_insn,
5697 INTVAL (gen_lowpart (SImode, operands[1])),
5698 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5699 arm_split_constant (SET, SImode, curr_insn,
5700 INTVAL (gen_highpart_mode (SImode,
5701 GET_MODE (operands[0]),
5703 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5708 ; If optimizing for size, or if we have load delay slots, then
5709 ; we want to split the constant into two separate operations.
5710 ; In both cases this may split a trivial part into a single data op
5711 ; leaving a single complex constant to load. We can also get longer
5712 ; offsets in a LDR which means we get better chances of sharing the pool
5713 ; entries. Finally, we can normally do a better job of scheduling
5714 ; LDR instructions than we can with LDM.
5715 ; This pattern will only match if the one above did not.
5717 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5718 (match_operand:ANY64 1 "const_double_operand" ""))]
5719 "TARGET_ARM && reload_completed
5720 && arm_const_double_by_parts (operands[1])"
5721 [(set (match_dup 0) (match_dup 1))
5722 (set (match_dup 2) (match_dup 3))]
5724 operands[2] = gen_highpart (SImode, operands[0]);
5725 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5727 operands[0] = gen_lowpart (SImode, operands[0]);
5728 operands[1] = gen_lowpart (SImode, operands[1]);
5733 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5734 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5735 "TARGET_EITHER && reload_completed"
5736 [(set (match_dup 0) (match_dup 1))
5737 (set (match_dup 2) (match_dup 3))]
5739 operands[2] = gen_highpart (SImode, operands[0]);
5740 operands[3] = gen_highpart (SImode, operands[1]);
5741 operands[0] = gen_lowpart (SImode, operands[0]);
5742 operands[1] = gen_lowpart (SImode, operands[1]);
5744 /* Handle a partial overlap. */
5745 if (rtx_equal_p (operands[0], operands[3]))
5747 rtx tmp0 = operands[0];
5748 rtx tmp1 = operands[1];
5750 operands[0] = operands[2];
5751 operands[1] = operands[3];
5758 ;; We can't actually do base+index doubleword loads if the index and
5759 ;; destination overlap. Split here so that we at least have chance to
5762 [(set (match_operand:DI 0 "s_register_operand" "")
5763 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5764 (match_operand:SI 2 "s_register_operand" ""))))]
5766 && reg_overlap_mentioned_p (operands[0], operands[1])
5767 && reg_overlap_mentioned_p (operands[0], operands[2])"
5769 (plus:SI (match_dup 1)
5772 (mem:DI (match_dup 4)))]
5774 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5778 (define_expand "movsi"
5779 [(set (match_operand:SI 0 "general_operand")
5780 (match_operand:SI 1 "general_operand"))]
5784 rtx base, offset, tmp;
5786 gcc_checking_assert (aligned_operand (operands[0], SImode));
5787 gcc_checking_assert (aligned_operand (operands[1], SImode));
5788 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5790 /* Everything except mem = const or mem = mem can be done easily. */
5791 if (MEM_P (operands[0]))
5792 operands[1] = force_reg (SImode, operands[1]);
5793 if (arm_general_register_operand (operands[0], SImode)
5794 && CONST_INT_P (operands[1])
5795 && !(const_ok_for_arm (INTVAL (operands[1]))
5796 || const_ok_for_arm (~INTVAL (operands[1]))))
5798 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5800 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5805 arm_split_constant (SET, SImode, NULL_RTX,
5806 INTVAL (operands[1]), operands[0], NULL_RTX,
5807 optimize && can_create_pseudo_p ());
5812 else /* Target doesn't have MOVT... */
5814 if (can_create_pseudo_p ())
5816 if (!REG_P (operands[0]))
5817 operands[1] = force_reg (SImode, operands[1]);
5821 split_const (operands[1], &base, &offset);
5822 if (INTVAL (offset) != 0
5823 && targetm.cannot_force_const_mem (SImode, operands[1]))
5825 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5826 emit_move_insn (tmp, base);
5827 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5831 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5833 /* Recognize the case where operand[1] is a reference to thread-local
5834 data and load its address to a register. Offsets have been split off
5836 if (arm_tls_referenced_p (operands[1]))
5837 operands[1] = legitimize_tls_address (operands[1], tmp);
5839 && (CONSTANT_P (operands[1])
5840 || symbol_mentioned_p (operands[1])
5841 || label_mentioned_p (operands[1])))
5843 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5848 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5849 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5850 ;; so this does not matter.
5851 (define_insn "*arm_movt"
5852 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5853 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5854 (match_operand:SI 2 "general_operand" "i,i")))]
5855 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5857 movt%?\t%0, #:upper16:%c2
5858 movt\t%0, #:upper16:%c2"
5859 [(set_attr "arch" "32,v8mb")
5860 (set_attr "predicable" "yes")
5861 (set_attr "length" "4")
5862 (set_attr "type" "alu_sreg")]
5865 (define_insn "*arm_movsi_insn"
5866 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5867 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5868 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5869 && ( register_operand (operands[0], SImode)
5870 || register_operand (operands[1], SImode))"
5878 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5879 (set_attr "predicable" "yes")
5880 (set_attr "arch" "*,*,*,v6t2,*,*")
5881 (set_attr "pool_range" "*,*,*,*,4096,*")
5882 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5886 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5887 (match_operand:SI 1 "const_int_operand" ""))]
5888 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5889 && (!(const_ok_for_arm (INTVAL (operands[1]))
5890 || const_ok_for_arm (~INTVAL (operands[1]))))"
5891 [(clobber (const_int 0))]
5893 arm_split_constant (SET, SImode, NULL_RTX,
5894 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5899 ;; A normal way to do (symbol + offset) requires three instructions at least
5900 ;; (depends on how big the offset is) as below:
5901 ;; movw r0, #:lower16:g
5902 ;; movw r0, #:upper16:g
5905 ;; A better way would be:
5906 ;; movw r0, #:lower16:g+4
5907 ;; movw r0, #:upper16:g+4
5909 ;; The limitation of this way is that the length of offset should be a 16-bit
5910 ;; signed value, because current assembler only supports REL type relocation for
5911 ;; such case. If the more powerful RELA type is supported in future, we should
5912 ;; update this pattern to go with better way.
5914 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5915 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5916 (match_operand:SI 2 "const_int_operand" ""))))]
5919 && arm_disable_literal_pool
5921 && GET_CODE (operands[1]) == SYMBOL_REF"
5922 [(clobber (const_int 0))]
5924 int offset = INTVAL (operands[2]);
5926 if (offset < -0x8000 || offset > 0x7fff)
5928 arm_emit_movpair (operands[0], operands[1]);
5929 emit_insn (gen_rtx_SET (operands[0],
5930 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5934 rtx op = gen_rtx_CONST (SImode,
5935 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5936 arm_emit_movpair (operands[0], op);
5941 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5942 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5943 ;; and lo_sum would be merged back into memory load at cprop. However,
5944 ;; if the default is to prefer movt/movw rather than a load from the constant
5945 ;; pool, the performance is better.
5947 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5948 (match_operand:SI 1 "general_operand" ""))]
5949 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5950 && !target_word_relocations
5951 && !arm_tls_referenced_p (operands[1])"
5952 [(clobber (const_int 0))]
5954 arm_emit_movpair (operands[0], operands[1]);
5958 ;; When generating pic, we need to load the symbol offset into a register.
5959 ;; So that the optimizer does not confuse this with a normal symbol load
5960 ;; we use an unspec. The offset will be loaded from a constant pool entry,
5961 ;; since that is the only type of relocation we can use.
5963 ;; Wrap calculation of the whole PIC address in a single pattern for the
5964 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
5965 ;; a PIC address involves two loads from memory, so we want to CSE it
5966 ;; as often as possible.
5967 ;; This pattern will be split into one of the pic_load_addr_* patterns
5968 ;; and a move after GCSE optimizations.
5970 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5971 (define_expand "calculate_pic_address"
5972 [(set (match_operand:SI 0 "register_operand")
5973 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
5974 (unspec:SI [(match_operand:SI 2 "" "")]
5979 ;; Split calculate_pic_address into pic_load_addr_* and a move.
5981 [(set (match_operand:SI 0 "register_operand" "")
5982 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5983 (unspec:SI [(match_operand:SI 2 "" "")]
5986 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5987 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5988 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5991 ;; operand1 is the memory address to go into
5992 ;; pic_load_addr_32bit.
5993 ;; operand2 is the PIC label to be emitted
5994 ;; from pic_add_dot_plus_eight.
5995 ;; We do this to allow hoisting of the entire insn.
5996 (define_insn_and_split "pic_load_addr_unified"
5997 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5998 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5999 (match_operand:SI 2 "" "")]
6000 UNSPEC_PIC_UNIFIED))]
6003 "&& reload_completed"
6004 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6005 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6006 (match_dup 2)] UNSPEC_PIC_BASE))]
6007 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6008 [(set_attr "type" "load_4,load_4,load_4")
6009 (set_attr "pool_range" "4096,4094,1022")
6010 (set_attr "neg_pool_range" "4084,0,0")
6011 (set_attr "arch" "a,t2,t1")
6012 (set_attr "length" "8,6,4")]
6015 ;; The rather odd constraints on the following are to force reload to leave
6016 ;; the insn alone, and to force the minipool generation pass to then move
6017 ;; the GOT symbol to memory.
6019 (define_insn "pic_load_addr_32bit"
6020 [(set (match_operand:SI 0 "s_register_operand" "=r")
6021 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6022 "TARGET_32BIT && flag_pic"
6024 [(set_attr "type" "load_4")
6025 (set (attr "pool_range")
6026 (if_then_else (eq_attr "is_thumb" "no")
6029 (set (attr "neg_pool_range")
6030 (if_then_else (eq_attr "is_thumb" "no")
6035 (define_insn "pic_load_addr_thumb1"
6036 [(set (match_operand:SI 0 "s_register_operand" "=l")
6037 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6038 "TARGET_THUMB1 && flag_pic"
6040 [(set_attr "type" "load_4")
6041 (set (attr "pool_range") (const_int 1018))]
6044 (define_insn "pic_add_dot_plus_four"
6045 [(set (match_operand:SI 0 "register_operand" "=r")
6046 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6048 (match_operand 2 "" "")]
6052 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6053 INTVAL (operands[2]));
6054 return \"add\\t%0, %|pc\";
6056 [(set_attr "length" "2")
6057 (set_attr "type" "alu_sreg")]
6060 (define_insn "pic_add_dot_plus_eight"
6061 [(set (match_operand:SI 0 "register_operand" "=r")
6062 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6064 (match_operand 2 "" "")]
6068 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6069 INTVAL (operands[2]));
6070 return \"add%?\\t%0, %|pc, %1\";
6072 [(set_attr "predicable" "yes")
6073 (set_attr "type" "alu_sreg")]
6076 (define_insn "tls_load_dot_plus_eight"
6077 [(set (match_operand:SI 0 "register_operand" "=r")
6078 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6080 (match_operand 2 "" "")]
6084 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6085 INTVAL (operands[2]));
6086 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6088 [(set_attr "predicable" "yes")
6089 (set_attr "type" "load_4")]
6092 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6093 ;; followed by a load. These sequences can be crunched down to
6094 ;; tls_load_dot_plus_eight by a peephole.
6097 [(set (match_operand:SI 0 "register_operand" "")
6098 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6100 (match_operand 1 "" "")]
6102 (set (match_operand:SI 2 "arm_general_register_operand" "")
6103 (mem:SI (match_dup 0)))]
6104 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6106 (mem:SI (unspec:SI [(match_dup 3)
6113 (define_insn "pic_offset_arm"
6114 [(set (match_operand:SI 0 "register_operand" "=r")
6115 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6116 (unspec:SI [(match_operand:SI 2 "" "X")]
6117 UNSPEC_PIC_OFFSET))))]
6118 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6119 "ldr%?\\t%0, [%1,%2]"
6120 [(set_attr "type" "load_4")]
6123 (define_expand "builtin_setjmp_receiver"
6124 [(label_ref (match_operand 0 "" ""))]
6128 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6130 if (arm_pic_register != INVALID_REGNUM)
6131 arm_load_pic_register (1UL << 3, NULL_RTX);
6135 ;; If copying one reg to another we can set the condition codes according to
6136 ;; its value. Such a move is common after a return from subroutine and the
6137 ;; result is being tested against zero.
6139 (define_insn "*movsi_compare0"
6140 [(set (reg:CC CC_REGNUM)
6141 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6143 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6148 subs%?\\t%0, %1, #0"
6149 [(set_attr "conds" "set")
6150 (set_attr "type" "alus_imm,alus_imm")]
6153 ;; Subroutine to store a half word from a register into memory.
6154 ;; Operand 0 is the source register (HImode)
6155 ;; Operand 1 is the destination address in a register (SImode)
6157 ;; In both this routine and the next, we must be careful not to spill
6158 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6159 ;; can generate unrecognizable rtl.
6161 (define_expand "storehi"
6162 [;; store the low byte
6163 (set (match_operand 1 "" "") (match_dup 3))
6164 ;; extract the high byte
6166 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6167 ;; store the high byte
6168 (set (match_dup 4) (match_dup 5))]
6172 rtx op1 = operands[1];
6173 rtx addr = XEXP (op1, 0);
6174 enum rtx_code code = GET_CODE (addr);
6176 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6178 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6180 operands[4] = adjust_address (op1, QImode, 1);
6181 operands[1] = adjust_address (operands[1], QImode, 0);
6182 operands[3] = gen_lowpart (QImode, operands[0]);
6183 operands[0] = gen_lowpart (SImode, operands[0]);
6184 operands[2] = gen_reg_rtx (SImode);
6185 operands[5] = gen_lowpart (QImode, operands[2]);
6189 (define_expand "storehi_bigend"
6190 [(set (match_dup 4) (match_dup 3))
6192 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6193 (set (match_operand 1 "" "") (match_dup 5))]
6197 rtx op1 = operands[1];
6198 rtx addr = XEXP (op1, 0);
6199 enum rtx_code code = GET_CODE (addr);
6201 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6203 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6205 operands[4] = adjust_address (op1, QImode, 1);
6206 operands[1] = adjust_address (operands[1], QImode, 0);
6207 operands[3] = gen_lowpart (QImode, operands[0]);
6208 operands[0] = gen_lowpart (SImode, operands[0]);
6209 operands[2] = gen_reg_rtx (SImode);
6210 operands[5] = gen_lowpart (QImode, operands[2]);
6214 ;; Subroutine to store a half word integer constant into memory.
6215 (define_expand "storeinthi"
6216 [(set (match_operand 0 "" "")
6217 (match_operand 1 "" ""))
6218 (set (match_dup 3) (match_dup 2))]
6222 HOST_WIDE_INT value = INTVAL (operands[1]);
6223 rtx addr = XEXP (operands[0], 0);
6224 rtx op0 = operands[0];
6225 enum rtx_code code = GET_CODE (addr);
6227 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6229 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6231 operands[1] = gen_reg_rtx (SImode);
6232 if (BYTES_BIG_ENDIAN)
6234 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6235 if ((value & 255) == ((value >> 8) & 255))
6236 operands[2] = operands[1];
6239 operands[2] = gen_reg_rtx (SImode);
6240 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6245 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6246 if ((value & 255) == ((value >> 8) & 255))
6247 operands[2] = operands[1];
6250 operands[2] = gen_reg_rtx (SImode);
6251 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6255 operands[3] = adjust_address (op0, QImode, 1);
6256 operands[0] = adjust_address (operands[0], QImode, 0);
6257 operands[2] = gen_lowpart (QImode, operands[2]);
6258 operands[1] = gen_lowpart (QImode, operands[1]);
6262 (define_expand "storehi_single_op"
6263 [(set (match_operand:HI 0 "memory_operand")
6264 (match_operand:HI 1 "general_operand"))]
6265 "TARGET_32BIT && arm_arch4"
6267 if (!s_register_operand (operands[1], HImode))
6268 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6272 (define_expand "movhi"
6273 [(set (match_operand:HI 0 "general_operand")
6274 (match_operand:HI 1 "general_operand"))]
6277 gcc_checking_assert (aligned_operand (operands[0], HImode));
6278 gcc_checking_assert (aligned_operand (operands[1], HImode));
6281 if (can_create_pseudo_p ())
6283 if (MEM_P (operands[0]))
6287 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6290 if (CONST_INT_P (operands[1]))
6291 emit_insn (gen_storeinthi (operands[0], operands[1]));
6294 if (MEM_P (operands[1]))
6295 operands[1] = force_reg (HImode, operands[1]);
6296 if (BYTES_BIG_ENDIAN)
6297 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6299 emit_insn (gen_storehi (operands[1], operands[0]));
6303 /* Sign extend a constant, and keep it in an SImode reg. */
6304 else if (CONST_INT_P (operands[1]))
6306 rtx reg = gen_reg_rtx (SImode);
6307 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6309 /* If the constant is already valid, leave it alone. */
6310 if (!const_ok_for_arm (val))
6312 /* If setting all the top bits will make the constant
6313 loadable in a single instruction, then set them.
6314 Otherwise, sign extend the number. */
6316 if (const_ok_for_arm (~(val | ~0xffff)))
6318 else if (val & 0x8000)
6322 emit_insn (gen_movsi (reg, GEN_INT (val)));
6323 operands[1] = gen_lowpart (HImode, reg);
6325 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6326 && MEM_P (operands[1]))
6328 rtx reg = gen_reg_rtx (SImode);
6330 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6331 operands[1] = gen_lowpart (HImode, reg);
6333 else if (!arm_arch4)
6335 if (MEM_P (operands[1]))
6338 rtx offset = const0_rtx;
6339 rtx reg = gen_reg_rtx (SImode);
6341 if ((REG_P (base = XEXP (operands[1], 0))
6342 || (GET_CODE (base) == PLUS
6343 && (CONST_INT_P (offset = XEXP (base, 1)))
6344 && ((INTVAL(offset) & 1) != 1)
6345 && REG_P (base = XEXP (base, 0))))
6346 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6350 new_rtx = widen_memory_access (operands[1], SImode,
6351 ((INTVAL (offset) & ~3)
6352 - INTVAL (offset)));
6353 emit_insn (gen_movsi (reg, new_rtx));
6354 if (((INTVAL (offset) & 2) != 0)
6355 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6357 rtx reg2 = gen_reg_rtx (SImode);
6359 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6364 emit_insn (gen_movhi_bytes (reg, operands[1]));
6366 operands[1] = gen_lowpart (HImode, reg);
6370 /* Handle loading a large integer during reload. */
6371 else if (CONST_INT_P (operands[1])
6372 && !const_ok_for_arm (INTVAL (operands[1]))
6373 && !const_ok_for_arm (~INTVAL (operands[1])))
6375 /* Writing a constant to memory needs a scratch, which should
6376 be handled with SECONDARY_RELOADs. */
6377 gcc_assert (REG_P (operands[0]));
6379 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6380 emit_insn (gen_movsi (operands[0], operands[1]));
6384 else if (TARGET_THUMB2)
6386 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6387 if (can_create_pseudo_p ())
6389 if (!REG_P (operands[0]))
6390 operands[1] = force_reg (HImode, operands[1]);
6391 /* Zero extend a constant, and keep it in an SImode reg. */
6392 else if (CONST_INT_P (operands[1]))
6394 rtx reg = gen_reg_rtx (SImode);
6395 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6397 emit_insn (gen_movsi (reg, GEN_INT (val)));
6398 operands[1] = gen_lowpart (HImode, reg);
6402 else /* TARGET_THUMB1 */
6404 if (can_create_pseudo_p ())
6406 if (CONST_INT_P (operands[1]))
6408 rtx reg = gen_reg_rtx (SImode);
6410 emit_insn (gen_movsi (reg, operands[1]));
6411 operands[1] = gen_lowpart (HImode, reg);
6414 /* ??? We shouldn't really get invalid addresses here, but this can
6415 happen if we are passed a SP (never OK for HImode/QImode) or
6416 virtual register (also rejected as illegitimate for HImode/QImode)
6417 relative address. */
6418 /* ??? This should perhaps be fixed elsewhere, for instance, in
6419 fixup_stack_1, by checking for other kinds of invalid addresses,
6420 e.g. a bare reference to a virtual register. This may confuse the
6421 alpha though, which must handle this case differently. */
6422 if (MEM_P (operands[0])
6423 && !memory_address_p (GET_MODE (operands[0]),
6424 XEXP (operands[0], 0)))
6426 = replace_equiv_address (operands[0],
6427 copy_to_reg (XEXP (operands[0], 0)));
6429 if (MEM_P (operands[1])
6430 && !memory_address_p (GET_MODE (operands[1]),
6431 XEXP (operands[1], 0)))
6433 = replace_equiv_address (operands[1],
6434 copy_to_reg (XEXP (operands[1], 0)));
6436 if (MEM_P (operands[1]) && optimize > 0)
6438 rtx reg = gen_reg_rtx (SImode);
6440 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6441 operands[1] = gen_lowpart (HImode, reg);
6444 if (MEM_P (operands[0]))
6445 operands[1] = force_reg (HImode, operands[1]);
6447 else if (CONST_INT_P (operands[1])
6448 && !satisfies_constraint_I (operands[1]))
6450 /* Handle loading a large integer during reload. */
6452 /* Writing a constant to memory needs a scratch, which should
6453 be handled with SECONDARY_RELOADs. */
6454 gcc_assert (REG_P (operands[0]));
6456 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6457 emit_insn (gen_movsi (operands[0], operands[1]));
6464 (define_expand "movhi_bytes"
6465 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6467 (zero_extend:SI (match_dup 6)))
6468 (set (match_operand:SI 0 "" "")
6469 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6474 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6476 mem1 = change_address (operands[1], QImode, addr);
6477 mem2 = change_address (operands[1], QImode,
6478 plus_constant (Pmode, addr, 1));
6479 operands[0] = gen_lowpart (SImode, operands[0]);
6481 operands[2] = gen_reg_rtx (SImode);
6482 operands[3] = gen_reg_rtx (SImode);
6485 if (BYTES_BIG_ENDIAN)
6487 operands[4] = operands[2];
6488 operands[5] = operands[3];
6492 operands[4] = operands[3];
6493 operands[5] = operands[2];
6498 (define_expand "movhi_bigend"
6500 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6503 (ashiftrt:SI (match_dup 2) (const_int 16)))
6504 (set (match_operand:HI 0 "s_register_operand")
6508 operands[2] = gen_reg_rtx (SImode);
6509 operands[3] = gen_reg_rtx (SImode);
6510 operands[4] = gen_lowpart (HImode, operands[3]);
6514 ;; Pattern to recognize insn generated default case above
6515 (define_insn "*movhi_insn_arch4"
6516 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6517 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6519 && arm_arch4 && !TARGET_HARD_FLOAT
6520 && (register_operand (operands[0], HImode)
6521 || register_operand (operands[1], HImode))"
6523 mov%?\\t%0, %1\\t%@ movhi
6524 mvn%?\\t%0, #%B1\\t%@ movhi
6525 movw%?\\t%0, %L1\\t%@ movhi
6526 strh%?\\t%1, %0\\t%@ movhi
6527 ldrh%?\\t%0, %1\\t%@ movhi"
6528 [(set_attr "predicable" "yes")
6529 (set_attr "pool_range" "*,*,*,*,256")
6530 (set_attr "neg_pool_range" "*,*,*,*,244")
6531 (set_attr "arch" "*,*,v6t2,*,*")
6532 (set_attr_alternative "type"
6533 [(if_then_else (match_operand 1 "const_int_operand" "")
6534 (const_string "mov_imm" )
6535 (const_string "mov_reg"))
6536 (const_string "mvn_imm")
6537 (const_string "mov_imm")
6538 (const_string "store_4")
6539 (const_string "load_4")])]
6542 (define_insn "*movhi_bytes"
6543 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6544 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6545 "TARGET_ARM && !TARGET_HARD_FLOAT"
6547 mov%?\\t%0, %1\\t%@ movhi
6548 mov%?\\t%0, %1\\t%@ movhi
6549 mvn%?\\t%0, #%B1\\t%@ movhi"
6550 [(set_attr "predicable" "yes")
6551 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6554 ;; We use a DImode scratch because we may occasionally need an additional
6555 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6556 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6557 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6558 ;; to be correctly handled in default_secondary_reload function.
6559 (define_expand "reload_outhi"
6560 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6561 (match_operand:HI 1 "s_register_operand" "r")
6562 (match_operand:DI 2 "s_register_operand" "=&l")])]
6565 arm_reload_out_hi (operands);
6567 thumb_reload_out_hi (operands);
6572 (define_expand "reload_inhi"
6573 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6574 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6575 (match_operand:DI 2 "s_register_operand" "=&r")])]
6579 arm_reload_in_hi (operands);
6581 thumb_reload_out_hi (operands);
6585 (define_expand "movqi"
6586 [(set (match_operand:QI 0 "general_operand")
6587 (match_operand:QI 1 "general_operand"))]
6590 /* Everything except mem = const or mem = mem can be done easily */
6592 if (can_create_pseudo_p ())
6594 if (CONST_INT_P (operands[1]))
6596 rtx reg = gen_reg_rtx (SImode);
6598 /* For thumb we want an unsigned immediate, then we are more likely
6599 to be able to use a movs insn. */
6601 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6603 emit_insn (gen_movsi (reg, operands[1]));
6604 operands[1] = gen_lowpart (QImode, reg);
6609 /* ??? We shouldn't really get invalid addresses here, but this can
6610 happen if we are passed a SP (never OK for HImode/QImode) or
6611 virtual register (also rejected as illegitimate for HImode/QImode)
6612 relative address. */
6613 /* ??? This should perhaps be fixed elsewhere, for instance, in
6614 fixup_stack_1, by checking for other kinds of invalid addresses,
6615 e.g. a bare reference to a virtual register. This may confuse the
6616 alpha though, which must handle this case differently. */
6617 if (MEM_P (operands[0])
6618 && !memory_address_p (GET_MODE (operands[0]),
6619 XEXP (operands[0], 0)))
6621 = replace_equiv_address (operands[0],
6622 copy_to_reg (XEXP (operands[0], 0)));
6623 if (MEM_P (operands[1])
6624 && !memory_address_p (GET_MODE (operands[1]),
6625 XEXP (operands[1], 0)))
6627 = replace_equiv_address (operands[1],
6628 copy_to_reg (XEXP (operands[1], 0)));
6631 if (MEM_P (operands[1]) && optimize > 0)
6633 rtx reg = gen_reg_rtx (SImode);
6635 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6636 operands[1] = gen_lowpart (QImode, reg);
6639 if (MEM_P (operands[0]))
6640 operands[1] = force_reg (QImode, operands[1]);
6642 else if (TARGET_THUMB
6643 && CONST_INT_P (operands[1])
6644 && !satisfies_constraint_I (operands[1]))
6646 /* Handle loading a large integer during reload. */
6648 /* Writing a constant to memory needs a scratch, which should
6649 be handled with SECONDARY_RELOADs. */
6650 gcc_assert (REG_P (operands[0]));
6652 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6653 emit_insn (gen_movsi (operands[0], operands[1]));
6659 (define_insn "*arm_movqi_insn"
6660 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
6661 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
6663 && ( register_operand (operands[0], QImode)
6664 || register_operand (operands[1], QImode))"
6675 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
6676 (set_attr "predicable" "yes")
6677 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6678 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6679 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6683 (define_expand "movhf"
6684 [(set (match_operand:HF 0 "general_operand")
6685 (match_operand:HF 1 "general_operand"))]
6688 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6689 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6692 if (MEM_P (operands[0]))
6693 operands[1] = force_reg (HFmode, operands[1]);
6695 else /* TARGET_THUMB1 */
6697 if (can_create_pseudo_p ())
6699 if (!REG_P (operands[0]))
6700 operands[1] = force_reg (HFmode, operands[1]);
6706 (define_insn "*arm32_movhf"
6707 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6708 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6709 "TARGET_32BIT && !TARGET_HARD_FLOAT
6710 && ( s_register_operand (operands[0], HFmode)
6711 || s_register_operand (operands[1], HFmode))"
6713 switch (which_alternative)
6715 case 0: /* ARM register from memory */
6716 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6717 case 1: /* memory from ARM register */
6718 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6719 case 2: /* ARM register from ARM register */
6720 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6721 case 3: /* ARM register from constant */
6726 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6728 ops[0] = operands[0];
6729 ops[1] = GEN_INT (bits);
6730 ops[2] = GEN_INT (bits & 0xff00);
6731 ops[3] = GEN_INT (bits & 0x00ff);
6733 if (arm_arch_thumb2)
6734 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6736 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6743 [(set_attr "conds" "unconditional")
6744 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6745 (set_attr "length" "4,4,4,8")
6746 (set_attr "predicable" "yes")]
6749 (define_expand "movsf"
6750 [(set (match_operand:SF 0 "general_operand")
6751 (match_operand:SF 1 "general_operand"))]
6754 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6755 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6758 if (MEM_P (operands[0]))
6759 operands[1] = force_reg (SFmode, operands[1]);
6761 else /* TARGET_THUMB1 */
6763 if (can_create_pseudo_p ())
6765 if (!REG_P (operands[0]))
6766 operands[1] = force_reg (SFmode, operands[1]);
6770 /* Cannot load it directly, generate a load with clobber so that it can be
6771 loaded via GPR with MOV / MOVT. */
6772 if (arm_disable_literal_pool
6773 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6774 && CONST_DOUBLE_P (operands[1])
6775 && TARGET_HARD_FLOAT
6776 && !vfp3_const_double_rtx (operands[1]))
6778 rtx clobreg = gen_reg_rtx (SFmode);
6779 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6786 ;; Transform a floating-point move of a constant into a core register into
6787 ;; an SImode operation.
6789 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6790 (match_operand:SF 1 "immediate_operand" ""))]
6793 && CONST_DOUBLE_P (operands[1])"
6794 [(set (match_dup 2) (match_dup 3))]
6796 operands[2] = gen_lowpart (SImode, operands[0]);
6797 operands[3] = gen_lowpart (SImode, operands[1]);
6798 if (operands[2] == 0 || operands[3] == 0)
6803 (define_insn "*arm_movsf_soft_insn"
6804 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6805 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6807 && TARGET_SOFT_FLOAT
6808 && (!MEM_P (operands[0])
6809 || register_operand (operands[1], SFmode))"
6811 switch (which_alternative)
6813 case 0: return \"mov%?\\t%0, %1\";
6815 /* Cannot load it directly, split to load it via MOV / MOVT. */
6816 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6818 return \"ldr%?\\t%0, %1\\t%@ float\";
6819 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6820 default: gcc_unreachable ();
6823 [(set_attr "predicable" "yes")
6824 (set_attr "type" "mov_reg,load_4,store_4")
6825 (set_attr "arm_pool_range" "*,4096,*")
6826 (set_attr "thumb2_pool_range" "*,4094,*")
6827 (set_attr "arm_neg_pool_range" "*,4084,*")
6828 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6831 ;; Splitter for the above.
6833 [(set (match_operand:SF 0 "s_register_operand")
6834 (match_operand:SF 1 "const_double_operand"))]
6835 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6839 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6840 rtx cst = gen_int_mode (buf, SImode);
6841 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6846 (define_expand "movdf"
6847 [(set (match_operand:DF 0 "general_operand")
6848 (match_operand:DF 1 "general_operand"))]
6851 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6852 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6855 if (MEM_P (operands[0]))
6856 operands[1] = force_reg (DFmode, operands[1]);
6858 else /* TARGET_THUMB */
6860 if (can_create_pseudo_p ())
6862 if (!REG_P (operands[0]))
6863 operands[1] = force_reg (DFmode, operands[1]);
6867 /* Cannot load it directly, generate a load with clobber so that it can be
6868 loaded via GPR with MOV / MOVT. */
6869 if (arm_disable_literal_pool
6870 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6871 && CONSTANT_P (operands[1])
6872 && TARGET_HARD_FLOAT
6873 && !arm_const_double_rtx (operands[1])
6874 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6876 rtx clobreg = gen_reg_rtx (DFmode);
6877 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6884 ;; Reloading a df mode value stored in integer regs to memory can require a
6886 ;; Another reload_out<m> pattern that requires special constraints.
6887 (define_expand "reload_outdf"
6888 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6889 (match_operand:DF 1 "s_register_operand" "r")
6890 (match_operand:SI 2 "s_register_operand" "=&r")]
6894 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6897 operands[2] = XEXP (operands[0], 0);
6898 else if (code == POST_INC || code == PRE_DEC)
6900 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6901 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6902 emit_insn (gen_movdi (operands[0], operands[1]));
6905 else if (code == PRE_INC)
6907 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6909 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6912 else if (code == POST_DEC)
6913 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6915 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6916 XEXP (XEXP (operands[0], 0), 1)));
6918 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6921 if (code == POST_DEC)
6922 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6928 (define_insn "*movdf_soft_insn"
6929 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6930 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6931 "TARGET_32BIT && TARGET_SOFT_FLOAT
6932 && ( register_operand (operands[0], DFmode)
6933 || register_operand (operands[1], DFmode))"
6935 switch (which_alternative)
6942 /* Cannot load it directly, split to load it via MOV / MOVT. */
6943 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6947 return output_move_double (operands, true, NULL);
6950 [(set_attr "length" "8,12,16,8,8")
6951 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6952 (set_attr "arm_pool_range" "*,*,*,1020,*")
6953 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6954 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6955 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6958 ;; Splitter for the above.
6960 [(set (match_operand:DF 0 "s_register_operand")
6961 (match_operand:DF 1 "const_double_operand"))]
6962 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6966 int order = BYTES_BIG_ENDIAN ? 1 : 0;
6967 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
6968 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
6969 ival |= (zext_hwi (buf[1 - order], 32) << 32);
6970 rtx cst = gen_int_mode (ival, DImode);
6971 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
6977 ;; load- and store-multiple insns
6978 ;; The arm can load/store any set of registers, provided that they are in
6979 ;; ascending order, but these expanders assume a contiguous set.
6981 (define_expand "load_multiple"
6982 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6983 (match_operand:SI 1 "" ""))
6984 (use (match_operand:SI 2 "" ""))])]
6987 HOST_WIDE_INT offset = 0;
6989 /* Support only fixed point registers. */
6990 if (!CONST_INT_P (operands[2])
6991 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
6992 || INTVAL (operands[2]) < 2
6993 || !MEM_P (operands[1])
6994 || !REG_P (operands[0])
6995 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6996 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7000 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7001 INTVAL (operands[2]),
7002 force_reg (SImode, XEXP (operands[1], 0)),
7003 FALSE, operands[1], &offset);
7006 (define_expand "store_multiple"
7007 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7008 (match_operand:SI 1 "" ""))
7009 (use (match_operand:SI 2 "" ""))])]
7012 HOST_WIDE_INT offset = 0;
7014 /* Support only fixed point registers. */
7015 if (!CONST_INT_P (operands[2])
7016 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7017 || INTVAL (operands[2]) < 2
7018 || !REG_P (operands[1])
7019 || !MEM_P (operands[0])
7020 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7021 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7025 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7026 INTVAL (operands[2]),
7027 force_reg (SImode, XEXP (operands[0], 0)),
7028 FALSE, operands[0], &offset);
7032 (define_expand "setmemsi"
7033 [(match_operand:BLK 0 "general_operand")
7034 (match_operand:SI 1 "const_int_operand")
7035 (match_operand:SI 2 "const_int_operand")
7036 (match_operand:SI 3 "const_int_operand")]
7039 if (arm_gen_setmem (operands))
7046 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7047 ;; We could let this apply for blocks of less than this, but it clobbers so
7048 ;; many registers that there is then probably a better way.
7050 (define_expand "cpymemqi"
7051 [(match_operand:BLK 0 "general_operand")
7052 (match_operand:BLK 1 "general_operand")
7053 (match_operand:SI 2 "const_int_operand")
7054 (match_operand:SI 3 "const_int_operand")]
7059 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7060 && !optimize_function_for_size_p (cfun))
7062 if (gen_cpymem_ldrd_strd (operands))
7067 if (arm_gen_cpymemqi (operands))
7071 else /* TARGET_THUMB1 */
7073 if ( INTVAL (operands[3]) != 4
7074 || INTVAL (operands[2]) > 48)
7077 thumb_expand_cpymemqi (operands);
7084 ;; Compare & branch insns
7085 ;; The range calculations are based as follows:
7086 ;; For forward branches, the address calculation returns the address of
7087 ;; the next instruction. This is 2 beyond the branch instruction.
7088 ;; For backward branches, the address calculation returns the address of
7089 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7090 ;; instruction for the shortest sequence, and 4 before the branch instruction
7091 ;; if we have to jump around an unconditional branch.
7092 ;; To the basic branch range the PC offset must be added (this is +4).
7093 ;; So for forward branches we have
7094 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7095 ;; And for backward branches we have
7096 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7098 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7099 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7101 (define_expand "cbranchsi4"
7102 [(set (pc) (if_then_else
7103 (match_operator 0 "expandable_comparison_operator"
7104 [(match_operand:SI 1 "s_register_operand")
7105 (match_operand:SI 2 "nonmemory_operand")])
7106 (label_ref (match_operand 3 "" ""))
7112 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7114 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7118 if (thumb1_cmpneg_operand (operands[2], SImode))
7120 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7121 operands[3], operands[0]));
7124 if (!thumb1_cmp_operand (operands[2], SImode))
7125 operands[2] = force_reg (SImode, operands[2]);
7128 (define_expand "cbranchsf4"
7129 [(set (pc) (if_then_else
7130 (match_operator 0 "expandable_comparison_operator"
7131 [(match_operand:SF 1 "s_register_operand")
7132 (match_operand:SF 2 "vfp_compare_operand")])
7133 (label_ref (match_operand 3 "" ""))
7135 "TARGET_32BIT && TARGET_HARD_FLOAT"
7136 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7137 operands[3])); DONE;"
7140 (define_expand "cbranchdf4"
7141 [(set (pc) (if_then_else
7142 (match_operator 0 "expandable_comparison_operator"
7143 [(match_operand:DF 1 "s_register_operand")
7144 (match_operand:DF 2 "vfp_compare_operand")])
7145 (label_ref (match_operand 3 "" ""))
7147 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7148 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7149 operands[3])); DONE;"
7152 (define_expand "cbranchdi4"
7153 [(set (pc) (if_then_else
7154 (match_operator 0 "expandable_comparison_operator"
7155 [(match_operand:DI 1 "s_register_operand")
7156 (match_operand:DI 2 "reg_or_int_operand")])
7157 (label_ref (match_operand 3 "" ""))
7161 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7163 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7169 ;; Comparison and test insns
7171 (define_insn "*arm_cmpsi_insn"
7172 [(set (reg:CC CC_REGNUM)
7173 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7174 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7182 [(set_attr "conds" "set")
7183 (set_attr "arch" "t2,t2,any,any,any")
7184 (set_attr "length" "2,2,4,4,4")
7185 (set_attr "predicable" "yes")
7186 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7187 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7190 (define_insn "*cmpsi_shiftsi"
7191 [(set (reg:CC CC_REGNUM)
7192 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7193 (match_operator:SI 3 "shift_operator"
7194 [(match_operand:SI 1 "s_register_operand" "r,r")
7195 (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7198 [(set_attr "conds" "set")
7199 (set_attr "shift" "1")
7200 (set_attr "arch" "32,a")
7201 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7203 (define_insn "*cmpsi_shiftsi_swp"
7204 [(set (reg:CC_SWP CC_REGNUM)
7205 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7206 [(match_operand:SI 1 "s_register_operand" "r,r")
7207 (match_operand:SI 2 "shift_amount_operand" "M,r")])
7208 (match_operand:SI 0 "s_register_operand" "r,r")))]
7211 [(set_attr "conds" "set")
7212 (set_attr "shift" "1")
7213 (set_attr "arch" "32,a")
7214 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7216 (define_insn "*arm_cmpsi_negshiftsi_si"
7217 [(set (reg:CC_Z CC_REGNUM)
7219 (neg:SI (match_operator:SI 1 "shift_operator"
7220 [(match_operand:SI 2 "s_register_operand" "r,r")
7221 (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7222 (match_operand:SI 0 "s_register_operand" "r,r")))]
7225 [(set_attr "conds" "set")
7226 (set_attr "arch" "32,a")
7227 (set_attr "shift" "2")
7228 (set_attr "type" "alus_shift_imm,alus_shift_reg")
7229 (set_attr "predicable" "yes")]
7232 ; This insn allows redundant compares to be removed by cse, nothing should
7233 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7234 ; is deleted later on. The match_dup will match the mode here, so that
7235 ; mode changes of the condition codes aren't lost by this even though we don't
7236 ; specify what they are.
7238 (define_insn "*deleted_compare"
7239 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7241 "\\t%@ deleted compare"
7242 [(set_attr "conds" "set")
7243 (set_attr "length" "0")
7244 (set_attr "type" "no_insn")]
7248 ;; Conditional branch insns
7250 (define_expand "cbranch_cc"
7252 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7253 (match_operand 2 "" "")])
7254 (label_ref (match_operand 3 "" ""))
7257 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7258 operands[1], operands[2], NULL_RTX);
7259 operands[2] = const0_rtx;"
7263 ;; Patterns to match conditional branch insns.
7266 (define_insn "arm_cond_branch"
7268 (if_then_else (match_operator 1 "arm_comparison_operator"
7269 [(match_operand 2 "cc_register" "") (const_int 0)])
7270 (label_ref (match_operand 0 "" ""))
7274 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7276 arm_ccfsm_state += 2;
7279 return \"b%d1\\t%l0\";
7281 [(set_attr "conds" "use")
7282 (set_attr "type" "branch")
7283 (set (attr "length")
7285 (and (match_test "TARGET_THUMB2")
7286 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7287 (le (minus (match_dup 0) (pc)) (const_int 256))))
7292 (define_insn "*arm_cond_branch_reversed"
7294 (if_then_else (match_operator 1 "arm_comparison_operator"
7295 [(match_operand 2 "cc_register" "") (const_int 0)])
7297 (label_ref (match_operand 0 "" ""))))]
7300 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7302 arm_ccfsm_state += 2;
7305 return \"b%D1\\t%l0\";
7307 [(set_attr "conds" "use")
7308 (set_attr "type" "branch")
7309 (set (attr "length")
7311 (and (match_test "TARGET_THUMB2")
7312 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7313 (le (minus (match_dup 0) (pc)) (const_int 256))))
7322 (define_expand "cstore_cc"
7323 [(set (match_operand:SI 0 "s_register_operand")
7324 (match_operator:SI 1 "" [(match_operand 2 "" "")
7325 (match_operand 3 "" "")]))]
7327 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7328 operands[2], operands[3], NULL_RTX);
7329 operands[3] = const0_rtx;"
7332 (define_insn_and_split "*mov_scc"
7333 [(set (match_operand:SI 0 "s_register_operand" "=r")
7334 (match_operator:SI 1 "arm_comparison_operator_mode"
7335 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7337 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7340 (if_then_else:SI (match_dup 1)
7344 [(set_attr "conds" "use")
7345 (set_attr "length" "8")
7346 (set_attr "type" "multiple")]
7349 (define_insn "*negscc_borrow"
7350 [(set (match_operand:SI 0 "s_register_operand" "=r")
7351 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7354 [(set_attr "conds" "use")
7355 (set_attr "length" "4")
7356 (set_attr "type" "adc_reg")]
7359 (define_insn_and_split "*mov_negscc"
7360 [(set (match_operand:SI 0 "s_register_operand" "=r")
7361 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7362 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7363 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
7364 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7367 (if_then_else:SI (match_dup 1)
7371 operands[3] = GEN_INT (~0);
7373 [(set_attr "conds" "use")
7374 (set_attr "length" "8")
7375 (set_attr "type" "multiple")]
7378 (define_insn_and_split "*mov_notscc"
7379 [(set (match_operand:SI 0 "s_register_operand" "=r")
7380 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7381 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7383 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7386 (if_then_else:SI (match_dup 1)
7390 operands[3] = GEN_INT (~1);
7391 operands[4] = GEN_INT (~0);
7393 [(set_attr "conds" "use")
7394 (set_attr "length" "8")
7395 (set_attr "type" "multiple")]
7398 (define_expand "cstoresi4"
7399 [(set (match_operand:SI 0 "s_register_operand")
7400 (match_operator:SI 1 "expandable_comparison_operator"
7401 [(match_operand:SI 2 "s_register_operand")
7402 (match_operand:SI 3 "reg_or_int_operand")]))]
7403 "TARGET_32BIT || TARGET_THUMB1"
7405 rtx op3, scratch, scratch2;
7409 if (!arm_add_operand (operands[3], SImode))
7410 operands[3] = force_reg (SImode, operands[3]);
7411 emit_insn (gen_cstore_cc (operands[0], operands[1],
7412 operands[2], operands[3]));
7416 if (operands[3] == const0_rtx)
7418 switch (GET_CODE (operands[1]))
7421 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7425 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7429 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7430 NULL_RTX, 0, OPTAB_WIDEN);
7431 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7432 NULL_RTX, 0, OPTAB_WIDEN);
7433 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7434 operands[0], 1, OPTAB_WIDEN);
7438 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7440 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7441 NULL_RTX, 1, OPTAB_WIDEN);
7445 scratch = expand_binop (SImode, ashr_optab, operands[2],
7446 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7447 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7448 NULL_RTX, 0, OPTAB_WIDEN);
7449 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7453 /* LT is handled by generic code. No need for unsigned with 0. */
7460 switch (GET_CODE (operands[1]))
7463 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7464 NULL_RTX, 0, OPTAB_WIDEN);
7465 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7469 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7470 NULL_RTX, 0, OPTAB_WIDEN);
7471 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7475 op3 = force_reg (SImode, operands[3]);
7477 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7478 NULL_RTX, 1, OPTAB_WIDEN);
7479 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7480 NULL_RTX, 0, OPTAB_WIDEN);
7481 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7487 if (!thumb1_cmp_operand (op3, SImode))
7488 op3 = force_reg (SImode, op3);
7489 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7490 NULL_RTX, 0, OPTAB_WIDEN);
7491 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7492 NULL_RTX, 1, OPTAB_WIDEN);
7493 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7498 op3 = force_reg (SImode, operands[3]);
7499 scratch = force_reg (SImode, const0_rtx);
7500 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7506 if (!thumb1_cmp_operand (op3, SImode))
7507 op3 = force_reg (SImode, op3);
7508 scratch = force_reg (SImode, const0_rtx);
7509 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7515 if (!thumb1_cmp_operand (op3, SImode))
7516 op3 = force_reg (SImode, op3);
7517 scratch = gen_reg_rtx (SImode);
7518 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7522 op3 = force_reg (SImode, operands[3]);
7523 scratch = gen_reg_rtx (SImode);
7524 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7527 /* No good sequences for GT, LT. */
7534 (define_expand "cstorehf4"
7535 [(set (match_operand:SI 0 "s_register_operand")
7536 (match_operator:SI 1 "expandable_comparison_operator"
7537 [(match_operand:HF 2 "s_register_operand")
7538 (match_operand:HF 3 "vfp_compare_operand")]))]
7539 "TARGET_VFP_FP16INST"
7541 if (!arm_validize_comparison (&operands[1],
7546 emit_insn (gen_cstore_cc (operands[0], operands[1],
7547 operands[2], operands[3]));
7552 (define_expand "cstoresf4"
7553 [(set (match_operand:SI 0 "s_register_operand")
7554 (match_operator:SI 1 "expandable_comparison_operator"
7555 [(match_operand:SF 2 "s_register_operand")
7556 (match_operand:SF 3 "vfp_compare_operand")]))]
7557 "TARGET_32BIT && TARGET_HARD_FLOAT"
7558 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7559 operands[2], operands[3])); DONE;"
7562 (define_expand "cstoredf4"
7563 [(set (match_operand:SI 0 "s_register_operand")
7564 (match_operator:SI 1 "expandable_comparison_operator"
7565 [(match_operand:DF 2 "s_register_operand")
7566 (match_operand:DF 3 "vfp_compare_operand")]))]
7567 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7568 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7569 operands[2], operands[3])); DONE;"
7572 (define_expand "cstoredi4"
7573 [(set (match_operand:SI 0 "s_register_operand")
7574 (match_operator:SI 1 "expandable_comparison_operator"
7575 [(match_operand:DI 2 "s_register_operand")
7576 (match_operand:DI 3 "reg_or_int_operand")]))]
7579 if (!arm_validize_comparison (&operands[1],
7583 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7590 ;; Conditional move insns
7592 (define_expand "movsicc"
7593 [(set (match_operand:SI 0 "s_register_operand")
7594 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
7595 (match_operand:SI 2 "arm_not_operand")
7596 (match_operand:SI 3 "arm_not_operand")))]
7603 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7604 &XEXP (operands[1], 1)))
7607 code = GET_CODE (operands[1]);
7608 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7609 XEXP (operands[1], 1), NULL_RTX);
7610 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7614 (define_expand "movhfcc"
7615 [(set (match_operand:HF 0 "s_register_operand")
7616 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
7617 (match_operand:HF 2 "s_register_operand")
7618 (match_operand:HF 3 "s_register_operand")))]
7619 "TARGET_VFP_FP16INST"
7622 enum rtx_code code = GET_CODE (operands[1]);
7625 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7626 &XEXP (operands[1], 1)))
7629 code = GET_CODE (operands[1]);
7630 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7631 XEXP (operands[1], 1), NULL_RTX);
7632 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7636 (define_expand "movsfcc"
7637 [(set (match_operand:SF 0 "s_register_operand")
7638 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7639 (match_operand:SF 2 "s_register_operand")
7640 (match_operand:SF 3 "s_register_operand")))]
7641 "TARGET_32BIT && TARGET_HARD_FLOAT"
7644 enum rtx_code code = GET_CODE (operands[1]);
7647 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7648 &XEXP (operands[1], 1)))
7651 code = GET_CODE (operands[1]);
7652 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7653 XEXP (operands[1], 1), NULL_RTX);
7654 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7658 (define_expand "movdfcc"
7659 [(set (match_operand:DF 0 "s_register_operand")
7660 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7661 (match_operand:DF 2 "s_register_operand")
7662 (match_operand:DF 3 "s_register_operand")))]
7663 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7666 enum rtx_code code = GET_CODE (operands[1]);
7669 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7670 &XEXP (operands[1], 1)))
7672 code = GET_CODE (operands[1]);
7673 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7674 XEXP (operands[1], 1), NULL_RTX);
7675 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7679 (define_insn "*cmov<mode>"
7680 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7681 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7682 [(match_operand 2 "cc_register" "") (const_int 0)])
7683 (match_operand:SDF 3 "s_register_operand"
7685 (match_operand:SDF 4 "s_register_operand"
7686 "<F_constraint>")))]
7687 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7690 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7697 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7702 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7708 [(set_attr "conds" "use")
7709 (set_attr "type" "fcsel")]
7712 (define_insn "*cmovhf"
7713 [(set (match_operand:HF 0 "s_register_operand" "=t")
7714 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7715 [(match_operand 2 "cc_register" "") (const_int 0)])
7716 (match_operand:HF 3 "s_register_operand" "t")
7717 (match_operand:HF 4 "s_register_operand" "t")))]
7718 "TARGET_VFP_FP16INST"
7721 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7728 return \"vsel%d1.f16\\t%0, %3, %4\";
7733 return \"vsel%D1.f16\\t%0, %4, %3\";
7739 [(set_attr "conds" "use")
7740 (set_attr "type" "fcsel")]
7743 (define_insn_and_split "*movsicc_insn"
7744 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7746 (match_operator 3 "arm_comparison_operator"
7747 [(match_operand 4 "cc_register" "") (const_int 0)])
7748 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7749 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7760 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7761 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7762 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7763 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7764 "&& reload_completed"
7767 enum rtx_code rev_code;
7771 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7773 gen_rtx_SET (operands[0], operands[1])));
7775 rev_code = GET_CODE (operands[3]);
7776 mode = GET_MODE (operands[4]);
7777 if (mode == CCFPmode || mode == CCFPEmode)
7778 rev_code = reverse_condition_maybe_unordered (rev_code);
7780 rev_code = reverse_condition (rev_code);
7782 rev_cond = gen_rtx_fmt_ee (rev_code,
7786 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7788 gen_rtx_SET (operands[0], operands[2])));
7791 [(set_attr "length" "4,4,4,4,8,8,8,8")
7792 (set_attr "conds" "use")
7793 (set_attr_alternative "type"
7794 [(if_then_else (match_operand 2 "const_int_operand" "")
7795 (const_string "mov_imm")
7796 (const_string "mov_reg"))
7797 (const_string "mvn_imm")
7798 (if_then_else (match_operand 1 "const_int_operand" "")
7799 (const_string "mov_imm")
7800 (const_string "mov_reg"))
7801 (const_string "mvn_imm")
7802 (const_string "multiple")
7803 (const_string "multiple")
7804 (const_string "multiple")
7805 (const_string "multiple")])]
7808 (define_insn "*movsfcc_soft_insn"
7809 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7810 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7811 [(match_operand 4 "cc_register" "") (const_int 0)])
7812 (match_operand:SF 1 "s_register_operand" "0,r")
7813 (match_operand:SF 2 "s_register_operand" "r,0")))]
7814 "TARGET_ARM && TARGET_SOFT_FLOAT"
7818 [(set_attr "conds" "use")
7819 (set_attr "type" "mov_reg")]
7823 ;; Jump and linkage insns
7825 (define_expand "jump"
7827 (label_ref (match_operand 0 "" "")))]
7832 (define_insn "*arm_jump"
7834 (label_ref (match_operand 0 "" "")))]
7838 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7840 arm_ccfsm_state += 2;
7843 return \"b%?\\t%l0\";
7846 [(set_attr "predicable" "yes")
7847 (set (attr "length")
7849 (and (match_test "TARGET_THUMB2")
7850 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7851 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7854 (set_attr "type" "branch")]
7857 (define_expand "call"
7858 [(parallel [(call (match_operand 0 "memory_operand")
7859 (match_operand 1 "general_operand"))
7860 (use (match_operand 2 "" ""))
7861 (clobber (reg:SI LR_REGNUM))])]
7866 tree addr = MEM_EXPR (operands[0]);
7868 /* In an untyped call, we can get NULL for operand 2. */
7869 if (operands[2] == NULL_RTX)
7870 operands[2] = const0_rtx;
7872 /* Decide if we should generate indirect calls by loading the
7873 32-bit address of the callee into a register before performing the
7875 callee = XEXP (operands[0], 0);
7876 if (GET_CODE (callee) == SYMBOL_REF
7877 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7879 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7881 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7882 /* Indirect call: set r9 with FDPIC value of callee. */
7883 XEXP (operands[0], 0)
7884 = arm_load_function_descriptor (XEXP (operands[0], 0));
7886 if (detect_cmse_nonsecure_call (addr))
7888 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7890 emit_call_insn (pat);
7894 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7895 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7898 /* Restore FDPIC register (r9) after call. */
7901 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7902 rtx initial_fdpic_reg
7903 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7905 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7906 initial_fdpic_reg));
7913 (define_insn "restore_pic_register_after_call"
7914 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7915 (unspec:SI [(match_dup 0)
7916 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7917 UNSPEC_PIC_RESTORE))]
7924 (define_expand "call_internal"
7925 [(parallel [(call (match_operand 0 "memory_operand")
7926 (match_operand 1 "general_operand"))
7927 (use (match_operand 2 "" ""))
7928 (clobber (reg:SI LR_REGNUM))])])
7930 (define_expand "nonsecure_call_internal"
7931 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7932 UNSPEC_NONSECURE_MEM)
7933 (match_operand 1 "general_operand"))
7934 (use (match_operand 2 "" ""))
7935 (clobber (reg:SI LR_REGNUM))])]
7940 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7941 gen_rtx_REG (SImode, R4_REGNUM),
7944 operands[0] = replace_equiv_address (operands[0], tmp);
7947 (define_insn "*call_reg_armv5"
7948 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7949 (match_operand 1 "" ""))
7950 (use (match_operand 2 "" ""))
7951 (clobber (reg:SI LR_REGNUM))]
7952 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7954 [(set_attr "type" "call")]
7957 (define_insn "*call_reg_arm"
7958 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7959 (match_operand 1 "" ""))
7960 (use (match_operand 2 "" ""))
7961 (clobber (reg:SI LR_REGNUM))]
7962 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
7964 return output_call (operands);
7966 ;; length is worst case, normally it is only two
7967 [(set_attr "length" "12")
7968 (set_attr "type" "call")]
7972 (define_expand "call_value"
7973 [(parallel [(set (match_operand 0 "" "")
7974 (call (match_operand 1 "memory_operand")
7975 (match_operand 2 "general_operand")))
7976 (use (match_operand 3 "" ""))
7977 (clobber (reg:SI LR_REGNUM))])]
7982 tree addr = MEM_EXPR (operands[1]);
7984 /* In an untyped call, we can get NULL for operand 2. */
7985 if (operands[3] == 0)
7986 operands[3] = const0_rtx;
7988 /* Decide if we should generate indirect calls by loading the
7989 32-bit address of the callee into a register before performing the
7991 callee = XEXP (operands[1], 0);
7992 if (GET_CODE (callee) == SYMBOL_REF
7993 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7995 XEXP (operands[1], 0) = force_reg (Pmode, callee);
7997 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
7998 /* Indirect call: set r9 with FDPIC value of callee. */
7999 XEXP (operands[1], 0)
8000 = arm_load_function_descriptor (XEXP (operands[1], 0));
8002 if (detect_cmse_nonsecure_call (addr))
8004 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8005 operands[2], operands[3]);
8006 emit_call_insn (pat);
8010 pat = gen_call_value_internal (operands[0], operands[1],
8011 operands[2], operands[3]);
8012 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8015 /* Restore FDPIC register (r9) after call. */
8018 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8019 rtx initial_fdpic_reg
8020 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8022 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8023 initial_fdpic_reg));
8030 (define_expand "call_value_internal"
8031 [(parallel [(set (match_operand 0 "" "")
8032 (call (match_operand 1 "memory_operand")
8033 (match_operand 2 "general_operand")))
8034 (use (match_operand 3 "" ""))
8035 (clobber (reg:SI LR_REGNUM))])])
8037 (define_expand "nonsecure_call_value_internal"
8038 [(parallel [(set (match_operand 0 "" "")
8039 (call (unspec:SI [(match_operand 1 "memory_operand")]
8040 UNSPEC_NONSECURE_MEM)
8041 (match_operand 2 "general_operand")))
8042 (use (match_operand 3 "" ""))
8043 (clobber (reg:SI LR_REGNUM))])]
8048 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
8049 gen_rtx_REG (SImode, R4_REGNUM),
8052 operands[1] = replace_equiv_address (operands[1], tmp);
8055 (define_insn "*call_value_reg_armv5"
8056 [(set (match_operand 0 "" "")
8057 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8058 (match_operand 2 "" "")))
8059 (use (match_operand 3 "" ""))
8060 (clobber (reg:SI LR_REGNUM))]
8061 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8063 [(set_attr "type" "call")]
8066 (define_insn "*call_value_reg_arm"
8067 [(set (match_operand 0 "" "")
8068 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8069 (match_operand 2 "" "")))
8070 (use (match_operand 3 "" ""))
8071 (clobber (reg:SI LR_REGNUM))]
8072 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8074 return output_call (&operands[1]);
8076 [(set_attr "length" "12")
8077 (set_attr "type" "call")]
8080 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8081 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8083 (define_insn "*call_symbol"
8084 [(call (mem:SI (match_operand:SI 0 "" ""))
8085 (match_operand 1 "" ""))
8086 (use (match_operand 2 "" ""))
8087 (clobber (reg:SI LR_REGNUM))]
8089 && !SIBLING_CALL_P (insn)
8090 && (GET_CODE (operands[0]) == SYMBOL_REF)
8091 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8094 rtx op = operands[0];
8096 /* Switch mode now when possible. */
8097 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8098 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8099 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8101 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8103 [(set_attr "type" "call")]
8106 (define_insn "*call_value_symbol"
8107 [(set (match_operand 0 "" "")
8108 (call (mem:SI (match_operand:SI 1 "" ""))
8109 (match_operand:SI 2 "" "")))
8110 (use (match_operand 3 "" ""))
8111 (clobber (reg:SI LR_REGNUM))]
8113 && !SIBLING_CALL_P (insn)
8114 && (GET_CODE (operands[1]) == SYMBOL_REF)
8115 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8118 rtx op = operands[1];
8120 /* Switch mode now when possible. */
8121 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8122 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8123 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8125 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8127 [(set_attr "type" "call")]
8130 (define_expand "sibcall_internal"
8131 [(parallel [(call (match_operand 0 "memory_operand")
8132 (match_operand 1 "general_operand"))
8134 (use (match_operand 2 "" ""))])])
8136 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8137 (define_expand "sibcall"
8138 [(parallel [(call (match_operand 0 "memory_operand")
8139 (match_operand 1 "general_operand"))
8141 (use (match_operand 2 "" ""))])]
8147 if ((!REG_P (XEXP (operands[0], 0))
8148 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8149 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8150 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8151 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8153 if (operands[2] == NULL_RTX)
8154 operands[2] = const0_rtx;
8156 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8157 arm_emit_call_insn (pat, operands[0], true);
8162 (define_expand "sibcall_value_internal"
8163 [(parallel [(set (match_operand 0 "" "")
8164 (call (match_operand 1 "memory_operand")
8165 (match_operand 2 "general_operand")))
8167 (use (match_operand 3 "" ""))])])
8169 (define_expand "sibcall_value"
8170 [(parallel [(set (match_operand 0 "" "")
8171 (call (match_operand 1 "memory_operand")
8172 (match_operand 2 "general_operand")))
8174 (use (match_operand 3 "" ""))])]
8180 if ((!REG_P (XEXP (operands[1], 0))
8181 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8182 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8183 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8184 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8186 if (operands[3] == NULL_RTX)
8187 operands[3] = const0_rtx;
8189 pat = gen_sibcall_value_internal (operands[0], operands[1],
8190 operands[2], operands[3]);
8191 arm_emit_call_insn (pat, operands[1], true);
8196 (define_insn "*sibcall_insn"
8197 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8198 (match_operand 1 "" ""))
8200 (use (match_operand 2 "" ""))]
8201 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8203 if (which_alternative == 1)
8204 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8207 if (arm_arch5t || arm_arch4t)
8208 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8210 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8213 [(set_attr "type" "call")]
8216 (define_insn "*sibcall_value_insn"
8217 [(set (match_operand 0 "" "")
8218 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8219 (match_operand 2 "" "")))
8221 (use (match_operand 3 "" ""))]
8222 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8224 if (which_alternative == 1)
8225 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8228 if (arm_arch5t || arm_arch4t)
8229 return \"bx%?\\t%1\";
8231 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8234 [(set_attr "type" "call")]
8237 (define_expand "<return_str>return"
8239 "(TARGET_ARM || (TARGET_THUMB2
8240 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8241 && !IS_STACKALIGN (arm_current_func_type ())))
8242 <return_cond_false>"
8247 thumb2_expand_return (<return_simple_p>);
8254 ;; Often the return insn will be the same as loading from memory, so set attr
8255 (define_insn "*arm_return"
8257 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8260 if (arm_ccfsm_state == 2)
8262 arm_ccfsm_state += 2;
8265 return output_return_instruction (const_true_rtx, true, false, false);
8267 [(set_attr "type" "load_4")
8268 (set_attr "length" "12")
8269 (set_attr "predicable" "yes")]
8272 (define_insn "*cond_<return_str>return"
8274 (if_then_else (match_operator 0 "arm_comparison_operator"
8275 [(match_operand 1 "cc_register" "") (const_int 0)])
8278 "TARGET_ARM <return_cond_true>"
8281 if (arm_ccfsm_state == 2)
8283 arm_ccfsm_state += 2;
8286 return output_return_instruction (operands[0], true, false,
8289 [(set_attr "conds" "use")
8290 (set_attr "length" "12")
8291 (set_attr "type" "load_4")]
8294 (define_insn "*cond_<return_str>return_inverted"
8296 (if_then_else (match_operator 0 "arm_comparison_operator"
8297 [(match_operand 1 "cc_register" "") (const_int 0)])
8300 "TARGET_ARM <return_cond_true>"
8303 if (arm_ccfsm_state == 2)
8305 arm_ccfsm_state += 2;
8308 return output_return_instruction (operands[0], true, true,
8311 [(set_attr "conds" "use")
8312 (set_attr "length" "12")
8313 (set_attr "type" "load_4")]
8316 (define_insn "*arm_simple_return"
8321 if (arm_ccfsm_state == 2)
8323 arm_ccfsm_state += 2;
8326 return output_return_instruction (const_true_rtx, true, false, true);
8328 [(set_attr "type" "branch")
8329 (set_attr "length" "4")
8330 (set_attr "predicable" "yes")]
8333 ;; Generate a sequence of instructions to determine if the processor is
8334 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8337 (define_expand "return_addr_mask"
8339 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8341 (set (match_operand:SI 0 "s_register_operand")
8342 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8344 (const_int 67108860)))] ; 0x03fffffc
8347 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8350 (define_insn "*check_arch2"
8351 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8352 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8355 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8356 [(set_attr "length" "8")
8357 (set_attr "conds" "set")
8358 (set_attr "type" "multiple")]
8361 ;; Call subroutine returning any type.
8363 (define_expand "untyped_call"
8364 [(parallel [(call (match_operand 0 "" "")
8366 (match_operand 1 "" "")
8367 (match_operand 2 "" "")])]
8368 "TARGET_EITHER && !TARGET_FDPIC"
8372 rtx par = gen_rtx_PARALLEL (VOIDmode,
8373 rtvec_alloc (XVECLEN (operands[2], 0)));
8374 rtx addr = gen_reg_rtx (Pmode);
8378 emit_move_insn (addr, XEXP (operands[1], 0));
8379 mem = change_address (operands[1], BLKmode, addr);
8381 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8383 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8385 /* Default code only uses r0 as a return value, but we could
8386 be using anything up to 4 registers. */
8387 if (REGNO (src) == R0_REGNUM)
8388 src = gen_rtx_REG (TImode, R0_REGNUM);
8390 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8392 size += GET_MODE_SIZE (GET_MODE (src));
8395 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8399 for (i = 0; i < XVECLEN (par, 0); i++)
8401 HOST_WIDE_INT offset = 0;
8402 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8405 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8407 mem = change_address (mem, GET_MODE (reg), NULL);
8408 if (REGNO (reg) == R0_REGNUM)
8410 /* On thumb we have to use a write-back instruction. */
8411 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8412 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8413 size = TARGET_ARM ? 16 : 0;
8417 emit_move_insn (mem, reg);
8418 size = GET_MODE_SIZE (GET_MODE (reg));
8422 /* The optimizer does not know that the call sets the function value
8423 registers we stored in the result block. We avoid problems by
8424 claiming that all hard registers are used and clobbered at this
8426 emit_insn (gen_blockage ());
8432 (define_expand "untyped_return"
8433 [(match_operand:BLK 0 "memory_operand")
8434 (match_operand 1 "" "")]
8435 "TARGET_EITHER && !TARGET_FDPIC"
8439 rtx addr = gen_reg_rtx (Pmode);
8443 emit_move_insn (addr, XEXP (operands[0], 0));
8444 mem = change_address (operands[0], BLKmode, addr);
8446 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8448 HOST_WIDE_INT offset = 0;
8449 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8452 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8454 mem = change_address (mem, GET_MODE (reg), NULL);
8455 if (REGNO (reg) == R0_REGNUM)
8457 /* On thumb we have to use a write-back instruction. */
8458 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8459 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8460 size = TARGET_ARM ? 16 : 0;
8464 emit_move_insn (reg, mem);
8465 size = GET_MODE_SIZE (GET_MODE (reg));
8469 /* Emit USE insns before the return. */
8470 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8471 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8473 /* Construct the return. */
8474 expand_naked_return ();
8480 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8481 ;; all of memory. This blocks insns from being moved across this point.
8483 (define_insn "blockage"
8484 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8487 [(set_attr "length" "0")
8488 (set_attr "type" "block")]
8491 ;; Since we hard code r0 here use the 'o' constraint to prevent
8492 ;; provoking undefined behaviour in the hardware with putting out
8493 ;; auto-increment operations with potentially r0 as the base register.
8494 (define_insn "probe_stack"
8495 [(set (match_operand:SI 0 "memory_operand" "=o")
8496 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8499 [(set_attr "type" "store_4")
8500 (set_attr "predicable" "yes")]
8503 (define_insn "probe_stack_range"
8504 [(set (match_operand:SI 0 "register_operand" "=r")
8505 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8506 (match_operand:SI 2 "register_operand" "r")]
8507 VUNSPEC_PROBE_STACK_RANGE))]
8510 return output_probe_stack_range (operands[0], operands[2]);
8512 [(set_attr "type" "multiple")
8513 (set_attr "conds" "clob")]
8516 ;; Named patterns for stack smashing protection.
8517 (define_expand "stack_protect_combined_set"
8519 [(set (match_operand:SI 0 "memory_operand")
8520 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8522 (clobber (match_scratch:SI 2 ""))
8523 (clobber (match_scratch:SI 3 ""))])]
8528 ;; Use a separate insn from the above expand to be able to have the mem outside
8529 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8530 ;; try to reload the guard since we need to control how PIC access is done in
8531 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8532 ;; legitimize_pic_address ()).
8533 (define_insn_and_split "*stack_protect_combined_set_insn"
8534 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8535 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8537 (clobber (match_scratch:SI 2 "=&l,&r"))
8538 (clobber (match_scratch:SI 3 "=&l,&r"))]
8542 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8544 (clobber (match_dup 2))])]
8552 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8554 pic_reg = operands[3];
8556 /* Forces recomputing of GOT base now. */
8557 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
8558 true /*compute_now*/);
8562 if (address_operand (operands[1], SImode))
8563 operands[2] = operands[1];
8566 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8567 emit_move_insn (operands[2], mem);
8571 [(set_attr "arch" "t1,32")]
8574 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8575 ;; canary value does not live beyond the life of this sequence.
8576 (define_insn "*stack_protect_set_insn"
8577 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8578 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
8580 (clobber (match_dup 1))]
8583 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
8584 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
8585 [(set_attr "length" "8,12")
8586 (set_attr "conds" "clob,nocond")
8587 (set_attr "type" "multiple")
8588 (set_attr "arch" "t1,32")]
8591 (define_expand "stack_protect_combined_test"
8595 (eq (match_operand:SI 0 "memory_operand")
8596 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8598 (label_ref (match_operand 2))
8600 (clobber (match_scratch:SI 3 ""))
8601 (clobber (match_scratch:SI 4 ""))
8602 (clobber (reg:CC CC_REGNUM))])]
8607 ;; Use a separate insn from the above expand to be able to have the mem outside
8608 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8609 ;; try to reload the guard since we need to control how PIC access is done in
8610 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8611 ;; legitimize_pic_address ()).
8612 (define_insn_and_split "*stack_protect_combined_test_insn"
8615 (eq (match_operand:SI 0 "memory_operand" "m,m")
8616 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8618 (label_ref (match_operand 2))
8620 (clobber (match_scratch:SI 3 "=&l,&r"))
8621 (clobber (match_scratch:SI 4 "=&l,&r"))
8622 (clobber (reg:CC CC_REGNUM))]
8635 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8637 pic_reg = operands[4];
8639 /* Forces recomputing of GOT base now. */
8640 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
8641 true /*compute_now*/);
8645 if (address_operand (operands[1], SImode))
8646 operands[3] = operands[1];
8649 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8650 emit_move_insn (operands[3], mem);
8655 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8657 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8658 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8659 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8663 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8665 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8666 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8671 [(set_attr "arch" "t1,32")]
8674 (define_insn "arm_stack_protect_test_insn"
8675 [(set (reg:CC_Z CC_REGNUM)
8676 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8677 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8680 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8681 (clobber (match_dup 2))]
8683 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8684 [(set_attr "length" "8,12")
8685 (set_attr "conds" "set")
8686 (set_attr "type" "multiple")
8687 (set_attr "arch" "t,32")]
8690 (define_expand "casesi"
8691 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8692 (match_operand:SI 1 "const_int_operand") ; lower bound
8693 (match_operand:SI 2 "const_int_operand") ; total range
8694 (match_operand:SI 3 "" "") ; table label
8695 (match_operand:SI 4 "" "")] ; Out of range label
8696 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8699 enum insn_code code;
8700 if (operands[1] != const0_rtx)
8702 rtx reg = gen_reg_rtx (SImode);
8704 emit_insn (gen_addsi3 (reg, operands[0],
8705 gen_int_mode (-INTVAL (operands[1]),
8711 code = CODE_FOR_arm_casesi_internal;
8712 else if (TARGET_THUMB1)
8713 code = CODE_FOR_thumb1_casesi_internal_pic;
8715 code = CODE_FOR_thumb2_casesi_internal_pic;
8717 code = CODE_FOR_thumb2_casesi_internal;
8719 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8720 operands[2] = force_reg (SImode, operands[2]);
8722 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8723 operands[3], operands[4]));
8728 ;; The USE in this pattern is needed to tell flow analysis that this is
8729 ;; a CASESI insn. It has no other purpose.
8730 (define_expand "arm_casesi_internal"
8731 [(parallel [(set (pc)
8733 (leu (match_operand:SI 0 "s_register_operand")
8734 (match_operand:SI 1 "arm_rhs_operand"))
8736 (label_ref:SI (match_operand 3 ""))))
8737 (clobber (reg:CC CC_REGNUM))
8738 (use (label_ref:SI (match_operand 2 "")))])]
8741 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8742 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8743 gen_rtx_LABEL_REF (SImode, operands[2]));
8744 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8745 MEM_READONLY_P (operands[4]) = 1;
8746 MEM_NOTRAP_P (operands[4]) = 1;
8749 (define_insn "*arm_casesi_internal"
8750 [(parallel [(set (pc)
8752 (leu (match_operand:SI 0 "s_register_operand" "r")
8753 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8754 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8755 (label_ref:SI (match_operand 2 "" ""))))
8756 (label_ref:SI (match_operand 3 "" ""))))
8757 (clobber (reg:CC CC_REGNUM))
8758 (use (label_ref:SI (match_dup 2)))])]
8762 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8763 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8765 [(set_attr "conds" "clob")
8766 (set_attr "length" "12")
8767 (set_attr "type" "multiple")]
8770 (define_expand "indirect_jump"
8772 (match_operand:SI 0 "s_register_operand"))]
8775 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8776 address and use bx. */
8780 tmp = gen_reg_rtx (SImode);
8781 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8787 ;; NB Never uses BX.
8788 (define_insn "*arm_indirect_jump"
8790 (match_operand:SI 0 "s_register_operand" "r"))]
8792 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8793 [(set_attr "predicable" "yes")
8794 (set_attr "type" "branch")]
8797 (define_insn "*load_indirect_jump"
8799 (match_operand:SI 0 "memory_operand" "m"))]
8801 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8802 [(set_attr "type" "load_4")
8803 (set_attr "pool_range" "4096")
8804 (set_attr "neg_pool_range" "4084")
8805 (set_attr "predicable" "yes")]
8815 [(set (attr "length")
8816 (if_then_else (eq_attr "is_thumb" "yes")
8819 (set_attr "type" "mov_reg")]
8823 [(trap_if (const_int 1) (const_int 0))]
8827 return \".inst\\t0xe7f000f0\";
8829 return \".inst\\t0xdeff\";
8831 [(set (attr "length")
8832 (if_then_else (eq_attr "is_thumb" "yes")
8835 (set_attr "type" "trap")
8836 (set_attr "conds" "unconditional")]
8840 ;; Patterns to allow combination of arithmetic, cond code and shifts
8842 (define_insn "*<arith_shift_insn>_multsi"
8843 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8845 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8846 (match_operand:SI 3 "power_of_two_operand" ""))
8847 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8849 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8850 [(set_attr "predicable" "yes")
8851 (set_attr "shift" "2")
8852 (set_attr "arch" "a,t2")
8853 (set_attr "type" "alu_shift_imm")])
8855 (define_insn "*<arith_shift_insn>_shiftsi"
8856 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8858 (match_operator:SI 2 "shift_nomul_operator"
8859 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8860 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8861 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8862 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8863 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8864 [(set_attr "predicable" "yes")
8865 (set_attr "shift" "3")
8866 (set_attr "arch" "a,t2,a")
8867 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8870 [(set (match_operand:SI 0 "s_register_operand" "")
8871 (match_operator:SI 1 "shiftable_operator"
8872 [(match_operator:SI 2 "shiftable_operator"
8873 [(match_operator:SI 3 "shift_operator"
8874 [(match_operand:SI 4 "s_register_operand" "")
8875 (match_operand:SI 5 "reg_or_int_operand" "")])
8876 (match_operand:SI 6 "s_register_operand" "")])
8877 (match_operand:SI 7 "arm_rhs_operand" "")]))
8878 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8881 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8884 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8887 (define_insn "*arith_shiftsi_compare0"
8888 [(set (reg:CC_NOOV CC_REGNUM)
8890 (match_operator:SI 1 "shiftable_operator"
8891 [(match_operator:SI 3 "shift_operator"
8892 [(match_operand:SI 4 "s_register_operand" "r,r")
8893 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8894 (match_operand:SI 2 "s_register_operand" "r,r")])
8896 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8897 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8900 "%i1s%?\\t%0, %2, %4%S3"
8901 [(set_attr "conds" "set")
8902 (set_attr "shift" "4")
8903 (set_attr "arch" "32,a")
8904 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8906 (define_insn "*arith_shiftsi_compare0_scratch"
8907 [(set (reg:CC_NOOV CC_REGNUM)
8909 (match_operator:SI 1 "shiftable_operator"
8910 [(match_operator:SI 3 "shift_operator"
8911 [(match_operand:SI 4 "s_register_operand" "r,r")
8912 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8913 (match_operand:SI 2 "s_register_operand" "r,r")])
8915 (clobber (match_scratch:SI 0 "=r,r"))]
8917 "%i1s%?\\t%0, %2, %4%S3"
8918 [(set_attr "conds" "set")
8919 (set_attr "shift" "4")
8920 (set_attr "arch" "32,a")
8921 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8923 (define_insn "*sub_shiftsi"
8924 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8925 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8926 (match_operator:SI 2 "shift_operator"
8927 [(match_operand:SI 3 "s_register_operand" "r,r")
8928 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8930 "sub%?\\t%0, %1, %3%S2"
8931 [(set_attr "predicable" "yes")
8932 (set_attr "predicable_short_it" "no")
8933 (set_attr "shift" "3")
8934 (set_attr "arch" "32,a")
8935 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8937 (define_insn "*sub_shiftsi_compare0"
8938 [(set (reg:CC_NOOV CC_REGNUM)
8940 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8941 (match_operator:SI 2 "shift_operator"
8942 [(match_operand:SI 3 "s_register_operand" "r,r")
8943 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
8945 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8946 (minus:SI (match_dup 1)
8947 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8949 "subs%?\\t%0, %1, %3%S2"
8950 [(set_attr "conds" "set")
8951 (set_attr "shift" "3")
8952 (set_attr "arch" "32,a")
8953 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8955 (define_insn "*sub_shiftsi_compare0_scratch"
8956 [(set (reg:CC_NOOV CC_REGNUM)
8958 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8959 (match_operator:SI 2 "shift_operator"
8960 [(match_operand:SI 3 "s_register_operand" "r,r")
8961 (match_operand:SI 4 "shift_amount_operand" "M,r")]))
8963 (clobber (match_scratch:SI 0 "=r,r"))]
8965 "subs%?\\t%0, %1, %3%S2"
8966 [(set_attr "conds" "set")
8967 (set_attr "shift" "3")
8968 (set_attr "arch" "32,a")
8969 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8972 (define_insn_and_split "*and_scc"
8973 [(set (match_operand:SI 0 "s_register_operand" "=r")
8974 (and:SI (match_operator:SI 1 "arm_comparison_operator"
8975 [(match_operand 2 "cc_register" "") (const_int 0)])
8976 (match_operand:SI 3 "s_register_operand" "r")))]
8978 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
8979 "&& reload_completed"
8980 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
8981 (cond_exec (match_dup 4) (set (match_dup 0)
8982 (and:SI (match_dup 3) (const_int 1))))]
8984 machine_mode mode = GET_MODE (operands[2]);
8985 enum rtx_code rc = GET_CODE (operands[1]);
8987 /* Note that operands[4] is the same as operands[1],
8988 but with VOIDmode as the result. */
8989 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8990 if (mode == CCFPmode || mode == CCFPEmode)
8991 rc = reverse_condition_maybe_unordered (rc);
8993 rc = reverse_condition (rc);
8994 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
8996 [(set_attr "conds" "use")
8997 (set_attr "type" "multiple")
8998 (set_attr "length" "8")]
9001 (define_insn_and_split "*ior_scc"
9002 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9003 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9004 [(match_operand 2 "cc_register" "") (const_int 0)])
9005 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9010 "&& reload_completed
9011 && REGNO (operands [0]) != REGNO (operands[3])"
9012 ;; && which_alternative == 1
9013 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9014 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9015 (cond_exec (match_dup 4) (set (match_dup 0)
9016 (ior:SI (match_dup 3) (const_int 1))))]
9018 machine_mode mode = GET_MODE (operands[2]);
9019 enum rtx_code rc = GET_CODE (operands[1]);
9021 /* Note that operands[4] is the same as operands[1],
9022 but with VOIDmode as the result. */
9023 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9024 if (mode == CCFPmode || mode == CCFPEmode)
9025 rc = reverse_condition_maybe_unordered (rc);
9027 rc = reverse_condition (rc);
9028 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9030 [(set_attr "conds" "use")
9031 (set_attr "length" "4,8")
9032 (set_attr "type" "logic_imm,multiple")]
9035 ; A series of splitters for the compare_scc pattern below. Note that
9036 ; order is important.
9038 [(set (match_operand:SI 0 "s_register_operand" "")
9039 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9041 (clobber (reg:CC CC_REGNUM))]
9042 "TARGET_32BIT && reload_completed"
9043 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9046 [(set (match_operand:SI 0 "s_register_operand" "")
9047 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9049 (clobber (reg:CC CC_REGNUM))]
9050 "TARGET_32BIT && reload_completed"
9051 [(set (match_dup 0) (not:SI (match_dup 1)))
9052 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9055 [(set (match_operand:SI 0 "s_register_operand" "")
9056 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9058 (clobber (reg:CC CC_REGNUM))]
9059 "arm_arch5t && TARGET_32BIT"
9060 [(set (match_dup 0) (clz:SI (match_dup 1)))
9061 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9065 [(set (match_operand:SI 0 "s_register_operand" "")
9066 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9068 (clobber (reg:CC CC_REGNUM))]
9069 "TARGET_32BIT && reload_completed"
9071 [(set (reg:CC CC_REGNUM)
9072 (compare:CC (const_int 1) (match_dup 1)))
9074 (minus:SI (const_int 1) (match_dup 1)))])
9075 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9076 (set (match_dup 0) (const_int 0)))])
9079 [(set (match_operand:SI 0 "s_register_operand" "")
9080 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9081 (match_operand:SI 2 "const_int_operand" "")))
9082 (clobber (reg:CC CC_REGNUM))]
9083 "TARGET_32BIT && reload_completed"
9085 [(set (reg:CC CC_REGNUM)
9086 (compare:CC (match_dup 1) (match_dup 2)))
9087 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9088 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9089 (set (match_dup 0) (const_int 1)))]
9091 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9095 [(set (match_operand:SI 0 "s_register_operand" "")
9096 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9097 (match_operand:SI 2 "arm_add_operand" "")))
9098 (clobber (reg:CC CC_REGNUM))]
9099 "TARGET_32BIT && reload_completed"
9101 [(set (reg:CC_NOOV CC_REGNUM)
9102 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9104 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9105 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9106 (set (match_dup 0) (const_int 1)))])
9108 (define_insn_and_split "*compare_scc"
9109 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9110 (match_operator:SI 1 "arm_comparison_operator"
9111 [(match_operand:SI 2 "s_register_operand" "r,r")
9112 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9113 (clobber (reg:CC CC_REGNUM))]
9116 "&& reload_completed"
9117 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9118 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9119 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9122 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9123 operands[2], operands[3]);
9124 enum rtx_code rc = GET_CODE (operands[1]);
9126 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9128 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9129 if (mode == CCFPmode || mode == CCFPEmode)
9130 rc = reverse_condition_maybe_unordered (rc);
9132 rc = reverse_condition (rc);
9133 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9135 [(set_attr "type" "multiple")]
9138 ;; Attempt to improve the sequence generated by the compare_scc splitters
9139 ;; not to use conditional execution.
9141 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9145 [(set (reg:CC CC_REGNUM)
9146 (compare:CC (match_operand:SI 1 "register_operand" "")
9148 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9149 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9150 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9151 (set (match_dup 0) (const_int 1)))]
9152 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9153 [(set (match_dup 0) (clz:SI (match_dup 1)))
9154 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9157 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9161 [(set (reg:CC CC_REGNUM)
9162 (compare:CC (match_operand:SI 1 "register_operand" "")
9164 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9165 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9166 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9167 (set (match_dup 0) (const_int 1)))
9168 (match_scratch:SI 2 "r")]
9169 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9171 [(set (reg:CC CC_REGNUM)
9172 (compare:CC (const_int 0) (match_dup 1)))
9173 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9175 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9176 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9179 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9180 ;; sub Rd, Reg1, reg2
9184 [(set (reg:CC CC_REGNUM)
9185 (compare:CC (match_operand:SI 1 "register_operand" "")
9186 (match_operand:SI 2 "arm_rhs_operand" "")))
9187 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9188 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9189 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9190 (set (match_dup 0) (const_int 1)))]
9191 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9192 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9193 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9194 (set (match_dup 0) (clz:SI (match_dup 0)))
9195 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9199 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9200 ;; sub T1, Reg1, reg2
9204 [(set (reg:CC CC_REGNUM)
9205 (compare:CC (match_operand:SI 1 "register_operand" "")
9206 (match_operand:SI 2 "arm_rhs_operand" "")))
9207 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9208 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9209 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9210 (set (match_dup 0) (const_int 1)))
9211 (match_scratch:SI 3 "r")]
9212 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9213 [(set (match_dup 3) (match_dup 4))
9215 [(set (reg:CC CC_REGNUM)
9216 (compare:CC (const_int 0) (match_dup 3)))
9217 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9219 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9220 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9222 if (CONST_INT_P (operands[2]))
9223 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9225 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9228 (define_insn "*cond_move"
9229 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9230 (if_then_else:SI (match_operator 3 "equality_operator"
9231 [(match_operator 4 "arm_comparison_operator"
9232 [(match_operand 5 "cc_register" "") (const_int 0)])
9234 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9235 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9238 if (GET_CODE (operands[3]) == NE)
9240 if (which_alternative != 1)
9241 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9242 if (which_alternative != 0)
9243 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9246 if (which_alternative != 0)
9247 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9248 if (which_alternative != 1)
9249 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9252 [(set_attr "conds" "use")
9253 (set_attr_alternative "type"
9254 [(if_then_else (match_operand 2 "const_int_operand" "")
9255 (const_string "mov_imm")
9256 (const_string "mov_reg"))
9257 (if_then_else (match_operand 1 "const_int_operand" "")
9258 (const_string "mov_imm")
9259 (const_string "mov_reg"))
9260 (const_string "multiple")])
9261 (set_attr "length" "4,4,8")]
9264 (define_insn "*cond_arith"
9265 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9266 (match_operator:SI 5 "shiftable_operator"
9267 [(match_operator:SI 4 "arm_comparison_operator"
9268 [(match_operand:SI 2 "s_register_operand" "r,r")
9269 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9270 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9271 (clobber (reg:CC CC_REGNUM))]
9274 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9275 return \"%i5\\t%0, %1, %2, lsr #31\";
9277 output_asm_insn (\"cmp\\t%2, %3\", operands);
9278 if (GET_CODE (operands[5]) == AND)
9279 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9280 else if (GET_CODE (operands[5]) == MINUS)
9281 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9282 else if (which_alternative != 0)
9283 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9284 return \"%i5%d4\\t%0, %1, #1\";
9286 [(set_attr "conds" "clob")
9287 (set_attr "length" "12")
9288 (set_attr "type" "multiple")]
9291 (define_insn "*cond_sub"
9292 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9293 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9294 (match_operator:SI 4 "arm_comparison_operator"
9295 [(match_operand:SI 2 "s_register_operand" "r,r")
9296 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9297 (clobber (reg:CC CC_REGNUM))]
9300 output_asm_insn (\"cmp\\t%2, %3\", operands);
9301 if (which_alternative != 0)
9302 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9303 return \"sub%d4\\t%0, %1, #1\";
9305 [(set_attr "conds" "clob")
9306 (set_attr "length" "8,12")
9307 (set_attr "type" "multiple")]
9310 (define_insn "*cmp_ite0"
9311 [(set (match_operand 6 "dominant_cc_register" "")
9314 (match_operator 4 "arm_comparison_operator"
9315 [(match_operand:SI 0 "s_register_operand"
9316 "l,l,l,r,r,r,r,r,r")
9317 (match_operand:SI 1 "arm_add_operand"
9318 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9319 (match_operator:SI 5 "arm_comparison_operator"
9320 [(match_operand:SI 2 "s_register_operand"
9321 "l,r,r,l,l,r,r,r,r")
9322 (match_operand:SI 3 "arm_add_operand"
9323 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9329 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9331 {\"cmp%d5\\t%0, %1\",
9332 \"cmp%d4\\t%2, %3\"},
9333 {\"cmn%d5\\t%0, #%n1\",
9334 \"cmp%d4\\t%2, %3\"},
9335 {\"cmp%d5\\t%0, %1\",
9336 \"cmn%d4\\t%2, #%n3\"},
9337 {\"cmn%d5\\t%0, #%n1\",
9338 \"cmn%d4\\t%2, #%n3\"}
9340 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9345 \"cmn\\t%0, #%n1\"},
9346 {\"cmn\\t%2, #%n3\",
9348 {\"cmn\\t%2, #%n3\",
9351 static const char * const ite[2] =
9356 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9357 CMP_CMP, CMN_CMP, CMP_CMP,
9358 CMN_CMP, CMP_CMN, CMN_CMN};
9360 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9362 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9363 if (TARGET_THUMB2) {
9364 output_asm_insn (ite[swap], operands);
9366 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9369 [(set_attr "conds" "set")
9370 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9371 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9372 (set_attr "type" "multiple")
9373 (set_attr_alternative "length"
9379 (if_then_else (eq_attr "is_thumb" "no")
9382 (if_then_else (eq_attr "is_thumb" "no")
9385 (if_then_else (eq_attr "is_thumb" "no")
9388 (if_then_else (eq_attr "is_thumb" "no")
9393 (define_insn "*cmp_ite1"
9394 [(set (match_operand 6 "dominant_cc_register" "")
9397 (match_operator 4 "arm_comparison_operator"
9398 [(match_operand:SI 0 "s_register_operand"
9399 "l,l,l,r,r,r,r,r,r")
9400 (match_operand:SI 1 "arm_add_operand"
9401 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9402 (match_operator:SI 5 "arm_comparison_operator"
9403 [(match_operand:SI 2 "s_register_operand"
9404 "l,r,r,l,l,r,r,r,r")
9405 (match_operand:SI 3 "arm_add_operand"
9406 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9412 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9416 {\"cmn\\t%0, #%n1\",
9419 \"cmn\\t%2, #%n3\"},
9420 {\"cmn\\t%0, #%n1\",
9423 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9425 {\"cmp%d4\\t%2, %3\",
9426 \"cmp%D5\\t%0, %1\"},
9427 {\"cmp%d4\\t%2, %3\",
9428 \"cmn%D5\\t%0, #%n1\"},
9429 {\"cmn%d4\\t%2, #%n3\",
9430 \"cmp%D5\\t%0, %1\"},
9431 {\"cmn%d4\\t%2, #%n3\",
9432 \"cmn%D5\\t%0, #%n1\"}
9434 static const char * const ite[2] =
9439 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9440 CMP_CMP, CMN_CMP, CMP_CMP,
9441 CMN_CMP, CMP_CMN, CMN_CMN};
9443 comparison_dominates_p (GET_CODE (operands[5]),
9444 reverse_condition (GET_CODE (operands[4])));
9446 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9447 if (TARGET_THUMB2) {
9448 output_asm_insn (ite[swap], operands);
9450 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9453 [(set_attr "conds" "set")
9454 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9455 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9456 (set_attr_alternative "length"
9462 (if_then_else (eq_attr "is_thumb" "no")
9465 (if_then_else (eq_attr "is_thumb" "no")
9468 (if_then_else (eq_attr "is_thumb" "no")
9471 (if_then_else (eq_attr "is_thumb" "no")
9474 (set_attr "type" "multiple")]
9477 (define_insn "*cmp_and"
9478 [(set (match_operand 6 "dominant_cc_register" "")
9481 (match_operator 4 "arm_comparison_operator"
9482 [(match_operand:SI 0 "s_register_operand"
9483 "l,l,l,r,r,r,r,r,r,r")
9484 (match_operand:SI 1 "arm_add_operand"
9485 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9486 (match_operator:SI 5 "arm_comparison_operator"
9487 [(match_operand:SI 2 "s_register_operand"
9488 "l,r,r,l,l,r,r,r,r,r")
9489 (match_operand:SI 3 "arm_add_operand"
9490 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9495 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9497 {\"cmp%d5\\t%0, %1\",
9498 \"cmp%d4\\t%2, %3\"},
9499 {\"cmn%d5\\t%0, #%n1\",
9500 \"cmp%d4\\t%2, %3\"},
9501 {\"cmp%d5\\t%0, %1\",
9502 \"cmn%d4\\t%2, #%n3\"},
9503 {\"cmn%d5\\t%0, #%n1\",
9504 \"cmn%d4\\t%2, #%n3\"}
9506 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9511 \"cmn\\t%0, #%n1\"},
9512 {\"cmn\\t%2, #%n3\",
9514 {\"cmn\\t%2, #%n3\",
9517 static const char *const ite[2] =
9522 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9523 CMP_CMP, CMN_CMP, CMP_CMP,
9524 CMP_CMP, CMN_CMP, CMP_CMN,
9527 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9529 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9530 if (TARGET_THUMB2) {
9531 output_asm_insn (ite[swap], operands);
9533 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9536 [(set_attr "conds" "set")
9537 (set_attr "predicable" "no")
9538 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9539 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9540 (set_attr_alternative "length"
9547 (if_then_else (eq_attr "is_thumb" "no")
9550 (if_then_else (eq_attr "is_thumb" "no")
9553 (if_then_else (eq_attr "is_thumb" "no")
9556 (if_then_else (eq_attr "is_thumb" "no")
9559 (set_attr "type" "multiple")]
9562 (define_insn "*cmp_ior"
9563 [(set (match_operand 6 "dominant_cc_register" "")
9566 (match_operator 4 "arm_comparison_operator"
9567 [(match_operand:SI 0 "s_register_operand"
9568 "l,l,l,r,r,r,r,r,r,r")
9569 (match_operand:SI 1 "arm_add_operand"
9570 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9571 (match_operator:SI 5 "arm_comparison_operator"
9572 [(match_operand:SI 2 "s_register_operand"
9573 "l,r,r,l,l,r,r,r,r,r")
9574 (match_operand:SI 3 "arm_add_operand"
9575 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9580 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9584 {\"cmn\\t%0, #%n1\",
9587 \"cmn\\t%2, #%n3\"},
9588 {\"cmn\\t%0, #%n1\",
9591 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9593 {\"cmp%D4\\t%2, %3\",
9594 \"cmp%D5\\t%0, %1\"},
9595 {\"cmp%D4\\t%2, %3\",
9596 \"cmn%D5\\t%0, #%n1\"},
9597 {\"cmn%D4\\t%2, #%n3\",
9598 \"cmp%D5\\t%0, %1\"},
9599 {\"cmn%D4\\t%2, #%n3\",
9600 \"cmn%D5\\t%0, #%n1\"}
9602 static const char *const ite[2] =
9607 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9608 CMP_CMP, CMN_CMP, CMP_CMP,
9609 CMP_CMP, CMN_CMP, CMP_CMN,
9612 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9614 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9615 if (TARGET_THUMB2) {
9616 output_asm_insn (ite[swap], operands);
9618 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9622 [(set_attr "conds" "set")
9623 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9624 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9625 (set_attr_alternative "length"
9632 (if_then_else (eq_attr "is_thumb" "no")
9635 (if_then_else (eq_attr "is_thumb" "no")
9638 (if_then_else (eq_attr "is_thumb" "no")
9641 (if_then_else (eq_attr "is_thumb" "no")
9644 (set_attr "type" "multiple")]
9647 (define_insn_and_split "*ior_scc_scc"
9648 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9649 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9650 [(match_operand:SI 1 "s_register_operand" "l,r")
9651 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9652 (match_operator:SI 6 "arm_comparison_operator"
9653 [(match_operand:SI 4 "s_register_operand" "l,r")
9654 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9655 (clobber (reg:CC CC_REGNUM))]
9657 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9660 "TARGET_32BIT && reload_completed"
9664 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9665 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9667 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9669 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9672 [(set_attr "conds" "clob")
9673 (set_attr "enabled_for_short_it" "yes,no")
9674 (set_attr "length" "16")
9675 (set_attr "type" "multiple")]
9678 ; If the above pattern is followed by a CMP insn, then the compare is
9679 ; redundant, since we can rework the conditional instruction that follows.
9680 (define_insn_and_split "*ior_scc_scc_cmp"
9681 [(set (match_operand 0 "dominant_cc_register" "")
9682 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9683 [(match_operand:SI 1 "s_register_operand" "l,r")
9684 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9685 (match_operator:SI 6 "arm_comparison_operator"
9686 [(match_operand:SI 4 "s_register_operand" "l,r")
9687 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9689 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9690 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9691 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9694 "TARGET_32BIT && reload_completed"
9698 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9699 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9701 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9703 [(set_attr "conds" "set")
9704 (set_attr "enabled_for_short_it" "yes,no")
9705 (set_attr "length" "16")
9706 (set_attr "type" "multiple")]
9709 (define_insn_and_split "*and_scc_scc"
9710 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9711 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9712 [(match_operand:SI 1 "s_register_operand" "l,r")
9713 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9714 (match_operator:SI 6 "arm_comparison_operator"
9715 [(match_operand:SI 4 "s_register_operand" "l,r")
9716 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9717 (clobber (reg:CC CC_REGNUM))]
9719 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9722 "TARGET_32BIT && reload_completed
9723 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9728 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9729 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9731 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9733 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9736 [(set_attr "conds" "clob")
9737 (set_attr "enabled_for_short_it" "yes,no")
9738 (set_attr "length" "16")
9739 (set_attr "type" "multiple")]
9742 ; If the above pattern is followed by a CMP insn, then the compare is
9743 ; redundant, since we can rework the conditional instruction that follows.
9744 (define_insn_and_split "*and_scc_scc_cmp"
9745 [(set (match_operand 0 "dominant_cc_register" "")
9746 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9747 [(match_operand:SI 1 "s_register_operand" "l,r")
9748 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9749 (match_operator:SI 6 "arm_comparison_operator"
9750 [(match_operand:SI 4 "s_register_operand" "l,r")
9751 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9753 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9754 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9755 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9758 "TARGET_32BIT && reload_completed"
9762 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9763 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9765 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9767 [(set_attr "conds" "set")
9768 (set_attr "enabled_for_short_it" "yes,no")
9769 (set_attr "length" "16")
9770 (set_attr "type" "multiple")]
9773 ;; If there is no dominance in the comparison, then we can still save an
9774 ;; instruction in the AND case, since we can know that the second compare
9775 ;; need only zero the value if false (if true, then the value is already
9777 (define_insn_and_split "*and_scc_scc_nodom"
9778 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9779 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9780 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9781 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9782 (match_operator:SI 6 "arm_comparison_operator"
9783 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9784 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9785 (clobber (reg:CC CC_REGNUM))]
9787 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9790 "TARGET_32BIT && reload_completed"
9791 [(parallel [(set (match_dup 0)
9792 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9793 (clobber (reg:CC CC_REGNUM))])
9794 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9796 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9799 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9800 operands[4], operands[5]),
9802 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9804 [(set_attr "conds" "clob")
9805 (set_attr "length" "20")
9806 (set_attr "type" "multiple")]
9810 [(set (reg:CC_NOOV CC_REGNUM)
9811 (compare:CC_NOOV (ior:SI
9812 (and:SI (match_operand:SI 0 "s_register_operand" "")
9814 (match_operator:SI 1 "arm_comparison_operator"
9815 [(match_operand:SI 2 "s_register_operand" "")
9816 (match_operand:SI 3 "arm_add_operand" "")]))
9818 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9821 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9823 (set (reg:CC_NOOV CC_REGNUM)
9824 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9829 [(set (reg:CC_NOOV CC_REGNUM)
9830 (compare:CC_NOOV (ior:SI
9831 (match_operator:SI 1 "arm_comparison_operator"
9832 [(match_operand:SI 2 "s_register_operand" "")
9833 (match_operand:SI 3 "arm_add_operand" "")])
9834 (and:SI (match_operand:SI 0 "s_register_operand" "")
9837 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9840 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9842 (set (reg:CC_NOOV CC_REGNUM)
9843 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9846 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9848 (define_insn_and_split "*negscc"
9849 [(set (match_operand:SI 0 "s_register_operand" "=r")
9850 (neg:SI (match_operator 3 "arm_comparison_operator"
9851 [(match_operand:SI 1 "s_register_operand" "r")
9852 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9853 (clobber (reg:CC CC_REGNUM))]
9856 "&& reload_completed"
9859 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9861 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9863 /* Emit mov\\t%0, %1, asr #31 */
9864 emit_insn (gen_rtx_SET (operands[0],
9865 gen_rtx_ASHIFTRT (SImode,
9870 else if (GET_CODE (operands[3]) == NE)
9872 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9873 if (CONST_INT_P (operands[2]))
9874 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9875 gen_int_mode (-INTVAL (operands[2]),
9878 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9880 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9884 gen_rtx_SET (operands[0],
9890 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9891 emit_insn (gen_rtx_SET (cc_reg,
9892 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9893 enum rtx_code rc = GET_CODE (operands[3]);
9895 rc = reverse_condition (rc);
9896 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9901 gen_rtx_SET (operands[0], const0_rtx)));
9902 rc = GET_CODE (operands[3]);
9903 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9908 gen_rtx_SET (operands[0],
9914 [(set_attr "conds" "clob")
9915 (set_attr "length" "12")
9916 (set_attr "type" "multiple")]
9919 (define_insn_and_split "movcond_addsi"
9920 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9922 (match_operator 5 "comparison_operator"
9923 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9924 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9926 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9927 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9928 (clobber (reg:CC CC_REGNUM))]
9931 "&& reload_completed"
9932 [(set (reg:CC_NOOV CC_REGNUM)
9934 (plus:SI (match_dup 3)
9937 (set (match_dup 0) (match_dup 1))
9938 (cond_exec (match_dup 6)
9939 (set (match_dup 0) (match_dup 2)))]
9942 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9943 operands[3], operands[4]);
9944 enum rtx_code rc = GET_CODE (operands[5]);
9945 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9946 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9947 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9948 rc = reverse_condition (rc);
9950 std::swap (operands[1], operands[2]);
9952 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9955 [(set_attr "conds" "clob")
9956 (set_attr "enabled_for_short_it" "no,yes,yes")
9957 (set_attr "type" "multiple")]
9960 (define_insn "movcond"
9961 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9963 (match_operator 5 "arm_comparison_operator"
9964 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9965 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9966 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9967 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9968 (clobber (reg:CC CC_REGNUM))]
9971 if (GET_CODE (operands[5]) == LT
9972 && (operands[4] == const0_rtx))
9974 if (which_alternative != 1 && REG_P (operands[1]))
9976 if (operands[2] == const0_rtx)
9977 return \"and\\t%0, %1, %3, asr #31\";
9978 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9980 else if (which_alternative != 0 && REG_P (operands[2]))
9982 if (operands[1] == const0_rtx)
9983 return \"bic\\t%0, %2, %3, asr #31\";
9984 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9986 /* The only case that falls through to here is when both ops 1 & 2
9990 if (GET_CODE (operands[5]) == GE
9991 && (operands[4] == const0_rtx))
9993 if (which_alternative != 1 && REG_P (operands[1]))
9995 if (operands[2] == const0_rtx)
9996 return \"bic\\t%0, %1, %3, asr #31\";
9997 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9999 else if (which_alternative != 0 && REG_P (operands[2]))
10001 if (operands[1] == const0_rtx)
10002 return \"and\\t%0, %2, %3, asr #31\";
10003 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10005 /* The only case that falls through to here is when both ops 1 & 2
10008 if (CONST_INT_P (operands[4])
10009 && !const_ok_for_arm (INTVAL (operands[4])))
10010 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10012 output_asm_insn (\"cmp\\t%3, %4\", operands);
10013 if (which_alternative != 0)
10014 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10015 if (which_alternative != 1)
10016 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10019 [(set_attr "conds" "clob")
10020 (set_attr "length" "8,8,12")
10021 (set_attr "type" "multiple")]
10024 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10026 (define_insn "*ifcompare_plus_move"
10027 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10028 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10029 [(match_operand:SI 4 "s_register_operand" "r,r")
10030 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10032 (match_operand:SI 2 "s_register_operand" "r,r")
10033 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10034 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10035 (clobber (reg:CC CC_REGNUM))]
10038 [(set_attr "conds" "clob")
10039 (set_attr "length" "8,12")
10040 (set_attr "type" "multiple")]
10043 (define_insn "*if_plus_move"
10044 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10046 (match_operator 4 "arm_comparison_operator"
10047 [(match_operand 5 "cc_register" "") (const_int 0)])
10049 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10050 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10051 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10054 add%d4\\t%0, %2, %3
10055 sub%d4\\t%0, %2, #%n3
10056 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10057 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10058 [(set_attr "conds" "use")
10059 (set_attr "length" "4,4,8,8")
10060 (set_attr_alternative "type"
10061 [(if_then_else (match_operand 3 "const_int_operand" "")
10062 (const_string "alu_imm" )
10063 (const_string "alu_sreg"))
10064 (const_string "alu_imm")
10065 (const_string "multiple")
10066 (const_string "multiple")])]
10069 (define_insn "*ifcompare_move_plus"
10070 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10071 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10072 [(match_operand:SI 4 "s_register_operand" "r,r")
10073 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10074 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10076 (match_operand:SI 2 "s_register_operand" "r,r")
10077 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10078 (clobber (reg:CC CC_REGNUM))]
10081 [(set_attr "conds" "clob")
10082 (set_attr "length" "8,12")
10083 (set_attr "type" "multiple")]
10086 (define_insn "*if_move_plus"
10087 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10089 (match_operator 4 "arm_comparison_operator"
10090 [(match_operand 5 "cc_register" "") (const_int 0)])
10091 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10093 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10094 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10097 add%D4\\t%0, %2, %3
10098 sub%D4\\t%0, %2, #%n3
10099 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10100 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10101 [(set_attr "conds" "use")
10102 (set_attr "length" "4,4,8,8")
10103 (set_attr_alternative "type"
10104 [(if_then_else (match_operand 3 "const_int_operand" "")
10105 (const_string "alu_imm" )
10106 (const_string "alu_sreg"))
10107 (const_string "alu_imm")
10108 (const_string "multiple")
10109 (const_string "multiple")])]
10112 (define_insn "*ifcompare_arith_arith"
10113 [(set (match_operand:SI 0 "s_register_operand" "=r")
10114 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10115 [(match_operand:SI 5 "s_register_operand" "r")
10116 (match_operand:SI 6 "arm_add_operand" "rIL")])
10117 (match_operator:SI 8 "shiftable_operator"
10118 [(match_operand:SI 1 "s_register_operand" "r")
10119 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10120 (match_operator:SI 7 "shiftable_operator"
10121 [(match_operand:SI 3 "s_register_operand" "r")
10122 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10123 (clobber (reg:CC CC_REGNUM))]
10126 [(set_attr "conds" "clob")
10127 (set_attr "length" "12")
10128 (set_attr "type" "multiple")]
10131 (define_insn "*if_arith_arith"
10132 [(set (match_operand:SI 0 "s_register_operand" "=r")
10133 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10134 [(match_operand 8 "cc_register" "") (const_int 0)])
10135 (match_operator:SI 6 "shiftable_operator"
10136 [(match_operand:SI 1 "s_register_operand" "r")
10137 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10138 (match_operator:SI 7 "shiftable_operator"
10139 [(match_operand:SI 3 "s_register_operand" "r")
10140 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10142 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10143 [(set_attr "conds" "use")
10144 (set_attr "length" "8")
10145 (set_attr "type" "multiple")]
10148 (define_insn "*ifcompare_arith_move"
10149 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10150 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10151 [(match_operand:SI 2 "s_register_operand" "r,r")
10152 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10153 (match_operator:SI 7 "shiftable_operator"
10154 [(match_operand:SI 4 "s_register_operand" "r,r")
10155 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10156 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10157 (clobber (reg:CC CC_REGNUM))]
10160 /* If we have an operation where (op x 0) is the identity operation and
10161 the conditional operator is LT or GE and we are comparing against zero and
10162 everything is in registers then we can do this in two instructions. */
10163 if (operands[3] == const0_rtx
10164 && GET_CODE (operands[7]) != AND
10165 && REG_P (operands[5])
10166 && REG_P (operands[1])
10167 && REGNO (operands[1]) == REGNO (operands[4])
10168 && REGNO (operands[4]) != REGNO (operands[0]))
10170 if (GET_CODE (operands[6]) == LT)
10171 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10172 else if (GET_CODE (operands[6]) == GE)
10173 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10175 if (CONST_INT_P (operands[3])
10176 && !const_ok_for_arm (INTVAL (operands[3])))
10177 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10179 output_asm_insn (\"cmp\\t%2, %3\", operands);
10180 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10181 if (which_alternative != 0)
10182 return \"mov%D6\\t%0, %1\";
10185 [(set_attr "conds" "clob")
10186 (set_attr "length" "8,12")
10187 (set_attr "type" "multiple")]
10190 (define_insn "*if_arith_move"
10191 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10192 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10193 [(match_operand 6 "cc_register" "") (const_int 0)])
10194 (match_operator:SI 5 "shiftable_operator"
10195 [(match_operand:SI 2 "s_register_operand" "r,r")
10196 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10197 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10200 %I5%d4\\t%0, %2, %3
10201 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10202 [(set_attr "conds" "use")
10203 (set_attr "length" "4,8")
10204 (set_attr_alternative "type"
10205 [(if_then_else (match_operand 3 "const_int_operand" "")
10206 (const_string "alu_shift_imm" )
10207 (const_string "alu_shift_reg"))
10208 (const_string "multiple")])]
10211 (define_insn "*ifcompare_move_arith"
10212 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10213 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10214 [(match_operand:SI 4 "s_register_operand" "r,r")
10215 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10216 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10217 (match_operator:SI 7 "shiftable_operator"
10218 [(match_operand:SI 2 "s_register_operand" "r,r")
10219 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10220 (clobber (reg:CC CC_REGNUM))]
10223 /* If we have an operation where (op x 0) is the identity operation and
10224 the conditional operator is LT or GE and we are comparing against zero and
10225 everything is in registers then we can do this in two instructions */
10226 if (operands[5] == const0_rtx
10227 && GET_CODE (operands[7]) != AND
10228 && REG_P (operands[3])
10229 && REG_P (operands[1])
10230 && REGNO (operands[1]) == REGNO (operands[2])
10231 && REGNO (operands[2]) != REGNO (operands[0]))
10233 if (GET_CODE (operands[6]) == GE)
10234 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10235 else if (GET_CODE (operands[6]) == LT)
10236 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10239 if (CONST_INT_P (operands[5])
10240 && !const_ok_for_arm (INTVAL (operands[5])))
10241 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10243 output_asm_insn (\"cmp\\t%4, %5\", operands);
10245 if (which_alternative != 0)
10246 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10247 return \"%I7%D6\\t%0, %2, %3\";
10249 [(set_attr "conds" "clob")
10250 (set_attr "length" "8,12")
10251 (set_attr "type" "multiple")]
10254 (define_insn "*if_move_arith"
10255 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10257 (match_operator 4 "arm_comparison_operator"
10258 [(match_operand 6 "cc_register" "") (const_int 0)])
10259 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10260 (match_operator:SI 5 "shiftable_operator"
10261 [(match_operand:SI 2 "s_register_operand" "r,r")
10262 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10265 %I5%D4\\t%0, %2, %3
10266 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10267 [(set_attr "conds" "use")
10268 (set_attr "length" "4,8")
10269 (set_attr_alternative "type"
10270 [(if_then_else (match_operand 3 "const_int_operand" "")
10271 (const_string "alu_shift_imm" )
10272 (const_string "alu_shift_reg"))
10273 (const_string "multiple")])]
10276 (define_insn "*ifcompare_move_not"
10277 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10279 (match_operator 5 "arm_comparison_operator"
10280 [(match_operand:SI 3 "s_register_operand" "r,r")
10281 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10282 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10284 (match_operand:SI 2 "s_register_operand" "r,r"))))
10285 (clobber (reg:CC CC_REGNUM))]
10288 [(set_attr "conds" "clob")
10289 (set_attr "length" "8,12")
10290 (set_attr "type" "multiple")]
10293 (define_insn "*if_move_not"
10294 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10296 (match_operator 4 "arm_comparison_operator"
10297 [(match_operand 3 "cc_register" "") (const_int 0)])
10298 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10299 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10303 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10304 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10305 [(set_attr "conds" "use")
10306 (set_attr "type" "mvn_reg")
10307 (set_attr "length" "4,8,8")
10308 (set_attr "type" "mvn_reg,multiple,multiple")]
10311 (define_insn "*ifcompare_not_move"
10312 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10314 (match_operator 5 "arm_comparison_operator"
10315 [(match_operand:SI 3 "s_register_operand" "r,r")
10316 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10318 (match_operand:SI 2 "s_register_operand" "r,r"))
10319 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10320 (clobber (reg:CC CC_REGNUM))]
10323 [(set_attr "conds" "clob")
10324 (set_attr "length" "8,12")
10325 (set_attr "type" "multiple")]
10328 (define_insn "*if_not_move"
10329 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10331 (match_operator 4 "arm_comparison_operator"
10332 [(match_operand 3 "cc_register" "") (const_int 0)])
10333 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10334 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10338 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10339 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10340 [(set_attr "conds" "use")
10341 (set_attr "type" "mvn_reg,multiple,multiple")
10342 (set_attr "length" "4,8,8")]
10345 (define_insn "*ifcompare_shift_move"
10346 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10348 (match_operator 6 "arm_comparison_operator"
10349 [(match_operand:SI 4 "s_register_operand" "r,r")
10350 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10351 (match_operator:SI 7 "shift_operator"
10352 [(match_operand:SI 2 "s_register_operand" "r,r")
10353 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10354 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10355 (clobber (reg:CC CC_REGNUM))]
10358 [(set_attr "conds" "clob")
10359 (set_attr "length" "8,12")
10360 (set_attr "type" "multiple")]
10363 (define_insn "*if_shift_move"
10364 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10366 (match_operator 5 "arm_comparison_operator"
10367 [(match_operand 6 "cc_register" "") (const_int 0)])
10368 (match_operator:SI 4 "shift_operator"
10369 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10370 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10371 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10375 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10376 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10377 [(set_attr "conds" "use")
10378 (set_attr "shift" "2")
10379 (set_attr "length" "4,8,8")
10380 (set_attr_alternative "type"
10381 [(if_then_else (match_operand 3 "const_int_operand" "")
10382 (const_string "mov_shift" )
10383 (const_string "mov_shift_reg"))
10384 (const_string "multiple")
10385 (const_string "multiple")])]
10388 (define_insn "*ifcompare_move_shift"
10389 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10391 (match_operator 6 "arm_comparison_operator"
10392 [(match_operand:SI 4 "s_register_operand" "r,r")
10393 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10394 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10395 (match_operator:SI 7 "shift_operator"
10396 [(match_operand:SI 2 "s_register_operand" "r,r")
10397 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10398 (clobber (reg:CC CC_REGNUM))]
10401 [(set_attr "conds" "clob")
10402 (set_attr "length" "8,12")
10403 (set_attr "type" "multiple")]
10406 (define_insn "*if_move_shift"
10407 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10409 (match_operator 5 "arm_comparison_operator"
10410 [(match_operand 6 "cc_register" "") (const_int 0)])
10411 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10412 (match_operator:SI 4 "shift_operator"
10413 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10414 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10418 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10419 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10420 [(set_attr "conds" "use")
10421 (set_attr "shift" "2")
10422 (set_attr "length" "4,8,8")
10423 (set_attr_alternative "type"
10424 [(if_then_else (match_operand 3 "const_int_operand" "")
10425 (const_string "mov_shift" )
10426 (const_string "mov_shift_reg"))
10427 (const_string "multiple")
10428 (const_string "multiple")])]
10431 (define_insn "*ifcompare_shift_shift"
10432 [(set (match_operand:SI 0 "s_register_operand" "=r")
10434 (match_operator 7 "arm_comparison_operator"
10435 [(match_operand:SI 5 "s_register_operand" "r")
10436 (match_operand:SI 6 "arm_add_operand" "rIL")])
10437 (match_operator:SI 8 "shift_operator"
10438 [(match_operand:SI 1 "s_register_operand" "r")
10439 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10440 (match_operator:SI 9 "shift_operator"
10441 [(match_operand:SI 3 "s_register_operand" "r")
10442 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10443 (clobber (reg:CC CC_REGNUM))]
10446 [(set_attr "conds" "clob")
10447 (set_attr "length" "12")
10448 (set_attr "type" "multiple")]
10451 (define_insn "*if_shift_shift"
10452 [(set (match_operand:SI 0 "s_register_operand" "=r")
10454 (match_operator 5 "arm_comparison_operator"
10455 [(match_operand 8 "cc_register" "") (const_int 0)])
10456 (match_operator:SI 6 "shift_operator"
10457 [(match_operand:SI 1 "s_register_operand" "r")
10458 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10459 (match_operator:SI 7 "shift_operator"
10460 [(match_operand:SI 3 "s_register_operand" "r")
10461 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10463 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10464 [(set_attr "conds" "use")
10465 (set_attr "shift" "1")
10466 (set_attr "length" "8")
10467 (set (attr "type") (if_then_else
10468 (and (match_operand 2 "const_int_operand" "")
10469 (match_operand 4 "const_int_operand" ""))
10470 (const_string "mov_shift")
10471 (const_string "mov_shift_reg")))]
10474 (define_insn "*ifcompare_not_arith"
10475 [(set (match_operand:SI 0 "s_register_operand" "=r")
10477 (match_operator 6 "arm_comparison_operator"
10478 [(match_operand:SI 4 "s_register_operand" "r")
10479 (match_operand:SI 5 "arm_add_operand" "rIL")])
10480 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10481 (match_operator:SI 7 "shiftable_operator"
10482 [(match_operand:SI 2 "s_register_operand" "r")
10483 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10484 (clobber (reg:CC CC_REGNUM))]
10487 [(set_attr "conds" "clob")
10488 (set_attr "length" "12")
10489 (set_attr "type" "multiple")]
10492 (define_insn "*if_not_arith"
10493 [(set (match_operand:SI 0 "s_register_operand" "=r")
10495 (match_operator 5 "arm_comparison_operator"
10496 [(match_operand 4 "cc_register" "") (const_int 0)])
10497 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10498 (match_operator:SI 6 "shiftable_operator"
10499 [(match_operand:SI 2 "s_register_operand" "r")
10500 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10502 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10503 [(set_attr "conds" "use")
10504 (set_attr "type" "mvn_reg")
10505 (set_attr "length" "8")]
10508 (define_insn "*ifcompare_arith_not"
10509 [(set (match_operand:SI 0 "s_register_operand" "=r")
10511 (match_operator 6 "arm_comparison_operator"
10512 [(match_operand:SI 4 "s_register_operand" "r")
10513 (match_operand:SI 5 "arm_add_operand" "rIL")])
10514 (match_operator:SI 7 "shiftable_operator"
10515 [(match_operand:SI 2 "s_register_operand" "r")
10516 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10517 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10518 (clobber (reg:CC CC_REGNUM))]
10521 [(set_attr "conds" "clob")
10522 (set_attr "length" "12")
10523 (set_attr "type" "multiple")]
10526 (define_insn "*if_arith_not"
10527 [(set (match_operand:SI 0 "s_register_operand" "=r")
10529 (match_operator 5 "arm_comparison_operator"
10530 [(match_operand 4 "cc_register" "") (const_int 0)])
10531 (match_operator:SI 6 "shiftable_operator"
10532 [(match_operand:SI 2 "s_register_operand" "r")
10533 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10534 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10536 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10537 [(set_attr "conds" "use")
10538 (set_attr "type" "multiple")
10539 (set_attr "length" "8")]
10542 (define_insn "*ifcompare_neg_move"
10543 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10545 (match_operator 5 "arm_comparison_operator"
10546 [(match_operand:SI 3 "s_register_operand" "r,r")
10547 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10548 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10549 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10550 (clobber (reg:CC CC_REGNUM))]
10553 [(set_attr "conds" "clob")
10554 (set_attr "length" "8,12")
10555 (set_attr "type" "multiple")]
10558 (define_insn_and_split "*if_neg_move"
10559 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10561 (match_operator 4 "arm_comparison_operator"
10562 [(match_operand 3 "cc_register" "") (const_int 0)])
10563 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10564 (match_operand:SI 1 "s_register_operand" "0,0")))]
10567 "&& reload_completed"
10568 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10569 (set (match_dup 0) (neg:SI (match_dup 2))))]
10571 [(set_attr "conds" "use")
10572 (set_attr "length" "4")
10573 (set_attr "arch" "t2,32")
10574 (set_attr "enabled_for_short_it" "yes,no")
10575 (set_attr "type" "logic_shift_imm")]
10578 (define_insn "*ifcompare_move_neg"
10579 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10581 (match_operator 5 "arm_comparison_operator"
10582 [(match_operand:SI 3 "s_register_operand" "r,r")
10583 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10584 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10585 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10586 (clobber (reg:CC CC_REGNUM))]
10589 [(set_attr "conds" "clob")
10590 (set_attr "length" "8,12")
10591 (set_attr "type" "multiple")]
10594 (define_insn_and_split "*if_move_neg"
10595 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10597 (match_operator 4 "arm_comparison_operator"
10598 [(match_operand 3 "cc_register" "") (const_int 0)])
10599 (match_operand:SI 1 "s_register_operand" "0,0")
10600 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
10603 "&& reload_completed"
10604 [(cond_exec (match_dup 5)
10605 (set (match_dup 0) (neg:SI (match_dup 2))))]
10607 machine_mode mode = GET_MODE (operands[3]);
10608 rtx_code rc = GET_CODE (operands[4]);
10610 if (mode == CCFPmode || mode == CCFPEmode)
10611 rc = reverse_condition_maybe_unordered (rc);
10613 rc = reverse_condition (rc);
10615 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
10617 [(set_attr "conds" "use")
10618 (set_attr "length" "4")
10619 (set_attr "arch" "t2,32")
10620 (set_attr "enabled_for_short_it" "yes,no")
10621 (set_attr "type" "logic_shift_imm")]
10624 (define_insn "*arith_adjacentmem"
10625 [(set (match_operand:SI 0 "s_register_operand" "=r")
10626 (match_operator:SI 1 "shiftable_operator"
10627 [(match_operand:SI 2 "memory_operand" "m")
10628 (match_operand:SI 3 "memory_operand" "m")]))
10629 (clobber (match_scratch:SI 4 "=r"))]
10630 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10636 HOST_WIDE_INT val1 = 0, val2 = 0;
10638 if (REGNO (operands[0]) > REGNO (operands[4]))
10640 ldm[1] = operands[4];
10641 ldm[2] = operands[0];
10645 ldm[1] = operands[0];
10646 ldm[2] = operands[4];
10649 base_reg = XEXP (operands[2], 0);
10651 if (!REG_P (base_reg))
10653 val1 = INTVAL (XEXP (base_reg, 1));
10654 base_reg = XEXP (base_reg, 0);
10657 if (!REG_P (XEXP (operands[3], 0)))
10658 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10660 arith[0] = operands[0];
10661 arith[3] = operands[1];
10675 if (val1 !=0 && val2 != 0)
10679 if (val1 == 4 || val2 == 4)
10680 /* Other val must be 8, since we know they are adjacent and neither
10682 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10683 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10685 ldm[0] = ops[0] = operands[4];
10687 ops[2] = GEN_INT (val1);
10688 output_add_immediate (ops);
10690 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10692 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10696 /* Offset is out of range for a single add, so use two ldr. */
10699 ops[2] = GEN_INT (val1);
10700 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10702 ops[2] = GEN_INT (val2);
10703 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10706 else if (val1 != 0)
10709 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10711 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10716 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10718 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10720 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10723 [(set_attr "length" "12")
10724 (set_attr "predicable" "yes")
10725 (set_attr "type" "load_4")]
10728 ; This pattern is never tried by combine, so do it as a peephole
10731 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10732 (match_operand:SI 1 "arm_general_register_operand" ""))
10733 (set (reg:CC CC_REGNUM)
10734 (compare:CC (match_dup 1) (const_int 0)))]
10736 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10737 (set (match_dup 0) (match_dup 1))])]
10742 [(set (match_operand:SI 0 "s_register_operand" "")
10743 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10745 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10746 [(match_operand:SI 3 "s_register_operand" "")
10747 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10748 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10750 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10751 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10756 ;; This split can be used because CC_Z mode implies that the following
10757 ;; branch will be an equality, or an unsigned inequality, so the sign
10758 ;; extension is not needed.
10761 [(set (reg:CC_Z CC_REGNUM)
10763 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10765 (match_operand 1 "const_int_operand" "")))
10766 (clobber (match_scratch:SI 2 ""))]
10768 && ((UINTVAL (operands[1]))
10769 == ((UINTVAL (operands[1])) >> 24) << 24)"
10770 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10771 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10773 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10776 ;; ??? Check the patterns above for Thumb-2 usefulness
10778 (define_expand "prologue"
10779 [(clobber (const_int 0))]
10782 arm_expand_prologue ();
10784 thumb1_expand_prologue ();
10789 (define_expand "epilogue"
10790 [(clobber (const_int 0))]
10793 if (crtl->calls_eh_return)
10794 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10797 thumb1_expand_epilogue ();
10798 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10799 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10801 else if (HAVE_return)
10803 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10804 no need for explicit testing again. */
10805 emit_jump_insn (gen_return ());
10807 else if (TARGET_32BIT)
10809 arm_expand_epilogue (true);
10815 ;; Note - although unspec_volatile's USE all hard registers,
10816 ;; USEs are ignored after relaod has completed. Thus we need
10817 ;; to add an unspec of the link register to ensure that flow
10818 ;; does not think that it is unused by the sibcall branch that
10819 ;; will replace the standard function epilogue.
10820 (define_expand "sibcall_epilogue"
10821 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10822 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10825 arm_expand_epilogue (false);
10830 (define_expand "eh_epilogue"
10831 [(use (match_operand:SI 0 "register_operand"))
10832 (use (match_operand:SI 1 "register_operand"))
10833 (use (match_operand:SI 2 "register_operand"))]
10837 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10838 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10840 rtx ra = gen_rtx_REG (Pmode, 2);
10842 emit_move_insn (ra, operands[2]);
10845 /* This is a hack -- we may have crystalized the function type too
10847 cfun->machine->func_type = 0;
10851 ;; This split is only used during output to reduce the number of patterns
10852 ;; that need assembler instructions adding to them. We allowed the setting
10853 ;; of the conditions to be implicit during rtl generation so that
10854 ;; the conditional compare patterns would work. However this conflicts to
10855 ;; some extent with the conditional data operations, so we have to split them
10858 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10859 ;; conditional execution sufficient?
10862 [(set (match_operand:SI 0 "s_register_operand" "")
10863 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10864 [(match_operand 2 "" "") (match_operand 3 "" "")])
10866 (match_operand 4 "" "")))
10867 (clobber (reg:CC CC_REGNUM))]
10868 "TARGET_ARM && reload_completed"
10869 [(set (match_dup 5) (match_dup 6))
10870 (cond_exec (match_dup 7)
10871 (set (match_dup 0) (match_dup 4)))]
10874 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10875 operands[2], operands[3]);
10876 enum rtx_code rc = GET_CODE (operands[1]);
10878 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10879 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10880 if (mode == CCFPmode || mode == CCFPEmode)
10881 rc = reverse_condition_maybe_unordered (rc);
10883 rc = reverse_condition (rc);
10885 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10890 [(set (match_operand:SI 0 "s_register_operand" "")
10891 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10892 [(match_operand 2 "" "") (match_operand 3 "" "")])
10893 (match_operand 4 "" "")
10895 (clobber (reg:CC CC_REGNUM))]
10896 "TARGET_ARM && reload_completed"
10897 [(set (match_dup 5) (match_dup 6))
10898 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10899 (set (match_dup 0) (match_dup 4)))]
10902 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10903 operands[2], operands[3]);
10905 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10906 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10911 [(set (match_operand:SI 0 "s_register_operand" "")
10912 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10913 [(match_operand 2 "" "") (match_operand 3 "" "")])
10914 (match_operand 4 "" "")
10915 (match_operand 5 "" "")))
10916 (clobber (reg:CC CC_REGNUM))]
10917 "TARGET_ARM && reload_completed"
10918 [(set (match_dup 6) (match_dup 7))
10919 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10920 (set (match_dup 0) (match_dup 4)))
10921 (cond_exec (match_dup 8)
10922 (set (match_dup 0) (match_dup 5)))]
10925 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10926 operands[2], operands[3]);
10927 enum rtx_code rc = GET_CODE (operands[1]);
10929 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10930 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10931 if (mode == CCFPmode || mode == CCFPEmode)
10932 rc = reverse_condition_maybe_unordered (rc);
10934 rc = reverse_condition (rc);
10936 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10941 [(set (match_operand:SI 0 "s_register_operand" "")
10942 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10943 [(match_operand:SI 2 "s_register_operand" "")
10944 (match_operand:SI 3 "arm_add_operand" "")])
10945 (match_operand:SI 4 "arm_rhs_operand" "")
10947 (match_operand:SI 5 "s_register_operand" ""))))
10948 (clobber (reg:CC CC_REGNUM))]
10949 "TARGET_ARM && reload_completed"
10950 [(set (match_dup 6) (match_dup 7))
10951 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10952 (set (match_dup 0) (match_dup 4)))
10953 (cond_exec (match_dup 8)
10954 (set (match_dup 0) (not:SI (match_dup 5))))]
10957 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10958 operands[2], operands[3]);
10959 enum rtx_code rc = GET_CODE (operands[1]);
10961 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10962 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10963 if (mode == CCFPmode || mode == CCFPEmode)
10964 rc = reverse_condition_maybe_unordered (rc);
10966 rc = reverse_condition (rc);
10968 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10972 (define_insn "*cond_move_not"
10973 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10974 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10975 [(match_operand 3 "cc_register" "") (const_int 0)])
10976 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10978 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10982 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10983 [(set_attr "conds" "use")
10984 (set_attr "type" "mvn_reg,multiple")
10985 (set_attr "length" "4,8")]
10988 ;; The next two patterns occur when an AND operation is followed by a
10989 ;; scc insn sequence
10991 (define_insn "*sign_extract_onebit"
10992 [(set (match_operand:SI 0 "s_register_operand" "=r")
10993 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10995 (match_operand:SI 2 "const_int_operand" "n")))
10996 (clobber (reg:CC CC_REGNUM))]
10999 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11000 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11001 return \"mvnne\\t%0, #0\";
11003 [(set_attr "conds" "clob")
11004 (set_attr "length" "8")
11005 (set_attr "type" "multiple")]
11008 (define_insn "*not_signextract_onebit"
11009 [(set (match_operand:SI 0 "s_register_operand" "=r")
11011 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11013 (match_operand:SI 2 "const_int_operand" "n"))))
11014 (clobber (reg:CC CC_REGNUM))]
11017 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11018 output_asm_insn (\"tst\\t%1, %2\", operands);
11019 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11020 return \"movne\\t%0, #0\";
11022 [(set_attr "conds" "clob")
11023 (set_attr "length" "12")
11024 (set_attr "type" "multiple")]
11026 ;; ??? The above patterns need auditing for Thumb-2
11028 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11029 ;; expressions. For simplicity, the first register is also in the unspec
11031 ;; To avoid the usage of GNU extension, the length attribute is computed
11032 ;; in a C function arm_attr_length_push_multi.
11033 (define_insn "*push_multi"
11034 [(match_parallel 2 "multi_register_push"
11035 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11036 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11037 UNSPEC_PUSH_MULT))])]
11041 int num_saves = XVECLEN (operands[2], 0);
11043 /* For the StrongARM at least it is faster to
11044 use STR to store only a single register.
11045 In Thumb mode always use push, and the assembler will pick
11046 something appropriate. */
11047 if (num_saves == 1 && TARGET_ARM)
11048 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11055 strcpy (pattern, \"push%?\\t{%1\");
11057 strcpy (pattern, \"push\\t{%1\");
11059 for (i = 1; i < num_saves; i++)
11061 strcat (pattern, \", %|\");
11063 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11066 strcat (pattern, \"}\");
11067 output_asm_insn (pattern, operands);
11072 [(set_attr "type" "store_16")
11073 (set (attr "length")
11074 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11077 (define_insn "stack_tie"
11078 [(set (mem:BLK (scratch))
11079 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11080 (match_operand:SI 1 "s_register_operand" "rk")]
11084 [(set_attr "length" "0")
11085 (set_attr "type" "block")]
11088 ;; Pop (as used in epilogue RTL)
11090 (define_insn "*load_multiple_with_writeback"
11091 [(match_parallel 0 "load_multiple_operation"
11092 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11093 (plus:SI (match_dup 1)
11094 (match_operand:SI 2 "const_int_I_operand" "I")))
11095 (set (match_operand:SI 3 "s_register_operand" "=rk")
11096 (mem:SI (match_dup 1)))
11098 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11101 arm_output_multireg_pop (operands, /*return_pc=*/false,
11102 /*cond=*/const_true_rtx,
11108 [(set_attr "type" "load_16")
11109 (set_attr "predicable" "yes")
11110 (set (attr "length")
11111 (symbol_ref "arm_attr_length_pop_multi (operands,
11112 /*return_pc=*/false,
11113 /*write_back_p=*/true)"))]
11116 ;; Pop with return (as used in epilogue RTL)
11118 ;; This instruction is generated when the registers are popped at the end of
11119 ;; epilogue. Here, instead of popping the value into LR and then generating
11120 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11122 (define_insn "*pop_multiple_with_writeback_and_return"
11123 [(match_parallel 0 "pop_multiple_return"
11125 (set (match_operand:SI 1 "s_register_operand" "+rk")
11126 (plus:SI (match_dup 1)
11127 (match_operand:SI 2 "const_int_I_operand" "I")))
11128 (set (match_operand:SI 3 "s_register_operand" "=rk")
11129 (mem:SI (match_dup 1)))
11131 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11134 arm_output_multireg_pop (operands, /*return_pc=*/true,
11135 /*cond=*/const_true_rtx,
11141 [(set_attr "type" "load_16")
11142 (set_attr "predicable" "yes")
11143 (set (attr "length")
11144 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11145 /*write_back_p=*/true)"))]
11148 (define_insn "*pop_multiple_with_return"
11149 [(match_parallel 0 "pop_multiple_return"
11151 (set (match_operand:SI 2 "s_register_operand" "=rk")
11152 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11154 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11157 arm_output_multireg_pop (operands, /*return_pc=*/true,
11158 /*cond=*/const_true_rtx,
11164 [(set_attr "type" "load_16")
11165 (set_attr "predicable" "yes")
11166 (set (attr "length")
11167 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11168 /*write_back_p=*/false)"))]
11171 ;; Load into PC and return
11172 (define_insn "*ldr_with_return"
11174 (set (reg:SI PC_REGNUM)
11175 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11176 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11177 "ldr%?\t%|pc, [%0], #4"
11178 [(set_attr "type" "load_4")
11179 (set_attr "predicable" "yes")]
11181 ;; Pop for floating point registers (as used in epilogue RTL)
11182 (define_insn "*vfp_pop_multiple_with_writeback"
11183 [(match_parallel 0 "pop_multiple_fp"
11184 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11185 (plus:SI (match_dup 1)
11186 (match_operand:SI 2 "const_int_I_operand" "I")))
11187 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11188 (mem:DF (match_dup 1)))])]
11189 "TARGET_32BIT && TARGET_HARD_FLOAT"
11192 int num_regs = XVECLEN (operands[0], 0);
11195 strcpy (pattern, \"vldm\\t\");
11196 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11197 strcat (pattern, \"!, {\");
11198 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11199 strcat (pattern, \"%P0\");
11200 if ((num_regs - 1) > 1)
11202 strcat (pattern, \"-%P1\");
11203 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11206 strcat (pattern, \"}\");
11207 output_asm_insn (pattern, op_list);
11211 [(set_attr "type" "load_16")
11212 (set_attr "conds" "unconditional")
11213 (set_attr "predicable" "no")]
11216 ;; Special patterns for dealing with the constant pool
11218 (define_insn "align_4"
11219 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11222 assemble_align (32);
11225 [(set_attr "type" "no_insn")]
11228 (define_insn "align_8"
11229 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11232 assemble_align (64);
11235 [(set_attr "type" "no_insn")]
11238 (define_insn "consttable_end"
11239 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11242 making_const_table = FALSE;
11245 [(set_attr "type" "no_insn")]
11248 (define_insn "consttable_1"
11249 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11252 making_const_table = TRUE;
11253 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11254 assemble_zeros (3);
11257 [(set_attr "length" "4")
11258 (set_attr "type" "no_insn")]
11261 (define_insn "consttable_2"
11262 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11266 rtx x = operands[0];
11267 making_const_table = TRUE;
11268 switch (GET_MODE_CLASS (GET_MODE (x)))
11271 arm_emit_fp16_const (x);
11274 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11275 assemble_zeros (2);
11280 [(set_attr "length" "4")
11281 (set_attr "type" "no_insn")]
11284 (define_insn "consttable_4"
11285 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11289 rtx x = operands[0];
11290 making_const_table = TRUE;
11291 scalar_float_mode float_mode;
11292 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11293 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11296 /* XXX: Sometimes gcc does something really dumb and ends up with
11297 a HIGH in a constant pool entry, usually because it's trying to
11298 load into a VFP register. We know this will always be used in
11299 combination with a LO_SUM which ignores the high bits, so just
11300 strip off the HIGH. */
11301 if (GET_CODE (x) == HIGH)
11303 assemble_integer (x, 4, BITS_PER_WORD, 1);
11304 mark_symbol_refs_as_used (x);
11308 [(set_attr "length" "4")
11309 (set_attr "type" "no_insn")]
11312 (define_insn "consttable_8"
11313 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11317 making_const_table = TRUE;
11318 scalar_float_mode float_mode;
11319 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11320 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11321 float_mode, BITS_PER_WORD);
11323 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11326 [(set_attr "length" "8")
11327 (set_attr "type" "no_insn")]
11330 (define_insn "consttable_16"
11331 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11335 making_const_table = TRUE;
11336 scalar_float_mode float_mode;
11337 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11338 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11339 float_mode, BITS_PER_WORD);
11341 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11344 [(set_attr "length" "16")
11345 (set_attr "type" "no_insn")]
11348 ;; V5 Instructions,
11350 (define_insn "clzsi2"
11351 [(set (match_operand:SI 0 "s_register_operand" "=r")
11352 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11353 "TARGET_32BIT && arm_arch5t"
11355 [(set_attr "predicable" "yes")
11356 (set_attr "type" "clz")])
11358 (define_insn "rbitsi2"
11359 [(set (match_operand:SI 0 "s_register_operand" "=r")
11360 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11361 "TARGET_32BIT && arm_arch_thumb2"
11363 [(set_attr "predicable" "yes")
11364 (set_attr "type" "clz")])
11366 ;; Keep this as a CTZ expression until after reload and then split
11367 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11368 ;; to fold with any other expression.
11370 (define_insn_and_split "ctzsi2"
11371 [(set (match_operand:SI 0 "s_register_operand" "=r")
11372 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11373 "TARGET_32BIT && arm_arch_thumb2"
11375 "&& reload_completed"
11378 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11379 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11383 ;; V5E instructions.
11385 (define_insn "prefetch"
11386 [(prefetch (match_operand:SI 0 "address_operand" "p")
11387 (match_operand:SI 1 "" "")
11388 (match_operand:SI 2 "" ""))]
11389 "TARGET_32BIT && arm_arch5te"
11391 [(set_attr "type" "load_4")]
11394 ;; General predication pattern
11397 [(match_operator 0 "arm_comparison_operator"
11398 [(match_operand 1 "cc_register" "")
11401 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11403 [(set_attr "predicated" "yes")]
11406 (define_insn "force_register_use"
11407 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11410 [(set_attr "length" "0")
11411 (set_attr "type" "no_insn")]
11415 ;; Patterns for exception handling
11417 (define_expand "eh_return"
11418 [(use (match_operand 0 "general_operand"))]
11423 emit_insn (gen_arm_eh_return (operands[0]));
11425 emit_insn (gen_thumb_eh_return (operands[0]));
11430 ;; We can't expand this before we know where the link register is stored.
11431 (define_insn_and_split "arm_eh_return"
11432 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11434 (clobber (match_scratch:SI 1 "=&r"))]
11437 "&& reload_completed"
11441 arm_set_return_address (operands[0], operands[1]);
11449 (define_insn "load_tp_hard"
11450 [(set (match_operand:SI 0 "register_operand" "=r")
11451 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11453 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11454 [(set_attr "predicable" "yes")
11455 (set_attr "type" "mrs")]
11458 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11459 (define_insn "load_tp_soft_fdpic"
11460 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11461 (clobber (reg:SI FDPIC_REGNUM))
11462 (clobber (reg:SI LR_REGNUM))
11463 (clobber (reg:SI IP_REGNUM))
11464 (clobber (reg:CC CC_REGNUM))]
11465 "TARGET_SOFT_TP && TARGET_FDPIC"
11466 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11467 [(set_attr "conds" "clob")
11468 (set_attr "type" "branch")]
11471 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11472 (define_insn "load_tp_soft"
11473 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11474 (clobber (reg:SI LR_REGNUM))
11475 (clobber (reg:SI IP_REGNUM))
11476 (clobber (reg:CC CC_REGNUM))]
11477 "TARGET_SOFT_TP && !TARGET_FDPIC"
11478 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11479 [(set_attr "conds" "clob")
11480 (set_attr "type" "branch")]
11483 ;; tls descriptor call
11484 (define_insn "tlscall"
11485 [(set (reg:SI R0_REGNUM)
11486 (unspec:SI [(reg:SI R0_REGNUM)
11487 (match_operand:SI 0 "" "X")
11488 (match_operand 1 "" "")] UNSPEC_TLS))
11489 (clobber (reg:SI R1_REGNUM))
11490 (clobber (reg:SI LR_REGNUM))
11491 (clobber (reg:SI CC_REGNUM))]
11494 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11495 INTVAL (operands[1]));
11496 return "bl\\t%c0(tlscall)";
11498 [(set_attr "conds" "clob")
11499 (set_attr "length" "4")
11500 (set_attr "type" "branch")]
11503 ;; For thread pointer builtin
11504 (define_expand "get_thread_pointersi"
11505 [(match_operand:SI 0 "s_register_operand")]
11509 arm_load_tp (operands[0]);
11515 ;; We only care about the lower 16 bits of the constant
11516 ;; being inserted into the upper 16 bits of the register.
11517 (define_insn "*arm_movtas_ze"
11518 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11521 (match_operand:SI 1 "const_int_operand" ""))]
11526 [(set_attr "arch" "32,v8mb")
11527 (set_attr "predicable" "yes")
11528 (set_attr "length" "4")
11529 (set_attr "type" "alu_sreg")]
11532 (define_insn "*arm_rev"
11533 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11534 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11540 [(set_attr "arch" "t1,t2,32")
11541 (set_attr "length" "2,2,4")
11542 (set_attr "predicable" "no,yes,yes")
11543 (set_attr "type" "rev")]
11546 (define_expand "arm_legacy_rev"
11547 [(set (match_operand:SI 2 "s_register_operand")
11548 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11552 (lshiftrt:SI (match_dup 2)
11554 (set (match_operand:SI 3 "s_register_operand")
11555 (rotatert:SI (match_dup 1)
11558 (and:SI (match_dup 2)
11559 (const_int -65281)))
11560 (set (match_operand:SI 0 "s_register_operand")
11561 (xor:SI (match_dup 3)
11567 ;; Reuse temporaries to keep register pressure down.
11568 (define_expand "thumb_legacy_rev"
11569 [(set (match_operand:SI 2 "s_register_operand")
11570 (ashift:SI (match_operand:SI 1 "s_register_operand")
11572 (set (match_operand:SI 3 "s_register_operand")
11573 (lshiftrt:SI (match_dup 1)
11576 (ior:SI (match_dup 3)
11578 (set (match_operand:SI 4 "s_register_operand")
11580 (set (match_operand:SI 5 "s_register_operand")
11581 (rotatert:SI (match_dup 1)
11584 (ashift:SI (match_dup 5)
11587 (lshiftrt:SI (match_dup 5)
11590 (ior:SI (match_dup 5)
11593 (rotatert:SI (match_dup 5)
11595 (set (match_operand:SI 0 "s_register_operand")
11596 (ior:SI (match_dup 5)
11602 ;; ARM-specific expansion of signed mod by power of 2
11603 ;; using conditional negate.
11604 ;; For r0 % n where n is a power of 2 produce:
11606 ;; and r0, r0, #(n - 1)
11607 ;; and r1, r1, #(n - 1)
11608 ;; rsbpl r0, r1, #0
11610 (define_expand "modsi3"
11611 [(match_operand:SI 0 "register_operand")
11612 (match_operand:SI 1 "register_operand")
11613 (match_operand:SI 2 "const_int_operand")]
11616 HOST_WIDE_INT val = INTVAL (operands[2]);
11619 || exact_log2 (val) <= 0)
11622 rtx mask = GEN_INT (val - 1);
11624 /* In the special case of x0 % 2 we can do the even shorter:
11627 rsblt r0, r0, #0. */
11631 rtx cc_reg = arm_gen_compare_reg (LT,
11632 operands[1], const0_rtx, NULL_RTX);
11633 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
11634 rtx masked = gen_reg_rtx (SImode);
11636 emit_insn (gen_andsi3 (masked, operands[1], mask));
11637 emit_move_insn (operands[0],
11638 gen_rtx_IF_THEN_ELSE (SImode, cond,
11639 gen_rtx_NEG (SImode,
11645 rtx neg_op = gen_reg_rtx (SImode);
11646 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11649 /* Extract the condition register and mode. */
11650 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11651 rtx cc_reg = SET_DEST (cmp);
11652 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11654 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11656 rtx masked_neg = gen_reg_rtx (SImode);
11657 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11659 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11660 during expand does not always work. Do an IF_THEN_ELSE instead. */
11661 emit_move_insn (operands[0],
11662 gen_rtx_IF_THEN_ELSE (SImode, cond,
11663 gen_rtx_NEG (SImode, masked_neg),
11671 (define_expand "bswapsi2"
11672 [(set (match_operand:SI 0 "s_register_operand")
11673 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11674 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11678 rtx op2 = gen_reg_rtx (SImode);
11679 rtx op3 = gen_reg_rtx (SImode);
11683 rtx op4 = gen_reg_rtx (SImode);
11684 rtx op5 = gen_reg_rtx (SImode);
11686 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11687 op2, op3, op4, op5));
11691 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11700 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11701 ;; and unsigned variants, respectively. For rev16, expose
11702 ;; byte-swapping in the lower 16 bits only.
11703 (define_insn "*arm_revsh"
11704 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11705 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11711 [(set_attr "arch" "t1,t2,32")
11712 (set_attr "length" "2,2,4")
11713 (set_attr "type" "rev")]
11716 (define_insn "*arm_rev16"
11717 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11718 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11724 [(set_attr "arch" "t1,t2,32")
11725 (set_attr "length" "2,2,4")
11726 (set_attr "type" "rev")]
11729 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11730 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11731 ;; each valid permutation.
11733 (define_insn "arm_rev16si2"
11734 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11735 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11737 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11738 (and:SI (lshiftrt:SI (match_dup 1)
11740 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11742 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11743 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11745 [(set_attr "arch" "t1,t2,32")
11746 (set_attr "length" "2,2,4")
11747 (set_attr "type" "rev")]
11750 (define_insn "arm_rev16si2_alt"
11751 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11752 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11754 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11755 (and:SI (ashift:SI (match_dup 1)
11757 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11759 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11760 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11762 [(set_attr "arch" "t1,t2,32")
11763 (set_attr "length" "2,2,4")
11764 (set_attr "type" "rev")]
11767 (define_expand "bswaphi2"
11768 [(set (match_operand:HI 0 "s_register_operand")
11769 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11774 ;; Patterns for LDRD/STRD in Thumb2 mode
11776 (define_insn "*thumb2_ldrd"
11777 [(set (match_operand:SI 0 "s_register_operand" "=r")
11778 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11779 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11780 (set (match_operand:SI 3 "s_register_operand" "=r")
11781 (mem:SI (plus:SI (match_dup 1)
11782 (match_operand:SI 4 "const_int_operand" ""))))]
11783 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11784 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11785 && (operands_ok_ldrd_strd (operands[0], operands[3],
11786 operands[1], INTVAL (operands[2]),
11788 "ldrd%?\t%0, %3, [%1, %2]"
11789 [(set_attr "type" "load_8")
11790 (set_attr "predicable" "yes")])
11792 (define_insn "*thumb2_ldrd_base"
11793 [(set (match_operand:SI 0 "s_register_operand" "=r")
11794 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11795 (set (match_operand:SI 2 "s_register_operand" "=r")
11796 (mem:SI (plus:SI (match_dup 1)
11798 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11799 && (operands_ok_ldrd_strd (operands[0], operands[2],
11800 operands[1], 0, false, true))"
11801 "ldrd%?\t%0, %2, [%1]"
11802 [(set_attr "type" "load_8")
11803 (set_attr "predicable" "yes")])
11805 (define_insn "*thumb2_ldrd_base_neg"
11806 [(set (match_operand:SI 0 "s_register_operand" "=r")
11807 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11809 (set (match_operand:SI 2 "s_register_operand" "=r")
11810 (mem:SI (match_dup 1)))]
11811 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11812 && (operands_ok_ldrd_strd (operands[0], operands[2],
11813 operands[1], -4, false, true))"
11814 "ldrd%?\t%0, %2, [%1, #-4]"
11815 [(set_attr "type" "load_8")
11816 (set_attr "predicable" "yes")])
11818 (define_insn "*thumb2_strd"
11819 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11820 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11821 (match_operand:SI 2 "s_register_operand" "r"))
11822 (set (mem:SI (plus:SI (match_dup 0)
11823 (match_operand:SI 3 "const_int_operand" "")))
11824 (match_operand:SI 4 "s_register_operand" "r"))]
11825 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11826 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11827 && (operands_ok_ldrd_strd (operands[2], operands[4],
11828 operands[0], INTVAL (operands[1]),
11830 "strd%?\t%2, %4, [%0, %1]"
11831 [(set_attr "type" "store_8")
11832 (set_attr "predicable" "yes")])
11834 (define_insn "*thumb2_strd_base"
11835 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11836 (match_operand:SI 1 "s_register_operand" "r"))
11837 (set (mem:SI (plus:SI (match_dup 0)
11839 (match_operand:SI 2 "s_register_operand" "r"))]
11840 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11841 && (operands_ok_ldrd_strd (operands[1], operands[2],
11842 operands[0], 0, false, false))"
11843 "strd%?\t%1, %2, [%0]"
11844 [(set_attr "type" "store_8")
11845 (set_attr "predicable" "yes")])
11847 (define_insn "*thumb2_strd_base_neg"
11848 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11850 (match_operand:SI 1 "s_register_operand" "r"))
11851 (set (mem:SI (match_dup 0))
11852 (match_operand:SI 2 "s_register_operand" "r"))]
11853 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11854 && (operands_ok_ldrd_strd (operands[1], operands[2],
11855 operands[0], -4, false, false))"
11856 "strd%?\t%1, %2, [%0, #-4]"
11857 [(set_attr "type" "store_8")
11858 (set_attr "predicable" "yes")])
11860 ;; ARMv8 CRC32 instructions.
11861 (define_insn "arm_<crc_variant>"
11862 [(set (match_operand:SI 0 "s_register_operand" "=r")
11863 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11864 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11867 "<crc_variant>\\t%0, %1, %2"
11868 [(set_attr "type" "crc")
11869 (set_attr "conds" "unconditional")]
11872 ;; Load the load/store double peephole optimizations.
11873 (include "ldrdstrd.md")
11875 ;; Load the load/store multiple patterns
11876 (include "ldmstm.md")
11878 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11879 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11880 ;; The operands are validated through the load_multiple_operation
11881 ;; match_parallel predicate rather than through constraints so enable it only
11883 (define_insn "*load_multiple"
11884 [(match_parallel 0 "load_multiple_operation"
11885 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11886 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11888 "TARGET_32BIT && reload_completed"
11891 arm_output_multireg_pop (operands, /*return_pc=*/false,
11892 /*cond=*/const_true_rtx,
11898 [(set_attr "predicable" "yes")]
11901 (define_expand "copysignsf3"
11902 [(match_operand:SF 0 "register_operand")
11903 (match_operand:SF 1 "register_operand")
11904 (match_operand:SF 2 "register_operand")]
11905 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11907 emit_move_insn (operands[0], operands[2]);
11908 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11909 GEN_INT (31), GEN_INT (0),
11910 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11915 (define_expand "copysigndf3"
11916 [(match_operand:DF 0 "register_operand")
11917 (match_operand:DF 1 "register_operand")
11918 (match_operand:DF 2 "register_operand")]
11919 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11921 rtx op0_low = gen_lowpart (SImode, operands[0]);
11922 rtx op0_high = gen_highpart (SImode, operands[0]);
11923 rtx op1_low = gen_lowpart (SImode, operands[1]);
11924 rtx op1_high = gen_highpart (SImode, operands[1]);
11925 rtx op2_high = gen_highpart (SImode, operands[2]);
11927 rtx scratch1 = gen_reg_rtx (SImode);
11928 rtx scratch2 = gen_reg_rtx (SImode);
11929 emit_move_insn (scratch1, op2_high);
11930 emit_move_insn (scratch2, op1_high);
11932 emit_insn(gen_rtx_SET(scratch1,
11933 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11934 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11935 emit_move_insn (op0_low, op1_low);
11936 emit_move_insn (op0_high, scratch2);
11942 ;; movmisalign patterns for HImode and SImode.
11943 (define_expand "movmisalign<mode>"
11944 [(match_operand:HSI 0 "general_operand")
11945 (match_operand:HSI 1 "general_operand")]
11948 /* This pattern is not permitted to fail during expansion: if both arguments
11949 are non-registers (e.g. memory := constant), force operand 1 into a
11951 rtx (* gen_unaligned_load)(rtx, rtx);
11952 rtx tmp_dest = operands[0];
11953 if (!s_register_operand (operands[0], <MODE>mode)
11954 && !s_register_operand (operands[1], <MODE>mode))
11955 operands[1] = force_reg (<MODE>mode, operands[1]);
11957 if (<MODE>mode == HImode)
11959 gen_unaligned_load = gen_unaligned_loadhiu;
11960 tmp_dest = gen_reg_rtx (SImode);
11963 gen_unaligned_load = gen_unaligned_loadsi;
11965 if (MEM_P (operands[1]))
11967 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
11968 if (<MODE>mode == HImode)
11969 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
11972 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
11977 (define_insn "arm_<cdp>"
11978 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11979 (match_operand:SI 1 "immediate_operand" "n")
11980 (match_operand:SI 2 "immediate_operand" "n")
11981 (match_operand:SI 3 "immediate_operand" "n")
11982 (match_operand:SI 4 "immediate_operand" "n")
11983 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
11984 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
11986 arm_const_bounds (operands[0], 0, 16);
11987 arm_const_bounds (operands[1], 0, 16);
11988 arm_const_bounds (operands[2], 0, (1 << 5));
11989 arm_const_bounds (operands[3], 0, (1 << 5));
11990 arm_const_bounds (operands[4], 0, (1 << 5));
11991 arm_const_bounds (operands[5], 0, 8);
11992 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
11994 [(set_attr "length" "4")
11995 (set_attr "type" "coproc")])
11997 (define_insn "*ldc"
11998 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
11999 (match_operand:SI 1 "immediate_operand" "n")
12000 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12001 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12003 arm_const_bounds (operands[0], 0, 16);
12004 arm_const_bounds (operands[1], 0, (1 << 5));
12005 return "<ldc>\\tp%c0, CR%c1, %2";
12007 [(set_attr "length" "4")
12008 (set_attr "type" "coproc")])
12010 (define_insn "*stc"
12011 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12012 (match_operand:SI 1 "immediate_operand" "n")
12013 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12014 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12016 arm_const_bounds (operands[0], 0, 16);
12017 arm_const_bounds (operands[1], 0, (1 << 5));
12018 return "<stc>\\tp%c0, CR%c1, %2";
12020 [(set_attr "length" "4")
12021 (set_attr "type" "coproc")])
12023 (define_expand "arm_<ldc>"
12024 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12025 (match_operand:SI 1 "immediate_operand")
12026 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12027 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12029 (define_expand "arm_<stc>"
12030 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12031 (match_operand:SI 1 "immediate_operand")
12032 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12033 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12035 (define_insn "arm_<mcr>"
12036 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12037 (match_operand:SI 1 "immediate_operand" "n")
12038 (match_operand:SI 2 "s_register_operand" "r")
12039 (match_operand:SI 3 "immediate_operand" "n")
12040 (match_operand:SI 4 "immediate_operand" "n")
12041 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12042 (use (match_dup 2))]
12043 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12045 arm_const_bounds (operands[0], 0, 16);
12046 arm_const_bounds (operands[1], 0, 8);
12047 arm_const_bounds (operands[3], 0, (1 << 5));
12048 arm_const_bounds (operands[4], 0, (1 << 5));
12049 arm_const_bounds (operands[5], 0, 8);
12050 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12052 [(set_attr "length" "4")
12053 (set_attr "type" "coproc")])
12055 (define_insn "arm_<mrc>"
12056 [(set (match_operand:SI 0 "s_register_operand" "=r")
12057 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12058 (match_operand:SI 2 "immediate_operand" "n")
12059 (match_operand:SI 3 "immediate_operand" "n")
12060 (match_operand:SI 4 "immediate_operand" "n")
12061 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12062 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12064 arm_const_bounds (operands[1], 0, 16);
12065 arm_const_bounds (operands[2], 0, 8);
12066 arm_const_bounds (operands[3], 0, (1 << 5));
12067 arm_const_bounds (operands[4], 0, (1 << 5));
12068 arm_const_bounds (operands[5], 0, 8);
12069 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12071 [(set_attr "length" "4")
12072 (set_attr "type" "coproc")])
12074 (define_insn "arm_<mcrr>"
12075 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12076 (match_operand:SI 1 "immediate_operand" "n")
12077 (match_operand:DI 2 "s_register_operand" "r")
12078 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12079 (use (match_dup 2))]
12080 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12082 arm_const_bounds (operands[0], 0, 16);
12083 arm_const_bounds (operands[1], 0, 8);
12084 arm_const_bounds (operands[3], 0, (1 << 5));
12085 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12087 [(set_attr "length" "4")
12088 (set_attr "type" "coproc")])
12090 (define_insn "arm_<mrrc>"
12091 [(set (match_operand:DI 0 "s_register_operand" "=r")
12092 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12093 (match_operand:SI 2 "immediate_operand" "n")
12094 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12095 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12097 arm_const_bounds (operands[1], 0, 16);
12098 arm_const_bounds (operands[2], 0, 8);
12099 arm_const_bounds (operands[3], 0, (1 << 5));
12100 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12102 [(set_attr "length" "4")
12103 (set_attr "type" "coproc")])
12105 (define_expand "speculation_barrier"
12106 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12109 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12110 have a usable barrier (and probably don't need one in practice).
12111 But to be safe if such code is run on later architectures, call a
12112 helper function in libgcc that will do the thing for the active
12114 if (!(arm_arch7 || arm_arch8))
12116 arm_emit_speculation_barrier_function ();
12122 ;; Generate a hard speculation barrier when we have not enabled speculation
12124 (define_insn "*speculation_barrier_insn"
12125 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12126 "arm_arch7 || arm_arch8"
12128 [(set_attr "type" "block")
12129 (set_attr "length" "8")]
12132 ;; Vector bits common to IWMMXT and Neon
12133 (include "vec-common.md")
12134 ;; Load the Intel Wireless Multimedia Extension patterns
12135 (include "iwmmxt.md")
12136 ;; Load the VFP co-processor patterns
12138 ;; Thumb-1 patterns
12139 (include "thumb1.md")
12140 ;; Thumb-2 patterns
12141 (include "thumb2.md")
12143 (include "neon.md")
12145 (include "crypto.md")
12146 ;; Synchronization Primitives
12147 (include "sync.md")
12148 ;; Fixed-point patterns
12149 (include "arm-fixed.md")